package com.shiwei.audiorecord
import android.Manifest
import android.app.Service
import android.content.Intent
import android.content.pm.PackageManager
import android.media.AudioAttributes
import android.media.AudioFormat
import android.media.AudioPlaybackCaptureConfiguration
import android.media.AudioRecord
import android.media.projection.MediaProjectionManager
import android.os.Binder
import android.os.IBinder
import android.util.Log
import androidx.core.app.NotificationCompat
import androidx.core.content.ContextCompat
import kotlin.concurrent.thread
class AudioCaptureService :Service(){
lateinit var audioRecord: AudioRecord
private val TAG: String = "AudioCaptureService"
private var mAudioData = ByteArray(1024)
private var minBufferSize:Int = 0
private var isRecording = false
class MyBinder: Binder() {
fun getService():AudioCaptureService{
return AudioCaptureService()
}
}
private final var mBinder:MyBinder = MyBinder()
override fun onBind(intent: Intent?): IBinder {
return mBinder
}
override fun onCreate() {
super.onCreate()
Log.d(TAG, "onCreate: ")
startForeground(1,NotificationCompat.Builder(this, AUDIO_RECORD_NOTIFICATION_CHANNEL_ID).build())
}
override fun onStartCommand(intent: Intent, flags: Int, startId: Int): Int {
Log.d(TAG, "onStartCommand: ")
val currentResultCode = intent.getIntExtra("resultCode",0)
val resultData = intent.getParcelableExtra<Intent>("resultData")
initAudioRecord(currentResultCode, resultData!!)
return super.onStartCommand(intent, flags, startId)
}
private fun initAudioRecord(resultCode:Int,intent: Intent) {
Log.d(TAG, "initAudioRecord:resultCode = $resultCode, intent = $intent")
minBufferSize = AudioRecord.getMinBufferSize(mSampleRate, mChannelConfig, mAudioFormat)
val mediaProjectionManager =
baseContext.getSystemService(MEDIA_PROJECTION_SERVICE) as MediaProjectionManager
//设置应用程序录制系统的音频的能力
val mediaProjection = mediaProjectionManager.getMediaProjection(resultCode, intent)
val builder = AudioRecord.Builder()
builder.setAudioFormat(
AudioFormat.Builder()
.setSampleRate(16000)
.setChannelMask(AudioFormat.CHANNEL_IN_MONO)
.setEncoding(AudioFormat.ENCODING_PCM_16BIT)
.build()
)
.setBufferSizeInBytes(minBufferSize)
val config = AudioPlaybackCaptureConfiguration.Builder(mediaProjection)
.addMatchingUsage(AudioAttributes.USAGE_MEDIA)
.addMatchingUsage(AudioAttributes.USAGE_UNKNOWN)
.addMatchingUsage(AudioAttributes.USAGE_GAME)
.build()
//将AudioRecord 设置为录制其他应用的音频
builder.setAudioPlaybackCaptureConfig(config)
try {
if (ContextCompat.checkSelfPermission(this,Manifest.permission.RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED) {
audioRecord = builder.build()
Log.d(TAG, "audioRecord:赋值对象 ")
}else{
Log.d(TAG, "audioRecord:未赋值对象 ")
}
} catch (e: Exception) {
e.printStackTrace()
Log.e("录音器错误", "录音器初始化失败")
}
Log.d(TAG, "audioRecord=: $audioRecord ")
}
fun startRecord() {
Log.d(TAG, "audioRecord:${audioRecord} ")
Log.d(TAG, "startRecord: ....")
//开始录音
if (isRecording){
Log.d(TAG, "startRecord: 已经录音中...")
return
}
isRecording = true
audioRecord.startRecording()
Log.d(TAG, "audioRecord: startRecording.... ")
thread {
Log.d(TAG, "thread: audioRecordState = ${audioRecord.recordingState} ")
while (isRecording){
while (audioRecord.recordingState == AudioRecord.RECORDSTATE_RECORDING){
audioRecord.read(mAudioData,0,mAudioData.size)
Log.d(TAG, "read: mAudioData = ${mAudioData}, mAudioData.size = ${mAudioData.size}")
}
}
}
}
fun stopRecord() {
Log.d(TAG, "audioRecord:${audioRecord} ")
Log.d(TAG, "stopRecord:.... ")
isRecording = false
audioRecord.stop()
audioRecord.release()
}
fun test(){
Log.d(TAG, "test: service")
}
}
package com.shiwei.audiorecord
import android.annotation.SuppressLint
import android.app.Activity
import android.content.ComponentName
import android.content.Context
import android.content.Intent
import android.content.ServiceConnection
import android.media.AudioAttributes
import android.media.AudioFormat
import android.media.AudioRecord
import android.media.projection.MediaProjection
import android.media.projection.MediaProjectionManager
import android.os.Bundle
import android.os.IBinder
import android.util.Log
import android.webkit.WebSettings
import android.webkit.WebView
import android.widget.Button
import android.widget.Toast
import androidx.appcompat.app.AppCompatActivity
import com.hjq.permissions.OnPermissionCallback
import com.hjq.permissions.Permission
import com.hjq.permissions.XXPermissions
import java.io.File
import java.io.FileOutputStream
import java.io.IOException
import kotlin.concurrent.thread
const val mSampleRate = 44100
const val mChannelConfig = AudioFormat.CHANNEL_IN_MONO
const val mAudioFormat = AudioFormat.ENCODING_PCM_16BIT
class MainActivity : AppCompatActivity() {
private var start:Button? = null
private var stop:Button? = null
private var pcmToWav:Button? = null
private var bufferSizeInBytes = 0
var audioRecord:AudioRecord? = null
private val TAG = "MainActivity"
private var isRecording = false
val filename = "a.pcm"
var buffer:ByteArray? = null
private var fileOutputStream: FileOutputStream? = null
var REQUEST_MEDIA_PROJECTION = 8888
val mp:MediaProjectionManager by lazy { getSystemService(MEDIA_PROJECTION_SERVICE) as MediaProjectionManager}
var mediaProjection:MediaProjection?=null
var audioRecordBuild:AudioRecord.Builder? = null
private var audioCaptureService:AudioCaptureService? = null
private var mConnection:ServiceConnection = object : ServiceConnection {
override fun onServiceConnected(name: ComponentName?, service: IBinder?) {
var binder:AudioCaptureService.MyBinder = (service as AudioCaptureService.MyBinder)
audioCaptureService = binder.getService()
Log.d(TAG, "onServiceConnected: ")
}
override fun onServiceDisconnected(name: ComponentName?) {
audioCaptureService = null;
Log.d(TAG, "onServiceDisconnected: ")
}
}
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
start = findViewById(R.id.start)
stop = findViewById(R.id.stop)
initListener()
val webView = findViewById<WebView>(R.id.webView)
webView.settings.mediaPlaybackRequiresUserGesture = false //webView 播放视频运行自动播放开启声音
webView.settings.javaScriptEnabled = true // enable javascript
webView.settings.cacheMode = WebSettings.LOAD_NO_CACHE // webView不走缓存,每次进行网络请求
webView.settings.domStorageEnabled = true // enable Storage
webView.settings.javaScriptCanOpenWindowsAutomatically = true
webView.loadUrl("file:///android_asset/index.html")
startActivityForResult(mp.createScreenCaptureIntent(),REQUEST_MEDIA_PROJECTION)
}
private fun initListener() {
start?.setOnClickListener {
audioCaptureService?.audioRecord
}
stop?.setOnClickListener {
stopRecord()
}
pcmToWav?.setOnClickListener {
pcmToWav()
}
}
private fun startRecord() {
Log.d(TAG, "startRecord: ${audioCaptureService?.audioRecord}")
audioCaptureService?.audioRecord
}
private fun stopRecord() {
audioCaptureService?.stopRecord()
}
override fun onDestroy() {
super.onDestroy()
unbindService(mConnection);
}
override fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent?) {
Log.d(TAG,"onActivityResult: requestCode="+requestCode+"resultCode= "+resultCode+"intent= "+intent)
super.onActivityResult(requestCode, resultCode, intent)
if (resultCode != RESULT_OK){
Toast.makeText(this,"User denied screen recorder permission",Toast.LENGTH_LONG).show()
return
}
val intent = Intent(this,AudioCaptureService::class.java)
bindService(intent, mConnection, BIND_AUTO_CREATE);
intent.putExtra("resultData",data)
intent.putExtra("resultCode",resultCode)
startForegroundService(intent)
}
}
这是一个service类,我发现在startRecord中读取audioRecord的对象为null,但是在initAudioRecord方法中,audioRecord中明明赋值了对象,并且可以打印地址,执行的startRecord目前的时机是在initAudioReocrd。
然后我本身想在MainActicity中去控制startRecord进行录音(录制权限的什么,没有问题)
方便说下,代码那个位置null?了?
null就看时序,赋值的地方在你调用之前有没有走到。操作的是不是同一个对象。