缘起
我做一个AI应用,需要实时播放音频,显示AI生成的文本,AI生成的文本是流式返回的,所以在UI上是滚动的,这个时候用户也可以滚动屏幕去查看内容。
我开始用的是Flutter_sound做的,这个玩意,只能在UI线程播放,也就是main线程播放。Flutter是单线程的,如果用Isolate隔离线程又播放不了,这里注意了,甚至你输出日志都不行,用得用原生的print才可以,如果用是用dev里的log是输出不了日志的,不要以为没有调用到。
那怎么办呢?
改成原生播放咯!
老本行
创建插件
flutter create --template=plugin --platforms=android,ios native_audio_player
创建完以后,项目结构里,会多出一个插件
编辑以下文件
com.example.native_audio_player.NativeAudioPlayerPlugin.kt
代码如下
package com.example.native_audio_player
import android.media.AudioFormat
import android.media.AudioManager
import android.media.AudioTrack
import android.os.Handler
import android.os.HandlerThread
import androidx.annotation.NonNull
import io.flutter.embedding.engine.plugins.FlutterPlugin
import io.flutter.plugin.common.MethodCall
import io.flutter.plugin.common.MethodChannel
import java.util.concurrent.LinkedBlockingQueue
class NativeAudioPlayerPlugin : FlutterPlugin, MethodChannel.MethodCallHandler {
private lateinit var channel: MethodChannel
private var audioTrack: AudioTrack? = null
private var audioThread: HandlerThread? = null
private var audioHandler: Handler? = null
private var pcmQueue: LinkedBlockingQueue<ByteArray>? = null
@Volatile private var isPlaying = false
override fun onAttachedToEngine(@NonNull binding: FlutterPlugin.FlutterPluginBinding) {
channel = MethodChannel(binding.binaryMessenger, "native_audio_player")
channel.setMethodCallHandler(this)
}
override fun onDetachedFromEngine(@NonNull binding: FlutterPlugin.FlutterPluginBinding) {
stopAudioTrack()
channel.setMethodCallHandler(null)
}
override fun onMethodCall(call: MethodCall, result: MethodChannel.Result) {
when (call.method) {
"startPlayer" -> {
val sampleRate = call.argument<Int>("sampleRate") ?: 16000
val channels = call.argument<Int>("channels") ?: 1
startAudioTrack(sampleRate, channels)
result.success(null)
}
"writePcm" -> {
val pcm = call.argument<ByteArray>("pcm")
if (pcm != null && isPlaying) {
pcmQueue?.offer(pcm)
}
result.success(null)
}
"stopPlayer" -> {
stopAudioTrack()
result.success(null)
}
else -> result.notImplemented()
}
}
private fun startAudioTrack(sampleRate: Int, channels: Int) {
val channelConfig = if (channels == 1) AudioFormat.CHANNEL_OUT_MONO else AudioFormat.CHANNEL_OUT_STEREO
val bufferSize = AudioTrack.getMinBufferSize(
sampleRate,
channelConfig,
AudioFormat.ENCODING_PCM_16BIT
)
audioTrack = AudioTrack(
AudioManager.STREAM_MUSIC,
sampleRate,
channelConfig,
AudioFormat.ENCODING_PCM_16BIT,
bufferSize,
AudioTrack.MODE_STREAM
)
pcmQueue = LinkedBlockingQueue()
isPlaying = true
audioThread = HandlerThread("AudioPlayerThread").apply { start() }
audioHandler = Handler(audioThread!!.looper)
audioHandler?.post { playLoop() }
audioTrack?.play()
}
private fun playLoop() {
try {
while (isPlaying && !Thread.currentThread().isInterrupted) {
val data = pcmQueue?.take()
if (data != null && data.isNotEmpty()) {
audioTrack?.write(data, 0, data.size)
}
}
} catch (e: InterruptedException) {
Thread.currentThread().interrupt()
}
}
private fun stopAudioTrack() {
isPlaying = false
audioThread?.quitSafely()
audioThread = null
audioHandler = null
audioTrack?.stop()
audioTrack?.release()
audioTrack = null
pcmQueue?.clear()
pcmQueue = null
}
}
ios的编辑这个文件
ios/Classes/NativeAudioPlayerPlugin.swift
代码如下:
import Flutter
import UIKit
import AVFoundation
public class NativeAudioPlayerPlugin: NSObject, FlutterPlugin {
var engine: AVAudioEngine?
var playerNode: AVAudioPlayerNode?
var sampleRate: Double = 16000
var channels: AVAudioChannelCount = 1
public static func register(with registrar: FlutterPluginRegistrar) {
let channel = FlutterMethodChannel(name: "native_audio_player", binaryMessenger: registrar.messenger())
let instance = NativeAudioPlayerPlugin()
registrar.addMethodCallDelegate(instance, channel: channel)
}
public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
switch call.method {
case "startPlayer":
if let args = call.arguments as? [String: Any] {
sampleRate = Double(args["sampleRate"] as? Int ?? 16000)
channels = AVAudioChannelCount(args["channels"] as? Int ?? 1)
startEngine()
}
result(nil)
case "writePcm":
if let args = call.arguments as? [String: Any],
let pcm = args["pcm"] as? FlutterStandardTypedData {
playPcm(pcm.data)
}
result(nil)
case "stopPlayer":
stopEngine()
result(nil)
default:
result(FlutterMethodNotImplemented)
}
}
func startEngine() {
engine = AVAudioEngine()
playerNode = AVAudioPlayerNode()
guard let engine = engine, let playerNode = playerNode else { return }
let format = AVAudioFormat(
commonFormat: .pcmFormatInt16,
sampleRate: sampleRate,
channels: channels,
interleaved: true
)
engine.attach(playerNode)
engine.connect(playerNode, to: engine.mainMixerNode, format: format)
try? engine.start()
playerNode.play()
}
func playPcm(_ data: Data) {
guard let playerNode = playerNode else { return }
let buffer = AVAudioPCMBuffer(
pcmFormat: playerNode.outputFormat(forBus: 0),
frameCapacity: AVAudioFrameCount(data.count / 2)
)!
buffer.frameLength = buffer.frameCapacity
let ptr = buffer.int16ChannelData![0]
data.copyBytes(to: ptr, count: data.count)
playerNode.scheduleBuffer(buffer, completionHandler: nil)
}
func stopEngine() {
playerNode?.stop()
engine?.stop()
engine = nil
playerNode = nil
}
}
插件代码
lib/native_audio_player.dart
代码如下:
import 'package:flutter/services.dart';
class NativeAudioPlayer {
static const MethodChannel _channel = MethodChannel('native_audio_player');
static Future<void> startPlayer({required int sampleRate, required int channels}) async {
await _channel.invokeMethod('startPlayer', {
'sampleRate': sampleRate,
'channels': channels,
});
}
static Future<void> writePcm(Uint8List pcmData) async {
await _channel.invokeMethod('writePcm', {
'pcm': pcmData,
});
}
static Future<void> stopPlayer() async {
await _channel.invokeMethod('stopPlayer');
}
}
插件注册
dependencies:
native_audio_player:
path: ./native_audio_player
然后pub get 添加依赖
使用
void _startFeedingPCM() async {
await NativeAudioPlayer.startPlayer(sampleRate: 16000, channels: 1);
// 然后再监听 PCM 流并转发
_pcmStreamController.stream.listen((msg) {
NativeAudioPlayer.writePcm(msg);
});
}
业务思路:后台流式返回数据,喂给播放器,播放器去播放。
缘灭
亲测,播放很流畅,一边播放一边滑动屏幕也不卡顿,表示已经解决了。也可以输出对应的线程来看看,确保播放的线程在非main线程。