//讯飞的翻译任务 import 'dart:async'; import 'dart:convert'; import 'dart:ffi'; import 'dart:typed_data'; import 'package:audioplayers/audioplayers.dart'; import 'package:demo001/xunfei/utils.dart'; import 'package:demo001/xunfei/xunfei.dart'; import 'package:intl/intl.dart'; import 'package:web_socket_channel/web_socket_channel.dart'; class XunferTask_Result_Text_Item { final int sn; final String pgs; final List rg; final List ws; XunferTask_Result_Text_Item({ required this.sn, required this.pgs, required this.rg, required this.ws, }); } class XunferTaskTrans implements ITaskTrans { static const int STATUS_FIRST_FRAME = 0; static const int STATUS_CONTINUE_FRAME = 1; static const int STATUS_LAST_FRAME = 2; final String appId; final String apiKey; final String apiSecret; final String host = "ws-api.xf-yun.com"; final String requestUri = "/v1/private/simult_interpretation"; late String url; late WebSocketChannel? _channel; bool isconnected = false; // 数据流控制器 final StreamController> _streamController = StreamController>(); // 数据流 Stream> get stream => _streamController.stream; // 是否正在运行 bool _isRunning = false; // 是否流已关闭 bool _isStreamClosed = false; // 上传任务的 Future Future? _uploadTask; final Map tests = {}; final StreamController _transtreamController = StreamController(); bool _isPlaying = false; XunferTaskTrans({ required this.appId, required this.apiKey, required this.apiSecret, }) { url = _geturl(); _connect(); _startUploadTask(); } //获取链接地址 String _geturl() { final now = DateTime.now(); final date = DateFormat("EEE, dd MMM yyyy HH:mm:ss 'GMT'").format(now.toUtc()); final signatureOrigin = "host: $host\ndate: $date\nGET $requestUri HTTP/1.1"; // 使用 HmacUtil 计算 HMAC-SHA256 签名 final signature = XunfeiUtils.hmacSha256(apiSecret, signatureOrigin); final authorization = base64.encode(utf8.encode( "api_key=\"$apiKey\", algorithm=\"hmac-sha256\", headers=\"host date request-line\", signature=\"$signature\"")); final queryParams = { "authorization": authorization, "date": date, "host": host, "serviceId": "simult_interpretation" }; final wsUri = 'ws://$host$requestUri?${Uri(queryParameters: queryParams).query}'; return wsUri; } //创建参数 Map _createParams( String appId, int status, List audio) { final param = { "header": { "app_id": appId, "status": status, }, "parameter": { "ist": { "accent": "mandarin", "domain": "ist_ed_open", "language": "zh_cn", "vto": 15000, "eos": 150000 }, "streamtrans": {"from": "cn", "to": "en"}, "tts": { "vcn": "x2_catherine", "tts_results": { "encoding": "raw", "sample_rate": 16000, "channels": 1, "bit_depth": 16, "frame_size": 0 } } }, "payload": { "data": { "audio": base64.encode(audio), "encoding": "raw", "sample_rate": 16000, "seq": 1, "status": status } } }; return param; } // 创建WebSocket连接 Future _connect() async { _channel = WebSocketChannel.connect(Uri.parse(url)); _channel?.stream.listen( (message) { onMessage(message); }, onError: (error) { isconnected = false; print('连接失败: $error'); }, onDone: () { isconnected = false; print('WebSocket 连接已关闭'); }, cancelOnError: true, ); isconnected = true; } // 启动上传任务 void _startUploadTask() { _isRunning = true; _uploadTask = _pushaudio(); } // 上传音频 Future _pushaudio() async { int frameSize = 1280; // 每一帧的音频大小 double interval = 0.04; // 发送音频间隔(单位:s) int status = STATUS_FIRST_FRAME; // 音频的状态信息,标识音频是第一帧,还是中间帧、最后一帧 int index = 0; List buffer = []; try { await for (List frame in stream) { // 将音频数据添加到 buffer buffer.addAll(frame); while (buffer.length >= frameSize) { List sendFrame = buffer.sublist(0, frameSize); buffer = buffer.sublist(frameSize); // 判断是否读取到足够的帧 if (index + frameSize <= buffer.length) { frame = buffer.sublist(index, index + frameSize); index += frameSize; } else { frame = buffer.sublist(index); index = buffer.length; // 结束 } // 第一帧处理 if (status == STATUS_FIRST_FRAME) { String data = json.encode(_createParams(appId, status, sendFrame)); _channel?.sink.add(data); print('第一帧已发送...$data'); status = STATUS_CONTINUE_FRAME; } // 中间帧处理 else if (status == STATUS_CONTINUE_FRAME) { String data = json.encode(_createParams(appId, status, sendFrame)); _channel?.sink.add(data); // print('中间帧已发送...'); } // 最后一帧处理 else if (status == STATUS_LAST_FRAME) { print('最后一帧已发送...'); String data = json.encode(_createParams(appId, status, sendFrame)); _channel?.sink.add(data); break; } // 模拟音频采样间隔 await Future.delayed( Duration(milliseconds: (interval * 1000).toInt())); } } print('最后一帧已发送...'); status = STATUS_LAST_FRAME; String data = json.encode(_createParams(appId, status, [])); _channel?.sink.add(data); } catch (e) { print("push msg: $e"); } print('音频处理完成'); } // 向流中添加音频数据 void addAudioData(List data) { if (!_isStreamClosed) { _streamController.add(data); } else { print("Stream is closed. Cannot add more data."); } } //接收到翻译结果 Future onMessage(String message) async { // try { // print("收到的消息:$message"); // } catch (e) { // print("receive msg, but parse exception: $e"); // } // 对结果进行解析 var messageMap = json.decode(message); var status = messageMap["header"]["status"]; var sid = messageMap["header"]["sid"]; // 接收到的识别结果写到文本 if (messageMap.containsKey('payload') && messageMap['payload'].containsKey('recognition_results')) { var result = messageMap['payload']['recognition_results']['text']; var asrresult = utf8.decode(base64.decode(result)); addtext(asrresult); print("收到识别回应..${text()}"); } if (messageMap.containsKey('payload') && messageMap['payload'].containsKey('tts_results')) { var audio = messageMap['payload']['tts_results']['audio']; var audioData = base64.decode(audio); _transtreamController.add(audioData); // curraudio.addAudioData(audioData); // var file = File('output/audio/trans.pcm'); // await file.writeAsBytes(audioData, mode: FileMode.append); } if (status == 2) { print("数据处理完毕,等待实时转译结束!同传后的音频文件请到output/audio/目录查看..."); await Future.delayed(Duration(seconds: 3)); close(); } } // 关闭流并停止上传任务 Future close() async { if (!_isStreamClosed) { _isStreamClosed = true; _isRunning = false; // 停止上传任务 await _streamController.close(); // 关闭流 await _uploadTask; // 等待上传任务完成 print("Stream and upload task closed."); } } void addtext(String result) { print("添加文本结果:$result"); var resultMap = json.decode(result); int sn = resultMap["sn"] as int; String pgs = resultMap["pgs"] as String; List rg = resultMap["rg"] != null ? List.from(resultMap["rg"]) : []; // 默认值为空列表 List ws = resultMap["ws"] as List; var item = XunferTask_Result_Text_Item(sn: sn, pgs: pgs, rg: rg, ws: ws); tests[sn] = item; } String text() { if (tests.isNotEmpty) { String resultStr = ""; Map _results = {}; var sortedKeys = tests.keys.toList()..sort(); for (var key in sortedKeys) { var item = tests[key]; if (item != null) { if (item.pgs == "rpl") { var start = item.rg[0]; var end = item.rg[1]; for (int i = start; i <= end; i++) { _results.remove(i); } } _results[item.sn] = item; } } var keys = _results.keys.toList()..sort(); for (var key in keys) { var item = tests[key]; if (item != null) { for (var ws in item.ws) { var it = ws as Map; var cw = it["cw"] as List; for (var ct in cw) { resultStr += ct["w"] as String; } } } } return resultStr; } return ""; } Future audio(AudioPlayer _audioPlayer) async { _streamController.stream.listen((List data) async { // 转换为 Uint8List Uint8List audioBytes = Uint8List.fromList(data); if (!_isPlaying) { // 第一次播放 await _audioPlayer.play(BytesSource(audioBytes)); setState(() { _isPlaying = true; }); } else { // 追加数据(需确认插件是否支持动态追加) // 注意:audioplayers 插件可能不支持此操作! await _audioPlayer.add(BytesSource(audioBytes)); } }, onError: (error) { print("Error in audio stream: $error"); }); } }