選択できるのは25トピックまでです。 トピックは、先頭が英数字で、英数字とダッシュ('-')を使用した35文字以内のものにしてください。
 
 
 
 
 
 

322 行
9.3 KiB

  1. //讯飞的翻译任务
  2. import 'dart:async';
  3. import 'dart:convert';
  4. import 'dart:ffi';
  5. import 'dart:typed_data';
  6. import 'package:demo001/xunfei/utils.dart';
  7. import 'package:demo001/xunfei/xunfei.dart';
  8. import 'package:intl/intl.dart';
  9. import 'package:web_socket_channel/web_socket_channel.dart';
  10. typedef TaskStateChangeEvent = void Function(ITaskTrans task); // 定义回调函数类型
  11. class XunferTask_Result_Text_Item {
  12. final int sn;
  13. final String pgs;
  14. final List<int> rg;
  15. final List<dynamic> ws;
  16. XunferTask_Result_Text_Item({
  17. required this.sn,
  18. required this.pgs,
  19. required this.rg,
  20. required this.ws,
  21. });
  22. }
  23. class XunferTaskTrans implements ITaskTrans {
  24. static const int STATUS_FIRST_FRAME = 0;
  25. static const int STATUS_CONTINUE_FRAME = 1;
  26. static const int STATUS_LAST_FRAME = 2;
  27. final String appId;
  28. final String apiKey;
  29. final String apiSecret;
  30. final String host = "ws-api.xf-yun.com";
  31. final String requestUri = "/v1/private/simult_interpretation";
  32. late String url;
  33. late WebSocketChannel? _channel;
  34. final TaskStateChangeEvent onEvent; // 回调函数类型
  35. bool isconnected = false;
  36. int _state = 0; //未连接 1上传语音 2结束语音 3完成任务
  37. //识别数据
  38. final Map<int, XunferTask_Result_Text_Item> tests = {};
  39. // 输入音频流
  40. final StreamController<Uint8List> _inputaudioStream =
  41. StreamController<Uint8List>();
  42. //输出音频流
  43. final StreamController<Uint8List> _outputaudioStream =
  44. StreamController<Uint8List>();
  45. XunferTaskTrans({
  46. required this.appId,
  47. required this.apiKey,
  48. required this.apiSecret,
  49. required this.onEvent,
  50. }) {
  51. url = _geturl();
  52. _connect();
  53. _startpush();
  54. }
  55. //获取链接地址
  56. String _geturl() {
  57. final now = DateTime.now();
  58. final date =
  59. DateFormat("EEE, dd MMM yyyy HH:mm:ss 'GMT'").format(now.toUtc());
  60. final signatureOrigin =
  61. "host: $host\ndate: $date\nGET $requestUri HTTP/1.1";
  62. // 使用 HmacUtil 计算 HMAC-SHA256 签名
  63. final signature = XunfeiUtils.hmacSha256(apiSecret, signatureOrigin);
  64. final authorization = base64.encode(utf8.encode(
  65. "api_key=\"$apiKey\", algorithm=\"hmac-sha256\", headers=\"host date request-line\", signature=\"$signature\""));
  66. final queryParams = {
  67. "authorization": authorization,
  68. "date": date,
  69. "host": host,
  70. "serviceId": "simult_interpretation"
  71. };
  72. final wsUri =
  73. 'ws://$host$requestUri?${Uri(queryParameters: queryParams).query}';
  74. return wsUri;
  75. }
  76. // 创建WebSocket连接
  77. Future<void> _connect() async {
  78. _channel = WebSocketChannel.connect(Uri.parse(url));
  79. _channel?.stream.timeout(Duration(seconds: 10)); //设置超时时间
  80. _channel?.stream.listen(
  81. (message) {
  82. onMessage(message);
  83. },
  84. onError: (error) {
  85. isconnected = false;
  86. print('连接失败: $error');
  87. },
  88. onDone: () {
  89. isconnected = false;
  90. print('WebSocket 连接已关闭');
  91. print('Close code: ${_channel?.closeCode}');
  92. print('Close reason: ${_channel?.closeReason}');
  93. },
  94. cancelOnError: true,
  95. );
  96. isconnected = true;
  97. }
  98. // 上传音频
  99. Future<void> _startpush() async {
  100. _state = 1;
  101. int frameSize = 1280; // 每一帧的音频大小
  102. double interval = 0.04; // 发送音频间隔(单位:s)
  103. int status = STATUS_FIRST_FRAME; // 音频的状态信息,标识音频是第一帧,还是中间帧、最后一帧
  104. Uint8List buffer = Uint8List(0);
  105. try {
  106. await for (Uint8List chunk in _inputaudioStream.stream) {
  107. // 将新数据追加到缓存中
  108. buffer = Uint8List.fromList([...buffer, ...chunk]);
  109. // 当缓存中的数据足够一帧时,处理并发送
  110. while (buffer.length >= frameSize) {
  111. Uint8List frame = buffer.sublist(0, frameSize); // 取出一帧数据
  112. buffer = buffer.sublist(frameSize); // 移除已处理的数据
  113. // 第一帧处理
  114. if (status == STATUS_FIRST_FRAME) {
  115. String data = json.encode(_createParams(appId, status, frame));
  116. _channel?.sink.add(data);
  117. print('第一帧已发送... $data');
  118. status = STATUS_CONTINUE_FRAME;
  119. }
  120. // 中间帧处理
  121. else if (status == STATUS_CONTINUE_FRAME) {
  122. String data = json.encode(_createParams(appId, status, frame));
  123. _channel?.sink.add(data);
  124. print('中间帧已发送... $data');
  125. }
  126. // 模拟音频采样间隔
  127. await Future.delayed(
  128. Duration(milliseconds: (interval * 1000).round()));
  129. }
  130. }
  131. status = STATUS_LAST_FRAME;
  132. String data = json.encode(_createParams(appId, status, buffer));
  133. _channel?.sink.add(data);
  134. print('最后一帧已发送... $data');
  135. _state = 2;
  136. } catch (e) {
  137. print("上传音频数据异常: $e");
  138. }
  139. print('音频处理完成');
  140. }
  141. //创建参数
  142. Map<String, dynamic> _createParams(
  143. String appId, int status, Uint8List audio) {
  144. final param = {
  145. "header": {
  146. "app_id": appId,
  147. "status": status,
  148. },
  149. "parameter": {
  150. "ist": {
  151. "accent": "mandarin",
  152. "domain": "ist_ed_open",
  153. "language": "zh_cn",
  154. "vto": 15000,
  155. "eos": 150000
  156. },
  157. "streamtrans": {"from": "cn", "to": "en"},
  158. "tts": {
  159. "vcn": "x2_catherine",
  160. "tts_results": {
  161. "encoding": "raw",
  162. "sample_rate": 16000,
  163. "channels": 1,
  164. "bit_depth": 16,
  165. "frame_size": 0
  166. }
  167. }
  168. },
  169. "payload": {
  170. "data": {
  171. "audio": base64.encode(audio),
  172. "encoding": "raw",
  173. "sample_rate": 16000,
  174. "seq": 1,
  175. "status": status
  176. }
  177. }
  178. };
  179. return param;
  180. }
  181. // 向流中添加音频数据
  182. void addAudioData(Uint8List data) {
  183. _inputaudioStream.add(data);
  184. }
  185. //接收到翻译结果
  186. Future<void> onMessage(String message) async {
  187. try {
  188. print("收到的消息:$message");
  189. // 对结果进行解析
  190. var messageMap = json.decode(message);
  191. var status = messageMap["header"]["status"];
  192. var sid = messageMap["header"]["sid"];
  193. // 接收到的识别结果写到文本
  194. if (messageMap.containsKey('payload') &&
  195. messageMap['payload'].containsKey('recognition_results')) {
  196. var result = messageMap['payload']['recognition_results']['text'];
  197. var asrresult = utf8.decode(base64.decode(result));
  198. addtext(asrresult);
  199. print("收到识别回应:${originalText()}");
  200. }
  201. //接收到的翻译结果写到文本
  202. if (messageMap.containsKey('payload') &&
  203. messageMap['payload'].containsKey('streamtrans_results')) {
  204. var result = messageMap['payload']['streamtrans_results']['text'];
  205. var transresult = utf8.decode(base64.decode(result));
  206. print("收到翻译结果:$transresult");
  207. }
  208. if (messageMap.containsKey('payload') &&
  209. messageMap['payload'].containsKey('tts_results')) {
  210. var audio = messageMap['payload']['tts_results']['audio'];
  211. var audioData = base64.decode(audio);
  212. _outputaudioStream.add(audioData);
  213. print("收到音频结果:${audioData.length}");
  214. }
  215. if (status == 2) {
  216. print("任务已结束!");
  217. _state = 3;
  218. onEvent(this);
  219. await Future.delayed(Duration(seconds: 1));
  220. _channel?.sink.close();
  221. }
  222. } catch (e) {
  223. print("接受结果异常: $e");
  224. }
  225. }
  226. // 关闭流并停止上传任务
  227. void endpuish() {
  228. _inputaudioStream.close(); // 关闭流
  229. }
  230. void addtext(String result) {
  231. var resultMap = json.decode(result);
  232. int sn = resultMap["sn"] as int;
  233. String pgs = resultMap["pgs"] as String;
  234. List<int> rg = resultMap["rg"] != null
  235. ? List<int>.from(resultMap["rg"])
  236. : []; // 默认值为空列表
  237. List<dynamic> ws = resultMap["ws"] as List<dynamic>;
  238. var item = XunferTask_Result_Text_Item(sn: sn, pgs: pgs, rg: rg, ws: ws);
  239. tests[sn] = item;
  240. }
  241. int state() {
  242. return this._state;
  243. }
  244. //文字
  245. String originalText() {
  246. if (tests.isNotEmpty) {
  247. String resultStr = "";
  248. Map<int, XunferTask_Result_Text_Item> _results = {};
  249. var sortedKeys = tests.keys.toList()..sort();
  250. for (var key in sortedKeys) {
  251. var item = tests[key];
  252. if (item != null) {
  253. if (item.pgs == "rpl") {
  254. var start = item.rg[0];
  255. var end = item.rg[1];
  256. for (int i = start; i <= end; i++) {
  257. _results.remove(i);
  258. }
  259. }
  260. _results[item.sn] = item;
  261. }
  262. }
  263. var keys = _results.keys.toList()..sort();
  264. for (var key in keys) {
  265. var item = tests[key];
  266. if (item != null) {
  267. for (var ws in item.ws) {
  268. var it = ws as Map<String, dynamic>;
  269. var cw = it["cw"] as List<dynamic>;
  270. for (var ct in cw) {
  271. resultStr += ct["w"] as String;
  272. }
  273. }
  274. }
  275. }
  276. return resultStr;
  277. }
  278. return "";
  279. }
  280. String translateText() {
  281. return "";
  282. }
  283. Stream<Uint8List> originalAudio() {
  284. return _inputaudioStream.stream;
  285. }
  286. //音频
  287. Stream<Uint8List> translateAudio() {
  288. return _outputaudioStream.stream;
  289. }
  290. }