Sfoglia il codice sorgente

上传新版本demo

master
awei 4 mesi fa
parent
commit
6c7ce8b033
29 ha cambiato i file con 1062 aggiunte e 2028 eliminazioni
  1. +3
    -0
      .vscode/settings.json
  2. +3
    -0
      devtools_options.yaml
  3. +0
    -37
      lib/audioplayer.dart
  4. +65
    -0
      lib/deepseek/deepseek.dart
  5. +62
    -0
      lib/doubao/DouBao.dart
  6. +2
    -4
      lib/main.dart
  7. +8
    -0
      lib/model/model.dart
  8. +0
    -209
      lib/plugin/xunfei/audiototext/audiototext.dart
  9. +0
    -74
      lib/plugin/xunfei/audiototext/result_test.dart
  10. +0
    -273
      lib/plugin/xunfei/audiotranslate/audiotranslate.dart
  11. +0
    -25
      lib/plugin/xunfei/audiotranslate/result_audio.dart
  12. +0
    -73
      lib/plugin/xunfei/audiotranslate/result_test.dart
  13. +0
    -19
      lib/plugin/xunfei/audiotranslate/utils.dart
  14. +218
    -0
      lib/scenes/ RecordScene.dart
  15. +129
    -0
      lib/scenes/AIChatScene.dart
  16. +114
    -0
      lib/scenes/BluetoothScene.dart
  17. +0
    -165
      lib/scenes/ChartScene.dart
  18. +0
    -112
      lib/scenes/HomeScene.dart
  19. +0
    -448
      lib/scenes/SoundRecordScene.dart
  20. +0
    -321
      lib/xunfei/task_trans.dart
  21. +0
    -56
      lib/xunfei/xunfei.dart
  22. +297
    -0
      lib/xunfei/xunfei_translate.dart
  23. +4
    -4
      linux/flutter/generated_plugin_registrant.cc
  24. +1
    -1
      linux/flutter/generated_plugins.cmake
  25. +2
    -4
      macos/Flutter/GeneratedPluginRegistrant.swift
  26. +143
    -191
      pubspec.lock
  27. +7
    -8
      pubspec.yaml
  28. +3
    -3
      windows/flutter/generated_plugin_registrant.cc
  29. +1
    -1
      windows/flutter/generated_plugins.cmake

+ 3
- 0
.vscode/settings.json Vedi File

@@ -0,0 +1,3 @@
{
"cmake.sourceDirectory": "/Users/liwei1dao/work/flutter/test001/linux"
}

+ 3
- 0
devtools_options.yaml Vedi File

@@ -0,0 +1,3 @@
description: This file stores settings for Dart & Flutter DevTools.
documentation: https://docs.flutter.dev/tools/devtools/extensions#configure-extension-enablement-states
extensions:

+ 0
- 37
lib/audioplayer.dart Vedi File

@@ -1,37 +0,0 @@
import 'package:audioplayers/audioplayers.dart';
import 'dart:typed_data';
import 'dart:async';
import 'dart:io';

import 'package:demo001/plugin/xunfei/audiotranslate/result_audio.dart';

class AudioPlayerHandler {
final AudioPlayer _audioPlayer = AudioPlayer();
final Xunfei_AudioTranslation_Result_Audio _audioStream;

AudioPlayerHandler(this._audioStream);

// 播放音频流
Future<void> playAudio() async {
// 从音频流获取数据并播放
await for (Uint8List audioData in _audioStream.audioStream) {
await _playAudioData(audioData);
}
}

// 播放音频数据
Future<void> _playAudioData(Uint8List audioData) async {
// 暂时将音频数据保存到文件系统
final file = await _saveToFile(audioData);
// 播放文件
await _audioPlayer.play(DeviceFileSource(file.path));
}

// 保存音频数据到文件
Future<File> _saveToFile(Uint8List audioData) async {
final directory = await Directory.systemTemp.createTemp();
final file = File('${directory.path}/audio.pcm');
await file.writeAsBytes(audioData);
return file;
}
}

+ 65
- 0
lib/deepseek/deepseek.dart Vedi File

@@ -0,0 +1,65 @@
import 'dart:convert';
import 'dart:io';

class Deepseek {
final String apiKey;
Deepseek({required this.apiKey});

Stream<String> chat(String prompt) async* {
final client = HttpClient();
try {
final request = await client
.postUrl(Uri.parse('https://api.deepseek.com/chat/completions'));

// 设置流式请求头
request.headers
..set('Content-Type', 'application/json')
..set('Authorization', 'Bearer $apiKey')
..set('Accept', 'text/event-stream');

// 构建请求体
final requestBody = jsonEncode({
'model': 'deepseek-chat',
'stream': true,
'messages': [
{'role': 'user', 'content': prompt}
]
});

// 写入请求体
request.add(utf8.encode(requestBody));
final response = await request.close();

// 检查状态码
if (response.statusCode != 200) {
throw Exception('API请求失败: ${response.statusCode}');
}

// 处理流数据
String buffer = '';
await for (final chunk in response.transform(utf8.decoder)) {
buffer += chunk;

// 分割完整事件(假设使用SSE格式)
while (buffer.contains('\n\n')) {
final eventEnd = buffer.indexOf('\n\n');
final event = buffer.substring(0, eventEnd);
buffer = buffer.substring(eventEnd + 2);

if (event.startsWith('data: ')) {
final dataContent = event.substring(6);
// 增加有效性检查
if (dataContent == '[DONE]') {
// print('流式传输结束');
continue; // 跳过特殊结束标记
}
final jsonData = jsonDecode(event.substring(6));
yield jsonData['choices'][0]['delta']['content'];
}
}
}
} finally {
client.close();
}
}
}

+ 62
- 0
lib/doubao/DouBao.dart Vedi File

@@ -0,0 +1,62 @@
import 'dart:convert';
import 'package:http/http.dart' as http;

// 假设这是一个包含聊天方法的类
class Doubao {
final String apiKey;
final String modelId;
Doubao({required this.apiKey, required this.modelId});

Stream<String> chat(String userMessage) async* {
final client = http.Client();
final url =
Uri.parse('https://ark.cn-beijing.volces.com/api/v3/chat/completions');

final headers = {
'Content-Type': 'application/json',
'Authorization': 'Bearer $apiKey',
};

final requestBody = {
"model": modelId,
"messages": [
{"role": "system", "content": "你是豆包,是由字节跳动开发的 AI 人工智能助手."},
{"role": "user", "content": userMessage}
],
"stream": true,
};

try {
final request = http.Request('POST', url)
..headers.addAll(headers)
..body = jsonEncode(requestBody);

final response = await client.send(request);
//流式处理响应数据
await for (final chunk in response.stream
.transform(utf8.decoder)
.transform(const LineSplitter())) {
if (chunk.isEmpty) continue;

// 假设豆包API使用类似OpenAI的流式格式(data: {...})
if (chunk.startsWith('data:')) {
final jsonStr = chunk.substring(5).trim();
if (jsonStr == '[DONE]') break; // 流结束标志

try {
final data = jsonDecode(jsonStr);
final content = data['choices'][0]['delta']['content'] ?? '';
if (content.isNotEmpty) {
yield content; // 逐块返回生成的文本
}
} catch (e) {
print('JSON解析错误: $e');
}
print('请求成功: $jsonStr');
}
}
} catch (e) {
print('请求异常: $e');
}
}
}

+ 2
- 4
lib/main.dart Vedi File

@@ -1,8 +1,6 @@
import 'package:demo001/scenes/ChartScene.dart';
import 'package:demo001/scenes/%20RecordScene.dart';
import 'package:flutter/material.dart';

import 'scenes/SoundRecordScene.dart';

void main() {
WidgetsFlutterBinding.ensureInitialized();
runApp(const MyApp());
@@ -20,7 +18,7 @@ class MyApp extends StatelessWidget {
colorScheme: ColorScheme.fromSeed(seedColor: Colors.deepPurple),
useMaterial3: true,
),
home: SoundRecordScene(),
home: RecordScene(),
);
}
}

+ 8
- 0
lib/model/model.dart Vedi File

@@ -0,0 +1,8 @@
import 'dart:typed_data';

class TranslateItemData {
Uint8List? _originalAudio; //原始音频
String? _originalText; //原始语言文本
String? _translateText; //翻译语言文本
Uint8List? _translateAudio; //翻译语音
}

+ 0
- 209
lib/plugin/xunfei/audiototext/audiototext.dart Vedi File

@@ -1,209 +0,0 @@
import 'dart:convert';

import 'package:crypto/crypto.dart';
import 'package:demo001/plugin/xunfei/audiototext/result_test.dart';
import 'package:intl/intl.dart';
import 'package:web_socket_channel/web_socket_channel.dart';

typedef ResponseCallback = void Function(
Xunfei_AudioToText_Result_Text_Item text);

class Xunfei_AudioToText {
static const int STATUS_FIRST_FRAME = 0;
static const int STATUS_CONTINUE_FRAME = 1;
static const int STATUS_LAST_FRAME = 2;

final String appId;
final String apiKey;
final String apiSecret;
final String host = "iat-api.xfyun.cn";
final String requestUri = "/v2/iat";
WebSocketChannel? _channel;

final ResponseCallback onResponse; // 回调函数类型
late Xunfei_AudioToText_Result_Text currtext;
// 静态变量保存唯一实例
static Xunfei_AudioToText? _instance;
Xunfei_AudioToText._internal({
required this.appId,
required this.apiKey,
required this.apiSecret,
required this.onResponse, // 在构造函数中传递回调
});
// 工厂构造函数
factory Xunfei_AudioToText({
required String appId,
required String apiKey,
required String apiSecret,
required ResponseCallback onResponse,
}) {
_instance ??= Xunfei_AudioToText._internal(
appId: appId,
apiKey: apiKey,
apiSecret: apiSecret,
onResponse: onResponse,
);
return _instance!;
}

// 创建 WebSocket URL
String _createUrl() {
final now = DateTime.now();
final date =
DateFormat("EEE, dd MMM yyyy HH:mm:ss 'GMT'").format(now.toUtc());
final signatureOrigin =
"host: $host\ndate: $date\nGET $requestUri HTTP/1.1";

// 使用 HmacUtil 计算 HMAC-SHA256 签名
final signature = _hmacSha256(apiSecret, signatureOrigin);

final authorization = base64.encode(utf8.encode(
"hmac username=\"$apiKey\", algorithm=\"hmac-sha256\", headers=\"host date request-line\", signature=\"$signature\""));

final queryParams = {
"host": host,
"date": date,
"authorization": authorization,
};

final wsUri =
'ws://$host$requestUri?${Uri(queryParameters: queryParams).query}';
return wsUri;
}

//测试sdk
Future<void> start() async {
String wsUrl = _createUrl();
await _connect(wsUrl);
await Future.delayed(const Duration(seconds: 3));
return;
}

// 上传音频
Future<void> pushaudio(Stream<List<int>> audioStream) async {
int frameSize = 1280; // 每一帧的音频大小
double interval = 0.04; // 发送音频间隔(单位:s)
int status = STATUS_FIRST_FRAME; // 音频的状态信息,标识音频是第一帧,还是中间帧、最后一帧
currtext = Xunfei_AudioToText_Result_Text();
int index = 0;
List<int> buffer = [];
try {
await for (List<int> frame in audioStream) {
// 将音频数据添加到 buffer
buffer.addAll(frame);
while (buffer.length >= frameSize) {
List<int> sendFrame = buffer.sublist(0, frameSize);
buffer = buffer.sublist(frameSize);

// 判断是否读取到足够的帧
if (index + frameSize <= buffer.length) {
frame = buffer.sublist(index, index + frameSize);
index += frameSize;
} else {
frame = buffer.sublist(index);
index = buffer.length; // 结束
}

// 第一帧处理
if (status == STATUS_FIRST_FRAME) {
final param = {
"common": {
"app_id": appId,
},
"business": {
"language": "zh_cn",
"domain": "iat",
"accent": "mandarin",
},
"data": {
"status": status,
"format": "audio/L16;rate=16000",
"audio": base64Encode(sendFrame),
"encoding": "raw",
}
};
String data = json.encode(param);
_channel?.sink.add(data);
// print('第一帧已发送...' + data);
status = STATUS_CONTINUE_FRAME;
}
// 中间帧处理
else if (status == STATUS_CONTINUE_FRAME) {
final param = {
"data": {
"status": status,
"format": "audio/L16;rate=16000",
"audio": base64Encode(sendFrame),
"encoding": "raw",
}
};
String data = json.encode(param);
_channel?.sink.add(data);
// print('中间帧已发送...');
}
// 最后一帧处理
else if (status == STATUS_LAST_FRAME) {
final param = {
"data": {
"status": status,
"format": "audio/L16;rate=16000",
"audio": base64Encode(sendFrame),
"encoding": "raw",
}
};
// print('最后一帧已发送...');
String data = json.encode(param);
_channel?.sink.add(data);
break;
}
// 模拟音频采样间隔
await Future.delayed(
Duration(milliseconds: (interval * 1000).toInt()));
}
}
status = STATUS_LAST_FRAME;
String data = json.encode({
"data": {
"status": status,
"format": "audio/L16;rate=16000",
"audio": base64Encode([]),
"encoding": "raw",
}
});
_channel?.sink.add(data);
} catch (e) {
print("push msg: $e");
}

print('音频处理完成');
}

// 创建WebSocket连接
Future<void> _connect(String url) async {
_channel = WebSocketChannel.connect(Uri.parse(url));
_channel?.stream.listen(
(message) {
onMessage(message);
},
onError: (error) {
print('连接失败: $error');
},
onDone: () {
print('WebSocket 连接已关闭');
},
cancelOnError: true,
);
Future.delayed(const Duration(seconds: 1));
}

Future<void> onMessage(String message) async {}

// 使用SHA-256算法计算HMAC
String _hmacSha256(String key, String message) {
var keyBytes = utf8.encode(key); // 将密钥转为字节数组
var messageBytes = utf8.encode(message); // 将消息转为字节数组
var hmac = Hmac(sha256, keyBytes); // 创建 HMAC 对象,指定哈希算法和密钥
var digest = hmac.convert(messageBytes); // 计算消息的哈希
return base64.encode(digest.bytes); // 返回 base64 编码的哈希值
}
}

+ 0
- 74
lib/plugin/xunfei/audiototext/result_test.dart Vedi File

@@ -1,74 +0,0 @@
// ignore: camel_case_types
import 'dart:convert';

// ignore: camel_case_types
class Xunfei_AudioToText_Result_Text_Item {
final int sn;
final String pgs;
final List<int> rg;
final List<dynamic> ws;

Xunfei_AudioToText_Result_Text_Item({
required this.sn,
required this.pgs,
required this.rg,
required this.ws,
});
}

//同声翻译 返回的 文本结果处理对象
// ignore: camel_case_types
class Xunfei_AudioToText_Result_Text {
final Map<int, Xunfei_AudioToText_Result_Text_Item> results = {};
Xunfei_AudioToText_Result_Text();

void add(String result) {
print("添加文本结果:$result");
var resultMap = json.decode(result);
int sn = resultMap["sn"] as int;
String pgs = resultMap["pgs"] as String;
List<int> rg = resultMap["rg"] != null
? List<int>.from(resultMap["rg"])
: []; // 默认值为空列表
List<dynamic> ws = resultMap["ws"] as List<dynamic>;
var item =
Xunfei_AudioToText_Result_Text_Item(sn: sn, pgs: pgs, rg: rg, ws: ws);
results[sn] = item;
}

String result() {
if (results.isNotEmpty) {
String resultStr = "";
Map<int, Xunfei_AudioToText_Result_Text_Item> _results = {};
var sortedKeys = results.keys.toList()..sort();
for (var key in sortedKeys) {
var item = results[key];
if (item != null) {
if (item.pgs == "rpl") {
var start = item.rg[0];
var end = item.rg[1];
for (int i = start; i <= end; i++) {
_results.remove(i);
}
}
_results[item.sn] = item;
}
}
var keys = _results.keys.toList()..sort();
for (var key in keys) {
var item = results[key];
if (item != null) {
for (var ws in item.ws) {
var it = ws as Map<String, dynamic>;
var cw = it["cw"] as List<dynamic>;
for (var ct in cw) {
resultStr += ct["w"] as String;
}
}
}
}
return resultStr;
}
return "";
}
}

+ 0
- 273
lib/plugin/xunfei/audiotranslate/audiotranslate.dart Vedi File

@@ -1,273 +0,0 @@
import 'dart:convert';
import 'package:crypto/crypto.dart';
import 'package:demo001/plugin/xunfei/audiotranslate/result_audio.dart';
import 'package:demo001/plugin/xunfei/audiotranslate/result_test.dart';
import 'package:web_socket_channel/web_socket_channel.dart';
import 'package:intl/intl.dart';
import 'package:flutter/services.dart' show rootBundle;
import 'dart:io';

typedef ResponseCallback = void Function(
Xunfei_AudioTranslation_Result_Text text,
Xunfei_AudioTranslation_Result_Audio audio); // 定义回调函数类型

class Xunfei_AudioTranslation {
static const int STATUS_FIRST_FRAME = 0;
static const int STATUS_CONTINUE_FRAME = 1;
static const int STATUS_LAST_FRAME = 2;

// 静态变量保存唯一实例
static Xunfei_AudioTranslation? _instance;

final String appId;
final String apiKey;
final String apiSecret;
final String host = "ws-api.xf-yun.com";
final String httpProto = "HTTP/1.1";
final String httpMethod = "GET";
final String requestUri = "/v1/private/simult_interpretation";
final String algorithm = "hmac-sha256";
final int state = 0; //0未初始化 1已连接 2翻译中
final String msg = "";
WebSocketChannel? _channel;
final ResponseCallback onResponse; // 回调函数类型

late Xunfei_AudioTranslation_Result_Text currtest; //翻译结果对象 文本
late Xunfei_AudioTranslation_Result_Audio curraudio; //翻译结果对象 音频
Xunfei_AudioTranslation._internal({
required this.appId,
required this.apiKey,
required this.apiSecret,
required this.onResponse, // 在构造函数中传递回调
});

// 工厂构造函数
factory Xunfei_AudioTranslation({
required String appId,
required String apiKey,
required String apiSecret,
required ResponseCallback onResponse,
}) {
_instance ??= Xunfei_AudioTranslation._internal(
appId: appId,
apiKey: apiKey,
apiSecret: apiSecret,
onResponse: onResponse,
);
return _instance!;
}

// 创建 WebSocket URL
String _createUrl() {
final now = DateTime.now();
final date =
DateFormat("EEE, dd MMM yyyy HH:mm:ss 'GMT'").format(now.toUtc());
final signatureOrigin =
"host: $host\ndate: $date\nGET $requestUri HTTP/1.1";

// 使用 HmacUtil 计算 HMAC-SHA256 签名
final signature = _hmacSha256(apiSecret, signatureOrigin);

final authorization = base64.encode(utf8.encode(
"api_key=\"$apiKey\", algorithm=\"hmac-sha256\", headers=\"host date request-line\", signature=\"$signature\""));

final queryParams = {
"authorization": authorization,
"date": date,
"host": host,
"serviceId": "simult_interpretation"
};

final wsUri =
'ws://$host$requestUri?${Uri(queryParameters: queryParams).query}';
return wsUri;
}

// 创建参数
Map<String, dynamic> _createParams(
String appId, int status, List<int> audio) {
final param = {
"header": {
"app_id": appId,
"status": status,
},
"parameter": {
"ist": {
"accent": "mandarin",
"domain": "ist_ed_open",
"language": "zh_cn",
"vto": 15000,
"eos": 150000
},
"streamtrans": {"from": "cn", "to": "en"},
"tts": {
"vcn": "x2_catherine",
"tts_results": {
"encoding": "raw",
"sample_rate": 16000,
"channels": 1,
"bit_depth": 16,
"frame_size": 0
}
}
},
"payload": {
"data": {
"audio": base64.encode(audio),
"encoding": "raw",
"sample_rate": 16000,
"seq": 1,
"status": status
}
}
};

return param;
}

// 使用SHA-256算法计算HMAC
String _hmacSha256(String key, String message) {
var keyBytes = utf8.encode(key); // 将密钥转为字节数组
var messageBytes = utf8.encode(message); // 将消息转为字节数组
var hmac = Hmac(sha256, keyBytes); // 创建 HMAC 对象,指定哈希算法和密钥
var digest = hmac.convert(messageBytes); // 计算消息的哈希
return base64.encode(digest.bytes); // 返回 base64 编码的哈希值
}

//测试sdk
Future<void> start() async {
String wsUrl = _createUrl();
await _connect(wsUrl);
await Future.delayed(const Duration(seconds: 3));
return;
}

// 创建WebSocket连接
Future<void> _connect(String url) async {
_channel = WebSocketChannel.connect(Uri.parse(url));
_channel?.stream.listen(
(message) {
onMessage(message);
},
onError: (error) {
print('连接失败: $error');
},
onDone: () {
print('WebSocket 连接已关闭');
},
cancelOnError: true,
);
Future.delayed(const Duration(seconds: 1));
}

// 上传音频
Future<void> pushaudio(Stream<List<int>> audioStream) async {
int frameSize = 1280; // 每一帧的音频大小
double interval = 0.04; // 发送音频间隔(单位:s)
int status = STATUS_FIRST_FRAME; // 音频的状态信息,标识音频是第一帧,还是中间帧、最后一帧
currtest = Xunfei_AudioTranslation_Result_Text();
int index = 0;
List<int> buffer = [];
try {
await for (List<int> frame in audioStream) {
// 将音频数据添加到 buffer
buffer.addAll(frame);
while (buffer.length >= frameSize) {
List<int> sendFrame = buffer.sublist(0, frameSize);
buffer = buffer.sublist(frameSize);

// 判断是否读取到足够的帧
if (index + frameSize <= buffer.length) {
frame = buffer.sublist(index, index + frameSize);
index += frameSize;
} else {
frame = buffer.sublist(index);
index = buffer.length; // 结束
}

// 第一帧处理
if (status == STATUS_FIRST_FRAME) {
String data = json.encode(_createParams(appId, status, sendFrame));
_channel?.sink.add(data);
// print('第一帧已发送...' + data);
status = STATUS_CONTINUE_FRAME;
}
// 中间帧处理
else if (status == STATUS_CONTINUE_FRAME) {
String data = json.encode(_createParams(appId, status, sendFrame));
_channel?.sink.add(data);
// print('中间帧已发送...');
}
// 最后一帧处理
else if (status == STATUS_LAST_FRAME) {
// print('最后一帧已发送...');
String data = json.encode(_createParams(appId, status, sendFrame));
_channel?.sink.add(data);
break;
}
// 模拟音频采样间隔
await Future.delayed(
Duration(milliseconds: (interval * 1000).toInt()));
}
}
status = STATUS_LAST_FRAME;
String data = json.encode(_createParams(appId, status, []));
_channel?.sink.add(data);
} catch (e) {
print("push msg: $e");
}

print('音频处理完成');
}

// 处理接收到的消息
Future<void> onMessage(String message) async {
try {
print("收到的消息:$message");
} catch (e) {
print("receive msg, but parse exception: $e");
}

// 对结果进行解析
var messageMap = json.decode(message);
var status = messageMap["header"]["status"];
var sid = messageMap["header"]["sid"];

// 接收到的识别结果写到文本
if (messageMap.containsKey('payload') &&
messageMap['payload'].containsKey('recognition_results')) {
var result = messageMap['payload']['recognition_results']['text'];
var asrresult = utf8.decode(base64.decode(result));
currtest.add(asrresult); //加入到结果对象中
}

// 接收到的翻译结果写到文本
// if (messageMap['payload'].containsKey('streamtrans_results')) {
// var result = messageMap['payload']['streamtrans_results']['text'];
// var transresult = utf8.decode(base64.decode(result));

// }

// 把接收到的音频流合成PCM
if (messageMap.containsKey('payload') &&
messageMap['payload'].containsKey('tts_results')) {
var audio = messageMap['payload']['tts_results']['audio'];
var audioData = base64.decode(audio);
curraudio.addAudioData(audioData);
// var file = File('output/audio/trans.pcm');
// await file.writeAsBytes(audioData, mode: FileMode.append);
}
onResponse(currtest, curraudio);
if (status == 2) {
print("数据处理完毕,等待实时转译结束!同传后的音频文件请到output/audio/目录查看...");
await Future.delayed(Duration(seconds: 3));
close();
}
return;
}

// 关闭WebSocket连接
void close() {
_channel?.sink.close();
}
}

+ 0
- 25
lib/plugin/xunfei/audiotranslate/result_audio.dart Vedi File

@@ -1,25 +0,0 @@
import 'dart:typed_data';
import 'dart:async';

// ignore: camel_case_types
class Xunfei_AudioTranslation_Result_Audio {
final List<Uint8List> _buffer = [];
final _streamController = StreamController<Uint8List>();

// 向音频流添加数据
void addAudioData(Uint8List data) {
_buffer.add(data);
_streamController.add(data); // 每当添加新数据时,推送到流
}

// 获取音频数据流
Stream<Uint8List> get audioStream => _streamController.stream;

// 获取当前缓存的所有数据
List<Uint8List> get buffer => List.from(_buffer);

// 关闭流
void close() {
_streamController.close();
}
}

+ 0
- 73
lib/plugin/xunfei/audiotranslate/result_test.dart Vedi File

@@ -1,73 +0,0 @@
// ignore: camel_case_types
import 'dart:convert';

class Xunfei_AudioTranslation_Result_Text_Item {
final int sn;
final String pgs;
final List<int> rg;
final List<dynamic> ws;

Xunfei_AudioTranslation_Result_Text_Item({
required this.sn,
required this.pgs,
required this.rg,
required this.ws,
});
}

//同声翻译 返回的 文本结果处理对象
// ignore: camel_case_types
class Xunfei_AudioTranslation_Result_Text {
final Map<int, Xunfei_AudioTranslation_Result_Text_Item> results = {};
Xunfei_AudioTranslation_Result_Text();

void add(String result) {
print("添加文本结果:$result");
var resultMap = json.decode(result);
int sn = resultMap["sn"] as int;
String pgs = resultMap["pgs"] as String;
List<int> rg = resultMap["rg"] != null
? List<int>.from(resultMap["rg"])
: []; // 默认值为空列表
List<dynamic> ws = resultMap["ws"] as List<dynamic>;
var item = Xunfei_AudioTranslation_Result_Text_Item(
sn: sn, pgs: pgs, rg: rg, ws: ws);
results[sn] = item;
}

String result() {
if (results.isNotEmpty) {
String resultStr = "";
Map<int, Xunfei_AudioTranslation_Result_Text_Item> _results = {};
var sortedKeys = results.keys.toList()..sort();
for (var key in sortedKeys) {
var item = results[key];
if (item != null) {
if (item.pgs == "rpl") {
var start = item.rg[0];
var end = item.rg[1];
for (int i = start; i <= end; i++) {
_results.remove(i);
}
}
_results[item.sn] = item;
}
}
var keys = _results.keys.toList()..sort();
for (var key in keys) {
var item = results[key];
if (item != null) {
for (var ws in item.ws) {
var it = ws as Map<String, dynamic>;
var cw = it["cw"] as List<dynamic>;
for (var ct in cw) {
resultStr += ct["w"] as String;
}
}
}
}
return resultStr;
}
return "";
}
}

+ 0
- 19
lib/plugin/xunfei/audiotranslate/utils.dart Vedi File

@@ -1,19 +0,0 @@
import 'dart:convert';

import 'package:crypto/crypto.dart';

class XunFeiUtils {
// 使用SHA-256算法计算HMAC
static String hmacSha256(String apiSecret, String signatureOrigin) {
// 将 apiSecret 和 signatureOrigin 转换为 UTF-8 编码的字节
var key = utf8.encode(apiSecret);
var message = utf8.encode(signatureOrigin);
// 使用 HMAC-SHA256 算法生成签名
var hmac = Hmac(sha256, key); // 初始化 HMAC(使用 SHA256)
var signatureSha = hmac.convert(message); // 计算签名

// 将生成的签名进行 Base64 编码
String base64Signature = base64.encode(signatureSha.bytes);
return base64Signature; // 返回 Base64 编码后的签名
}
}

+ 218
- 0
lib/scenes/ RecordScene.dart Vedi File

@@ -0,0 +1,218 @@
import 'dart:async';
import 'dart:io';
import 'dart:typed_data';
import 'package:record/record.dart';
import 'package:flutter/material.dart';
import 'package:logger/logger.dart';
import 'package:path_provider/path_provider.dart';
import 'package:permission_handler/permission_handler.dart';
import 'package:demo001/xunfei/xunfei_translate.dart';

/*
录音测试场景
*/
class RecordScene extends StatefulWidget {
@override
_RecordSceneState createState() => _RecordSceneState();
}

class _RecordSceneState extends State<RecordScene> {
late Directory savedirectory;

final XunFeiTranslate xunfei = XunFeiTranslate(
appId: "137dc132",
apiKey: "1c1891a475e71250ecd1320303ad6545",
apiSecret: "MjZhNDA1NTI1NWZkZDQxOTMxYzMyN2Yw");

AudioRecorder _recorder = AudioRecorder();
bool _isRecorderReady = false; //是否录音已准备
bool _isRecording = false; //是否录音中

@override
void initState() {
super.initState();
_requestPermissions();
}

// 初始化录音器
void _initRecorder() async {
try {
// 获取外部存储目录路径
savedirectory = (await getExternalStorageDirectory())!;
setState(() {
_isRecorderReady = true;
});
_log('录音器初始化成功');
} catch (e) {
_log('初始化录音器失败: $e');
setState(() {
_isRecorderReady = false;
});
}
}

// 请求麦克风权限
void _requestPermissions() async {
try {
if (await Permission.microphone.request().isGranted) {
_log('麦克风权限已授予');
} else {
_log('麦克风权限被拒绝');
setState(() {
_isRecorderReady = false;
});
}
} catch (e) {
_log('请求麦克风权限失败: $e');
setState(() {
_isRecorderReady = false;
});
}
}

// 切换按钮状态
void _toggleCallStatus() {
if (!_isRecording) {
//开始通话
_startRecorder();
} else {
//结束通话
_stopRecorder();
}

setState(() {
_isRecording = !_isRecording;
});
}

//开始录音
void _startRecorder() async {
try {
if (!_isRecorderReady) {
_initRecorder();
}
if (_isRecording) return; // 防止重复调用
Stream<Uint8List> dataStream = await _recorder.startStream(RecordConfig(
sampleRate: 16000, encoder: AudioEncoder.pcm16bits, numChannels: 1));
xunfei.starttranslate(dataStream);
setState(() {
_isRecording = true;
});
_log('录音开始');
} catch (e) {
_log('录音开始 异常: $e');
}
}

//结束录音
void _stopRecorder() async {
try {
if (!_isRecording) return; // 防止重复调用
await _recorder.stop();
await _recorder.cancel();
xunfei.stoptranslate();
setState(() {
_isRecorderReady = false;
_isRecording = false;
});
_log('录音停止');
} catch (e) {
_log('录音停止 异常: $e');
}
}

@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(title: Text("录音测试")),
// body: ListView.builder(
// itemCount: _records.length,
// itemBuilder: (context, index) {
// var audio = _records[index];
// return _buildAudioMessage(audio);
// },
// ),
bottomNavigationBar: Padding(
padding: const EdgeInsets.all(20.0),
child: InkWell(
onTap: _toggleCallStatus,
child: Container(
decoration: BoxDecoration(
borderRadius: BorderRadius.circular(30), // 圆角按钮
color: _isRecording
? Colors.red
: Colors.green, // 通话状态红色,非通话状态绿色
),
padding: EdgeInsets.symmetric(
vertical: 15, horizontal: 40), // 调整按钮大小
child: Row(
mainAxisAlignment: MainAxisAlignment.center,
children: [
Icon(
_isRecording ? Icons.call_end : Icons.mic, // 图标变化
color: Colors.white,
size: 30,
),
SizedBox(width: 10),
Text(
_isRecording ? '挂断' : '开始通话', // 状态文字变化
style: TextStyle(
color: Colors.white,
fontSize: 18,
),
),
],
),
))),
);
}

// 构建语音消息
// Widget _buildAudioMessage(RecordData data) {
// Color buttColor = data.state == 0 ? Colors.red : Colors.green;
// return Padding(
// padding: const EdgeInsets.symmetric(vertical: 10, horizontal: 15),
// child: Column(
// crossAxisAlignment: CrossAxisAlignment.start,
// children: [
// // 音频播放按钮
// GestureDetector(
// onTap: () {
// // _playRecording(data);
// },
// child: Container(
// padding: EdgeInsets.symmetric(vertical: 10, horizontal: 20),
// decoration: BoxDecoration(
// color: buttColor,
// borderRadius: BorderRadius.circular(30),
// ),
// child: Row(
// children: [
// Icon(
// Icons.play_arrow,
// color: Colors.white,
// ),
// SizedBox(width: 10),
// Text(
// '播放音频',
// style: TextStyle(color: Colors.white),
// ),
// ],
// ),
// ),
// ),
// SizedBox(height: 5),
// // 文字内容
// // Text(
// // message['text'],
// // style: TextStyle(fontSize: 16),
// // ),
// ],
// ),
// );
// }

void _log(String msg) {
Logger().f("LIWEI---------------:$msg");
}
}

+ 129
- 0
lib/scenes/AIChatScene.dart Vedi File

@@ -0,0 +1,129 @@
/*
AI测试场景
*/
import 'package:flutter/material.dart';
import 'package:demo001/doubao/DouBao.dart';

class ChatItem {
String msg = "";
int state = 0;

ChatItem({required this.msg});

void append(String _msg) {
msg += _msg;
}

void end() {
state = 1;
}
}

class AIChatScene extends StatefulWidget {
@override
_AIChatSceneState createState() => _AIChatSceneState();
}

class _AIChatSceneState extends State<AIChatScene> {
// final String apiKey = "sk-3adfd188a3134e718bbf704f525aff17";
final Doubao doubao = Doubao(
apiKey: "418ec475-e2dc-4b76-8aca-842d81bc3466",
modelId: "ep-20250203161136-9lrxg");

final List<ChatItem> _chats = [ChatItem(msg: "我是测试代码")];
ChatItem? _currchat;
final TextEditingController _textController = TextEditingController();

//发送消息
_sendMessage() async {
_currchat = ChatItem(msg: "");
setState(() {
_chats.add(_currchat!);
});
var stream = doubao.chat(_textController.text);
_textController.text = "";
await for (final content in stream) {
setState(() {
_currchat?.append(content);
});
print('实时更新: $content');
}
print('结束恢复');
_currchat?.end();
}

@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(title: Text("AI测试")),
body: ListView.builder(
itemCount: _chats.length,
itemBuilder: (context, index) {
var item = _chats[index];
return _buildAudioMessage(item);
},
),
bottomNavigationBar: Padding(
padding: const EdgeInsets.all(20.0),
child: Container(
decoration: BoxDecoration(
color: Colors.grey[200], // 背景颜色
borderRadius: BorderRadius.circular(30), // 圆角
),
padding: EdgeInsets.symmetric(
horizontal: 20, vertical: 10), // 输入框和按钮的内边距
child: Row(
children: [
Expanded(
child: TextField(
controller: _textController, // 用于获取输入的文本
decoration: InputDecoration(
hintText: '输入消息...', // 提示文本
border: InputBorder.none, // 去除默认边框
contentPadding:
EdgeInsets.symmetric(vertical: 12), // 调整内边距
),
),
),
IconButton(
icon: Icon(Icons.send, color: Colors.blue), // 发送按钮图标
onPressed: _sendMessage, // 发送按钮点击事件
),
],
),
),
));
}

// 构建语音消息
Widget _buildAudioMessage(ChatItem data) {
return Padding(
padding: const EdgeInsets.symmetric(vertical: 10, horizontal: 15),
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
// 音频播放按钮
GestureDetector(
onTap: () {},
child: Container(
padding: EdgeInsets.symmetric(vertical: 10, horizontal: 20),
decoration: BoxDecoration(
color: Colors.green,
borderRadius: BorderRadius.circular(30),
),
child: Row(
children: [
Text(
data.msg,
style: TextStyle(color: Colors.white),
),
],
),
),
),
SizedBox(height: 5),
],
),
);
}
}

+ 114
- 0
lib/scenes/BluetoothScene.dart Vedi File

@@ -0,0 +1,114 @@
// /*
// 蓝牙测试场景
// */
// import 'package:flutter/material.dart';
// import 'package:flutter_blue/flutter_blue.dart';

// class BluetoothScene extends StatefulWidget {
// @override
// _BluetoothSceneState createState() => _BluetoothSceneState();
// }

// class _BluetoothSceneState extends State<BluetoothScene> {
// FlutterBlue flutterBlue = FlutterBlue.instance;
// List<BluetoothDevice> devicesList = [];
// BluetoothDevice? connectedDevice;
// BluetoothState? bluetoothState;

// @override
// void initState() {
// super.initState();
// flutterBlue.state.listen((state) {
// setState(() {
// bluetoothState = state;
// });
// });

// flutterBlue.scanResults.listen((results) {
// setState(() {
// devicesList = results
// .where((result) => result.device.name.isNotEmpty)
// .map((result) => result.device)
// .toList();
// });
// });
// }

// void startScan() {
// flutterBlue.startScan(timeout: Duration(seconds: 4));
// }

// void stopScan() {
// flutterBlue.stopScan();
// }

// void connectToDevice(BluetoothDevice device) async {
// await device.connect();
// setState(() {
// connectedDevice = device;
// });
// listenToDeviceEvents(device);
// }

// void listenToDeviceEvents(BluetoothDevice device) {
// device.state.listen((state) {
// if (state == BluetoothDeviceState.connected) {
// print('Device connected');
// } else if (state == BluetoothDeviceState.disconnected) {
// print('Device disconnected');
// }
// });

// device.discoverServices().then((services) {
// services.forEach((service) {
// service.characteristics.forEach((characteristic) {
// characteristic.setNotifyValue(true);
// characteristic.value.listen((value) {
// print('Received value: $value');
// });
// });
// });
// });
// }

// @override
// Widget build(BuildContext context) {
// return Scaffold(
// appBar: AppBar(title: Text('Flutter Bluetooth Demo')),
// body: Column(
// children: [
// if (bluetoothState == BluetoothState.off)
// Text('Bluetooth is off, please turn it on'),
// if (bluetoothState == BluetoothState.on)
// Column(
// children: [
// ElevatedButton(
// onPressed: startScan,
// child: Text('Start Scan'),
// ),
// ElevatedButton(
// onPressed: stopScan,
// child: Text('Stop Scan'),
// ),
// ListView.builder(
// shrinkWrap: true,
// itemCount: devicesList.length,
// itemBuilder: (context, index) {
// return ListTile(
// title: Text(devicesList[index].name),
// subtitle: Text(devicesList[index].id.toString()),
// onTap: () {
// connectToDevice(devicesList[index]);
// },
// );
// },
// ),
// if (connectedDevice != null)
// Text('Connected to: ${connectedDevice?.name}')
// ],
// ),
// ],
// ),
// );
// }
// }

+ 0
- 165
lib/scenes/ChartScene.dart Vedi File

@@ -1,165 +0,0 @@
import 'package:flutter/material.dart';
import 'dart:async';

class ChatScene extends StatefulWidget {
@override
_ChatSceneState createState() => _ChatSceneState();
}

class _ChatSceneState extends State<ChatScene> {
bool _isInCall = false; // 记录是否在通话状态

// 模拟的聊天消息数据,包括语音消息和文字消息
final List<Map<String, dynamic>> _messages = [
{
'type': 'audio',
'audioDuration': 5, // 音频时长 5秒
'audioUrl':
'https://www.soundhelix.com/examples/mp3/SoundHelix-Song-1.mp3',
'text': '你好,我是一个语音消息'
},
{'type': 'text', 'text': '这是一个文本消息'},
{
'type': 'audio',
'audioDuration': 8, // 音频时长 8秒
'audioUrl':
'https://www.soundhelix.com/examples/mp3/SoundHelix-Song-2.mp3',
'text': '这是另一条语音消息'
}
];

// 切换按钮状态
void _toggleCallStatus() {
setState(() {
_isInCall = !_isInCall;
});
}

@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(title: Text("Chat Scene")),
body: ListView.builder(
itemCount: _messages.length,
itemBuilder: (context, index) {
var message = _messages[index];
if (message['type'] == 'audio') {
// 语音消息
return _buildAudioMessage(message);
} else {
// 文本消息
return _buildTextMessage(message);
}
},
),
bottomNavigationBar: Padding(
padding: const EdgeInsets.all(20.0),
child: InkWell(
onTap: _toggleCallStatus, // 点击按钮时切换状态
onLongPress: () {
// 按住时切换为通话状态
_toggleCallStatus();
},
child: Container(
decoration: BoxDecoration(
borderRadius: BorderRadius.circular(30), // 圆角按钮
color: _isInCall ? Colors.red : Colors.green, // 通话状态红色,非通话状态绿色
),
padding:
EdgeInsets.symmetric(vertical: 15, horizontal: 40), // 调整按钮大小
child: Row(
mainAxisAlignment: MainAxisAlignment.center,
children: [
Icon(
_isInCall ? Icons.call_end : Icons.mic, // 图标变化
color: Colors.white,
size: 30,
),
SizedBox(width: 10),
Text(
_isInCall ? '挂断' : '开始通话', // 状态文字变化
style: TextStyle(
color: Colors.white,
fontSize: 18,
),
),
],
),
),
),
),
);
}

// 构建文本消息
Widget _buildTextMessage(Map<String, dynamic> message) {
return Padding(
padding: const EdgeInsets.symmetric(vertical: 10, horizontal: 15),
child: Align(
alignment: Alignment.centerLeft,
child: Container(
padding: EdgeInsets.all(10),
decoration: BoxDecoration(
color: Colors.blue[100],
borderRadius: BorderRadius.circular(10),
),
child: Text(
message['text'],
style: TextStyle(fontSize: 16),
),
),
),
);
}

// 构建语音消息
Widget _buildAudioMessage(Map<String, dynamic> message) {
return Padding(
padding: const EdgeInsets.symmetric(vertical: 10, horizontal: 15),
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
// 音频时长显示
Text(
'${message['audioDuration']}秒',
style: TextStyle(fontSize: 14, color: Colors.grey),
),
SizedBox(height: 5),
// 音频播放按钮
GestureDetector(
onTap: () {
// 这里可以实现点击播放音频的功能
print("播放音频: ${message['audioUrl']}");
},
child: Container(
padding: EdgeInsets.symmetric(vertical: 10, horizontal: 20),
decoration: BoxDecoration(
color: Colors.green,
borderRadius: BorderRadius.circular(30),
),
child: Row(
children: [
Icon(
Icons.play_arrow,
color: Colors.white,
),
SizedBox(width: 10),
Text(
'播放音频',
style: TextStyle(color: Colors.white),
),
],
),
),
),
SizedBox(height: 5),
// 文字内容
Text(
message['text'],
style: TextStyle(fontSize: 16),
),
],
),
);
}
}

+ 0
- 112
lib/scenes/HomeScene.dart Vedi File

@@ -1,112 +0,0 @@
import 'package:demo001/plugin/xunfei/audiotranslate/audiotranslate.dart';
import 'package:demo001/plugin/xunfei/audiotranslate/result_audio.dart';
import 'package:demo001/plugin/xunfei/audiotranslate/result_test.dart';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';

class MyHomePage extends StatefulWidget {
const MyHomePage({super.key, required this.title});
final String title;
@override
State<MyHomePage> createState() => _MyHomePageState();
}

class _MyHomePageState extends State<MyHomePage> {
late Xunfei_AudioTranslation xunfei;
String _result = "";
// 在 initState 中初始化
@override
void initState() {
super.initState();
xunfei = new Xunfei_AudioTranslation(
appId: "137dc132",
apiKey: "1c1891a475e71250ecd1320303ad6545",
apiSecret: "MjZhNDA1NTI1NWZkZDQxOTMxYzMyN2Yw",
onResponse: _onResponse,
);
}

void _onResponse(Xunfei_AudioTranslation_Result_Text text,
Xunfei_AudioTranslation_Result_Audio audio) {
setState(() {
_result = "接受消息:${text.result()}";
});
}

//测试链接
Future<void> _connectTest() async {
await xunfei.start();
setState(() {
_result = "链接状态:${xunfei.state}";
});
}

// 读取本地文件转流对象
Stream<List<int>> _getAudioStream() async* {
// 从 assets 中读取音频文件
final byteData = await rootBundle.load('assests/original.pcm');
final buffer = byteData.buffer.asUint8List();

// 按块生成流,块大小为 frameSize
int frameSize = 1280; // 每一帧的音频大小
for (int i = 0; i < buffer.length; i += frameSize) {
int end =
(i + frameSize <= buffer.length) ? i + frameSize : buffer.length;
yield buffer.sublist(i, end);
}
}

//测试翻译
Future<void> _translationTest() async {
Stream<List<int>> audioStream = _getAudioStream();
await xunfei.pushaudio(audioStream);
}

//测试录音
void _recordTest() {}

@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
backgroundColor: Theme.of(context).colorScheme.inversePrimary,
title: Text(widget.title),
),
body: Center(
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
children: <Widget>[
const Text(
'测试结果',
),
Text(
_result,
style: Theme.of(context).textTheme.headlineMedium,
),
],
),
),
// 页面底部的按钮
bottomNavigationBar: Padding(
padding: const EdgeInsets.all(8.0),
child: Row(
mainAxisAlignment: MainAxisAlignment.spaceAround, // 按钮等距排列
children: <Widget>[
ElevatedButton(
onPressed: _connectTest,
child: const Text('测试链接'),
),
ElevatedButton(
onPressed: _translationTest,
child: const Text('测试翻译'),
),
ElevatedButton(
onPressed: _recordTest,
child: const Text('测试录音'),
),
],
),
),
);
}
}

+ 0
- 448
lib/scenes/SoundRecordScene.dart Vedi File

@@ -1,448 +0,0 @@
import 'dart:async';
import 'dart:typed_data';

import 'package:demo001/xunfei/xunfei.dart';
import 'package:flutter/material.dart';
import 'package:path_provider/path_provider.dart';
import 'package:just_audio/just_audio.dart' as just_audio;
import 'package:flutter_sound/flutter_sound.dart' as flutter_sound;
import 'package:permission_handler/permission_handler.dart';

class SoundRecordScene extends StatefulWidget {
@override
_SoundRecordSceneState createState() => _SoundRecordSceneState();
}

class _SoundRecordSceneState extends State<SoundRecordScene> {
late ISDK _sdk;
flutter_sound.FlutterSoundRecorder? _soundRecorder;
just_audio.AudioPlayer? _audioPlayer;
bool _isRecorderReady = false;
bool _isRecording = false;
bool _isSpeaking = false; //是否说话
int _stateSpeak = 0; // 说话状态 0 未说话 1开始说话 2 说话中 3结束说话
String? _audioFilePath;
double _volumeLevel = 0.0; // 当前音量值
DateTime? _lastBelowThresholdTime; // 上次音量低于阈值的时间
ScrollController _scrollController = ScrollController();
List<String> _logs = [];
List<ITaskTrans> _trans = [];
late ITaskTrans _lasttran;
// 音量阈值
final double _speakingThreshold = 50.0; // 开始说话的阈值
final double _silenceThreshold = 30.0; // 结束说话的阈值
final Duration _silenceDuration = Duration(seconds: 1); // 持续低于阈值的时间

// 采样率和声道数
flutter_sound.Codec _audiocodec = flutter_sound.Codec.pcm16;
final int _sampleRate = 16000; // 16kHz 采样率
final int _numChannels = 1; // 单声道
StreamController<Uint8List> _audioDataStreamController =
StreamController<Uint8List>.broadcast();
@override
void initState() {
super.initState();
_sdk = Xunfei(
appId: "137dc132",
apiKey: "1c1891a475e71250ecd1320303ad6545",
apiSecret: "MjZhNDA1NTI1NWZkZDQxOTMxYzMyN2Yw");
_audioPlayer = just_audio.AudioPlayer();
_requestPermissions();
_initRecorder();
}

// 初始化录音器
void _initRecorder() async {
try {
_soundRecorder = flutter_sound.FlutterSoundRecorder();
await _soundRecorder?.openRecorder();
await _soundRecorder
?.setSubscriptionDuration(const Duration(milliseconds: 100));
//检查编解码器是否支持
if (!await _soundRecorder!
.isEncoderSupported(flutter_sound.Codec.pcm16)) {
_log("PCM16 codec is not supported on this device.");
_audiocodec = flutter_sound.Codec.aacADTS;
}
setState(() {
_isRecorderReady = true;
});
_log('录音器初始化成功');
} catch (e) {
_log('初始化录音器失败: $e');
setState(() {
_isRecorderReady = false;
});
}
}

// 请求麦克风权限
void _requestPermissions() async {
try {
if (await Permission.microphone.request().isGranted) {
_log('麦克风权限已授予');
} else {
_log('麦克风权限被拒绝');
setState(() {
_isRecorderReady = false;
});
}
} catch (e) {
_log('请求麦克风权限失败: $e');
setState(() {
_isRecorderReady = false;
});
}
}

// 开始录音
void _startRecording() async {
try {
if (!_isRecorderReady) {
_log('录音器未准备好');
return;
}
if (_isRecording) return; // 防止重复调用
final directory = await getTemporaryDirectory();
final tempPath = '${directory.path}/recorded_audio.pcm';
_log('录音文件路径: $tempPath');
await _soundRecorder?.startRecorder(
codec: _audiocodec,
toStream: _audioDataStreamController.sink, // 将音频数据写入到 StreamController
sampleRate: _sampleRate, // 设置采样率
numChannels: _numChannels, // 设置声道数
enableVoiceProcessing: true, // 启用音量监听
);
_soundRecorder?.onProgress!
.listen((flutter_sound.RecordingDisposition event) {
// _log('onProgress 回调触发, 分贝: ${event.decibels}');
if (event.decibels != null) {
setState(() {
_volumeLevel = event.decibels!; //更新音量值
});
_checkSpeakingStatus(); // 检查说话状态
}
});
// 监听音频数据流
_audioDataStreamController.stream.listen((Uint8List audioData) {
_processAudioData(audioData);
// 这里可以进一步处理音频数据,例如保存到文件或上传到服务器
});
setState(() {
_audioFilePath = tempPath;
_isRecording = true;
});
_log('录音开始');
} catch (e) {
_log('录音开始 异常: $e');
}
}

// 处理音频数据的方法
Uint8List _audioBuffer = Uint8List(0); // 缓存音频数据
void _processAudioData(Uint8List newData) {
// 将新数据追加到缓存中
_audioBuffer = Uint8List.fromList([..._audioBuffer, ...newData]);

// 每次处理一帧数据(1280 字节)
int frameSize = 1280; // 每帧的大小
while (_isSpeaking && _audioBuffer.length >= frameSize) {
// 取出一帧数据
Uint8List frame = _audioBuffer.sublist(0, frameSize);
_audioBuffer = _audioBuffer.sublist(frameSize); // 移除已处理的数据

// 将帧数据传递给任务
_lasttran.addAudioData(frame);
}

// 如果录音结束且缓存中还有剩余数据,则作为最后一帧发送
if (!_isRecording && _audioBuffer.isNotEmpty) {
_lasttran.addAudioData(_audioBuffer);
_audioBuffer = Uint8List(0); // 清空缓存
}
}

// 停止录音
void _stopRecording() async {
try {
if (!_isRecording) return; // 防止重复调用
_lasttran.endpuish();
await _soundRecorder?.stopRecorder();
await _soundRecorder?.closeRecorder();
setState(() {
_isRecording = false;
_volumeLevel = 0.0; //重置音量值
});
_log('录音停止');
} catch (e) {
_log('录音停止 异常: $e');
}
}

// 播放录音
void _playRecording() async {
// try {
// if (_audioFilePath != null) {
// await _audioPlayer?.play(DeviceFileSource(_audioFilePath!));
// _log('播放录音');
// }
// } catch (e) {
// _log('播放录音 异常: $e');
// }
}

// 检查说话状态
_checkSpeakingStatus() {
if (_volumeLevel > _speakingThreshold && !_isSpeaking) {
// 音量高于阈值,表示开始说话
setState(() {
_isSpeaking = true;
});
_log('开始说话');
_stateSpeak = 1;
_lasttran = _sdk.createTransTask(_taskchange);
_trans.add(_lasttran);
} else if (_volumeLevel < _silenceThreshold) {
// 音量低于阈值
if (_lastBelowThresholdTime == null) {
// 记录第一次低于阈值的时间
_lastBelowThresholdTime = DateTime.now();
} else if (DateTime.now().difference(_lastBelowThresholdTime!) >
_silenceDuration) {
// 持续低于阈值超过指定时间,表示结束说话
if (_isSpeaking) {
setState(() {
_isSpeaking = false;
});
_log('结束说话');
_stateSpeak = 3;
_lasttran.endpuish();
}
}
} else {
// 音量恢复到阈值以上,重置计时器
_lastBelowThresholdTime = null;
}
_stateSpeak = 2;
}

//任务状态变化
void _taskchange(ITaskTrans task) {
if (task.state() == 3) {
playAudioStream(task.translateAudio());
}
}

// 添加日志信息并自动滚动
void _log(String message) {
print("输出日志:${message}");
setState(() {
_logs.add(message); // 从顶部插入新日志
});
_scrollToBottom();
}

// 滚动到底部
void _scrollToBottom() {
WidgetsBinding.instance.addPostFrameCallback((_) {
if (_scrollController.hasClients) {
_scrollController.animateTo(
_scrollController.position.maxScrollExtent, // 滚动到底部
duration: Duration(milliseconds: 200),
curve: Curves.easeInOut,
);
}
});
}

// 播放音频流
void playAudioStream(Stream<Uint8List> audioStream) async {
try {
// 创建自定义的 AudioSource
final audioSource = CustomStreamAudioSource(
audioStream: audioStream,
contentLength: null, // 如果不知道音频数据长度,设置为 null
);
// 设置音频源并播放
await _audioPlayer?.setAudioSource(audioSource);
await _audioPlayer?.play();

print('音频流播放开始');
} catch (e) {
print('播放音频流失败: $e');
}
}

@override
void dispose() {
_soundRecorder?.closeRecorder();
_audioPlayer?.dispose();
_scrollController.dispose();
super.dispose();
}

@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(title: Text('录音与播放')),
body: Column(
children: [
// 滑动面板(日志区域)
Expanded(
child: Padding(
padding: const EdgeInsets.all(8.0),
child: Container(
decoration: BoxDecoration(
border: Border.all(color: Colors.blue),
borderRadius: BorderRadius.circular(10),
),
child: ListView.builder(
controller: _scrollController,
itemCount: _trans.length,
itemBuilder: (context, index) {
// 语音消息
return _buildAudioMessage(_trans[index]);
},
),
),
),
),

// 底部按钮区域
Padding(
padding: const EdgeInsets.all(20.0),
child:
Row(mainAxisAlignment: MainAxisAlignment.center, children: [
// 音量图标和音量值
Row(
children: [
Icon(
Icons.volume_up,
size: 30,
),
SizedBox(width: 10), // 间距
Text(
'${_volumeLevel.toStringAsFixed(2)} dB', //显示音量值,保留两位小数
style: TextStyle(fontSize: 16),
),
],
),
SizedBox(width: 20), // 间距
// 按钮区域
GestureDetector(
onTapDown: (details) {
_startRecording(); // 按下时开始录音
},
onTapUp: (details) {
_stopRecording(); // 抬起时停止录音并播放
_playRecording(); // 播放录音
},
child: Container(
margin: EdgeInsets.all(20),
padding: EdgeInsets.all(20),
decoration: BoxDecoration(
color: Colors.blue,
shape: BoxShape.circle,
),
child: Icon(
Icons.mic,
color: Colors.white,
size: 50,
),
),
),
]))
],
),
);
}

// 构建语音消息
Widget _buildAudioMessage(ITaskTrans msg) {
return Padding(
padding: const EdgeInsets.symmetric(vertical: 10, horizontal: 15),
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
// 音频时长显示
Text(
'2秒',
style: TextStyle(fontSize: 14, color: Colors.grey),
),
SizedBox(height: 5),
// 音频播放按钮
GestureDetector(
onTap: () {
// 这里可以实现点击播放音频的功能
// print("播放音频: ${message['audioUrl']}");
},
child: Container(
padding: EdgeInsets.symmetric(vertical: 10, horizontal: 20),
decoration: BoxDecoration(
color: Colors.green,
borderRadius: BorderRadius.circular(30),
),
child: Row(
children: [
Icon(
Icons.play_arrow,
color: Colors.white,
),
SizedBox(width: 10),
Text(
'播放音频',
style: TextStyle(color: Colors.white),
),
],
),
),
),
SizedBox(height: 5),
// 文字内容
Text(
msg.originalText(),
style: TextStyle(fontSize: 16),
),
],
),
);
}
}

class CustomStreamAudioSource extends just_audio.StreamAudioSource {
final Stream<Uint8List> audioStream;
final int? contentLength;

CustomStreamAudioSource({
required this.audioStream,
this.contentLength,
});

@override
Future<just_audio.StreamAudioResponse> request([int? start, int? end]) async {
try {
// 将 Stream<Uint8List> 转换为 Stream<List<int>>
final stream = audioStream.map((uint8List) => uint8List.toList());

// 处理范围请求
if (start != null || end != null) {
// 这里假设音频流支持范围请求
// 如果音频流不支持范围请求,可以忽略 start 和 end 参数
// 或者抛出一个异常
throw UnsupportedError('Range requests are not supported');
}

// 返回 StreamAudioResponse
return just_audio.StreamAudioResponse(
stream: stream,
contentLength: contentLength,
sourceLength: null,
offset: null,
contentType: 'audio/mpeg',
);
} catch (e) {
print('请求音频流失败: $e');
rethrow;
}
}
}

+ 0
- 321
lib/xunfei/task_trans.dart Vedi File

@@ -1,321 +0,0 @@
//讯飞的翻译任务
import 'dart:async';
import 'dart:convert';
import 'dart:ffi';
import 'dart:typed_data';
import 'package:demo001/xunfei/utils.dart';
import 'package:demo001/xunfei/xunfei.dart';
import 'package:intl/intl.dart';
import 'package:web_socket_channel/web_socket_channel.dart';

typedef TaskStateChangeEvent = void Function(ITaskTrans task); // 定义回调函数类型

class XunferTask_Result_Text_Item {
final int sn;
final String pgs;
final List<int> rg;
final List<dynamic> ws;

XunferTask_Result_Text_Item({
required this.sn,
required this.pgs,
required this.rg,
required this.ws,
});
}

class XunferTaskTrans implements ITaskTrans {
static const int STATUS_FIRST_FRAME = 0;
static const int STATUS_CONTINUE_FRAME = 1;
static const int STATUS_LAST_FRAME = 2;

final String appId;
final String apiKey;
final String apiSecret;
final String host = "ws-api.xf-yun.com";
final String requestUri = "/v1/private/simult_interpretation";
late String url;
late WebSocketChannel? _channel;
final TaskStateChangeEvent onEvent; // 回调函数类型

bool isconnected = false;
int _state = 0; //未连接 1上传语音 2结束语音 3完成任务

//识别数据
final Map<int, XunferTask_Result_Text_Item> tests = {};
// 输入音频流
final StreamController<Uint8List> _inputaudioStream =
StreamController<Uint8List>();
//输出音频流
final StreamController<Uint8List> _outputaudioStream =
StreamController<Uint8List>();

XunferTaskTrans({
required this.appId,
required this.apiKey,
required this.apiSecret,
required this.onEvent,
}) {
url = _geturl();
_connect();
_startpush();
}

//获取链接地址
String _geturl() {
final now = DateTime.now();
final date =
DateFormat("EEE, dd MMM yyyy HH:mm:ss 'GMT'").format(now.toUtc());
final signatureOrigin =
"host: $host\ndate: $date\nGET $requestUri HTTP/1.1";

// 使用 HmacUtil 计算 HMAC-SHA256 签名
final signature = XunfeiUtils.hmacSha256(apiSecret, signatureOrigin);

final authorization = base64.encode(utf8.encode(
"api_key=\"$apiKey\", algorithm=\"hmac-sha256\", headers=\"host date request-line\", signature=\"$signature\""));

final queryParams = {
"authorization": authorization,
"date": date,
"host": host,
"serviceId": "simult_interpretation"
};

final wsUri =
'ws://$host$requestUri?${Uri(queryParameters: queryParams).query}';
return wsUri;
}

// 创建WebSocket连接
Future<void> _connect() async {
_channel = WebSocketChannel.connect(Uri.parse(url));
_channel?.stream.timeout(Duration(seconds: 10)); //设置超时时间
_channel?.stream.listen(
(message) {
onMessage(message);
},
onError: (error) {
isconnected = false;
print('连接失败: $error');
},
onDone: () {
isconnected = false;
print('WebSocket 连接已关闭');
print('Close code: ${_channel?.closeCode}');
print('Close reason: ${_channel?.closeReason}');
},
cancelOnError: true,
);
isconnected = true;
}

// 上传音频
Future<void> _startpush() async {
_state = 1;
int frameSize = 1280; // 每一帧的音频大小
double interval = 0.04; // 发送音频间隔(单位:s)
int status = STATUS_FIRST_FRAME; // 音频的状态信息,标识音频是第一帧,还是中间帧、最后一帧
Uint8List buffer = Uint8List(0);
try {
await for (Uint8List chunk in _inputaudioStream.stream) {
// 将新数据追加到缓存中
buffer = Uint8List.fromList([...buffer, ...chunk]);

// 当缓存中的数据足够一帧时,处理并发送
while (buffer.length >= frameSize) {
Uint8List frame = buffer.sublist(0, frameSize); // 取出一帧数据
buffer = buffer.sublist(frameSize); // 移除已处理的数据

// 第一帧处理
if (status == STATUS_FIRST_FRAME) {
String data = json.encode(_createParams(appId, status, frame));
_channel?.sink.add(data);
print('第一帧已发送... $data');
status = STATUS_CONTINUE_FRAME;
}
// 中间帧处理
else if (status == STATUS_CONTINUE_FRAME) {
String data = json.encode(_createParams(appId, status, frame));
_channel?.sink.add(data);
print('中间帧已发送... $data');
}

// 模拟音频采样间隔
await Future.delayed(
Duration(milliseconds: (interval * 1000).round()));
}
}

status = STATUS_LAST_FRAME;
String data = json.encode(_createParams(appId, status, buffer));
_channel?.sink.add(data);
print('最后一帧已发送... $data');
_state = 2;
} catch (e) {
print("上传音频数据异常: $e");
}
print('音频处理完成');
}

//创建参数
Map<String, dynamic> _createParams(
String appId, int status, Uint8List audio) {
final param = {
"header": {
"app_id": appId,
"status": status,
},
"parameter": {
"ist": {
"accent": "mandarin",
"domain": "ist_ed_open",
"language": "zh_cn",
"vto": 15000,
"eos": 150000
},
"streamtrans": {"from": "cn", "to": "en"},
"tts": {
"vcn": "x2_catherine",
"tts_results": {
"encoding": "raw",
"sample_rate": 16000,
"channels": 1,
"bit_depth": 16,
"frame_size": 0
}
}
},
"payload": {
"data": {
"audio": base64.encode(audio),
"encoding": "raw",
"sample_rate": 16000,
"seq": 1,
"status": status
}
}
};

return param;
}

// 向流中添加音频数据
void addAudioData(Uint8List data) {
_inputaudioStream.add(data);
}

//接收到翻译结果
Future<void> onMessage(String message) async {
try {
print("收到的消息:$message");

// 对结果进行解析
var messageMap = json.decode(message);
var status = messageMap["header"]["status"];
var sid = messageMap["header"]["sid"];
// 接收到的识别结果写到文本
if (messageMap.containsKey('payload') &&
messageMap['payload'].containsKey('recognition_results')) {
var result = messageMap['payload']['recognition_results']['text'];
var asrresult = utf8.decode(base64.decode(result));
addtext(asrresult);
print("收到识别回应:${originalText()}");
}
//接收到的翻译结果写到文本
if (messageMap.containsKey('payload') &&
messageMap['payload'].containsKey('streamtrans_results')) {
var result = messageMap['payload']['streamtrans_results']['text'];
var transresult = utf8.decode(base64.decode(result));
print("收到翻译结果:$transresult");
}
if (messageMap.containsKey('payload') &&
messageMap['payload'].containsKey('tts_results')) {
var audio = messageMap['payload']['tts_results']['audio'];
var audioData = base64.decode(audio);
_outputaudioStream.add(audioData);
print("收到音频结果:${audioData.length}");
}
if (status == 2) {
print("任务已结束!");
_state = 3;
onEvent(this);
await Future.delayed(Duration(seconds: 1));
_channel?.sink.close();
}
} catch (e) {
print("接受结果异常: $e");
}
}

// 关闭流并停止上传任务
void endpuish() {
_inputaudioStream.close(); // 关闭流
}

void addtext(String result) {
var resultMap = json.decode(result);
int sn = resultMap["sn"] as int;
String pgs = resultMap["pgs"] as String;
List<int> rg = resultMap["rg"] != null
? List<int>.from(resultMap["rg"])
: []; // 默认值为空列表
List<dynamic> ws = resultMap["ws"] as List<dynamic>;
var item = XunferTask_Result_Text_Item(sn: sn, pgs: pgs, rg: rg, ws: ws);
tests[sn] = item;
}

int state() {
return this._state;
}

//文字
String originalText() {
if (tests.isNotEmpty) {
String resultStr = "";
Map<int, XunferTask_Result_Text_Item> _results = {};
var sortedKeys = tests.keys.toList()..sort();
for (var key in sortedKeys) {
var item = tests[key];
if (item != null) {
if (item.pgs == "rpl") {
var start = item.rg[0];
var end = item.rg[1];
for (int i = start; i <= end; i++) {
_results.remove(i);
}
}
_results[item.sn] = item;
}
}
var keys = _results.keys.toList()..sort();
for (var key in keys) {
var item = tests[key];
if (item != null) {
for (var ws in item.ws) {
var it = ws as Map<String, dynamic>;
var cw = it["cw"] as List<dynamic>;
for (var ct in cw) {
resultStr += ct["w"] as String;
}
}
}
}
return resultStr;
}
return "";
}

String translateText() {
return "";
}

Stream<Uint8List> originalAudio() {
return _inputaudioStream.stream;
}

//音频
Stream<Uint8List> translateAudio() {
return _outputaudioStream.stream;
}
}

+ 0
- 56
lib/xunfei/xunfei.dart Vedi File

@@ -1,56 +0,0 @@
import 'dart:async';
import 'dart:typed_data';

import 'package:demo001/xunfei/task_trans.dart';

abstract class ISDK {
//创建翻译任务
ITaskTrans createTransTask(TaskStateChangeEvent onEvent);
}

abstract class ITaskTrans {
int state();
String originalText();
String translateText();
Stream<Uint8List> originalAudio();
Stream<Uint8List> translateAudio();
void addAudioData(Uint8List data);
void endpuish();
}

class Xunfei implements ISDK {
final String appId;
final String apiKey;
final String apiSecret;

//静态变量保存唯一实例
static Xunfei? _instance;
Xunfei._internal({
required this.appId,
required this.apiKey,
required this.apiSecret,
});

//工厂构造函数
factory Xunfei({
required String appId,
required String apiKey,
required String apiSecret,
}) {
_instance ??= Xunfei._internal(
appId: appId,
apiKey: apiKey,
apiSecret: apiSecret,
);
return _instance!;
}

ITaskTrans createTransTask(TaskStateChangeEvent onEvent) {
var task = XunferTaskTrans(
appId: this.appId,
apiKey: this.apiKey,
apiSecret: this.apiSecret,
onEvent: onEvent);
return task;
}
}

+ 297
- 0
lib/xunfei/xunfei_translate.dart Vedi File

@@ -0,0 +1,297 @@
import 'dart:async';
import 'dart:convert';
import 'dart:math';
import 'dart:typed_data';
import 'package:intl/intl.dart';
import 'package:demo001/xunfei/utils.dart';
import 'package:web_socket_channel/web_socket_channel.dart';

//讯飞翻译
class XunFeiTranslate {
final int _chunkSize = 1280; // 每次发送的数据大小
// 音量阈值
final double _speakingThreshold = 50.0; // 开始说话的阈值
final double _silenceThreshold = 30.0; // 结束说话的阈值
final Duration _silenceDuration = Duration(seconds: 1); // 持续低于阈值的时间
DateTime? _lastBelowThresholdTime; // 上次音量低于阈值的时间
double _volume = 0; //当前原因

Uint8List _buff = Uint8List(0); //音频缓存区
bool _isrecord = false; //是否录音
bool _isspeaking = false; //是否说话
Timer? _timer;

XunFeiTranslateTask? currtask;

final String appId;
final String apiKey;
final String apiSecret;
final String host = "ws-api.xf-yun.com";
final String requestUri = "/v1/private/simult_interpretation";
//静态变量保存唯一实例
static XunFeiTranslate? _instance;
XunFeiTranslate._internal({
required this.appId,
required this.apiKey,
required this.apiSecret,
});

//工厂构造函数
factory XunFeiTranslate({
required String appId,
required String apiKey,
required String apiSecret,
}) {
_instance ??= XunFeiTranslate._internal(
appId: appId,
apiKey: apiKey,
apiSecret: apiSecret,
);
return _instance!;
}

//获取链接地址
String _geturl() {
final now = DateTime.now();
final date =
DateFormat("EEE, dd MMM yyyy HH:mm:ss 'GMT'").format(now.toUtc());
final signatureOrigin =
"host: $host\ndate: $date\nGET $requestUri HTTP/1.1";

// 使用 HmacUtil 计算 HMAC-SHA256 签名
final signature = XunfeiUtils.hmacSha256(apiSecret, signatureOrigin);

final authorization = base64.encode(utf8.encode(
"api_key=\"$apiKey\", algorithm=\"hmac-sha256\", headers=\"host date request-line\", signature=\"$signature\""));

final queryParams = {
"authorization": authorization,
"date": date,
"host": host,
"serviceId": "simult_interpretation"
};

final wsUri =
'ws://$host$requestUri?${Uri(queryParameters: queryParams).query}';
return wsUri;
}

//创建参数
Map<String, dynamic> _createParams(int status, Uint8List audio) {
final param = {
"header": {
"app_id": appId,
"status": status,
},
"parameter": {
"ist": {
"accent": "mandarin",
"domain": "ist_ed_open",
"language": "zh_cn",
"vto": 15000,
"eos": 150000
},
"streamtrans": {"from": "cn", "to": "en"},
"tts": {
"vcn": "x2_catherine",
"tts_results": {
"encoding": "raw",
"sample_rate": 16000,
"channels": 1,
"bit_depth": 16,
"frame_size": 0
}
}
},
"payload": {
"data": {
"audio": base64.encode(audio),
"encoding": "raw",
"sample_rate": 16000,
"seq": 1,
"status": status
}
}
};

return param;
}

//开始同时翻译
Future<void> starttranslate(Stream<Uint8List> stream) async {
_isrecord = true;
stream.listen((data) {
if (_isrecord) {
_buff = _appendToBuffer(data);
}
});
_timer = Timer.periodic(Duration(milliseconds: 40), (timer) async {
//每40毫秒读取一次数据
var frame = _getAudioData();
_volume = _calculateAmplitude(frame);
var state = _checkSpeakingStatus();
if (state == 1) {
//开始说话
currtask = XunFeiTranslateTask(_geturl());
currtask?.sendaudio(_createParams(0, frame));
} else if (state == 2) {
//结束说话
currtask?.sendaudio(_createParams(1, frame));
} else if (state == 3) {
//结束说话
currtask?.sendaudio(_createParams(2, frame));
}
});
return;
}

//结束翻译
Future<void> stoptranslate() async {
_isrecord = false;
_timer = null;
if (currtask != null) {
var _frame = _getAudioData();
currtask?.sendaudio(_createParams(2, _frame));
currtask = null;
}
return;
}

//写入音频数据到缓存区中
Uint8List _appendToBuffer(Uint8List newData) {
var newBuffer = Uint8List(_buff.length + newData.length);
newBuffer.setAll(0, _buff);
newBuffer.setAll(_buff.length, newData);
return newBuffer;
}

//读取缓存区中一帧数据
Uint8List _getAudioData() {
if (_buff.length >= _chunkSize) {
// 从缓冲区中读取1280字节的数据
var data = _buff.sublist(0, _chunkSize);
// 移除已读取的数据
_buff = _buff.sublist(_chunkSize);
return data;
} else if (_buff.length >= 0) {
return _buff;
} else {
// 如果数据不足,返回空数据
return Uint8List(0);
}
}

//当前音量计算
double _calculateAmplitude(Uint8List data) {
Int16List samples = Int16List(data.length ~/ 2);
ByteData byteData = ByteData.view(data.buffer);
for (int i = 0; i < samples.length; i++) {
samples[i] = byteData.getInt16(i * 2, Endian.little);
}

double sum = 0;
for (int sample in samples) {
sum += (sample * sample);
}
double rms = sqrt(sum / samples.length);
return rms;
}

// 检查说话状态
int _checkSpeakingStatus() {
if (_volume > _speakingThreshold && !_isspeaking) {
// 音量高于阈值,表示开始说话
_isspeaking = true;
return 1;
} else if (_volume < _silenceThreshold) {
// 音量低于阈值
if (_lastBelowThresholdTime == null) {
// 记录第一次低于阈值的时间
_lastBelowThresholdTime = DateTime.now();
} else if (DateTime.now().difference(_lastBelowThresholdTime!) >
_silenceDuration) {
// 持续低于阈值超过指定时间,表示结束说话
_isspeaking = false;
return 3;
}
} else {
// 音量恢复到阈值以上,重置计时器
_lastBelowThresholdTime = null;
}
if (!_isspeaking) {
return 0;
} else {
return 2;
}
}
}

//讯飞翻译任务
class XunFeiTranslateTask {
late WebSocketChannel _channel;
bool isconnected = false;

XunFeiTranslateTask(String url) {
_channel = WebSocketChannel.connect(Uri.parse(url));
_channel.stream.timeout(Duration(seconds: 10)); //设置超时时间
_channel.stream.listen(
(message) {
onMessage(message);
},
onError: (error) {
isconnected = false;
print('连接失败: $error');
},
onDone: () {
isconnected = false;
print('WebSocket 连接已关闭');
print('Close code: ${_channel?.closeCode}');
print('Close reason: ${_channel?.closeReason}');
},
cancelOnError: true,
);
isconnected = true;
}

Future<void> sendaudio(Map<String, dynamic> data) async {
if (isconnected) {
_channel.sink.add(json.encode(data));
}
}

Future<void> onMessage(String message) async {
try {
print("收到的消息:$message");
// 对结果进行解析
var messageMap = json.decode(message);
var status = messageMap["header"]["status"];
var sid = messageMap["header"]["sid"];
// 接收到的识别结果写到文本
if (messageMap.containsKey('payload') &&
messageMap['payload'].containsKey('recognition_results')) {
var result = messageMap['payload']['recognition_results']['text'];
var asrresult = utf8.decode(base64.decode(result));
}
//接收到的翻译结果写到文本
if (messageMap.containsKey('payload') &&
messageMap['payload'].containsKey('streamtrans_results')) {
var result = messageMap['payload']['streamtrans_results']['text'];
var transresult = utf8.decode(base64.decode(result));
print("收到翻译结果:$transresult");
}
if (messageMap.containsKey('payload') &&
messageMap['payload'].containsKey('tts_results')) {
var audio = messageMap['payload']['tts_results']['audio'];
var audioData = base64.decode(audio);
print("收到音频结果:${audioData.length}");
}
if (status == 2) {
print("任务已结束!");
_channel.sink.close();
}
} catch (e) {
print("收到的消息 异常:$e");
}
return;
}
}

+ 4
- 4
linux/flutter/generated_plugin_registrant.cc Vedi File

@@ -6,10 +6,10 @@

#include "generated_plugin_registrant.h"

#include <audioplayers_linux/audioplayers_linux_plugin.h>
#include <record_linux/record_linux_plugin.h>

void fl_register_plugins(FlPluginRegistry* registry) {
g_autoptr(FlPluginRegistrar) audioplayers_linux_registrar =
fl_plugin_registry_get_registrar_for_plugin(registry, "AudioplayersLinuxPlugin");
audioplayers_linux_plugin_register_with_registrar(audioplayers_linux_registrar);
g_autoptr(FlPluginRegistrar) record_linux_registrar =
fl_plugin_registry_get_registrar_for_plugin(registry, "RecordLinuxPlugin");
record_linux_plugin_register_with_registrar(record_linux_registrar);
}

+ 1
- 1
linux/flutter/generated_plugins.cmake Vedi File

@@ -3,7 +3,7 @@
#

list(APPEND FLUTTER_PLUGIN_LIST
audioplayers_linux
record_linux
)

list(APPEND FLUTTER_FFI_PLUGIN_LIST


+ 2
- 4
macos/Flutter/GeneratedPluginRegistrant.swift Vedi File

@@ -6,13 +6,11 @@ import FlutterMacOS
import Foundation

import audio_session
import audioplayers_darwin
import just_audio
import path_provider_foundation
import record_darwin

func RegisterGeneratedPlugins(registry: FlutterPluginRegistry) {
AudioSessionPlugin.register(with: registry.registrar(forPlugin: "AudioSessionPlugin"))
AudioplayersDarwinPlugin.register(with: registry.registrar(forPlugin: "AudioplayersDarwinPlugin"))
JustAudioPlugin.register(with: registry.registrar(forPlugin: "JustAudioPlugin"))
PathProviderPlugin.register(with: registry.registrar(forPlugin: "PathProviderPlugin"))
RecordPlugin.register(with: registry.registrar(forPlugin: "RecordPlugin"))
}

+ 143
- 191
pubspec.lock Vedi File

@@ -6,7 +6,7 @@ packages:
description:
name: async
sha256: "947bfcf187f74dbc5e146c9eb9c0f10c9f8b30743e341481c1e2ed3ecc18c20c"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "2.11.0"
audio_session:
@@ -14,71 +14,15 @@ packages:
description:
name: audio_session
sha256: a92eed06a93721bcc8a8b57d0a623e3fb9d2e4e11cef0a08ed448c73886700b7
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "0.1.24"
audioplayers:
dependency: "direct main"
description:
name: audioplayers
sha256: "4ca57fd24594af04e93b9b9f1b1739ffb9204dbab7ce8d9b28a02f464456dcca"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
source: hosted
version: "6.1.1"
audioplayers_android:
dependency: transitive
description:
name: audioplayers_android
sha256: "6c9443ce0a99b29a840f14bc2d0f7b25eb0fd946dc592a1b8a697807d5b195f3"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
source: hosted
version: "5.0.1"
audioplayers_darwin:
dependency: transitive
description:
name: audioplayers_darwin
sha256: e507887f3ff18d8e5a10a668d7bedc28206b12e10b98347797257c6ae1019c3b
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
source: hosted
version: "6.0.0"
audioplayers_linux:
dependency: transitive
description:
name: audioplayers_linux
sha256: "9d3cb4e9533a12a462821e3f18bd282e0fa52f67ff96a06301d48dd48b82c2d1"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
source: hosted
version: "4.0.1"
audioplayers_platform_interface:
dependency: transitive
description:
name: audioplayers_platform_interface
sha256: "6834dd48dfb7bc6c2404998ebdd161f79cd3774a7e6779e1348d54a3bfdcfaa5"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
source: hosted
version: "7.0.0"
audioplayers_web:
dependency: transitive
description:
name: audioplayers_web
sha256: "3609bdf0e05e66a3d9750ee40b1e37f2a622c4edb796cc600b53a90a30a2ace4"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
source: hosted
version: "5.0.1"
audioplayers_windows:
dependency: transitive
description:
name: audioplayers_windows
sha256: "8605762dddba992138d476f6a0c3afd9df30ac5b96039929063eceed416795c2"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
source: hosted
version: "4.0.0"
boolean_selector:
dependency: transitive
description:
name: boolean_selector
sha256: "6cfb5af12253eaf2b368f07bacc5a80d1301a071c73360d746b7f2e32d762c66"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "2.1.1"
characters:
@@ -86,7 +30,7 @@ packages:
description:
name: characters
sha256: "04a925763edad70e8443c99234dc3328f442e811f1d8fd1a72f1c8ad0f69a605"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "1.3.0"
clock:
@@ -94,23 +38,23 @@ packages:
description:
name: clock
sha256: cb6d7f03e1de671e34607e909a7213e31d7752be4fb66a86d29fe1eb14bfb5cf
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "1.1.1"
collection:
dependency: transitive
description:
name: collection
sha256: ee67cb0715911d28db6bf4af1026078bd6f0128b07a5f66fb2ed94ec6783c09a
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
sha256: a1ace0a119f20aabc852d165077c036cd864315bd99b7eaa10a60100341941bf
url: "https://pub.dev"
source: hosted
version: "1.18.0"
version: "1.19.0"
crypto:
dependency: "direct main"
dependency: transitive
description:
name: crypto
sha256: "1e445881f28f22d6140f181e07737b22f1e099a5e1ff94b0af2f9e4a463f4855"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "3.0.6"
cupertino_icons:
@@ -118,7 +62,7 @@ packages:
description:
name: cupertino_icons
sha256: ba631d1c7f7bef6b729a622b7b752645a2d076dba9976925b8f25725a30e1ee6
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "1.0.8"
etau:
@@ -126,7 +70,7 @@ packages:
description:
name: etau
sha256: "4b43a615ecceb1de7c5a35f0a159920b4fdb8ce5a33d71d3828a31efedc67572"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "0.0.14-alpha.4"
fake_async:
@@ -134,7 +78,7 @@ packages:
description:
name: fake_async
sha256: "511392330127add0b769b75a987850d136345d9227c6b94c96a04cf4a391bf78"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "1.3.1"
ffi:
@@ -142,23 +86,15 @@ packages:
description:
name: ffi
sha256: "16ed7b077ef01ad6170a3d0c57caa4a112a38d7a2ed5602e0aca9ca6f3d98da6"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "2.1.3"
file:
dependency: transitive
description:
name: file
sha256: a3b4f84adafef897088c160faf7dfffb7696046cb13ae90b508c2cbc95d3b8d4
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
source: hosted
version: "7.0.1"
fixnum:
dependency: transitive
description:
name: fixnum
sha256: b6dc7065e46c974bc7c5f143080a6764ec7a4be6da1285ececdc37be96de53be
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "1.1.1"
flutter:
@@ -171,7 +107,7 @@ packages:
description:
name: flutter_lints
sha256: "5398f14efa795ffb7a33e9b6a08798b26a180edac4ad7db3f231e40f82ce11e1"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "5.0.0"
flutter_sound:
@@ -179,7 +115,7 @@ packages:
description:
name: flutter_sound
sha256: "223949653433bfc22749e9a9725c802733ed24e6dd51c53775716c340631dcae"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "9.20.5"
flutter_sound_platform_interface:
@@ -187,7 +123,7 @@ packages:
description:
name: flutter_sound_platform_interface
sha256: b85ac6eb1a482329c560e189fca75d596091c291a7d1756e0a3c9536ae87af1b
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "9.20.5"
flutter_sound_web:
@@ -195,7 +131,7 @@ packages:
description:
name: flutter_sound_web
sha256: dccae5647cdcac368723ff90a3fb2ec0d77b2e871ceb47b3de628806b0ca325c
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "9.20.5"
flutter_test:
@@ -208,36 +144,28 @@ packages:
description: flutter
source: sdk
version: "0.0.0"
go_router:
dependency: "direct main"
description:
name: go_router
sha256: "9b736a9fa879d8ad6df7932cbdcc58237c173ab004ef90d8377923d7ad731eaa"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
source: hosted
version: "14.7.2"
http:
dependency: transitive
dependency: "direct main"
description:
name: http
sha256: fe7ab022b76f3034adc518fb6ea04a82387620e19977665ea18d30a1cf43442f
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "1.3.0"
http_parser:
dependency: transitive
description:
name: http_parser
sha256: "2aa08ce0341cc9b354a498388e30986515406668dbcc4f7c950c3e715496693b"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
sha256: "178d74305e7866013777bab2c3d8726205dc5a4dd935297175b19a23a2e66571"
url: "https://pub.dev"
source: hosted
version: "4.0.2"
version: "4.1.2"
intl:
dependency: "direct main"
description:
name: intl
sha256: "3df61194eb431efc39c4ceba583b95633a403f46c9fd341e550ce0bfa50e9aa5"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "0.20.2"
js:
@@ -245,87 +173,55 @@ packages:
description:
name: js
sha256: c1b2e9b5ea78c45e1a0788d29606ba27dc5f71f019f32ca5140f61ef071838cf
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "0.7.1"
just_audio:
dependency: "direct main"
description:
name: just_audio
sha256: "50ed9f0ba88012eabdef7519ba6040bdbcf6c6667ebd77736fb25c196c98c0f3"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
source: hosted
version: "0.9.44"
just_audio_platform_interface:
dependency: transitive
description:
name: just_audio_platform_interface
sha256: "0243828cce503c8366cc2090cefb2b3c871aa8ed2f520670d76fd47aa1ab2790"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
source: hosted
version: "4.3.0"
just_audio_web:
dependency: transitive
description:
name: just_audio_web
sha256: "9a98035b8b24b40749507687520ec5ab404e291d2b0937823ff45d92cb18d448"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
source: hosted
version: "0.4.13"
leak_tracker:
dependency: transitive
description:
name: leak_tracker
sha256: "3f87a60e8c63aecc975dda1ceedbc8f24de75f09e4856ea27daf8958f2f0ce05"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
sha256: "7bb2830ebd849694d1ec25bf1f44582d6ac531a57a365a803a6034ff751d2d06"
url: "https://pub.dev"
source: hosted
version: "10.0.5"
version: "10.0.7"
leak_tracker_flutter_testing:
dependency: transitive
description:
name: leak_tracker_flutter_testing
sha256: "932549fb305594d82d7183ecd9fa93463e9914e1b67cacc34bc40906594a1806"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
sha256: "9491a714cca3667b60b5c420da8217e6de0d1ba7a5ec322fab01758f6998f379"
url: "https://pub.dev"
source: hosted
version: "3.0.5"
version: "3.0.8"
leak_tracker_testing:
dependency: transitive
description:
name: leak_tracker_testing
sha256: "6ba465d5d76e67ddf503e1161d1f4a6bc42306f9d66ca1e8f079a47290fb06d3"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "3.0.1"
lints:
dependency: transitive
description:
name: lints
sha256: "3315600f3fb3b135be672bf4a178c55f274bebe368325ae18462c89ac1e3b413"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
sha256: c35bb79562d980e9a453fc715854e1ed39e24e7d0297a880ef54e17f9874a9d7
url: "https://pub.dev"
source: hosted
version: "5.0.0"
version: "5.1.1"
logger:
dependency: transitive
dependency: "direct main"
description:
name: logger
sha256: be4b23575aac7ebf01f225a241eb7f6b5641eeaf43c6a8613510fc2f8cf187d1
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "2.5.0"
logging:
dependency: transitive
description:
name: logging
sha256: c8245ada5f1717ed44271ed1c26b8ce85ca3228fd2ffdb75468ab01979309d61
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
source: hosted
version: "1.3.0"
matcher:
dependency: transitive
description:
name: matcher
sha256: d2323aa2060500f906aa31a895b4030b6da3ebdcc5619d14ce1aada65cd161cb
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "0.12.16+1"
material_color_utilities:
@@ -333,7 +229,7 @@ packages:
description:
name: material_color_utilities
sha256: f7142bb1154231d7ea5f96bc7bde4bda2a0945d2806bb11670e30b850d56bdec
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "0.11.1"
meta:
@@ -341,7 +237,7 @@ packages:
description:
name: meta
sha256: bdb68674043280c3428e9ec998512fb681678676b3c54e773629ffe74419f8c7
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "1.15.0"
nested:
@@ -349,7 +245,7 @@ packages:
description:
name: nested
sha256: "03bac4c528c64c95c722ec99280375a6f2fc708eec17c7b3f07253b626cd2a20"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "1.0.0"
path:
@@ -357,7 +253,7 @@ packages:
description:
name: path
sha256: "087ce49c3f0dc39180befefc60fdb4acd8f8620e5682fe2476afd0b3688bb4af"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "1.9.0"
path_provider:
@@ -365,7 +261,7 @@ packages:
description:
name: path_provider
sha256: "50c5dd5b6e1aaf6fb3a78b33f6aa3afca52bf903a8a5298f53101fdaee55bbcd"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "2.1.5"
path_provider_android:
@@ -373,7 +269,7 @@ packages:
description:
name: path_provider_android
sha256: "4adf4fd5423ec60a29506c76581bc05854c55e3a0b72d35bb28d661c9686edf2"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "2.2.15"
path_provider_foundation:
@@ -381,7 +277,7 @@ packages:
description:
name: path_provider_foundation
sha256: "4843174df4d288f5e29185bd6e72a6fbdf5a4a4602717eed565497429f179942"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "2.4.1"
path_provider_linux:
@@ -389,7 +285,7 @@ packages:
description:
name: path_provider_linux
sha256: f7a1fe3a634fe7734c8d3f2766ad746ae2a2884abe22e241a8b301bf5cac3279
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "2.2.1"
path_provider_platform_interface:
@@ -397,7 +293,7 @@ packages:
description:
name: path_provider_platform_interface
sha256: "88f5779f72ba699763fa3a3b06aa4bf6de76c8e5de842cf6f29e2e06476c2334"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "2.1.2"
path_provider_windows:
@@ -405,7 +301,7 @@ packages:
description:
name: path_provider_windows
sha256: bd6f00dbd873bfb70d0761682da2b3a2c2fccc2b9e84c495821639601d81afe7
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "2.3.0"
permission_handler:
@@ -413,7 +309,7 @@ packages:
description:
name: permission_handler
sha256: "18bf33f7fefbd812f37e72091a15575e72d5318854877e0e4035a24ac1113ecb"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "11.3.1"
permission_handler_android:
@@ -421,7 +317,7 @@ packages:
description:
name: permission_handler_android
sha256: "71bbecfee799e65aff7c744761a57e817e73b738fedf62ab7afd5593da21f9f1"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "12.0.13"
permission_handler_apple:
@@ -429,7 +325,7 @@ packages:
description:
name: permission_handler_apple
sha256: e6f6d73b12438ef13e648c4ae56bd106ec60d17e90a59c4545db6781229082a0
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "9.4.5"
permission_handler_html:
@@ -437,7 +333,7 @@ packages:
description:
name: permission_handler_html
sha256: "38f000e83355abb3392140f6bc3030660cfaef189e1f87824facb76300b4ff24"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "0.1.3+5"
permission_handler_platform_interface:
@@ -445,7 +341,7 @@ packages:
description:
name: permission_handler_platform_interface
sha256: e9c8eadee926c4532d0305dff94b85bf961f16759c3af791486613152af4b4f9
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "4.2.3"
permission_handler_windows:
@@ -453,7 +349,7 @@ packages:
description:
name: permission_handler_windows
sha256: "1a790728016f79a41216d88672dbc5df30e686e811ad4e698bfc51f76ad91f1e"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "0.2.1"
platform:
@@ -461,7 +357,7 @@ packages:
description:
name: platform
sha256: "5d6b1b0036a5f331ebc77c850ebc8506cbc1e9416c27e59b439f917a902a4984"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "3.1.6"
plugin_platform_interface:
@@ -469,7 +365,7 @@ packages:
description:
name: plugin_platform_interface
sha256: "4820fbfdb9478b1ebae27888254d445073732dae3d6ea81f0b7e06d5dedc3f02"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "2.1.8"
provider:
@@ -477,7 +373,7 @@ packages:
description:
name: provider
sha256: c8a055ee5ce3fd98d6fc872478b03823ffdb448699c6ebdbbc71d59b596fd48c
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "6.1.2"
recase:
@@ -485,28 +381,84 @@ packages:
description:
name: recase
sha256: e4eb4ec2dcdee52dcf99cb4ceabaffc631d7424ee55e56f280bc039737f89213
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "4.1.0"
record:
dependency: "direct main"
description:
name: record
sha256: "8cb57763d954624fbc673874930c6f1ceca3baaf9bfee24b25da6fd451362394"
url: "https://pub.dev"
source: hosted
version: "5.2.0"
record_android:
dependency: transitive
description:
name: record_android
sha256: "0b4739a2502fff402b0ac0ff1d6b2740854d116d78e06a4a16b3989821f84446"
url: "https://pub.dev"
source: hosted
version: "1.3.0"
record_darwin:
dependency: transitive
description:
name: record_darwin
sha256: e487eccb19d82a9a39cd0126945cfc47b9986e0df211734e2788c95e3f63c82c
url: "https://pub.dev"
source: hosted
version: "1.2.2"
record_linux:
dependency: transitive
description:
name: record_linux
sha256: "74d41a9ebb1eb498a38e9a813dd524e8f0b4fdd627270bda9756f437b110a3e3"
url: "https://pub.dev"
source: hosted
version: "0.7.2"
record_platform_interface:
dependency: transitive
description:
name: record_platform_interface
sha256: "8a575828733d4c3cb5983c914696f40db8667eab3538d4c41c50cbb79e722ef4"
url: "https://pub.dev"
source: hosted
version: "1.2.0"
record_web:
dependency: transitive
description:
name: record_web
sha256: "10cb041349024ce4256e11dd35874df26d8b45b800678f2f51fd1318901adc64"
url: "https://pub.dev"
source: hosted
version: "1.1.4"
record_windows:
dependency: transitive
description:
name: record_windows
sha256: "7bce0ac47454212ca8bfa72791d8b6a951f2fb0d4b953b64443c014227f035b4"
url: "https://pub.dev"
source: hosted
version: "1.0.4"
rxdart:
dependency: transitive
description:
name: rxdart
sha256: "5c3004a4a8dbb94bd4bf5412a4def4acdaa12e12f269737a5751369e12d1a962"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "0.28.0"
sky_engine:
dependency: transitive
description: flutter
source: sdk
version: "0.0.99"
version: "0.0.0"
source_span:
dependency: transitive
description:
name: source_span
sha256: "53e943d4206a5e30df338fd4c6e7a077e02254531b138a15aec3bd143c1a8b3c"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "1.10.0"
sprintf:
@@ -514,39 +466,39 @@ packages:
description:
name: sprintf
sha256: "1fc9ffe69d4df602376b52949af107d8f5703b77cda567c4d7d86a0693120f23"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "7.0.0"
stack_trace:
dependency: transitive
description:
name: stack_trace
sha256: "73713990125a6d93122541237550ee3352a2d84baad52d375a4cad2eb9b7ce0b"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
sha256: "9f47fd3630d76be3ab26f0ee06d213679aa425996925ff3feffdec504931c377"
url: "https://pub.dev"
source: hosted
version: "1.11.1"
version: "1.12.0"
stream_channel:
dependency: transitive
description:
name: stream_channel
sha256: ba2aa5d8cc609d96bbb2899c28934f9e1af5cddbd60a827822ea467161eb54e7
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "2.1.2"
string_scanner:
dependency: transitive
description:
name: string_scanner
sha256: "556692adab6cfa87322a115640c11f13cb77b3f076ddcc5d6ae3c20242bedcde"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
sha256: "688af5ed3402a4bde5b3a6c15fd768dbf2621a614950b17f04626c431ab3c4c3"
url: "https://pub.dev"
source: hosted
version: "1.2.0"
version: "1.3.0"
synchronized:
dependency: transitive
description:
name: synchronized
sha256: "69fe30f3a8b04a0be0c15ae6490fc859a78ef4c43ae2dd5e8a623d45bfcf9225"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "3.3.0+3"
tau_web:
@@ -554,7 +506,7 @@ packages:
description:
name: tau_web
sha256: c612cd3dcd9b7aa3472c0272bf3531fc232cd80e53ed7d7388b77dd541720cd6
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "0.0.14-alpha.4"
term_glyph:
@@ -562,23 +514,23 @@ packages:
description:
name: term_glyph
sha256: a29248a84fbb7c79282b40b8c72a1209db169a2e0542bce341da992fe1bc7e84
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "1.2.1"
test_api:
dependency: transitive
description:
name: test_api
sha256: "5b8a98dafc4d5c4c9c72d8b31ab2b23fc13422348d2997120294d3bac86b4ddb"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
sha256: "664d3a9a64782fcdeb83ce9c6b39e78fd2971d4e37827b9b06c3aa1edc5e760c"
url: "https://pub.dev"
source: hosted
version: "0.7.2"
version: "0.7.3"
typed_data:
dependency: transitive
description:
name: typed_data
sha256: f9049c039ebfeb4cf7a7104a675823cd72dba8297f264b6637062516699fa006
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "1.4.0"
uuid:
@@ -586,7 +538,7 @@ packages:
description:
name: uuid
sha256: a5be9ef6618a7ac1e964353ef476418026db906c4facdedaa299b7a2e71690ff
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "4.5.1"
vector_math:
@@ -594,23 +546,23 @@ packages:
description:
name: vector_math
sha256: "80b3257d1492ce4d091729e3a67a60407d227c27241d6927be0130c98e741803"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "2.1.4"
vm_service:
dependency: transitive
description:
name: vm_service
sha256: "5c5f338a667b4c644744b661f309fb8080bb94b18a7e91ef1dbd343bed00ed6d"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
sha256: f6be3ed8bd01289b34d679c2b62226f63c0e69f9fd2e50a6b3c1c729a961041b
url: "https://pub.dev"
source: hosted
version: "14.2.5"
version: "14.3.0"
web:
dependency: transitive
description:
name: web
sha256: cd3543bd5798f6ad290ea73d210f423502e71900302dde696f8bff84bf89a1cb
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "1.1.0"
web_socket:
@@ -618,7 +570,7 @@ packages:
description:
name: web_socket
sha256: "3c12d96c0c9a4eec095246debcea7b86c0324f22df69893d538fcc6f1b8cce83"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "0.1.6"
web_socket_channel:
@@ -626,7 +578,7 @@ packages:
description:
name: web_socket_channel
sha256: "0b8e2457400d8a859b7b2030786835a28a8e80836ef64402abef392ff4f1d0e5"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "3.0.2"
xdg_directories:
@@ -634,9 +586,9 @@ packages:
description:
name: xdg_directories
sha256: "7a3f37b05d989967cdddcbb571f1ea834867ae2faa29725fd085180e0883aa15"
url: "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/"
url: "https://pub.dev"
source: hosted
version: "1.1.0"
sdks:
dart: ">=3.5.4 <4.0.0"
dart: ">=3.6.0 <4.0.0"
flutter: ">=3.24.0"

+ 7
- 8
pubspec.yaml Vedi File

@@ -35,15 +35,14 @@ dependencies:
# The following adds the Cupertino Icons font to your application.
# Use with the CupertinoIcons class for iOS style icons.
cupertino_icons: ^1.0.8
crypto: ^3.0.6
intl: ^0.20.1
web_socket_channel: ^3.0.1
go_router: ^14.6.3
audioplayers: ^6.1.0
path_provider: ^2.1.5
flutter_sound: ^9.19.1
flutter_sound: ^9.20.5
permission_handler: ^11.3.1
just_audio: ^0.9.45
logger: ^2.5.0
path_provider: ^2.1.5
record: ^5.2.0
intl: ^0.20.2
web_socket_channel: ^3.0.2
http: ^1.3.0

dev_dependencies:
flutter_test:


+ 3
- 3
windows/flutter/generated_plugin_registrant.cc Vedi File

@@ -6,12 +6,12 @@

#include "generated_plugin_registrant.h"

#include <audioplayers_windows/audioplayers_windows_plugin.h>
#include <permission_handler_windows/permission_handler_windows_plugin.h>
#include <record_windows/record_windows_plugin_c_api.h>

void RegisterPlugins(flutter::PluginRegistry* registry) {
AudioplayersWindowsPluginRegisterWithRegistrar(
registry->GetRegistrarForPlugin("AudioplayersWindowsPlugin"));
PermissionHandlerWindowsPluginRegisterWithRegistrar(
registry->GetRegistrarForPlugin("PermissionHandlerWindowsPlugin"));
RecordWindowsPluginCApiRegisterWithRegistrar(
registry->GetRegistrarForPlugin("RecordWindowsPluginCApi"));
}

+ 1
- 1
windows/flutter/generated_plugins.cmake Vedi File

@@ -3,8 +3,8 @@
#

list(APPEND FLUTTER_PLUGIN_LIST
audioplayers_windows
permission_handler_windows
record_windows
)

list(APPEND FLUTTER_FFI_PLUGIN_LIST


Caricamento…
Annulla
Salva