Ver código fonte

上传同声翻译的逻辑代码

master
liwei1dao 2 meses atrás
pai
commit
a6119a5633
21 arquivos alterados com 250 adições e 1149 exclusões
  1. +8
    -0
      android/app/src/main/AndroidManifest.xml
  2. +2
    -53
      lib/main.dart
  3. +0
    -92
      lib/scenes/home/home_logic.dart
  4. +0
    -23
      lib/scenes/home/home_state.dart
  5. +0
    -235
      lib/scenes/home/home_view.dart
  6. +0
    -105
      lib/scenes/login/login_logic.dart
  7. +0
    -32
      lib/scenes/login/login_state.dart
  8. +0
    -317
      lib/scenes/login/login_view.dart
  9. +0
    -20
      lib/scenes/login/model/data.dart
  10. +0
    -19
      lib/scenes/login/model/data.g.dart
  11. +0
    -26
      lib/scenes/login/model/login_model.dart
  12. +0
    -22
      lib/scenes/login/model/login_model.g.dart
  13. +0
    -37
      lib/scenes/login/model/user.dart
  14. +0
    -31
      lib/scenes/login/model/user.g.dart
  15. +0
    -14
      lib/scenes/public.dart
  16. +111
    -4
      lib/scenes/translate/TranslateLogic.dart
  17. +71
    -78
      lib/scenes/translate/TranslateScene.dart
  18. +19
    -7
      lib/tools/audio_tool.dart
  19. +1
    -2
      lib/xunfei/recognition_result/recognition_content/recognition_content.dart
  20. +37
    -31
      lib/xunfei/xunfei_translate.dart
  21. +1
    -1
      pubspec.yaml

+ 8
- 0
android/app/src/main/AndroidManifest.xml Ver arquivo

@@ -1,4 +1,12 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.INTERNET"/>
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
<uses-permission android:name="android.permission.READ_PHONE_STATE" />
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />

<application
android:label="demo001"
android:name="${applicationName}"


+ 2
- 53
lib/main.dart Ver arquivo

@@ -1,6 +1,4 @@
import 'package:demo001/gen/assets.gen.dart';
import 'package:demo001/generated/l10n.dart';
import 'package:demo001/scenes/home/home_view.dart';
import 'package:demo001/scenes/translate/TranslateScene.dart';
import 'package:demo001/tools/color_utils.dart';
import 'package:flutter/material.dart';
@@ -8,8 +6,6 @@ import 'package:flutter_localizations/flutter_localizations.dart';
import 'package:get/get.dart';
import 'package:flutter_easyloading/flutter_easyloading.dart';

import 'scenes/login/login_view.dart';

void main() {
WidgetsFlutterBinding.ensureInitialized();
EasyLoading.instance
@@ -74,8 +70,8 @@ class MyApp extends StatelessWidget {
GlobalCupertinoLocalizations.delegate,
GlobalWidgetsLocalizations.delegate,
},
supportedLocales: S.delegate.supportedLocales,
home: LoginScene(),
// supportedLocales: S.delegate.supportedLocales,
home: TranslateScene(),
builder: (context, widget) {
final easyload = EasyLoading.init();
var child = easyload(context, widget);
@@ -88,50 +84,3 @@ class MyApp extends StatelessWidget {
);
}
}

S? s;

class IndexWidget extends StatelessWidget {
final RxInt _currentIndex = 0.obs;

final List<Widget> _pages = [HomePage()];

@override
Widget build(BuildContext context) {
return Scaffold(
body: Obx(() => _pages[_currentIndex.value]),
bottomNavigationBar: Builder(builder: (context) {
s = S.of(context);
return Obx(() => BottomNavigationBar(
type: BottomNavigationBarType.fixed,
backgroundColor: bottomNavBg,
currentIndex: _currentIndex.value,
selectedItemColor: white,
unselectedItemColor: grey,
items: [
BottomNavigationBarItem(
icon: Assets.icon.bottomNavHomeUnselect
.image(width: 20, height: 20),
activeIcon: Assets.icon.bottomNavHomeSelect
.image(width: 20, height: 20),
label: s?.bottomNavHome),
BottomNavigationBarItem(
icon: Assets.icon.bottomNavSleepUnselect
.image(width: 20, height: 20),
activeIcon: Assets.icon.bottomNavSleepSelect
.image(width: 20, height: 20),
label: s?.bottomNavSleep),
BottomNavigationBarItem(
icon: Assets.icon.bottomNavProfileUnselect
.image(width: 20, height: 20),
activeIcon: Assets.icon.bottomNavProfileSelect
.image(width: 20, height: 20),
label: s?.bottomNavProfile),
],
onTap: (index) {
_currentIndex.value = index;
},
));
}));
}
}

+ 0
- 92
lib/scenes/home/home_logic.dart Ver arquivo

@@ -1,92 +0,0 @@
import 'dart:convert';

import 'package:demo001/scenes/login/login_view.dart';
import 'package:demo001/scenes/public.dart';
import 'package:demo001/tools/http_utils.dart';
import 'package:flutter_easyloading/flutter_easyloading.dart';
import 'package:get/get.dart';
import 'package:logger/logger.dart';
import 'package:permission_handler/permission_handler.dart';
import 'home_state.dart';

/// @description:
/// @author
/// @date: 2025-01-07 15:51:34
class HomeLogic extends GetxController {
final state = HomeState();

//1.已连接到的蓝牙,2.失去连接的蓝牙
final String _bluetoothConnectStateKey = "bluetoothConnected";
final String _bluetoothDisConnectStateKey = "bluetoothDisConnect";
//1.已经连接的蓝牙
final String _bluetoothAlreadyConnectKey = "bluetoothAlreadyConnected";

@override
void onInit() {
super.onInit();
_getLoginInfo();
_checkPermission();
_checkConnectDevice();
_checkBluetoothStatus();
}

void _checkPermission() {
Permission.microphone.request();
state.methodChannel.invokeMethod("bluetoothPermissionRequest");
}

void _checkBluetoothStatus() {}

///获取登录信息
void _getLoginInfo() async {
final info = await getSharedLoginInfo();
if (info != null) {
state.loginModel = info;
_getAliToken();
} else {
EasyLoading.showError('登录过期');
Get.offAll(() => LoginScene());
}
}

void _getAliToken() {
ApiClient.post(
url: '/api/home/auth_alitoken',
token: state.loginModel!.data!.token!,
param: {},
onSuccess: (data) {
// Logger().i('---------_getAliToken--------token-:${data.runtimeType}');
// Logger().i('---------_getAliToken--------token-:${data['data']['token']}');
state.loginModel?.ali_appkey = data['data']['appkey'];
state.loginModel?.ali_token = data['data']['token'];
},
onFailed: (msg) {
Logger().e('-------_getAliToken-------error-$msg');
});
}

///原生传回的数据监听
void _phoneCallRecordingListenner() {
state.eventChannel.receiveBroadcastStream((data) {});
}

//开始监听蓝牙连接设备状态
void _checkConnectDevice() {
state.methodChannel.invokeMethod('checkConnectedBluetooth');
}

///通话翻译按钮点击
void startPhoneCallRecording() {
state.methodChannel.invokeMethod('startPhoneCallRecording');
}

///停止通讯录音
void stopPhoneCallRecording() {
state.methodChannel.invokeMethod('stopPhoneCallRecording');
}

//打开蓝牙扫描界面
void toBonding() {}

///**************通话录音翻译*************** */
}

+ 0
- 23
lib/scenes/home/home_state.dart Ver arquivo

@@ -1,23 +0,0 @@
import 'package:demo001/scenes/login/model/login_model.dart';
import 'package:flutter/services.dart';
import 'package:get/get.dart';

/// @description:
/// @author
/// @date: 2025-01-07 15:51:34
class HomeState {
HomeState() {
///Initialize variables
}

EventChannel eventChannel = EventChannel('eventChannel');

MethodChannel methodChannel = MethodChannel('methodChannel');

RxString originText = 'english'.obs;
RxString translationText = '英语'.obs;

RxString phoneNum = ''.obs;

LoginModel? loginModel;
}

+ 0
- 235
lib/scenes/home/home_view.dart Ver arquivo

@@ -1,235 +0,0 @@
import 'dart:io';
import 'dart:typed_data';

import 'package:audioplayers/audioplayers.dart';
import 'package:demo001/gen/assets.gen.dart';
import 'package:demo001/generated/l10n.dart';
import 'package:demo001/main.dart';
import 'package:demo001/tools/color_utils.dart';
import 'package:demo001/tools/textStyle.dart';
import 'package:demo001/tools/widgets.dart';
import 'package:flutter/material.dart';
import 'package:gap/gap.dart';
import 'package:get/get.dart';
import 'package:logger/logger.dart';
import 'package:path_provider/path_provider.dart';

import 'home_logic.dart';
import 'home_state.dart';

/// @description:
/// @author
/// @date: 2025-01-07 15:51:34
class HomePage extends StatelessWidget {
final HomeLogic logic = Get.put(HomeLogic());
final HomeState state = Get.find<HomeLogic>().state;

double width = 0;
double height = 0;

@override
Widget build(BuildContext context) {
s = S.of(context);
width = MediaQuery.of(context).size.width;
height = MediaQuery.of(context).size.height;
return Scaffold(
backgroundColor: bgColor,
body: SafeArea(
child: Container(
padding: EdgeInsets.all(10),
alignment: Alignment.center,
child: SingleChildScrollView(
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Container(
alignment: Alignment.center,
child: Obx(() => Text(
state.phoneNum.value,
style: TextStyle(
fontSize: 18,
fontWeight: FontWeight.bold,
color: white),
)),
),
const Gap(12),
Center(
child: Image.asset(
'assets/icon/home_img.png',
width: width * 0.5,
fit: BoxFit.fitWidth,
),
),
const Gap(20),
GestureDetector(
onTap: logic.toBonding,
child: Center(
child: Container(
height: 40,
decoration: BoxDecoration(
color: white,
borderRadius: BorderRadius.circular(20),
),
child: Row(
mainAxisSize: MainAxisSize.min,
children: [
const Gap(20),
Container(
width: 20,
height: 20,
alignment: Alignment.center,
decoration: BoxDecoration(
color: blue,
borderRadius: BorderRadius.circular(4)),
child: Icon(
Icons.bluetooth,
color: white,
size: 15,
),
),
Gap(5),
Obx(() => Text(
'请连接耳机',
style: TextStyle(fontSize: 14, color: black),
)),
const Gap(20),
],
),
),
),
),
const Gap(10),
Text(
'智能工具',
style: TextStyle(
fontSize: 18, fontWeight: FontWeight.bold, color: white),
),
const Gap(10),
GestureDetector(
onTap: () {
//Get.to(() => AiChatPage(loginModel: state.loginModel!));
},
child: Stack(
children: [
Assets.icon.homeAiModel.image(fit: BoxFit.fitHeight),
Positioned(
left: 20,
top: 120 * 0.5 -
textSize('AI对话模式', Style.homeItemTextStyle).height *
0.5,
child: Text(
'AI对话模式',
style: Style.homeItemTextStyle,
),
)
],
),
),
const Gap(10),
Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
GestureDetector(
onTap: () {
//Get.to(() => Tcty_chatPage());
},
child: Stack(
children: [
Assets.icon.homeIconTcty.image(
width: (width - 30) * 0.5, fit: BoxFit.fitWidth),
Positioned(
left: (width - 30) * 0.25 -
textSize('同声传译', Style.homeItemTextStyle)
.width *
0.5,
top: (width - 30) * 0.5 - 50,
child: Text(
'同声传译',
style: Style.homeItemTextStyle,
),
)
],
),
),
GestureDetector(
onTap: () async {},
child: Stack(
children: [
Assets.icon.homeIconMdmfy.image(
width: (width - 30) * 0.5, fit: BoxFit.fitWidth),
Positioned(
left: (width - 30) * 0.25 -
textSize('面对面翻译', Style.homeItemTextStyle)
.width *
0.5,
top: (width - 30) * 0.5 - 50,
child: Text(
'面对面翻译',
style: Style.homeItemTextStyle,
),
)
],
),
),
],
),
const Gap(10),
Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
GestureDetector(
onTap: () {
// Get.to(() => PhonecallAudioTranslatePage());
// state.methodChannel
// .invokeMethod('googleRealTimeSpeechToText');
},
child: Stack(
children: [
Assets.icon.homeIconThyyfy.image(
width: (width - 30) * 0.5, fit: BoxFit.fitWidth),
Positioned(
left: (width - 30) * 0.25 -
textSize('通话语音翻译', Style.homeItemTextStyle)
.width *
0.5,
top: (width - 30) * 0.5 - 50,
child: Text(
'通话语音翻译',
style: Style.homeItemTextStyle,
),
)
],
),
),
GestureDetector(
onTap: () {
// Get.to(() => TransRecordsPage());
},
child: Stack(
children: [
Assets.icon.homeIconFyjl.image(
width: (width - 30) * 0.5, fit: BoxFit.fitWidth),
Positioned(
left: (width - 30) * 0.25 -
textSize('翻译记录', Style.homeItemTextStyle)
.width *
0.5,
top: (width - 30) * 0.5 - 50,
child: Text(
'翻译记录',
style: Style.homeItemTextStyle,
),
)
],
),
),
],
)
],
),
),
),
),
);
}
}

+ 0
- 105
lib/scenes/login/login_logic.dart Ver arquivo

@@ -1,105 +0,0 @@
import 'dart:async';
import 'dart:convert';

import 'package:demo001/main.dart';
import 'package:demo001/scenes/public.dart';
import 'package:demo001/tools/http_utils.dart';
import 'package:flutter/material.dart';
import 'package:flutter_easyloading/flutter_easyloading.dart';
import 'package:get/get.dart';
import 'package:shared_preferences/shared_preferences.dart';
import 'package:demo001/tools/widgets.dart';

import 'login_state.dart';

/// @description:
/// @author
/// @date: 2025-01-11 17:25:10
class LoginLogic extends GetxController {
final state = LoginState();

@override
void onInit() {
super.onInit();
}

@override
void onReady() {
super.onReady();
_getLoginInfo();
_checkPermission();
}

void _checkPermission() {
state.methodChannel.invokeMethod("bluetoothPermissionRequest");
}

void _getLoginInfo() async {
EasyLoading.show();
final info = await getSharedLoginInfo();
if (info != null) {
state.emailContr.text = info.data?.user?.mail ?? '';
Future.delayed(1.seconds, () {
EasyLoading.dismiss();
Get.offAll(() => IndexWidget());
});
} else {
EasyLoading.dismiss();
}
}

void getEmailPin() {
if (state.countDown.value != 60) return;
state.pinCodeTimer = Timer.periodic(1.seconds, (t) {
if (state.countDown.value <= 0) {
t.cancel();
state.pinCodeTimer?.cancel();
state.pinCodeTimer = null;
state.countDown.value = 60;
} else {
state.countDown.value = state.countDown.value - 1;
}
});
EasyLoading.show();
ApiClient.post(
url: ApiClient.getPin,
param: {"addr": state.emailContr.text, "vtype": 0},
onSuccess: (data) {
EasyLoading.showInfo('验证码已发送,请到邮箱查看!');
},
onFailed: (msg) {
EasyLoading.showError(msg);
});
}

void login() {
FocusScope.of(Get.context!).unfocus();
if (!isEmail(state.emailContr.text)) {
EasyLoading.showError('邮箱错误!');
return;
}
if (state.pinContr.text.length != 4) {
EasyLoading.showError('验证码错误!');
return;
}
EasyLoading.show();
ApiClient.post(
url: ApiClient.login,
param: {
"mail": state.emailContr.text,
"openid": "",
"phone": "",
"stype": 0,
"vcode": state.pinContr.text
},
onSuccess: (data) async {
EasyLoading.dismiss();
final instance = await SharedPreferences.getInstance();
await instance.setString('loginInfo', jsonEncode(data));
Get.offAll(() => IndexWidget());
},
onFailed: (msg) {
EasyLoading.showError(msg);
});
}
}

+ 0
- 32
lib/scenes/login/login_state.dart Ver arquivo

@@ -1,32 +0,0 @@
import 'dart:async';

import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:get/get.dart';

/// @description:
/// @author
/// @date: 2025-01-11 17:25:10
class LoginState {
TextEditingController phoneContr = TextEditingController();
TextEditingController pinContr = TextEditingController();
TextEditingController emailContr = TextEditingController();

MethodChannel methodChannel = MethodChannel('methodChannel');

FocusNode phoneNode = FocusNode();
FocusNode emailNode = FocusNode();
FocusNode pinNode = FocusNode();

// RxString phoneNumber = ''.obs;

RxString countryCode = '86'.obs;

RxBool protocolCheck = false.obs;

Timer? pinCodeTimer;

RxInt countDown = 60.obs;

RxBool isWechatLogin = false.obs;
}

+ 0
- 317
lib/scenes/login/login_view.dart Ver arquivo

@@ -1,317 +0,0 @@
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:flutter_easyloading/flutter_easyloading.dart';
import 'package:flutter_keyboard_visibility/flutter_keyboard_visibility.dart';
import 'package:gap/gap.dart';
import 'package:get/get.dart';
import 'package:demo001/tools/color_utils.dart';
import 'package:demo001/tools/textStyle.dart';
import 'package:demo001/tools/widgets.dart';

import 'login_logic.dart';
import 'login_state.dart';

/// @description:
/// @author
/// @date: 2025-01-11 17:25:10
class LoginScene extends StatelessWidget {
final LoginLogic logic = Get.put(LoginLogic());
final LoginState state = Get.find<LoginLogic>().state;

LoginScene({Key? key}) : super(key: key);

double width = 0;
double height = 0;

@override
Widget build(BuildContext context) {
width = MediaQuery.of(context).size.width;
height = MediaQuery.of(context).size.height;
return Scaffold(
backgroundColor: Color.fromARGB(255, 18, 19, 24),
body: KeyboardDismissOnTap(
child: Container(
height: height,
padding: EdgeInsets.all(8),
child: SingleChildScrollView(
child: Column(
mainAxisSize: MainAxisSize.max,
children: [
Gap(45),
topWidget,
Gap(50),
logo,
Gap(15),
emailInput,
Gap(15),
pinInput,
Gap(30),
loginBtn,
Gap(80),
otherLogin,
Gap(180),
protocolWidget,
Gap(15),
],
),
),
),
),
);
}

Widget get topWidget => Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
Text(
'联系客服',
style: Style.clean,
),
Text(
'登录/注册账号',
style: Style.navTitle,
),
Text(
'联系客服',
style: Style.normalWhiteGrey,
)
],
);

Widget get logo => Container(
child: Icon(
Icons.logo_dev,
size: 100,
color: white,
),
);

Widget get phoneInput => SizedBox(
width: width * 0.9,
child: Container(
height: 45,
decoration: BoxDecoration(
color: inputBgColor, borderRadius: BorderRadius.circular(8)),
child: Obx(() => TextField(
controller: state.phoneContr,
focusNode: state.phoneNode,
textInputAction: TextInputAction.next,
style: Style.normalBold,
keyboardType: TextInputType.phone,
inputFormatters: state.countryCode.value == '86'
? [
FilteringTextInputFormatter.allow(RegExp(r'[0-9]')),
LengthLimitingTextInputFormatter(11),
]
: [
FilteringTextInputFormatter.allow(RegExp(r'[0-9]')),
],
decoration: InputDecoration(
border: InputBorder.none,
hintText: '请输入手机号',
contentPadding: EdgeInsets.symmetric(horizontal: 8),
),
onSubmitted: (value) {
if (value.length != 11) {
EasyLoading.showError('手机号码错误!');
} else {
state.emailNode.nextFocus();
}
},
)),
),
);

Widget get emailInput => SizedBox(
width: width * 0.9,
child: Container(
height: 45,
decoration: BoxDecoration(
color: inputBgColor, borderRadius: BorderRadius.circular(8)),
child: TextField(
controller: state.emailContr,
focusNode: state.emailNode,
style: Style.normalBold,
textInputAction: TextInputAction.next,
keyboardType: TextInputType.emailAddress,
decoration: InputDecoration(
border: InputBorder.none,
hintText: '请输入邮箱',
contentPadding: EdgeInsets.symmetric(horizontal: 8),
),
onSubmitted: (value) {
final emailRegex =
RegExp(r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$');
if (!emailRegex.hasMatch(state.emailContr.text)) {
EasyLoading.showError('邮箱输入错误,请重新输入!');
} else {
state.pinNode.nextFocus();
}
},
),
),
);

Widget get pinInput => SizedBox(
width: width * 0.9,
child: Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
Container(
height: 45,
width: width * 0.5,
decoration: BoxDecoration(
color: inputBgColor, borderRadius: BorderRadius.circular(8)),
child: Obx(() => TextField(
controller: state.pinContr,
focusNode: state.pinNode,
style: Style.normalBold,
textInputAction: TextInputAction.send,
keyboardType: TextInputType.phone,
inputFormatters: state.countryCode.value == '86'
? [
FilteringTextInputFormatter.allow(RegExp(r'[0-9]')),
LengthLimitingTextInputFormatter(4),
]
: [
FilteringTextInputFormatter.allow(RegExp(r'[0-9]')),
],
decoration: InputDecoration(
border: InputBorder.none,
hintText: '请输入验证码',
contentPadding: EdgeInsets.symmetric(horizontal: 8),
),
onSubmitted: (value) {
if (value.length != 4) {
EasyLoading.showError("验证码错误");
} else {
logic.login();
}
},
)),
),
GestureDetector(
onTap: () {
if (!isEmail(state.emailContr.text)) {
EasyLoading.showError('邮箱输入错误,请重新输入!');
} else {
logic.getEmailPin();
}
},
child: Container(
height: 45,
width: width * 0.35,
alignment: Alignment.center,
decoration: BoxDecoration(
color: lightBlue, borderRadius: BorderRadius.circular(8)),
child: Obx(() => Text(
state.countDown.value == 60
? '获取验证码'
: '${state.countDown.value}S后重新获取',
style: Style.normalWhiteGrey,
)),
),
)
],
),
);

Widget get loginBtn => GestureDetector(
onTap: logic.login,
child: Container(
height: 45,
width: width * 0.9,
alignment: Alignment.center,
decoration: BoxDecoration(
color: blue, borderRadius: BorderRadius.circular(8)),
child: Obx(() => Text(
state.isWechatLogin.value ? '确定绑定' : '登录',
style: Style.normalBold,
)),
),
);

Widget get otherLogin => Container(
child: Column(
children: [
Row(
mainAxisAlignment: MainAxisAlignment.center,
children: [
Container(
color: white.withAlpha(30),
height: 1,
width: width * 0.25,
),
Text(
' 其他登录方式 ',
style: Style.normalSmallWhiteGrey,
),
Container(
color: white.withAlpha(30),
height: 1,
width: width * 0.25,
),
],
),
Gap(15),
GestureDetector(
onTap: () {
state.isWechatLogin.value = !state.isWechatLogin.value;
},
child: Icon(
Icons.wechat,
size: 60,
color: green,
),
),
],
),
);

Widget get protocolWidget => Row(
mainAxisSize: MainAxisSize.max,
mainAxisAlignment: MainAxisAlignment.center,
children: [
GestureDetector(
onTap: () {
state.protocolCheck.value = !state.protocolCheck.value;
},
child: Row(
children: [
Obx(() => Container(
child: Icon(
state.protocolCheck.value
? Icons.check_circle
: Icons.radio_button_unchecked,
color: state.protocolCheck.value ? blue : white,
size: 16,
),
)),
Text(
' 我已阅读并同意',
style: Style.normalSmall2,
),
],
),
),
GestureDetector(
onTap: () {},
child: Text(
'《用户服务协议》',
style: Style.normalBlueSmall2,
),
),
Text(
'和',
style: Style.normalSmall2,
),
GestureDetector(
onTap: () {},
child: Text(
'《用户服务协议》',
style: Style.normalBlueSmall2,
),
)
],
);
}

+ 0
- 20
lib/scenes/login/model/data.dart Ver arquivo

@@ -1,20 +0,0 @@
import 'package:json_annotation/json_annotation.dart';

import 'user.dart';

part 'data.g.dart';

@JsonSerializable()
class Data {
String? token;
User? user;

Data({this.token, this.user});

@override
String toString() => 'Data(token: $token, user: $user)';

factory Data.fromJson(Map<String, dynamic> json) => _$DataFromJson(json);

Map<String, dynamic> toJson() => _$DataToJson(this);
}

+ 0
- 19
lib/scenes/login/model/data.g.dart Ver arquivo

@@ -1,19 +0,0 @@
// GENERATED CODE - DO NOT MODIFY BY HAND

part of 'data.dart';

// **************************************************************************
// JsonSerializableGenerator
// **************************************************************************

Data _$DataFromJson(Map<String, dynamic> json) => Data(
token: json['token'] as String?,
user: json['user'] == null
? null
: User.fromJson(json['user'] as Map<String, dynamic>),
);

Map<String, dynamic> _$DataToJson(Data instance) => <String, dynamic>{
'token': instance.token,
'user': instance.user,
};

+ 0
- 26
lib/scenes/login/model/login_model.dart Ver arquivo

@@ -1,26 +0,0 @@
import 'package:json_annotation/json_annotation.dart';

import 'data.dart';

part 'login_model.g.dart';

@JsonSerializable()
class LoginModel {
int? code;
Data? data;
String? msg;

String? ali_token;
String? ali_appkey;

LoginModel({this.code, this.data, this.msg});

@override
String toString() => 'LoginModel(code: $code, data: $data, msg: $msg)';

factory LoginModel.fromJson(Map<String, dynamic> json) {
return _$LoginModelFromJson(json);
}

Map<String, dynamic> toJson() => _$LoginModelToJson(this);
}

+ 0
- 22
lib/scenes/login/model/login_model.g.dart Ver arquivo

@@ -1,22 +0,0 @@
// GENERATED CODE - DO NOT MODIFY BY HAND

part of 'login_model.dart';

// **************************************************************************
// JsonSerializableGenerator
// **************************************************************************

LoginModel _$LoginModelFromJson(Map<String, dynamic> json) => LoginModel(
code: (json['code'] as num?)?.toInt(),
data: json['data'] == null
? null
: Data.fromJson(json['data'] as Map<String, dynamic>),
msg: json['msg'] as String?,
);

Map<String, dynamic> _$LoginModelToJson(LoginModel instance) =>
<String, dynamic>{
'code': instance.code,
'data': instance.data,
'msg': instance.msg,
};

+ 0
- 37
lib/scenes/login/model/user.dart Ver arquivo

@@ -1,37 +0,0 @@
import 'package:json_annotation/json_annotation.dart';

part 'user.g.dart';

@JsonSerializable()
class User {
String? avatar;
int? createtime;
String? language;
int? lastbettime;
String? mail;
String? name;
String? phone;
String? uid;
String? wxopenid;

User({
this.avatar,
this.createtime,
this.language,
this.lastbettime,
this.mail,
this.name,
this.phone,
this.uid,
this.wxopenid,
});

@override
String toString() {
return 'User(avatar: $avatar, createtime: $createtime, language: $language, lastbettime: $lastbettime, mail: $mail, name: $name, phone: $phone, uid: $uid, wxopenid: $wxopenid)';
}

factory User.fromJson(Map<String, dynamic> json) => _$UserFromJson(json);

Map<String, dynamic> toJson() => _$UserToJson(this);
}

+ 0
- 31
lib/scenes/login/model/user.g.dart Ver arquivo

@@ -1,31 +0,0 @@
// GENERATED CODE - DO NOT MODIFY BY HAND

part of 'user.dart';

// **************************************************************************
// JsonSerializableGenerator
// **************************************************************************

User _$UserFromJson(Map<String, dynamic> json) => User(
avatar: json['avatar'] as String?,
createtime: (json['createtime'] as num?)?.toInt(),
language: json['language'] as String?,
lastbettime: (json['lastbettime'] as num?)?.toInt(),
mail: json['mail'] as String?,
name: json['name'] as String?,
phone: json['phone'] as String?,
uid: json['uid'] as String?,
wxopenid: json['wxopenid'] as String?,
);

Map<String, dynamic> _$UserToJson(User instance) => <String, dynamic>{
'avatar': instance.avatar,
'createtime': instance.createtime,
'language': instance.language,
'lastbettime': instance.lastbettime,
'mail': instance.mail,
'name': instance.name,
'phone': instance.phone,
'uid': instance.uid,
'wxopenid': instance.wxopenid,
};

+ 0
- 14
lib/scenes/public.dart Ver arquivo

@@ -1,14 +0,0 @@
import 'dart:convert';

import 'package:demo001/scenes/login/model/login_model.dart';
import 'package:shared_preferences/shared_preferences.dart';

Future<LoginModel?> getSharedLoginInfo() async {
final instance = await SharedPreferences.getInstance();
final loginInfoStr = instance.getString('loginInfo');
if (loginInfoStr != null && loginInfoStr != '') {
return LoginModel.fromJson(jsonDecode(loginInfoStr));
} else {
return null;
}
}

+ 111
- 4
lib/scenes/translate/TranslateLogic.dart Ver arquivo

@@ -2,10 +2,12 @@ import 'dart:convert';
import 'dart:io';
import 'dart:typed_data';

import 'package:audio_waveforms/audio_waveforms.dart';
import 'package:demo001/scenes/translate/TranslateState.dart';
import 'package:demo001/tools/audio_tool.dart';
import 'package:demo001/xunfei/phonetic_dictation/phonetic_dictaion_model/w.dart';
import 'package:demo001/xunfei/recognition_result/recognition_content/recognition_content.dart';
import 'package:demo001/xunfei/streamtrans_result/trans_content/trans_content.dart';
import 'package:demo001/xunfei/xunfei_translate.dart';
import 'package:get/get.dart';
import 'package:logger/logger.dart';
@@ -53,6 +55,11 @@ class TranslateLogic extends GetxController {
} else {
_log('麦克风权限被拒绝');
}
if (await Permission.storage.request().isGranted) {
// 权限已授予
} else {
_log('存储功能未相授权,无法保存录音文件!!!');
}
} catch (e) {
_log('请求麦克风权限失败: $e');
}
@@ -141,6 +148,7 @@ class TranslateLogic extends GetxController {
});
});
if (text.trim() == '') return;
print("添加到对象列表中 $text ");
state.kdxfSentenceList[index].content = text;
state.kdxfSentenceList.refresh();
// }
@@ -155,12 +163,13 @@ class TranslateLogic extends GetxController {
});
});
if (text == '') return;
print("添加到对象列表中 $text ");
model = KDXFSentenceModel(
content: text,
sid: result.header?.sid ?? '',
transResult: '',
audioPath: '',
perviousWs: (ctx.ws ?? []) as List<W>,
perviousWs: ctx.ws ?? [],
);
state.kdxfSentenceList.add(model);
state.kdxfSentenceList.refresh();
@@ -170,12 +179,110 @@ class TranslateLogic extends GetxController {
}
}

void _handleTranslateData(result) {}
void _handleTranslateData(result) {
KDXFSentenceModel model;
String text = "";
try {
final index = state.kdxfSentenceList.indexWhere((KDXFSentenceModel e) {
return e.sid == result.header?.sid;
});
if (index != -1) {
model = state.kdxfSentenceList[index];
final transStr = utf8.decode(
base64Decode(result.payload?.streamtransResults?.text ?? ''));

void _handleAudioData(AudioModel model) {}
final trctx = TransContent.fromJson(jsonDecode(transStr));

if (trctx.isFinal == 0) {
//更新
String text = '';
model.isFinalTransString.forEach((e) {
text = text + e;
});
text = text + (trctx.dst ?? '');
model.transResult = text;
state.kdxfSentenceList.refresh();
} else if (trctx.isFinal == 1) {
model.isFinalTransString.add(trctx.dst ?? '');
String text = '';
model.isFinalTransString.forEach((e) {
text = text + e;
});
model.transResult = text;
state.kdxfSentenceList.refresh();
}
} else {
final transStr = utf8.decode(
base64Decode(result.payload?.streamtransResults?.text ?? ''));
final textModel = TransContent.fromJson(jsonDecode(transStr));
text = textModel.dst ?? '';
final model = KDXFSentenceModel(
content: '',
sid: result.header?.sid ?? '',
transResult: text,
audioPath: '',
perviousWs: []);
state.kdxfSentenceList.add(model);
state.kdxfSentenceList.refresh();
}
} catch (e) {
print("接收识别结果异常 $e");
}
}

//处理音频数据
void _handleAudioData(AudioModel audo) {
KDXFSentenceModel model;
AudioTool.share.addAudioChunk(
model: audo,
onPlaying: () {},
onPlayEnd: () {},
onPath: (path) async {
final file = File(path);
if (file.existsSync()) {
print("File exists");
} else {
print("File does not exist");
}
if (state.kdxfSentenceList.isNotEmpty) {
try {
print("收到音频数据:$path --------------------------");
final index =
state.kdxfSentenceList.indexWhere((KDXFSentenceModel e) {
return e.sid == audo.sid;
});
model = state.kdxfSentenceList[index];
model.audioPath = path;
print("初始化和播放器 !");
await model.playerController.preparePlayer(path: path);
int duration = (await model.playerController.getDuration());
model.audioDuration = duration;
print("设置播放模式 !");
model.playerController
.setFinishMode(finishMode: FinishMode.pause);
if (!xunfei.isspeaking) {
print("可以播放:$path --------------------------");
xunfei.isRecording = false;
//为说话 可以播放
await model.playerController.startPlayer();

model.playerController.onCompletion.listen((_) {
model.playerController.seekTo(0);
xunfei.isRecording = true;
});
}
state.kdxfSentenceList.refresh();
} catch (e) {
print("播放音频异常:$e");
}
}
});
}

void kdxfPlayAudio(KDXFSentenceModel model) async {
model.playerController.startPlayer();
if (!xunfei.isspeaking) {
model.playerController.startPlayer();
}
}

void _log(String msg) {


+ 71
- 78
lib/scenes/translate/TranslateScene.dart Ver arquivo

@@ -13,67 +13,63 @@ import 'TranslateState.dart';
*/
// ignore: must_be_immutable
class TranslateScene extends StatelessWidget {
final TranslateLogic logic = Get.put(TranslateLogic());
final TranslateLogic logic = Get.put(TranslateLogic()); // 注册控制器
final TranslateState state = Get.find<TranslateLogic>().state;

TranslateScene({super.key});
double width = 0;
double height = 0;

@override
Widget build(BuildContext context) {
width = MediaQuery.of(context).size.width;
height = MediaQuery.of(context).size.height;
double width = MediaQuery.of(context).size.width;
double height = MediaQuery.of(context).size.height;

return Scaffold(
appBar: AppBar(
title: Text(
"同声传译",
style: Style.navTitle,
)),
body: ListView.builder(
itemCount: state.kdxfSentenceList.length,
itemBuilder: (context, index) {
var audio = state.kdxfSentenceList[index];
return _buildAudioMessage(audio);
},
title: Text(
"同声传译",
style: Style.navTitle,
),
),
body: Obx(() => ListView.builder(
itemCount: state.kdxfSentenceList.length,
itemBuilder: (context, index) {
var audio = state.kdxfSentenceList[index];
return _buildAudioMessage(audio, width);
},
)),
bottomNavigationBar: Padding(
padding: const EdgeInsets.all(20.0),
child: InkWell(
onTap: logic.toggleCallStatus,
child: Obx(() => Container(
decoration: BoxDecoration(
borderRadius: BorderRadius.circular(30), // 圆角按钮
color: state.isRecording.value
? Colors.red
: Colors.green, // 通话状态红色,非通话状态绿色
padding: const EdgeInsets.all(20.0),
child: InkWell(
onTap: logic.toggleCallStatus,
child: Obx(() => Container(
decoration: BoxDecoration(
borderRadius: BorderRadius.circular(30),
color: state.isRecording.value ? Colors.red : Colors.green,
),
padding: EdgeInsets.symmetric(vertical: 15, horizontal: 40),
child: Row(
mainAxisAlignment: MainAxisAlignment.center,
children: [
Icon(
state.isRecording.value ? Icons.call_end : Icons.mic,
color: Colors.white,
size: 30,
),
padding: EdgeInsets.symmetric(
vertical: 15, horizontal: 40), // 调整按钮大小
child: Row(
mainAxisAlignment: MainAxisAlignment.center,
children: [
Icon(
state.isRecording.value
? Icons.call_end
: Icons.mic, // 图标变化
color: Colors.white,
size: 30,
),
SizedBox(width: 10),
Text(
state.isRecording.value ? '挂断' : '开始通话', // 状态文字变化
style: TextStyle(
color: Colors.white,
fontSize: 18,
),
),
],
SizedBox(width: 10),
Text(
state.isRecording.value ? '挂断' : '开始通话',
style: TextStyle(color: Colors.white, fontSize: 18),
),
)))),
],
),
)),
),
),
);
}

// 构建语音消息
Widget _buildAudioMessage(KDXFSentenceModel model) {
Widget _buildAudioMessage(KDXFSentenceModel model, double width) {
return Padding(
padding: const EdgeInsets.symmetric(vertical: 10, horizontal: 15),
child: Row(
@@ -100,46 +96,43 @@ class TranslateScene extends StatelessWidget {
style: Style.normal,
maxLines: 1000,
),
Obx(() => Visibility(
visible: model.transResult != '', child: Gap(5))),
Obx(() => Visibility(
visible: model.transResult != '',
child: Text(
Visibility(
visible: model.transResult.isNotEmpty,
child: Column(
children: [
Gap(5),
Text(
textAlign: TextAlign.start,
model.transResult,
style: Style.normal,
maxLines: 1000,
),
)),
Obx(() => Visibility(
visible: model.audioDuration > 0,
child: const Gap(10),
)),
Obx(
() => Visibility(
visible: model.audioDuration > 0,
child: GestureDetector(
onTap: () {
logic.kdxfPlayAudio(model);
},
child: ClipRRect(
borderRadius: BorderRadius.circular(5),
child: AudioFileWaveforms(
size: Size(165, 30),
decoration: BoxDecoration(
border: Border.all(color: orange, width: 0.5),
borderRadius: BorderRadius.circular(5),
),
enableSeekGesture: false,
waveformType: WaveformType.fitWidth,
playerWaveStyle: PlayerWaveStyle(
waveThickness: 2, scaleFactor: 50),
playerController: model.playerController,
],
), // 判断 transResult 是否为空
),
Visibility(
visible: model.audioDuration > 0,
child: GestureDetector(
onTap: () {
logic.kdxfPlayAudio(model);
},
child: ClipRRect(
borderRadius: BorderRadius.circular(5),
child: AudioFileWaveforms(
size: Size(165, 30),
decoration: BoxDecoration(
border: Border.all(color: orange, width: 0.5),
borderRadius: BorderRadius.circular(5),
),
enableSeekGesture: false,
waveformType: WaveformType.fitWidth,
playerWaveStyle: PlayerWaveStyle(
waveThickness: 2, scaleFactor: 50),
playerController: model.playerController,
),
),
),
)
),
],
),
),


+ 19
- 7
lib/tools/audio_tool.dart Ver arquivo

@@ -1,10 +1,7 @@
import 'dart:async';
import 'dart:io';
import 'dart:typed_data';

import 'package:audioplayers/audioplayers.dart';
import 'package:get/get.dart';
import 'package:logger/logger.dart';
import 'package:path_provider/path_provider.dart';

class AudioTool {
@@ -31,19 +28,31 @@ class AudioTool {
await player.dispose();
}

void addAudioChunk({required AudioModel model, required Function(String) onPath, required Function() onPlaying, required Function() onPlayEnd}) async {
void addAudioChunk(
{required AudioModel model,
required Function(String) onPath,
required Function() onPlaying,
required Function() onPlayEnd}) async {
if (model.status == 0 || model.status == 1) {
_audioChunks.add(model.data);
_playTemps.add(model.data);
} else if (model.status == 2) {
_audioChunks.add(model.data);
_playTemps.add(model.data);
final path = await mergeAudioChunks(sid: model.sid, chunks: _playTemps, onPlaying: onPlaying, onPlayEnd: onPlayEnd);
final path = await mergeAudioChunks(
sid: model.sid,
chunks: _playTemps,
onPlaying: onPlaying,
onPlayEnd: onPlayEnd);
onPath(path);
}
}

Future<String> mergeAudioChunks({required String sid, required List<Uint8List> chunks, required Function() onPlaying, required Function() onPlayEnd}) async {
Future<String> mergeAudioChunks(
{required String sid,
required List<Uint8List> chunks,
required Function() onPlaying,
required Function() onPlayEnd}) async {
// 计算总长度
int totalLength = chunks.fold(0, (sum, chunk) => sum + chunk.length);

@@ -73,7 +82,10 @@ class AudioTool {
return path;
}

void play({required Uint8List bytes, required Function() onPlaying, required Function() onPlayEnd}) async {
void play(
{required Uint8List bytes,
required Function() onPlaying,
required Function() onPlayEnd}) async {
// Logger().f('------_playBuff--------${_playBuff.length}');
// if (_playBuff.isNotEmpty) {
await player.onPlayerStateChanged.listen((sta) async {


+ 1
- 2
lib/xunfei/recognition_result/recognition_content/recognition_content.dart Ver arquivo

@@ -1,7 +1,6 @@
import 'package:json_annotation/json_annotation.dart';

import 'w.dart';

import 'package:demo001/xunfei/phonetic_dictation/phonetic_dictaion_model/w.dart';
part 'recognition_content.g.dart';

@JsonSerializable()


+ 37
- 31
lib/xunfei/xunfei_translate.dart Ver arquivo

@@ -13,15 +13,15 @@ import 'package:web_socket_channel/web_socket_channel.dart';
class XunFeiTranslate {
final int _chunkSize = 1280; // 每次发送的数据大小
// 音量阈值
final double _speakingThreshold = 50.0; // 开始说话的阈值
final double _silenceThreshold = 30.0; // 结束说话的阈值
final double _speakingThreshold = 1000.0; // 开始说话的阈值
final double _silenceThreshold = 500.0; // 结束说话的阈值
final Duration _silenceDuration = Duration(seconds: 1); // 持续低于阈值的时间
DateTime? _lastBelowThresholdTime; // 上次音量低于阈值的时间
double _volume = 0; //当前原因

Uint8List _buff = Uint8List(0); //音频缓存区
bool _isrecord = false; //是否录音
bool _isspeaking = false; //是否说话
bool isRecording = false; //是否录音
bool isspeaking = false; //是否说话
Timer? _timer;

XunFeiTranslateTask? currtask;
@@ -109,7 +109,7 @@ class XunFeiTranslate {
"tts": {
"vcn": "x2_catherine",
"tts_results": {
"encoding": "raw",
"encoding": "lame",
"sample_rate": 16000,
"channels": 1,
"bit_depth": 16,
@@ -133,9 +133,9 @@ class XunFeiTranslate {

//开始同时翻译
Future<void> starttranslate(Stream<Uint8List> stream) async {
_isrecord = true;
isRecording = true;
stream.listen((data) {
if (_isrecord) {
if (isRecording) {
_buff = _appendToBuffer(data);
}
});
@@ -144,6 +144,7 @@ class XunFeiTranslate {
var frame = _getAudioData();
_volume = _calculateAmplitude(frame);
var state = _checkSpeakingStatus();
// print("当前状态: volume:$_volume state:$state ---------------------------");
if (state == 1) {
//开始说话
currtask = XunFeiTranslateTask(_geturl(), _handleData);
@@ -155,6 +156,7 @@ class XunFeiTranslate {
} else if (state == 3) {
//结束说话
currtask?.sendaudio(_createParams(2, frame));
currtask = null;
print("发送最后一帧---------------------------");
}
});
@@ -163,7 +165,7 @@ class XunFeiTranslate {

//结束翻译
Future<void> stoptranslate() async {
_isrecord = false;
isRecording = false;
_timer?.cancel();
_timer = null;
if (currtask != null) {
@@ -172,7 +174,7 @@ class XunFeiTranslate {
print("发送最后一帧---------------------------");
currtask = null;
}
_isspeaking = false;
isspeaking = false;
_lastBelowThresholdTime = null;
_buff = Uint8List(0);
return;
@@ -220,26 +222,30 @@ class XunFeiTranslate {

// 检查说话状态
int _checkSpeakingStatus() {
if (_volume > _speakingThreshold && !_isspeaking) {
// 音量高于阈值,表示开始说话
_isspeaking = true;
return 1;
} else if (_volume < _silenceThreshold) {
// 音量低于阈值
if (_lastBelowThresholdTime == null) {
// 记录第一次低于阈值的时间
_lastBelowThresholdTime = DateTime.now();
} else if (DateTime.now().difference(_lastBelowThresholdTime!) >
_silenceDuration) {
// 持续低于阈值超过指定时间,表示结束说话
_isspeaking = false;
return 3;
if (isRecording) {
//正在录音
if (_volume > _speakingThreshold && !isspeaking) {
// 音量高于阈值,表示开始说话
isspeaking = true;
return 1;
} else if (_volume < _silenceThreshold && isspeaking) {
// 音量低于阈值
if (_lastBelowThresholdTime == null) {
// 记录第一次低于阈值的时间
_lastBelowThresholdTime = DateTime.now();
} else if (DateTime.now().difference(_lastBelowThresholdTime!) >
_silenceDuration) {
// 持续低于阈值超过指定时间,表示结束说话
isspeaking = false;
return 3;
}
} else {
// 音量恢复到阈值以上,重置计时器
_lastBelowThresholdTime = null;
}
} else {
// 音量恢复到阈值以上,重置计时器
_lastBelowThresholdTime = null;
}
if (!_isspeaking) {

if (!isspeaking) {
return 0;
} else {
return 2;
@@ -258,6 +264,7 @@ class XunFeiTranslate {
final model = RecognitionResult.fromJson(json);
if (model.payload?.recognitionResults?.text == null ||
model.payload?.recognitionResults?.text?.trim() == '') return;
print("收到识别数据 ${model.toString()}");
onRecognitionResult(model);
//翻译好的结果
} else if (payload.containsKey('streamtrans_results')) {
@@ -272,7 +279,6 @@ class XunFeiTranslate {
onAudioResult(AudioModel(status: status, sid: sid, data: bytes));
}
}
if (status == 2) {}
}
}

@@ -280,9 +286,10 @@ class XunFeiTranslate {
class XunFeiTranslateTask {
late WebSocketChannel _channel;
bool isconnected = false;
late Function(dynamic) handle;
late Function(dynamic) _handle;

XunFeiTranslateTask(String url, Function(dynamic) handle) {
_handle = handle;
_channel = WebSocketChannel.connect(Uri.parse(url));
_channel.stream.timeout(Duration(seconds: 10)); //设置超时时间
_channel.stream.listen(
@@ -302,7 +309,6 @@ class XunFeiTranslateTask {
cancelOnError: true,
);
isconnected = true;
handle = handle;
}

Future<void> sendaudio(Map<String, dynamic> data) async {
@@ -317,7 +323,7 @@ class XunFeiTranslateTask {
// 对结果进行解析
var messageMap = json.decode(message);
var status = messageMap["header"]["status"];
handle(messageMap);
_handle(messageMap);
if (status == 2) {
print("任务已结束!------------------------------------------");
_channel.sink.close();


+ 1
- 1
pubspec.yaml Ver arquivo

@@ -46,7 +46,7 @@ dependencies:
http: ^1.3.0
get: ^4.6.6
json_annotation: ^4.9.0
audioplayers: ^6.1.1
audioplayers: ^6.1.0
audio_waveforms: ^1.2.0
gap: ^3.0.1
flutter_gen_runner: ^5.9.0


Carregando…
Cancelar
Salvar