didvan-app/lib/views/direct/direct_state.dart

311 lines
8.4 KiB
Dart

import 'dart:async';
import 'dart:io';
import 'package:audio_session/audio_session.dart';
import 'package:didvan/models/enums.dart';
import 'package:didvan/models/message_data/message_data.dart';
import 'package:didvan/models/message_data/news_attachment.dart';
import 'package:didvan/models/message_data/radar_attachment.dart';
import 'package:didvan/providers/core.dart';
import 'package:didvan/services/media/voice.dart';
import 'package:didvan/services/network/request.dart';
import 'package:didvan/services/network/request_helper.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter_sound/public/flutter_sound_player.dart';
import 'package:flutter_sound/public/flutter_sound_recorder.dart';
import 'package:flutter_sound_platform_interface/flutter_sound_platform_interface.dart';
import 'package:http/http.dart' as http;
import 'package:permission_handler/permission_handler.dart';
class DirectState extends CoreProvier {
// final _recorder = AudioRecorder();
final List<MessageData> messages = [];
late final int typeId;
final Map<String, List<int>> dailyMessages = {};
final List<int> deletionQueue = [];
String? text;
NewsAttachment? replyNews;
RadarAttachment? replyRadar;
int? audioDuration;
String? path;
FlutterSoundPlayer? mPlayer = FlutterSoundPlayer();
FlutterSoundRecorder? mRecorder = FlutterSoundRecorder();
Codec _codec = Codec.aacMP4;
String _mPath = '${DateTime.now().millisecondsSinceEpoch ~/ 1000}.mp4';
bool mPlayerIsInited = false;
bool mRecorderIsInited = false;
bool mplaybackReady = false;
Timer? _timer;
final theSource = AudioSource.microphone;
final ValueNotifier<Duration> countTimer = ValueNotifier(Duration.zero);
Future<void> openTheRecorder() async {
if (!kIsWeb) {
var status = await Permission.microphone.request();
if (status != PermissionStatus.granted) {
throw RecordingPermissionException('Microphone permission not granted');
}
}
await mRecorder!.openRecorder();
if (!await mRecorder!.isEncoderSupported(_codec) && kIsWeb) {
_codec = Codec.opusWebM;
_mPath = '${DateTime.now().millisecondsSinceEpoch ~/ 1000}.webm';
if (!await mRecorder!.isEncoderSupported(_codec) && kIsWeb) {
mRecorderIsInited = true;
return;
}
}
final session = await AudioSession.instance;
await session.configure(AudioSessionConfiguration(
avAudioSessionCategory: AVAudioSessionCategory.playAndRecord,
avAudioSessionCategoryOptions:
AVAudioSessionCategoryOptions.allowBluetooth |
AVAudioSessionCategoryOptions.defaultToSpeaker,
avAudioSessionMode: AVAudioSessionMode.spokenAudio,
avAudioSessionRouteSharingPolicy:
AVAudioSessionRouteSharingPolicy.defaultPolicy,
avAudioSessionSetActiveOptions: AVAudioSessionSetActiveOptions.none,
androidAudioAttributes: const AndroidAudioAttributes(
contentType: AndroidAudioContentType.speech,
flags: AndroidAudioFlags.none,
usage: AndroidAudioUsage.voiceCommunication,
),
androidAudioFocusGainType: AndroidAudioFocusGainType.gain,
androidWillPauseWhenDucked: true,
));
mRecorderIsInited = true;
}
Future<void> getMessages() async {
appState = AppState.busy;
final RequestService service = RequestService(RequestHelper.direct(typeId));
await service.httpGet();
if (service.isSuccess) {
messages.clear();
final messageDatas = service.result['messages'];
for (var i = 0; i < messageDatas.length; i++) {
messages.add(MessageData.fromJson(messageDatas[i]));
_addToDailyGrouped(messages.last);
}
appState = AppState.idle;
return;
}
appState = AppState.failed;
}
@override
void dispose() {
super.dispose();
mPlayer!.closePlayer();
mPlayer = null;
mRecorder!.closeRecorder();
mRecorder = null;
_timer?.cancel();
}
void startTimer() {
const oneSec = Duration(seconds: 1);
_timer = Timer.periodic(
oneSec,
(Timer timer) {
countTimer.value = Duration(seconds: countTimer.value.inSeconds + 1);
},
);
}
void record() async {
countTimer.value = Duration.zero;
await mRecorder!
.startRecorder(
toFile: _mPath,
codec: _codec,
audioSource: theSource,
)
.then((value) {
startTimer();
update();
});
}
void play() async {
assert(mPlayerIsInited &&
mplaybackReady &&
mRecorder!.isStopped &&
mPlayer!.isStopped);
mPlayer!
.startPlayer(
fromURI: _mPath,
// fromDataBuffer: Uint8List.fromList(
// recordedData.expand((element) => element).toList()),
//codec: kIsWeb ? Codec.opusWebM : Codec.aacADTS,
whenFinished: () {})
.then((value) {});
}
void stopPlayer() {
mPlayer!.stopPlayer().then((value) {});
}
void pausePlayer() {
mPlayer!.pausePlayer().then((value) {});
}
void resumePlayer() {
mPlayer!.resumePlayer().then((value) {});
}
void deleteRecordedFile() {
path = null;
notifyListeners();
update();
}
Future<void> startRecording() async {
text = null;
// await _recorder.hasPermission();
record();
notifyListeners();
}
Future<void> stopRecording({required bool sendImidiately}) async {
path = await mRecorder!.stopRecorder();
mplaybackReady = true;
_timer?.cancel();
mPlayer!.setSubscriptionDuration(countTimer.value);
update();
if (path == null) {
notifyListeners();
return;
}
// if (kIsWeb) {
// final Uri audioUri = Uri.parse(path!);
// final http.Response audioResponse = await http.get(audioUri);
// recordedFileBytes = audioResponse.bodyBytes;
// } else {
// recordedFile = File(path!);
// }
if (sendImidiately) {
await sendMessage();
} else {
notifyListeners();
}
}
void delete() {
for (var i = 0; i < deletionQueue.length; i++) {
final service = RequestService(
RequestHelper.deleteDirect(typeId, deletionQueue[i]),
);
service.delete();
messages.removeWhere((element) => element.id == deletionQueue[i]);
}
deletionQueue.clear();
notifyListeners();
}
void _addToDailyGrouped(MessageData message) {
String createdAt = message.createdAt.replaceAll('T', ' ').split(' ').first;
if (!dailyMessages.containsKey(createdAt)) {
dailyMessages.addAll({
createdAt: [message.id]
});
} else {
dailyMessages[createdAt]!.add(message.id);
}
}
Future<void> sendMessage() async {
if ((text == null || text!.isEmpty) && path == null) return;
VoiceService.audioPlayer.stop();
final body = {};
if (text != null) {
body.addAll({'text': text});
}
if (replyRadar != null) {
body.addAll({'radarId': replyRadar!.id});
}
if (replyNews != null) {
body.addAll({'newsId': replyNews!.id});
}
if (replyNews != null) {
body.addAll({'newsId': replyNews!.id});
}
if (path != null) {
body.addAll({'duration': countTimer.value.inSeconds.toString()});
}
text = null;
notifyListeners();
final service =
RequestService(RequestHelper.sendDirectMessage(typeId), body: body);
if (path == null) {
await service.post();
if (service.isSuccess) {
final message = service.result['message'];
messages.insert(
0,
MessageData.fromJson(message)
.copyWith(news: replyNews, radar: replyRadar));
dailyMessages.clear();
for (var i = 0; i < messages.length; i++) {
_addToDailyGrouped(messages[i]);
}
}
} else {
final Uint8List uploadFile = kIsWeb
? (await http.get(Uri.parse(path!.replaceAll('%3A', ':')))).bodyBytes
: await File(path!).readAsBytes();
await service.multipartBytes(
file: uploadFile,
method: 'POST',
fieldName: 'audio',
fileName: 'voice-message',
mediaExtension: 'mp3',
mediaFormat: 'audio',
);
if (service.isSuccess) {
final message = service.result['message'];
messages.insert(0, MessageData.fromJson(message));
dailyMessages.clear();
for (var i = 0; i < messages.length; i++) {
_addToDailyGrouped(messages[i]);
}
}
}
path = null;
replyRadar = null;
replyNews = null;
notifyListeners();
}
}