D1APP-53 chat
This commit is contained in:
parent
6d5548362c
commit
c740a72025
|
|
@ -5,7 +5,8 @@
|
||||||
<uses-permission android:name="android.permission.INTERNET" />
|
<uses-permission android:name="android.permission.INTERNET" />
|
||||||
<application
|
<application
|
||||||
android:label="Didvan"
|
android:label="Didvan"
|
||||||
android:icon="@mipmap/ic_launcher">
|
android:icon="@mipmap/ic_launcher"
|
||||||
|
android:usesCleartextTraffic="true">
|
||||||
<activity
|
<activity
|
||||||
android:name=".MainActivity"
|
android:name=".MainActivity"
|
||||||
android:launchMode="singleTop"
|
android:launchMode="singleTop"
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,6 @@
|
||||||
|
import 'dart:convert';
|
||||||
import 'dart:io';
|
import 'dart:io';
|
||||||
|
import 'dart:typed_data';
|
||||||
|
|
||||||
import 'package:just_waveform/just_waveform.dart';
|
import 'package:just_waveform/just_waveform.dart';
|
||||||
|
|
||||||
|
|
@ -14,17 +16,19 @@ class MessageData {
|
||||||
final RadarAttachment? radar;
|
final RadarAttachment? radar;
|
||||||
final File? audioFile;
|
final File? audioFile;
|
||||||
final Waveform? waveform;
|
final Waveform? waveform;
|
||||||
|
final int? audioDuration;
|
||||||
|
|
||||||
const MessageData({
|
const MessageData({
|
||||||
required this.id,
|
required this.id,
|
||||||
required this.writedByAdmin,
|
required this.writedByAdmin,
|
||||||
required this.readed,
|
required this.readed,
|
||||||
required this.createdAt,
|
required this.createdAt,
|
||||||
required this.text,
|
this.text,
|
||||||
required this.audio,
|
this.audio,
|
||||||
required this.radar,
|
this.radar,
|
||||||
required this.waveform,
|
this.waveform,
|
||||||
this.audioFile,
|
this.audioFile,
|
||||||
|
this.audioDuration,
|
||||||
});
|
});
|
||||||
|
|
||||||
factory MessageData.fromJson(Map<String, dynamic> json) => MessageData(
|
factory MessageData.fromJson(Map<String, dynamic> json) => MessageData(
|
||||||
|
|
@ -34,19 +38,26 @@ class MessageData {
|
||||||
writedByAdmin: json['writedByAdmin'],
|
writedByAdmin: json['writedByAdmin'],
|
||||||
readed: json['readed'],
|
readed: json['readed'],
|
||||||
createdAt: json['createdAt'],
|
createdAt: json['createdAt'],
|
||||||
waveform: json['waveForm'] != null
|
audioDuration: json['waveform'] == null
|
||||||
|
? null
|
||||||
|
: jsonDecode(json['waveform'])['duration'] ?? 0,
|
||||||
|
waveform: json['waveform'] != null
|
||||||
? Waveform(
|
? Waveform(
|
||||||
version: json['version'],
|
version: 1,
|
||||||
flags: json['flags'],
|
flags: 0,
|
||||||
sampleRate: json['sampleRate'],
|
sampleRate: 44100,
|
||||||
samplesPerPixel: json['samplesPerPixel'],
|
samplesPerPixel: 441,
|
||||||
length: json['length'],
|
length: jsonDecode(json['waveform'])['length'],
|
||||||
data: json['data'],
|
data: Int16List.fromList(
|
||||||
|
List<int>.from(
|
||||||
|
jsonDecode(json['waveform'])['data'],
|
||||||
|
),
|
||||||
|
),
|
||||||
)
|
)
|
||||||
: null,
|
: null,
|
||||||
radar: json['radar'] == null
|
radar: json['radar'] == null
|
||||||
? null
|
? null
|
||||||
: RadarAttachment.fromJson(json['radar'] as Map<String, dynamic>),
|
: RadarAttachment.fromJson(json['radar']),
|
||||||
);
|
);
|
||||||
|
|
||||||
Map<String, dynamic> toJson() => {
|
Map<String, dynamic> toJson() => {
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,4 @@
|
||||||
|
import 'dart:convert';
|
||||||
import 'dart:io';
|
import 'dart:io';
|
||||||
|
|
||||||
import 'package:didvan/models/enums.dart';
|
import 'package:didvan/models/enums.dart';
|
||||||
|
|
@ -89,37 +90,57 @@ class DirectState extends CoreProvier {
|
||||||
|
|
||||||
Future<void> sendMessage() async {
|
Future<void> sendMessage() async {
|
||||||
if ((text == null || text!.isEmpty) && recordedFile == null) return;
|
if ((text == null || text!.isEmpty) && recordedFile == null) return;
|
||||||
final body = {};
|
if (recordedFile != null) {
|
||||||
if (text != null) {
|
while (waveform == null) {}
|
||||||
body.addAll({'text': text});
|
}
|
||||||
messages.insert(
|
messages.insert(
|
||||||
0,
|
0,
|
||||||
MessageData(
|
MessageData(
|
||||||
id: 0,
|
id: 0,
|
||||||
writedByAdmin: false,
|
writedByAdmin: false,
|
||||||
readed: false,
|
readed: false,
|
||||||
createdAt: DateTime.now().toString(),
|
createdAt:
|
||||||
|
DateTime.now().subtract(const Duration(minutes: 210)).toString(),
|
||||||
text: text,
|
text: text,
|
||||||
audio: null,
|
audio: null,
|
||||||
audioFile: recordedFile,
|
audioFile: recordedFile,
|
||||||
radar: replyRadar,
|
radar: replyRadar,
|
||||||
waveform: waveform,
|
waveform: waveform,
|
||||||
|
audioDuration: waveform != null ? waveform!.duration.inSeconds : null,
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
}
|
|
||||||
_addToDailyGrouped();
|
_addToDailyGrouped();
|
||||||
|
final body = {};
|
||||||
|
if (text != null) {
|
||||||
|
body.addAll({'text': text});
|
||||||
|
}
|
||||||
if (replyRadar != null) {
|
if (replyRadar != null) {
|
||||||
body.addAll({'radarId': replyRadar!.id});
|
body.addAll({'radarId': replyRadar!.id});
|
||||||
}
|
}
|
||||||
|
final uploadFile = recordedFile;
|
||||||
text = null;
|
text = null;
|
||||||
recordedFile = null;
|
recordedFile = null;
|
||||||
notifyListeners();
|
notifyListeners();
|
||||||
final service =
|
final service =
|
||||||
RequestService(RequestHelper.sendDirectMessage(typeId), body: body);
|
RequestService(RequestHelper.sendDirectMessage(typeId), body: body);
|
||||||
if (recordedFile == null) {
|
if (uploadFile == null) {
|
||||||
await service.post();
|
service.post();
|
||||||
} else {
|
} else {
|
||||||
await service.multipart(recordedFile, 'POST');
|
body.addAll({
|
||||||
|
'waveform': jsonEncode({
|
||||||
|
'data': waveform!.data,
|
||||||
|
'length': waveform!.length,
|
||||||
|
'duration': waveform!.duration,
|
||||||
|
})
|
||||||
|
});
|
||||||
|
service.multipart(
|
||||||
|
file: uploadFile,
|
||||||
|
method: 'POST',
|
||||||
|
fieldName: 'audio',
|
||||||
|
fileName: 'voice-message',
|
||||||
|
mediaExtension: 'm4a',
|
||||||
|
mediaFormat: 'audio',
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -59,10 +59,14 @@ class Message extends StatelessWidget {
|
||||||
child: Column(
|
child: Column(
|
||||||
children: [
|
children: [
|
||||||
if (message.text != null) DidvanText(message.text!),
|
if (message.text != null) DidvanText(message.text!),
|
||||||
if (message.audio != null)
|
if (message.audio != null || message.audioFile != null)
|
||||||
AudioVisualizer(
|
SizedBox(
|
||||||
|
height: 50,
|
||||||
|
child: AudioVisualizer(
|
||||||
audioUrl: message.audio,
|
audioUrl: message.audio,
|
||||||
waveform: message.waveform,
|
waveform: message.waveform,
|
||||||
|
backgroundColor: Colors.transparent,
|
||||||
|
),
|
||||||
),
|
),
|
||||||
if (message.radar != null) const DidvanDivider(),
|
if (message.radar != null) const DidvanDivider(),
|
||||||
if (message.radar != null) const SizedBox(height: 4),
|
if (message.radar != null) const SizedBox(height: 4),
|
||||||
|
|
@ -156,7 +160,7 @@ class _MessageContainer extends StatelessWidget {
|
||||||
@override
|
@override
|
||||||
Widget build(BuildContext context) {
|
Widget build(BuildContext context) {
|
||||||
return Container(
|
return Container(
|
||||||
padding: const EdgeInsets.symmetric(vertical: 4, horizontal: 16),
|
padding: const EdgeInsets.symmetric(vertical: 8, horizontal: 16),
|
||||||
decoration: BoxDecoration(
|
decoration: BoxDecoration(
|
||||||
borderRadius: DesignConfig.mediumBorderRadius.copyWith(
|
borderRadius: DesignConfig.mediumBorderRadius.copyWith(
|
||||||
bottomLeft: writedByAdmin ? Radius.zero : null,
|
bottomLeft: writedByAdmin ? Radius.zero : null,
|
||||||
|
|
|
||||||
|
|
@ -6,6 +6,7 @@ import 'package:didvan/config/theme_data.dart';
|
||||||
import 'package:didvan/constants/app_icons.dart';
|
import 'package:didvan/constants/app_icons.dart';
|
||||||
import 'package:didvan/constants/assets.dart';
|
import 'package:didvan/constants/assets.dart';
|
||||||
import 'package:didvan/pages/home/direct/direct_state.dart';
|
import 'package:didvan/pages/home/direct/direct_state.dart';
|
||||||
|
import 'package:didvan/services/media/media.dart';
|
||||||
import 'package:didvan/services/storage/storage.dart';
|
import 'package:didvan/services/storage/storage.dart';
|
||||||
import 'package:didvan/utils/date_time.dart';
|
import 'package:didvan/utils/date_time.dart';
|
||||||
import 'package:didvan/widgets/didvan/icon_button.dart';
|
import 'package:didvan/widgets/didvan/icon_button.dart';
|
||||||
|
|
@ -13,7 +14,6 @@ import 'package:didvan/widgets/didvan/text.dart';
|
||||||
import 'package:flutter/foundation.dart';
|
import 'package:flutter/foundation.dart';
|
||||||
import 'package:flutter/material.dart';
|
import 'package:flutter/material.dart';
|
||||||
import 'package:flutter_svg/flutter_svg.dart';
|
import 'package:flutter_svg/flutter_svg.dart';
|
||||||
import 'package:just_audio/just_audio.dart';
|
|
||||||
import 'package:just_waveform/just_waveform.dart';
|
import 'package:just_waveform/just_waveform.dart';
|
||||||
import 'package:provider/provider.dart';
|
import 'package:provider/provider.dart';
|
||||||
|
|
||||||
|
|
@ -21,12 +21,16 @@ class AudioVisualizer extends StatefulWidget {
|
||||||
final File? audioFile;
|
final File? audioFile;
|
||||||
final Waveform? waveform;
|
final Waveform? waveform;
|
||||||
final String? audioUrl;
|
final String? audioUrl;
|
||||||
|
final int? duration;
|
||||||
|
final Color? backgroundColor;
|
||||||
|
|
||||||
const AudioVisualizer({
|
const AudioVisualizer({
|
||||||
Key? key,
|
Key? key,
|
||||||
this.audioFile,
|
this.audioFile,
|
||||||
this.waveform,
|
this.waveform,
|
||||||
this.audioUrl,
|
this.audioUrl,
|
||||||
|
this.duration,
|
||||||
|
this.backgroundColor,
|
||||||
}) : super(key: key);
|
}) : super(key: key);
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
|
@ -34,8 +38,6 @@ class AudioVisualizer extends StatefulWidget {
|
||||||
}
|
}
|
||||||
|
|
||||||
class _AudioVisualizerState extends State<AudioVisualizer> {
|
class _AudioVisualizerState extends State<AudioVisualizer> {
|
||||||
final AudioPlayer _audioPlayer = AudioPlayer();
|
|
||||||
|
|
||||||
Stream<WaveformProgress>? waveDataStream;
|
Stream<WaveformProgress>? waveDataStream;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
|
@ -47,34 +49,43 @@ class _AudioVisualizerState extends State<AudioVisualizer> {
|
||||||
zoom: const WaveformZoom.pixelsPerSecond(100),
|
zoom: const WaveformZoom.pixelsPerSecond(100),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
_setupAudioPlayer();
|
|
||||||
super.initState();
|
super.initState();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool get _nowPlaying =>
|
||||||
|
MediaService.lastAudioPath == widget.audioFile ||
|
||||||
|
MediaService.lastAudioPath == widget.audioUrl;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Widget build(BuildContext context) {
|
Widget build(BuildContext context) {
|
||||||
return Container(
|
return Container(
|
||||||
decoration: BoxDecoration(
|
decoration: BoxDecoration(
|
||||||
color: DesignConfig.isDark
|
color: widget.backgroundColor ??
|
||||||
|
(DesignConfig.isDark
|
||||||
? Theme.of(context).colorScheme.black
|
? Theme.of(context).colorScheme.black
|
||||||
: Theme.of(context).colorScheme.background,
|
: Theme.of(context).colorScheme.background),
|
||||||
borderRadius: DesignConfig.mediumBorderRadius,
|
borderRadius: DesignConfig.mediumBorderRadius,
|
||||||
),
|
),
|
||||||
child: Row(
|
child: Row(
|
||||||
children: [
|
children: [
|
||||||
const SizedBox(width: 12),
|
const SizedBox(width: 12),
|
||||||
StreamBuilder<Duration>(
|
StreamBuilder<Duration>(
|
||||||
stream: _audioPlayer.positionStream,
|
stream:
|
||||||
|
_nowPlaying ? MediaService.audioPlayer.positionStream : null,
|
||||||
builder: (context, snapshot) {
|
builder: (context, snapshot) {
|
||||||
String text = '';
|
String text = '';
|
||||||
if (_audioPlayer.duration == null) {
|
if (MediaService.audioPlayer.duration == null) {
|
||||||
Future.delayed(Duration.zero, () {
|
Future.delayed(Duration.zero, () {
|
||||||
|
if (mounted) {
|
||||||
setState(() {});
|
setState(() {});
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
if (snapshot.data == null || snapshot.data == Duration.zero) {
|
if (snapshot.data == null || snapshot.data == Duration.zero) {
|
||||||
text = DateTimeUtils.normalizeTimeDuration(
|
text = DateTimeUtils.normalizeTimeDuration(
|
||||||
_audioPlayer.duration ?? Duration.zero);
|
MediaService.audioPlayer.duration ??
|
||||||
|
widget.waveform?.duration ??
|
||||||
|
Duration.zero);
|
||||||
} else {
|
} else {
|
||||||
text = DateTimeUtils.normalizeTimeDuration(snapshot.data!);
|
text = DateTimeUtils.normalizeTimeDuration(snapshot.data!);
|
||||||
}
|
}
|
||||||
|
|
@ -92,15 +103,12 @@ class _AudioVisualizerState extends State<AudioVisualizer> {
|
||||||
if (kIsWeb) {
|
if (kIsWeb) {
|
||||||
return SvgPicture.asset(Assets.record);
|
return SvgPicture.asset(Assets.record);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (widget.audioFile != null) {
|
if (widget.audioFile != null) {
|
||||||
return StreamBuilder<WaveformProgress>(
|
return StreamBuilder<WaveformProgress>(
|
||||||
stream: waveDataStream,
|
stream: waveDataStream,
|
||||||
builder: (context, snapshot) {
|
builder: (context, snapshot) {
|
||||||
if (snapshot.data == null) {
|
if (snapshot.data == null ||
|
||||||
return const SizedBox();
|
snapshot.data!.waveform == null) {
|
||||||
}
|
|
||||||
if (snapshot.data!.waveform == null) {
|
|
||||||
return const SizedBox();
|
return const SizedBox();
|
||||||
}
|
}
|
||||||
final waveform = snapshot.data!.waveform!;
|
final waveform = snapshot.data!.waveform!;
|
||||||
|
|
@ -109,19 +117,28 @@ class _AudioVisualizerState extends State<AudioVisualizer> {
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
if (widget.waveform == null && waveDataStream == null) {
|
||||||
|
return SvgPicture.asset(Assets.record);
|
||||||
|
}
|
||||||
return _waveWidget(widget.waveform!);
|
return _waveWidget(widget.waveform!);
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
StreamBuilder<bool>(
|
StreamBuilder<bool>(
|
||||||
stream: _audioPlayer.playingStream,
|
stream: _nowPlaying ? MediaService.audioPlayer.playingStream : null,
|
||||||
builder: (context, snapshot) {
|
builder: (context, snapshot) {
|
||||||
return DidvanIconButton(
|
return DidvanIconButton(
|
||||||
icon: snapshot.data == true
|
icon: snapshot.data == true
|
||||||
? DidvanIcons.pause_circle_solid
|
? DidvanIcons.pause_circle_solid
|
||||||
: DidvanIcons.play_circle_solid,
|
: DidvanIcons.play_circle_solid,
|
||||||
color: Theme.of(context).colorScheme.focusedBorder,
|
color: Theme.of(context).colorScheme.focusedBorder,
|
||||||
onPressed: _playAndPouse,
|
onPressed: () {
|
||||||
|
MediaService.handleAudioPlayback(
|
||||||
|
audioSource: widget.audioFile ?? widget.audioUrl,
|
||||||
|
isNetworkAudio: widget.audioFile == null,
|
||||||
|
);
|
||||||
|
setState(() {});
|
||||||
|
},
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
|
|
@ -130,7 +147,9 @@ class _AudioVisualizerState extends State<AudioVisualizer> {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
Widget _waveWidget(Waveform waveform) => GestureDetector(
|
Widget _waveWidget(Waveform waveform) => IgnorePointer(
|
||||||
|
ignoring: !_nowPlaying,
|
||||||
|
child: GestureDetector(
|
||||||
onHorizontalDragUpdate: _changePosition,
|
onHorizontalDragUpdate: _changePosition,
|
||||||
onTapDown: _changePosition,
|
onTapDown: _changePosition,
|
||||||
child: SizedBox(
|
child: SizedBox(
|
||||||
|
|
@ -138,119 +157,86 @@ class _AudioVisualizerState extends State<AudioVisualizer> {
|
||||||
width: double.infinity,
|
width: double.infinity,
|
||||||
child: _AudioWaveformWidget(
|
child: _AudioWaveformWidget(
|
||||||
waveform: waveform,
|
waveform: waveform,
|
||||||
audioPlayer: _audioPlayer,
|
|
||||||
start: Duration.zero,
|
start: Duration.zero,
|
||||||
scale: 2,
|
scale: 2,
|
||||||
strokeWidth: 3,
|
strokeWidth: 3,
|
||||||
|
nowPlaying: _nowPlaying,
|
||||||
duration: waveform.duration,
|
duration: waveform.duration,
|
||||||
waveColor: Theme.of(context).colorScheme.focusedBorder,
|
waveColor: Theme.of(context).colorScheme.focusedBorder,
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
|
),
|
||||||
);
|
);
|
||||||
|
|
||||||
void _changePosition(details) {
|
void _changePosition(details) {
|
||||||
|
if (MediaService.audioPlayer.audioSource == null) return;
|
||||||
double posper =
|
double posper =
|
||||||
details.localPosition.dx / (MediaQuery.of(context).size.width - 200);
|
details.localPosition.dx / (MediaQuery.of(context).size.width - 200);
|
||||||
if (posper >= 1 || posper < 0) return;
|
if (posper >= 1 || posper < 0) return;
|
||||||
final position = _audioPlayer.duration!.inMilliseconds;
|
final position = MediaService.audioPlayer.duration!.inMilliseconds;
|
||||||
_audioPlayer.seek(
|
MediaService.audioPlayer.seek(
|
||||||
Duration(milliseconds: (posper * position).toInt()),
|
Duration(milliseconds: (posper * position).toInt()),
|
||||||
);
|
);
|
||||||
setState(() {});
|
|
||||||
}
|
|
||||||
|
|
||||||
Future<void> _setupAudioPlayer() async {
|
|
||||||
if (kIsWeb || widget.audioFile == null) {
|
|
||||||
await _audioPlayer.setUrl(
|
|
||||||
kIsWeb
|
|
||||||
? widget.audioFile!.uri.path.replaceAll('%3A', ':')
|
|
||||||
: widget.audioUrl!,
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
await _audioPlayer.setFilePath(widget.audioFile!.path);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Future<void> _playAndPouse() async {
|
|
||||||
if (_audioPlayer.playing) {
|
|
||||||
_audioPlayer.pause();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
await _audioPlayer.play();
|
|
||||||
}
|
|
||||||
|
|
||||||
@override
|
|
||||||
void dispose() {
|
|
||||||
_audioPlayer.dispose();
|
|
||||||
super.dispose();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
class _AudioWaveformWidget extends StatefulWidget {
|
class _AudioWaveformWidget extends StatelessWidget {
|
||||||
final Color waveColor;
|
final Color waveColor;
|
||||||
final double scale;
|
final double scale;
|
||||||
final double strokeWidth;
|
final double strokeWidth;
|
||||||
final double pixelsPerStep;
|
final double pixelsPerStep;
|
||||||
final Waveform waveform;
|
final Waveform waveform;
|
||||||
final Duration start;
|
final Duration start;
|
||||||
|
final bool nowPlaying;
|
||||||
final Duration duration;
|
final Duration duration;
|
||||||
final AudioPlayer audioPlayer;
|
|
||||||
|
|
||||||
const _AudioWaveformWidget({
|
const _AudioWaveformWidget({
|
||||||
Key? key,
|
Key? key,
|
||||||
required this.waveform,
|
required this.waveform,
|
||||||
required this.start,
|
required this.start,
|
||||||
required this.duration,
|
required this.duration,
|
||||||
required this.audioPlayer,
|
required this.nowPlaying,
|
||||||
this.waveColor = Colors.blue,
|
this.waveColor = Colors.blue,
|
||||||
this.scale = 1.0,
|
this.scale = 1.0,
|
||||||
this.strokeWidth = 5.0,
|
this.strokeWidth = 5.0,
|
||||||
this.pixelsPerStep = 8.0,
|
this.pixelsPerStep = 8.0,
|
||||||
}) : super(key: key);
|
}) : super(key: key);
|
||||||
|
|
||||||
@override
|
|
||||||
__AudioWaveformWidgetState createState() => __AudioWaveformWidgetState();
|
|
||||||
}
|
|
||||||
|
|
||||||
class __AudioWaveformWidgetState extends State<_AudioWaveformWidget>
|
|
||||||
with SingleTickerProviderStateMixin {
|
|
||||||
double progress = 0;
|
|
||||||
|
|
||||||
@override
|
|
||||||
void initState() {
|
|
||||||
widget.audioPlayer.positionStream.listen((event) {
|
|
||||||
if (widget.audioPlayer.duration == null) return;
|
|
||||||
setState(() {
|
|
||||||
progress = event.inMilliseconds /
|
|
||||||
widget.audioPlayer.duration!.inMilliseconds *
|
|
||||||
100;
|
|
||||||
if (progress >= 100) {
|
|
||||||
progress = 0;
|
|
||||||
widget.audioPlayer.stop();
|
|
||||||
widget.audioPlayer.seek(Duration.zero);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
super.initState();
|
|
||||||
}
|
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Widget build(BuildContext context) {
|
Widget build(BuildContext context) {
|
||||||
return ClipRect(
|
return ClipRect(
|
||||||
child: CustomPaint(
|
child: StreamBuilder<Duration?>(
|
||||||
|
stream: nowPlaying ? MediaService.audioPlayer.positionStream : null,
|
||||||
|
builder: (context, snapshot) {
|
||||||
|
double progress = 0;
|
||||||
|
if (snapshot.data == null ||
|
||||||
|
MediaService.audioPlayer.duration == null) {
|
||||||
|
progress = 0;
|
||||||
|
} else {
|
||||||
|
progress = snapshot.data!.inMilliseconds /
|
||||||
|
MediaService.audioPlayer.duration!.inMilliseconds *
|
||||||
|
100;
|
||||||
|
}
|
||||||
|
if (progress >= 100) {
|
||||||
|
progress = 0;
|
||||||
|
MediaService.audioPlayer.stop();
|
||||||
|
MediaService.audioPlayer.seek(Duration.zero);
|
||||||
|
}
|
||||||
|
return CustomPaint(
|
||||||
painter: _AudioWaveformPainter(
|
painter: _AudioWaveformPainter(
|
||||||
waveColor: widget.waveColor,
|
waveColor: waveColor,
|
||||||
waveform: widget.waveform,
|
waveform: waveform,
|
||||||
start: widget.start,
|
start: start,
|
||||||
duration: widget.duration,
|
duration: duration,
|
||||||
scale: widget.scale,
|
scale: scale,
|
||||||
strokeWidth: widget.strokeWidth,
|
strokeWidth: strokeWidth,
|
||||||
pixelsPerStep: widget.pixelsPerStep,
|
pixelsPerStep: pixelsPerStep,
|
||||||
progressPercentage: progress,
|
progressPercentage: progress,
|
||||||
progressColor: Theme.of(context).colorScheme.focusedBorder,
|
progressColor: Theme.of(context).colorScheme.focusedBorder,
|
||||||
color: Theme.of(context).colorScheme.border,
|
color: Theme.of(context).colorScheme.border,
|
||||||
),
|
),
|
||||||
),
|
);
|
||||||
|
}),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -37,7 +37,14 @@ class UserProvider extends CoreProvier {
|
||||||
appState = AppState.isolatedBusy;
|
appState = AppState.isolatedBusy;
|
||||||
final RequestService service =
|
final RequestService service =
|
||||||
RequestService(RequestHelper.updateProfilePhoto);
|
RequestService(RequestHelper.updateProfilePhoto);
|
||||||
await service.multipart(file, 'PUT');
|
await service.multipart(
|
||||||
|
file: file,
|
||||||
|
method: 'PUT',
|
||||||
|
fileName: 'user-profile',
|
||||||
|
fieldName: 'photo',
|
||||||
|
mediaExtension: 'jpg',
|
||||||
|
mediaFormat: 'image',
|
||||||
|
);
|
||||||
if (service.isSuccess) {
|
if (service.isSuccess) {
|
||||||
user = user.copyWith(photo: service.result['photo']);
|
user = user.copyWith(photo: service.result['photo']);
|
||||||
appState = AppState.idle;
|
appState = AppState.idle;
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,41 @@
|
||||||
|
import 'package:didvan/services/network/request.dart';
|
||||||
|
import 'package:didvan/services/network/request_helper.dart';
|
||||||
|
import 'package:flutter/foundation.dart';
|
||||||
import 'package:image_picker/image_picker.dart';
|
import 'package:image_picker/image_picker.dart';
|
||||||
|
import 'package:just_audio/just_audio.dart';
|
||||||
|
|
||||||
class MediaService {
|
class MediaService {
|
||||||
|
static final AudioPlayer audioPlayer = AudioPlayer();
|
||||||
|
static dynamic lastAudioPath;
|
||||||
|
|
||||||
|
static Future<void> handleAudioPlayback(
|
||||||
|
{required dynamic audioSource, required bool isNetworkAudio}) async {
|
||||||
|
if (lastAudioPath == audioSource) {
|
||||||
|
if (audioPlayer.playing) {
|
||||||
|
await audioPlayer.pause();
|
||||||
|
} else {
|
||||||
|
await audioPlayer.play();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
lastAudioPath = audioSource;
|
||||||
|
if (isNetworkAudio) {
|
||||||
|
await audioPlayer.setUrl(
|
||||||
|
RequestHelper.baseUrl +
|
||||||
|
audioSource +
|
||||||
|
'?accessToken=${RequestService.token}',
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
if (kIsWeb) {
|
||||||
|
await audioPlayer
|
||||||
|
.setUrl(audioSource!.uri.path.replaceAll('%3A', ':'));
|
||||||
|
} else {
|
||||||
|
await audioPlayer.setFilePath(audioSource.path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await audioPlayer.play();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
static Future<XFile?> pickImage({required ImageSource source}) async {
|
static Future<XFile?> pickImage({required ImageSource source}) async {
|
||||||
final imagePicker = ImagePicker();
|
final imagePicker = ImagePicker();
|
||||||
final XFile? pickedFile = await imagePicker.pickImage(source: source);
|
final XFile? pickedFile = await imagePicker.pickImage(source: source);
|
||||||
|
|
|
||||||
|
|
@ -99,7 +99,14 @@ class RequestService {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Future<void> multipart(dynamic file, String method) async {
|
Future<void> multipart({
|
||||||
|
required dynamic file,
|
||||||
|
required String method,
|
||||||
|
required String fileName,
|
||||||
|
required String fieldName,
|
||||||
|
required String mediaFormat,
|
||||||
|
required String mediaExtension,
|
||||||
|
}) async {
|
||||||
try {
|
try {
|
||||||
final request = http.MultipartRequest(method, Uri.parse(url));
|
final request = http.MultipartRequest(method, Uri.parse(url));
|
||||||
_headers.update('Content-Type', (_) => 'multipart/form-data');
|
_headers.update('Content-Type', (_) => 'multipart/form-data');
|
||||||
|
|
@ -112,11 +119,11 @@ class RequestService {
|
||||||
}
|
}
|
||||||
request.files.add(
|
request.files.add(
|
||||||
http.MultipartFile(
|
http.MultipartFile(
|
||||||
'photo',
|
fieldName,
|
||||||
file.readAsBytes().asStream(),
|
file.readAsBytes().asStream(),
|
||||||
length,
|
length,
|
||||||
filename: 'profile-photo',
|
filename: fileName + '.' + mediaExtension,
|
||||||
contentType: parser.MediaType('image', 'jpg'),
|
contentType: parser.MediaType(mediaFormat, mediaExtension),
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
final streamedResponse = await request
|
final streamedResponse = await request
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue