import 'dart:io'; import 'package:didvan/config/theme_data.dart'; import 'package:didvan/constants/app_icons.dart'; import 'package:didvan/models/requests/studio.dart'; import 'package:didvan/models/studio_details_data.dart'; import 'package:didvan/services/media/media.dart'; import 'package:didvan/views/home/media/widgets/audio_waveform_progress.dart'; import 'package:didvan/views/podcasts/studio_details/studio_details_state.dart'; import 'package:didvan/views/widgets/didvan/icon_button.dart'; import 'package:flutter/material.dart'; import 'package:just_audio/just_audio.dart'; import 'package:provider/provider.dart'; class AudioWidget extends StatelessWidget { final String? audioUrl; final File? audioFile; final int id; final StudioDetailsData? audioMetaData; final Duration? duration; const AudioWidget({ Key? key, this.audioUrl, this.audioFile, required this.id, this.audioMetaData, this.duration, }) : super(key: key); String _formatDuration(Duration duration) { String twoDigits(int n) => n.toString().padLeft(2, "0"); String twoDigitMinutes = twoDigits(duration.inMinutes.remainder(60)); String twoDigitSeconds = twoDigits(duration.inSeconds.remainder(60)); return "$twoDigitMinutes:$twoDigitSeconds"; } @override Widget build(BuildContext context) { final tag = audioMetaData != null ? '${audioMetaData!.type}-$id' : 'message-$id'; return StreamBuilder( stream: MediaService.audioPlayer.playerStateStream, builder: (context, snapshot) { final isCurrentlyPlaying = MediaService.audioPlayerTag == tag; return StreamBuilder( stream: MediaService.audioPlayer.positionStream, builder: (context, posSnapshot) { var totalDuration = audioMetaData?.duration != null ? Duration(seconds: audioMetaData!.duration!) : (duration ?? Duration.zero); if (isCurrentlyPlaying && MediaService.audioPlayer.duration != null && MediaService.audioPlayer.duration! > Duration.zero) { totalDuration = MediaService.audioPlayer.duration!; } final currentPosition = isCurrentlyPlaying ? (posSnapshot.data ?? Duration.zero) : Duration.zero; final progress = (totalDuration.inMilliseconds > 0) ? currentPosition.inMilliseconds / totalDuration.inMilliseconds : 0.0; return Row( crossAxisAlignment: CrossAxisAlignment.center, children: [ _AudioControllerButton( audioFile: audioFile, audioUrl: audioUrl, id: id, audioMetaData: audioMetaData, ), const SizedBox(width: 12), Expanded( child: Padding( padding: const EdgeInsets.only(top: 4), child: Row( children: [ SizedBox( width: 45, child: Text( _formatDuration(totalDuration), style: TextStyle( fontSize: 12, color: Theme.of(context).colorScheme.caption), textAlign: TextAlign.center, ), ), const SizedBox(width: 8), Expanded( child: AudioWaveformProgress( progress: progress.clamp(0.0, 1.0), isActive: isCurrentlyPlaying, onChanged: isCurrentlyPlaying ? (value) { final seekMillis = (value * totalDuration.inMilliseconds) .toInt(); MediaService.audioPlayer.seek( Duration(milliseconds: seekMillis)); } : null, ), ), const SizedBox(width: 8), SizedBox( width: 45, child: Text( _formatDuration(currentPosition), style: TextStyle( fontSize: 12, color: Theme.of(context).colorScheme.caption), textAlign: TextAlign.center, ), ), ], ), ), ), ], ); }, ); }, ); } } class _AudioControllerButton extends StatelessWidget { final String? audioUrl; final File? audioFile; final int id; final StudioDetailsData? audioMetaData; const _AudioControllerButton({ Key? key, this.audioUrl, this.audioFile, required this.id, this.audioMetaData, }) : super(key: key); bool get _nowPlaying => MediaService.audioPlayerTag == (audioMetaData != null ? '${audioMetaData!.type}-$id' : 'message-$id'); @override Widget build(BuildContext context) { return StreamBuilder( stream: MediaService.audioPlayer.playerStateStream, builder: (context, snapshot) { if (snapshot.hasData && snapshot.data!.processingState == ProcessingState.completed) { MediaService.audioPlayer.pause(); MediaService.audioPlayer.seek(Duration.zero); } return DidvanIconButton( icon: MediaService.audioPlayer.playing && _nowPlaying ? DidvanIcons.pause_circle_solid : DidvanIcons.play_circle_solid, gestureSize: 36, color: Theme.of(context).colorScheme.focusedBorder, onPressed: () async { if (audioMetaData?.type == 'podcast') { final state = context.read(); if (MediaService.currentPodcast == null) { await state.getStudioDetails( id, args: const StudioRequestArgs(page: 0, type: 'podcast'), ); } } if (snapshot.data == null && _nowPlaying && MediaService.audioPlayer.playing) { return; } if (audioMetaData != null) { MediaService.currentPodcast = audioMetaData; } MediaService.handleAudioPlayback( audioSource: audioFile?.path ?? audioUrl, id: id, isNetworkAudio: audioFile == null, isVoiceMessage: audioMetaData == null, ); }, ); }); } }