315 lines
10 KiB
Dart
315 lines
10 KiB
Dart
import 'dart:io';
|
|
import 'dart:math';
|
|
|
|
import 'package:didvan/config/design_config.dart';
|
|
import 'package:didvan/config/theme_data.dart';
|
|
import 'package:didvan/constants/app_icons.dart';
|
|
import 'package:didvan/constants/assets.dart';
|
|
import 'package:didvan/pages/home/direct/direct_state.dart';
|
|
import 'package:didvan/services/media/media.dart';
|
|
import 'package:didvan/services/storage/storage.dart';
|
|
import 'package:didvan/utils/date_time.dart';
|
|
import 'package:didvan/widgets/didvan/icon_button.dart';
|
|
import 'package:didvan/widgets/didvan/text.dart';
|
|
import 'package:flutter/foundation.dart';
|
|
import 'package:flutter/material.dart';
|
|
import 'package:flutter_svg/flutter_svg.dart';
|
|
import 'package:just_waveform/just_waveform.dart';
|
|
import 'package:provider/provider.dart';
|
|
|
|
class AudioVisualizer extends StatefulWidget {
|
|
final File? audioFile;
|
|
final Waveform? waveform;
|
|
final String? audioUrl;
|
|
final int? duration;
|
|
final Color? backgroundColor;
|
|
|
|
const AudioVisualizer({
|
|
Key? key,
|
|
this.audioFile,
|
|
this.waveform,
|
|
this.audioUrl,
|
|
this.duration,
|
|
this.backgroundColor,
|
|
}) : super(key: key);
|
|
|
|
@override
|
|
State<AudioVisualizer> createState() => _AudioVisualizerState();
|
|
}
|
|
|
|
class _AudioVisualizerState extends State<AudioVisualizer> {
|
|
Stream<WaveformProgress>? waveDataStream;
|
|
|
|
@override
|
|
void initState() {
|
|
if (!kIsWeb && widget.audioFile != null) {
|
|
waveDataStream = JustWaveform.extract(
|
|
audioInFile: widget.audioFile!,
|
|
waveOutFile: File(StorageService.appTempsDir + '/rec-wave.wave'),
|
|
zoom: const WaveformZoom.pixelsPerSecond(100),
|
|
);
|
|
}
|
|
super.initState();
|
|
}
|
|
|
|
bool get _nowPlaying =>
|
|
MediaService.lastAudioPath == widget.audioFile ||
|
|
MediaService.lastAudioPath == widget.audioUrl;
|
|
|
|
@override
|
|
Widget build(BuildContext context) {
|
|
return Container(
|
|
decoration: BoxDecoration(
|
|
color: widget.backgroundColor ??
|
|
(DesignConfig.isDark
|
|
? Theme.of(context).colorScheme.black
|
|
: Theme.of(context).colorScheme.background),
|
|
borderRadius: DesignConfig.mediumBorderRadius,
|
|
),
|
|
child: Row(
|
|
children: [
|
|
const SizedBox(width: 12),
|
|
StreamBuilder<Duration>(
|
|
stream:
|
|
_nowPlaying ? MediaService.audioPlayer.positionStream : null,
|
|
builder: (context, snapshot) {
|
|
String text = '';
|
|
if (MediaService.audioPlayer.duration == null) {
|
|
Future.delayed(Duration.zero, () {
|
|
if (mounted) {
|
|
setState(() {});
|
|
}
|
|
});
|
|
}
|
|
if (snapshot.data == null || snapshot.data == Duration.zero) {
|
|
text = DateTimeUtils.normalizeTimeDuration(
|
|
MediaService.audioPlayer.duration ??
|
|
widget.waveform?.duration ??
|
|
Duration.zero);
|
|
} else {
|
|
text = DateTimeUtils.normalizeTimeDuration(snapshot.data!);
|
|
}
|
|
return DidvanText(
|
|
text,
|
|
color: Theme.of(context).colorScheme.focusedBorder,
|
|
isEnglishFont: true,
|
|
);
|
|
},
|
|
),
|
|
const SizedBox(width: 12),
|
|
Expanded(
|
|
child: Builder(
|
|
builder: (context) {
|
|
if (kIsWeb) {
|
|
return SvgPicture.asset(Assets.record);
|
|
}
|
|
if (widget.audioFile != null) {
|
|
return StreamBuilder<WaveformProgress>(
|
|
stream: waveDataStream,
|
|
builder: (context, snapshot) {
|
|
if (snapshot.data == null ||
|
|
snapshot.data!.waveform == null) {
|
|
return const SizedBox();
|
|
}
|
|
final waveform = snapshot.data!.waveform!;
|
|
context.read<DirectState>().waveform = waveform;
|
|
return _waveWidget(waveform);
|
|
},
|
|
);
|
|
}
|
|
if (widget.waveform == null && waveDataStream == null) {
|
|
return SvgPicture.asset(Assets.record);
|
|
}
|
|
return _waveWidget(widget.waveform!);
|
|
},
|
|
),
|
|
),
|
|
StreamBuilder<bool>(
|
|
stream: _nowPlaying ? MediaService.audioPlayer.playingStream : null,
|
|
builder: (context, snapshot) {
|
|
return DidvanIconButton(
|
|
icon: snapshot.data == true
|
|
? DidvanIcons.pause_circle_solid
|
|
: DidvanIcons.play_circle_solid,
|
|
color: Theme.of(context).colorScheme.focusedBorder,
|
|
onPressed: () {
|
|
MediaService.handleAudioPlayback(
|
|
audioSource: widget.audioFile ?? widget.audioUrl,
|
|
isNetworkAudio: widget.audioFile == null,
|
|
);
|
|
setState(() {});
|
|
},
|
|
);
|
|
},
|
|
),
|
|
],
|
|
),
|
|
);
|
|
}
|
|
|
|
Widget _waveWidget(Waveform waveform) => IgnorePointer(
|
|
ignoring: !_nowPlaying,
|
|
child: GestureDetector(
|
|
onHorizontalDragUpdate: _changePosition,
|
|
onTapDown: _changePosition,
|
|
child: SizedBox(
|
|
height: double.infinity,
|
|
width: double.infinity,
|
|
child: _AudioWaveformWidget(
|
|
waveform: waveform,
|
|
start: Duration.zero,
|
|
scale: 2,
|
|
strokeWidth: 3,
|
|
nowPlaying: _nowPlaying,
|
|
duration: waveform.duration,
|
|
waveColor: Theme.of(context).colorScheme.focusedBorder,
|
|
),
|
|
),
|
|
),
|
|
);
|
|
|
|
void _changePosition(details) {
|
|
if (MediaService.audioPlayer.audioSource == null) return;
|
|
double posper =
|
|
details.localPosition.dx / (MediaQuery.of(context).size.width - 200);
|
|
if (posper >= 1 || posper < 0) return;
|
|
final position = MediaService.audioPlayer.duration!.inMilliseconds;
|
|
MediaService.audioPlayer.seek(
|
|
Duration(milliseconds: (posper * position).toInt()),
|
|
);
|
|
}
|
|
}
|
|
|
|
class _AudioWaveformWidget extends StatelessWidget {
|
|
final Color waveColor;
|
|
final double scale;
|
|
final double strokeWidth;
|
|
final double pixelsPerStep;
|
|
final Waveform waveform;
|
|
final Duration start;
|
|
final bool nowPlaying;
|
|
final Duration duration;
|
|
|
|
const _AudioWaveformWidget({
|
|
Key? key,
|
|
required this.waveform,
|
|
required this.start,
|
|
required this.duration,
|
|
required this.nowPlaying,
|
|
this.waveColor = Colors.blue,
|
|
this.scale = 1.0,
|
|
this.strokeWidth = 5.0,
|
|
this.pixelsPerStep = 8.0,
|
|
}) : super(key: key);
|
|
|
|
@override
|
|
Widget build(BuildContext context) {
|
|
return ClipRect(
|
|
child: StreamBuilder<Duration?>(
|
|
stream: nowPlaying ? MediaService.audioPlayer.positionStream : null,
|
|
builder: (context, snapshot) {
|
|
double progress = 0;
|
|
if (snapshot.data == null ||
|
|
MediaService.audioPlayer.duration == null) {
|
|
progress = 0;
|
|
} else {
|
|
progress = snapshot.data!.inMilliseconds /
|
|
MediaService.audioPlayer.duration!.inMilliseconds *
|
|
100;
|
|
}
|
|
if (progress >= 100) {
|
|
progress = 0;
|
|
MediaService.audioPlayer.stop();
|
|
MediaService.audioPlayer.seek(Duration.zero);
|
|
}
|
|
return CustomPaint(
|
|
painter: _AudioWaveformPainter(
|
|
waveColor: waveColor,
|
|
waveform: waveform,
|
|
start: start,
|
|
duration: duration,
|
|
scale: scale,
|
|
strokeWidth: strokeWidth,
|
|
pixelsPerStep: pixelsPerStep,
|
|
progressPercentage: progress,
|
|
progressColor: Theme.of(context).colorScheme.focusedBorder,
|
|
color: Theme.of(context).colorScheme.border,
|
|
),
|
|
);
|
|
}),
|
|
);
|
|
}
|
|
}
|
|
|
|
class _AudioWaveformPainter extends CustomPainter {
|
|
final double scale;
|
|
final double strokeWidth;
|
|
final double pixelsPerStep;
|
|
final Waveform waveform;
|
|
final Duration start;
|
|
final Duration duration;
|
|
final double progressPercentage;
|
|
final Color progressColor;
|
|
final Color color;
|
|
|
|
_AudioWaveformPainter({
|
|
required this.waveform,
|
|
required this.start,
|
|
required this.duration,
|
|
required this.progressPercentage,
|
|
required this.color,
|
|
required this.progressColor,
|
|
Color waveColor = Colors.blue,
|
|
this.scale = 1.0,
|
|
this.strokeWidth = 5.0,
|
|
this.pixelsPerStep = 8.0,
|
|
});
|
|
|
|
@override
|
|
void paint(Canvas canvas, Size size) {
|
|
if (duration == Duration.zero) return;
|
|
double width = size.width;
|
|
double height = size.height;
|
|
|
|
final waveformPixelsPerWindow = waveform.positionToPixel(duration).toInt();
|
|
final waveformPixelsPerDevicePixel = waveformPixelsPerWindow / width;
|
|
final waveformPixelsPerStep = waveformPixelsPerDevicePixel * pixelsPerStep;
|
|
final sampleOffset = waveform.positionToPixel(start);
|
|
final sampleStart = -sampleOffset % waveformPixelsPerStep;
|
|
final totalLength = waveformPixelsPerWindow;
|
|
final wavePaintB = Paint()
|
|
..style = PaintingStyle.stroke
|
|
..strokeWidth = strokeWidth
|
|
..strokeCap = StrokeCap.round
|
|
..color = progressColor;
|
|
final wavePaintA = Paint()
|
|
..style = PaintingStyle.stroke
|
|
..strokeWidth = strokeWidth
|
|
..strokeCap = StrokeCap.round
|
|
..color = color;
|
|
for (var i = sampleStart.toDouble();
|
|
i <= waveformPixelsPerWindow + 1.0;
|
|
i += waveformPixelsPerStep) {
|
|
final sampleIdx = (sampleOffset + i).toInt();
|
|
final x = i / waveformPixelsPerDevicePixel;
|
|
final minY = normalise(waveform.getPixelMin(sampleIdx), height);
|
|
final maxY = normalise(waveform.getPixelMax(sampleIdx), height);
|
|
canvas.drawLine(
|
|
Offset(x + strokeWidth / 2, max(strokeWidth * 0.75, minY)),
|
|
Offset(x + strokeWidth / 2, min(height - strokeWidth * 0.75, maxY)),
|
|
i / totalLength < progressPercentage / 100 ? wavePaintB : wavePaintA,
|
|
);
|
|
}
|
|
}
|
|
|
|
@override
|
|
bool shouldRepaint(covariant _AudioWaveformPainter oldDelegate) {
|
|
return oldDelegate.progressPercentage != progressPercentage;
|
|
}
|
|
|
|
double normalise(int s, double height) {
|
|
final y = 32768 + (scale * s).clamp(-32768.0, 32767.0).toDouble();
|
|
return height - 1 - y * height / 65536;
|
|
}
|
|
}
|