didvan-app/lib/widgets/audio_visualizer.dart

315 lines
9.7 KiB
Dart

import 'dart:io';
import 'dart:math';
import 'package:didvan/config/design_config.dart';
import 'package:didvan/config/theme_data.dart';
import 'package:didvan/constants/app_icons.dart';
import 'package:didvan/constants/assets.dart';
import 'package:didvan/services/storage/storage.dart';
import 'package:didvan/utils/date_time.dart';
import 'package:didvan/widgets/didvan/icon_button.dart';
import 'package:didvan/widgets/didvan/text.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:flutter_svg/flutter_svg.dart';
import 'package:just_audio/just_audio.dart';
import 'package:just_waveform/just_waveform.dart';
class AudioVisualizer extends StatefulWidget {
final File audioFile;
const AudioVisualizer({
Key? key,
required this.audioFile,
}) : super(key: key);
@override
State<AudioVisualizer> createState() => _AudioVisualizerState();
}
class _AudioVisualizerState extends State<AudioVisualizer> {
final AudioPlayer _audioPlayer = AudioPlayer();
Stream<WaveformProgress>? waveDataStream;
@override
void initState() {
if (!kIsWeb) {
waveDataStream = JustWaveform.extract(
audioInFile: widget.audioFile,
waveOutFile: File(StorageService.appTempsDir + '/rec-wave.wave'),
zoom: const WaveformZoom.pixelsPerSecond(100),
);
}
_setupAudioPlayer();
super.initState();
}
@override
Widget build(BuildContext context) {
return Container(
decoration: BoxDecoration(
color: DesignConfig.isDark
? Theme.of(context).colorScheme.black
: Theme.of(context).colorScheme.background,
borderRadius: DesignConfig.mediumBorderRadius,
),
child: Row(
children: [
const SizedBox(width: 12),
StreamBuilder<Duration>(
stream: _audioPlayer.positionStream,
builder: (context, snapshot) {
String text = '';
if (_audioPlayer.duration == null) {
Future.delayed(Duration.zero, () {
setState(() {});
});
}
if (snapshot.data == null || snapshot.data == Duration.zero) {
text = DateTimeUtils.normalizeTimeDuration(
_audioPlayer.duration ?? Duration.zero);
} else {
text = DateTimeUtils.normalizeTimeDuration(snapshot.data!);
}
return DidvanText(
text,
color: Theme.of(context).colorScheme.focusedBorder,
isEnglishFont: true,
);
},
),
const SizedBox(width: 12),
Expanded(
child: Builder(
builder: (context) {
if (kIsWeb) {
return SvgPicture.asset(Assets.record);
}
return StreamBuilder<WaveformProgress>(
stream: waveDataStream,
builder: (context, snapshot) {
if (snapshot.data == null) {
return const SizedBox();
}
if (snapshot.data!.waveform == null) {
return const SizedBox();
}
final waveform = snapshot.data!.waveform!;
return GestureDetector(
onHorizontalDragUpdate: _changePosition,
onTapDown: _changePosition,
child: SizedBox(
height: double.infinity,
width: double.infinity,
child: _AudioWaveformWidget(
waveform: waveform,
audioPlayer: _audioPlayer,
start: Duration.zero,
scale: 2,
strokeWidth: 3,
duration: waveform.duration,
waveColor:
Theme.of(context).colorScheme.focusedBorder,
),
),
);
},
);
},
),
),
StreamBuilder<bool>(
stream: _audioPlayer.playingStream,
builder: (context, snapshot) {
return DidvanIconButton(
icon: snapshot.data == true
? DidvanIcons.pause_circle_solid
: DidvanIcons.play_circle_solid,
color: Theme.of(context).colorScheme.focusedBorder,
onPressed: _playAndPouse,
);
},
),
],
),
);
}
void _changePosition(details) {
double posper =
details.localPosition.dx / (MediaQuery.of(context).size.width - 200);
if (posper >= 1 || posper < 0) return;
final position = _audioPlayer.duration!.inMilliseconds;
_audioPlayer.seek(
Duration(milliseconds: (posper * position).toInt()),
);
setState(() {});
}
Future<void> _setupAudioPlayer() async {
if (kIsWeb) {
await _audioPlayer.setUrl(
widget.audioFile.uri.path.replaceAll('%3A', ':'),
);
} else {
await _audioPlayer.setFilePath(widget.audioFile.path);
}
}
Future<void> _playAndPouse() async {
if (_audioPlayer.playing) {
_audioPlayer.pause();
return;
}
await _audioPlayer.play();
}
@override
void dispose() {
_audioPlayer.dispose();
super.dispose();
}
}
class _AudioWaveformWidget extends StatefulWidget {
final Color waveColor;
final double scale;
final double strokeWidth;
final double pixelsPerStep;
final Waveform waveform;
final Duration start;
final Duration duration;
final AudioPlayer audioPlayer;
const _AudioWaveformWidget({
Key? key,
required this.waveform,
required this.start,
required this.duration,
required this.audioPlayer,
this.waveColor = Colors.blue,
this.scale = 1.0,
this.strokeWidth = 5.0,
this.pixelsPerStep = 8.0,
}) : super(key: key);
@override
__AudioWaveformWidgetState createState() => __AudioWaveformWidgetState();
}
class __AudioWaveformWidgetState extends State<_AudioWaveformWidget>
with SingleTickerProviderStateMixin {
double progress = 0;
@override
void initState() {
widget.audioPlayer.positionStream.listen((event) {
if (widget.audioPlayer.duration == null) return;
setState(() {
progress = event.inMilliseconds /
widget.audioPlayer.duration!.inMilliseconds *
100;
if (progress >= 100) {
progress = 0;
widget.audioPlayer.stop();
widget.audioPlayer.seek(Duration.zero);
}
});
});
super.initState();
}
@override
Widget build(BuildContext context) {
return ClipRect(
child: CustomPaint(
painter: _AudioWaveformPainter(
waveColor: widget.waveColor,
waveform: widget.waveform,
start: widget.start,
duration: widget.duration,
scale: widget.scale,
strokeWidth: widget.strokeWidth,
pixelsPerStep: widget.pixelsPerStep,
progressPercentage: progress,
progressColor: Theme.of(context).colorScheme.focusedBorder,
color: Theme.of(context).colorScheme.border,
),
),
);
}
}
class _AudioWaveformPainter extends CustomPainter {
final double scale;
final double strokeWidth;
final double pixelsPerStep;
final Waveform waveform;
final Duration start;
final Duration duration;
final double progressPercentage;
final Color progressColor;
final Color color;
_AudioWaveformPainter({
required this.waveform,
required this.start,
required this.duration,
required this.progressPercentage,
required this.color,
required this.progressColor,
Color waveColor = Colors.blue,
this.scale = 1.0,
this.strokeWidth = 5.0,
this.pixelsPerStep = 8.0,
});
@override
void paint(Canvas canvas, Size size) {
if (duration == Duration.zero) return;
double width = size.width;
double height = size.height;
final waveformPixelsPerWindow = waveform.positionToPixel(duration).toInt();
final waveformPixelsPerDevicePixel = waveformPixelsPerWindow / width;
final waveformPixelsPerStep = waveformPixelsPerDevicePixel * pixelsPerStep;
final sampleOffset = waveform.positionToPixel(start);
final sampleStart = -sampleOffset % waveformPixelsPerStep;
final totalLength = waveformPixelsPerWindow;
final wavePaintB = Paint()
..style = PaintingStyle.stroke
..strokeWidth = strokeWidth
..strokeCap = StrokeCap.round
..color = progressColor;
final wavePaintA = Paint()
..style = PaintingStyle.stroke
..strokeWidth = strokeWidth
..strokeCap = StrokeCap.round
..color = color;
for (var i = sampleStart.toDouble();
i <= waveformPixelsPerWindow + 1.0;
i += waveformPixelsPerStep) {
final sampleIdx = (sampleOffset + i).toInt();
final x = i / waveformPixelsPerDevicePixel;
final minY = normalise(waveform.getPixelMin(sampleIdx), height);
final maxY = normalise(waveform.getPixelMax(sampleIdx), height);
canvas.drawLine(
Offset(x + strokeWidth / 2, max(strokeWidth * 0.75, minY)),
Offset(x + strokeWidth / 2, min(height - strokeWidth * 0.75, maxY)),
i / totalLength < progressPercentage / 100 ? wavePaintB : wavePaintA,
);
}
}
@override
bool shouldRepaint(covariant _AudioWaveformPainter oldDelegate) {
return oldDelegate.progressPercentage != progressPercentage;
}
double normalise(int s, double height) {
final y = 32768 + (scale * s).clamp(-32768.0, 32767.0).toDouble();
return height - 1 - y * height / 65536;
}
}