didvan-app/lib/views/widgets/ai_voice_chat_dialog.dart

1000 lines
31 KiB
Dart
Raw Blame History

This file contains invisible Unicode characters

This file contains invisible Unicode characters that are indistinguishable to humans but may be processed differently by a computer. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

// ignore_for_file: deprecated_member_use
import 'dart:async';
import 'dart:convert';
import 'dart:typed_data';
import 'dart:ui';
import 'dart:math' as math;
import 'dart:io';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:record/record.dart';
import 'package:flutter_sound/flutter_sound.dart' hide Codec;
import 'package:flutter_sound_platform_interface/flutter_sound_platform_interface.dart'
as fsp;
import 'package:socket_io_client/socket_io_client.dart' as IO;
import 'package:didvan/views/widgets/didvan/text.dart';
import 'package:path_provider/path_provider.dart';
import 'package:didvan/services/network/request.dart';
import 'package:audio_session/audio_session.dart';
class AiVoiceChatDialog extends StatefulWidget {
const AiVoiceChatDialog({super.key});
@override
State<AiVoiceChatDialog> createState() => _AiVoiceChatDialogState();
}
class _AiVoiceChatDialogState extends State<AiVoiceChatDialog>
with TickerProviderStateMixin {
// --- Socket.IO Configuration ---
final String _socketUrl = 'https://socket.houshan.ai';
IO.Socket? _socket;
bool _isConnected = false;
// --- Audio Configuration ---
static const int inputSampleRate = 16000;
static const int geminiSampleRate = 24000;
// --- VAD Settings ---
static const double vadThreshold = 0.05;
static const double speechThreshold = 0.1;
// --- روش ساده و مطمئن: آستانه ثابت ---
// صدای AI از اسپیکر معمولاً RMS حدود 0.05-0.12 دارد
// صدای کاربر مستقیم به میکروفون معمولاً RMS بالای 0.15 دارد
static const double userInterruptThreshold =
0.25; // آستانه پایین‌تر - حساسیت بیشتر
static const int ignoreInitialMs = 800; // 800ms اول نادیده گرفته شود
static const int sustainedChunksRequired =
4; // 4 chunk متوالی برای تایید اینتراپت
int _interruptChunkCount = 0; // شمارنده chunkهای متوالی با صدای بالا
DateTime? _aiPlaybackStartTime; // زمان شروع پخش AI
static const int vadSustainMs = 150;
static const int silenceTimeoutMs = 1000;
Timer? _silenceTimer;
bool _isSpeechActive = false;
bool _ignoreAudioDuringAIPlayback = false;
// --- States ---
bool _isRecording = false;
bool _isAiSpeaking = false;
String _statusText = 'در حال اتصال...';
int _speechStartTime = 0;
int _lastSpeechTime = 0;
int _chunkCount = 0;
// --- Controllers (UI Animations) ---
late AnimationController _orbController;
late AnimationController _rippleController;
late AnimationController _waveController;
// --- Audio ---
final AudioRecorder _audioRecorder = AudioRecorder();
final FlutterSoundPlayer _audioPlayer = FlutterSoundPlayer();
StreamSubscription<Uint8List>? _audioStreamSubscription;
Timer? _vadTimer;
Timer? _speakingTimer;
bool _isPlayerInitialized = false;
// Buffers
final BytesBuilder _fullResponseAccumulator = BytesBuilder();
Timer? _playbackTriggerTimer;
bool _isPlayingFromQueue = false;
final List<int> _sendBuffer = [];
static const int targetChunkSize = 5462;
bool _wasStoppedByUser = false;
// --- Visualizer Data ---
final List<double> _audioWaveHeights = List.generate(40, (_) => 0.1);
@override
void initState() {
super.initState();
_initAnimations();
_initAudio();
_connectSocket();
}
void _initAnimations() {
_orbController = AnimationController(
vsync: this,
duration: const Duration(milliseconds: 2000),
)..repeat(reverse: true);
_rippleController = AnimationController(
vsync: this,
duration: const Duration(milliseconds: 2000),
)..repeat();
_waveController = AnimationController(
vsync: this,
duration: const Duration(milliseconds: 50),
)..addListener(() {
if (_isRecording || _isAiSpeaking) {
setState(() {
final random = math.Random();
for (int i = 0; i < _audioWaveHeights.length; i++) {
double target = 0.1 + (random.nextDouble() * 0.4);
if (_isAiSpeaking) target *= 1.5;
if (_isSpeechActive) target *= 2.0;
_audioWaveHeights[i] =
_audioWaveHeights[i] + (target - _audioWaveHeights[i]) * 0.2;
}
});
} else {
for (int i = 0; i < _audioWaveHeights.length; i++) {
_audioWaveHeights[i] = _audioWaveHeights[i] * 0.9;
}
}
});
}
Future<void> _initAudio() async {
try {
// تنظیم AudioSession برای پخش از طریق MEDIA نه CALL
final session = await AudioSession.instance;
await session.configure(AudioSessionConfiguration(
// 1. حالت PlayAndRecord برای استفاده همزمان
avAudioSessionCategory: AVAudioSessionCategory.playAndRecord,
// 2. تنظیمات کلیدی:
// defaultToSpeaker: صدا حتما از اسپیکر بیاید (نه گوشی)
// allowBluetooth: اجازه استفاده از هندزفری بلوتوث
avAudioSessionCategoryOptions:
AVAudioSessionCategoryOptions.allowBluetooth |
AVAudioSessionCategoryOptions.defaultToSpeaker,
// 3. حالت VoiceChat: این حالت پردازشگر سیگنال (DSP) موبایل را برای حذف اکو فعال می‌کند
avAudioSessionMode: AVAudioSessionMode.voiceChat,
androidAudioAttributes: const AndroidAudioAttributes(
contentType: AndroidAudioContentType.speech,
flags: AndroidAudioFlags.none,
usage: AndroidAudioUsage
.voiceCommunication, // در اندروید هم حالت مکالمه باشد
),
androidAudioFocusGainType: AndroidAudioFocusGainType.gain,
));
await _audioPlayer.openPlayer();
await _audioPlayer
.setSubscriptionDuration(const Duration(milliseconds: 10));
_isPlayerInitialized = true;
debugPrint('✅ Audio player initialized with VOICE CHAT + SPEAKER');
} catch (e) {
debugPrint('❌ Error initializing audio player: $e');
}
}
// ---------------------------------------------------------------------------
// Helper: Stop AI Playback (Internal Logic Only)
// ---------------------------------------------------------------------------
Future<void> _stopAiPlayback() async {
if (_isAiSpeaking ||
_isPlayingFromQueue ||
_fullResponseAccumulator.isNotEmpty) {
debugPrint('🛑 Stopping AI Playback (Interrupted)');
_wasStoppedByUser = true;
_playbackTriggerTimer?.cancel();
_fullResponseAccumulator.clear();
try {
if (_audioPlayer.isPlaying) {
await _audioPlayer.stopPlayer();
}
} catch (e) {
debugPrint("⚠️ Error stopping player: $e");
}
if (mounted) {
setState(() {
_isPlayingFromQueue = false;
_isAiSpeaking = false;
if (_isRecording) {
_statusText = '🎤 در حال صحبت...';
}
});
}
}
}
// ---------------------------------------------------------------------------
// Socket.IO Connection
// ---------------------------------------------------------------------------
void _connectSocket() {
try {
_socket = IO.io(
_socketUrl,
IO.OptionBuilder()
.setTransports(['websocket'])
.setAuth({'token': 'Bearer ${RequestService.token}'})
.disableAutoConnect()
.build(),
);
_socket!.connect();
_socket!.onConnect((_) {
debugPrint('✅ Socket Connected');
_socket!.emit('test', 'Hello from Flutter');
if (mounted) {
setState(() {
_isConnected = true;
_statusText = 'آماده گوش دادن...';
});
_startRecordingStream();
}
});
_socket!.onDisconnect((_) {
debugPrint('❌ Socket Disconnected');
if (mounted) {
setState(() {
_isConnected = false;
_statusText = 'اتصال قطع شد';
});
}
});
_socket!
.on('gemini_audio_chunk', (data) => _handleGeminiAudioChunk(data));
_socket!.on('audio', (data) => _handleGeminiAudioChunk(data));
_socket!.on('audio_chunk', (data) => _handleGeminiAudioChunk(data));
_socket!.on('gemini_audio', (data) => _handleGeminiAudioChunk(data));
_socket!.on('response', (data) => _handleGeminiAudioChunk(data));
_socket!.on('error', (error) => debugPrint('❌ Server Error: $error'));
} catch (e) {
debugPrint('❌ Connection Error: $e');
setState(() => _statusText = 'خطا در اتصال');
}
}
// ---------------------------------------------------------------------------
// Recording & VAD (User Input)
// ---------------------------------------------------------------------------
Future<void> _startRecordingStream() async {
if (!_isConnected) return;
try {
if (await _audioRecorder.hasPermission()) {
await _stopAiPlayback();
setState(() {
_isRecording = true;
_statusText = '👂 در حال گوش دادن...';
_chunkCount = 0;
_speechStartTime = 0;
_lastSpeechTime = 0;
_isAiSpeaking = false;
});
_sendBuffer.clear();
_waveController.repeat();
final stream = await _audioRecorder.startStream(
const RecordConfig(
encoder: AudioEncoder.pcm16bits,
sampleRate: 16000,
numChannels: 1,
// روشن بودن این گزینه‌ها حیاتی است
echoCancel: true,
noiseSuppress: true,
autoGain: false,
),
);
_audioStreamSubscription = stream.listen((data) {
_processAudioChunk(data);
});
debugPrint('🎙️ Recording started at 16000Hz');
}
} catch (e) {
debugPrint('Start Recording Error: $e');
setState(() {
_isRecording = false;
_statusText = 'خطا در دسترسی به میکروفون';
});
}
}
void _flushAiBuffers({bool resetStatus = true}) {
_playbackTriggerTimer?.cancel();
_fullResponseAccumulator.clear();
_isPlayingFromQueue = false;
_isAiSpeaking = false;
_ignoreAudioDuringAIPlayback = false;
_aiPlaybackStartTime = null; // ریست زمان پخش
_interruptChunkCount = 0; // ریست شمارنده
try {
if (_audioPlayer.isPlaying) {
_audioPlayer.stopPlayer().catchError((e) {
debugPrint('⚠️ Error stopping: $e');
});
}
} catch (_) {}
if (resetStatus && mounted) {
setState(() {
_statusText = _isRecording ? '🎤 در حال صحبت...' : 'آماده';
});
}
}
void _processAudioChunk(Uint8List chunkData) {
if (!_isRecording || _socket == null) return;
// ۱. محاسبه شدت صدا (RMS)
double sumSquares = 0;
int sampleCount = 0;
for (int i = 0; i < chunkData.length; i += 2) {
if (i + 1 < chunkData.length) {
int sample = chunkData[i] | (chunkData[i + 1] << 8);
if (sample > 32767) sample -= 65536;
double floatSample = sample / 32768.0;
sumSquares += floatSample * floatSample;
sampleCount++;
}
}
final rms = sampleCount > 0 ? math.sqrt(sumSquares / sampleCount) : 0;
// ۲. بررسی اینتراپت - روش ساده و مطمئن
if (_ignoreAudioDuringAIPlayback) {
if (_aiPlaybackStartTime != null) {
final elapsed =
DateTime.now().difference(_aiPlaybackStartTime!).inMilliseconds;
// در 500ms اول، همه چیز را نادیده بگیر (زمان برای استقرار صدا)
if (elapsed < ignoreInitialMs) {
return;
}
// اگر صدا از آستانه بالاتر بود
if (rms > userInterruptThreshold) {
_interruptChunkCount++;
// اگر چند chunk متوالی صدای بالا داشتیم = کاربر واقعاً صحبت می‌کند
if (_interruptChunkCount >= sustainedChunksRequired) {
debugPrint(
'🧯 User speaking detected! RMS: ${rms.toStringAsFixed(3)}, Chunks: $_interruptChunkCount - Interrupting AI');
_wasStoppedByUser = true;
_interruptChunkCount = 0;
_flushAiBuffers();
// ادامه می‌دهیم تا صحبت کاربر شروع شود
} else {
return; // هنوز تعداد کافی نیست
}
} else {
// اگر صدا پایین آمد، شمارنده را ریست کن
_interruptChunkCount = 0;
return;
}
}
}
// ۳. لاجیک عادی VAD و ارسال صدا
double currentThreshold = vadThreshold;
if (rms > currentThreshold) {
if (_silenceTimer?.isActive ?? false) {
_silenceTimer!.cancel();
}
if (!_isSpeechActive) {
_isSpeechActive = true;
setState(() => _statusText = '🎤 در حال صحبت...');
}
} else {
if (_isSpeechActive &&
(_silenceTimer == null || !_silenceTimer!.isActive)) {
_silenceTimer =
Timer(const Duration(milliseconds: silenceTimeoutMs), () {
_isSpeechActive = false;
_silenceTimer = null;
setState(() => _statusText = 'درحال گوش دادن...');
});
}
}
// ۴. ارسال داده‌ها
if (_isSpeechActive || (_silenceTimer?.isActive ?? false)) {
_sendBuffer.addAll(chunkData);
while (_sendBuffer.length >= targetChunkSize) {
final chunkToSend = _sendBuffer.sublist(0, targetChunkSize);
final remaining = _sendBuffer.sublist(targetChunkSize);
_sendBuffer.clear();
_sendBuffer.addAll(remaining);
final base64Audio = base64Encode(Uint8List.fromList(chunkToSend));
_socket!.emit('audio_chunk', base64Audio);
_chunkCount++;
}
} else {
if (_sendBuffer.isNotEmpty) {
final base64Audio = base64Encode(Uint8List.fromList(_sendBuffer));
_socket!.emit('audio_chunk', base64Audio);
_sendBuffer.clear();
}
}
}
Future<void> _stopRecordingStream() async {
_silenceTimer?.cancel();
_silenceTimer = null;
if (!_isRecording) return;
try {
setState(() {
_isRecording = false;
_statusText = 'متوقف شد';
});
_waveController.stop();
_vadTimer?.cancel();
await _audioStreamSubscription?.cancel();
_audioStreamSubscription = null;
await _audioRecorder.stop();
_sendBuffer.clear();
} catch (e) {
debugPrint('Stop Recording Error: $e');
}
}
// ---------------------------------------------------------------------------
// Receive and Play (AI Output)
// ---------------------------------------------------------------------------
/// کاهش حجم صدای PCM16 به صورت مستقیم
Uint8List _reduceAudioVolume(Uint8List audioData, double volumeFactor) {
final result = Uint8List(audioData.length);
debugPrint(
'🔉 Reducing volume: ${audioData.length} bytes, factor: $volumeFactor');
int maxOriginal = 0;
int maxReduced = 0;
for (int i = 0; i < audioData.length - 1; i += 2) {
// خواندن sample به صورت Little Endian 16-bit signed integer
int low = audioData[i];
int high = audioData[i + 1];
int sample = (high << 8) | low;
// تبدیل به signed
if (sample >= 32768) {
sample -= 65536;
}
if (sample.abs() > maxOriginal) maxOriginal = sample.abs();
// کاهش شدید حجم - ضرب در ضریب خیلی کوچک
double reduced = sample * volumeFactor;
int newSample = reduced.round();
// محدود کردن
newSample = newSample.clamp(-32768, 32767);
if (newSample.abs() > maxReduced) maxReduced = newSample.abs();
// تبدیل به unsigned
if (newSample < 0) {
newSample += 65536;
}
// نوشتن Little Endian
result[i] = newSample & 0xFF;
result[i + 1] = (newSample >> 8) & 0xFF;
}
debugPrint(
'✅ Volume reduced - Max original: $maxOriginal, Max reduced: $maxReduced');
return result;
}
void _handleGeminiAudioChunk(dynamic data) {
try {
_ignoreAudioDuringAIPlayback = true;
_aiPlaybackStartTime = DateTime.now(); // ثبت زمان شروع دریافت
String base64String;
if (data is String) {
base64String = data;
} else if (data is Map && data['data'] != null) {
base64String = data['data'] as String;
} else {
return;
}
if (base64String.startsWith('data:')) {
base64String = base64String.split(',').last;
}
final Uint8List bytes = base64Decode(base64String);
if (bytes.isEmpty) return;
_fullResponseAccumulator.add(bytes);
if (mounted && !_isAiSpeaking) {
setState(() {
_isAiSpeaking = true;
_statusText = '🔄 منتظر پاسخ AI...';
});
}
_playbackTriggerTimer?.cancel();
_playbackTriggerTimer = Timer(const Duration(milliseconds: 800), () {
debugPrint(
'✅ Response complete. Total size: ${_fullResponseAccumulator.length} bytes');
_playAccumulatedAudio();
});
} catch (e) {
debugPrint('❌ Error receiving chunk: $e');
}
}
// متد اصلی پخش صدا با منطق جداگانه برای وب و موبایل
Future<void> _playAccumulatedAudio() async {
if (_isSpeechActive) {
debugPrint('⚠️ User is speaking, cancelling AI playback');
_fullResponseAccumulator.clear();
_isAiSpeaking = false;
return;
}
if (_isPlayingFromQueue) {
debugPrint('⚠️ Already playing, skipping...');
return;
}
final Uint8List totalAudioData = _fullResponseAccumulator.toBytes();
_fullResponseAccumulator.clear();
if (totalAudioData.isEmpty) {
debugPrint('⚠️ No audio data to play');
return;
}
// کاهش حجم صدا در سطح sample - محدود کردن به 8% حجم اصلی
debugPrint('📊 Original audio size: ${totalAudioData.length} bytes');
final reducedAudioData = _reduceAudioVolume(totalAudioData, 0.08);
debugPrint('📊 Reduced audio size: ${reducedAudioData.length} bytes');
_isPlayingFromQueue = true;
try {
if (mounted) {
setState(() => _statusText = '🔊 AI در حال صحبت...');
}
// --- تغییرات برای وب ---
if (kIsWeb) {
// در وب به جای ذخیره فایل، مستقیم از بافر پخش می‌کنیم
await _audioPlayer.startPlayer(
fromDataBuffer: reducedAudioData,
codec: fsp.Codec.pcm16,
sampleRate: geminiSampleRate,
numChannels: 1,
whenFinished: () {
_onPlaybackFinished();
},
);
} else {
// در موبایل (کد قبلی): ذخیره در فایل موقت و پخش
final tempDir = await getTemporaryDirectory();
final tempFile = File(
'${tempDir.path}/ai_response_${DateTime.now().millisecondsSinceEpoch}.pcm');
await tempFile.writeAsBytes(reducedAudioData);
await _audioPlayer.startPlayer(
fromURI: tempFile.path,
codec: fsp.Codec.pcm16,
sampleRate: geminiSampleRate,
numChannels: 1,
whenFinished: () {
_onPlaybackFinished(tempFile: tempFile);
},
);
}
} catch (e) {
debugPrint('❌ Playback Error: $e');
_isPlayingFromQueue = false;
if (mounted) {
setState(() {
_isAiSpeaking = false;
_statusText = _isRecording ? '👂 در حال گوش دادن...' : 'آماده';
});
}
}
}
// متد کمکی برای پایان پخش و پاکسازی
void _onPlaybackFinished({File? tempFile}) {
debugPrint('✅ Playback finished');
if (!_wasStoppedByUser) {
_ignoreAudioDuringAIPlayback = false;
_aiPlaybackStartTime = null; // ریست زمان پخش
}
// پاک کردن فایل فقط در موبایل (چون در وب فایلی نیست)
if (tempFile != null) {
try {
if (tempFile.existsSync()) tempFile.deleteSync();
} catch (e) {
/* ignore */
}
}
if (!_wasStoppedByUser) {
if (mounted) {
setState(() {
_isAiSpeaking = false;
_isPlayingFromQueue = false;
_statusText = _isRecording ? '👂 در حال گوش دادن...' : 'آماده';
});
}
} else {
_wasStoppedByUser = false;
}
}
@override
void dispose() {
_stopAiPlayback();
_audioStreamSubscription?.cancel();
if (_isRecording) _audioRecorder.stop();
_vadTimer?.cancel();
_speakingTimer?.cancel();
_socket?.disconnect();
_socket?.dispose();
_orbController.dispose();
_rippleController.dispose();
_waveController.dispose();
_audioRecorder.dispose();
if (_isPlayerInitialized) {
_audioPlayer.closePlayer();
}
super.dispose();
}
// ---------------------------------------------------------------------------
// UI Design & Theme Colors (DESIGN UPDATE ONLY)
// ---------------------------------------------------------------------------
List<Color> _getGradientColors() {
if (!_isConnected) {
return [
const Color(0xFF2C3E50),
const Color(0xFF000000),
];
}
if (_isAiSpeaking) {
return [
const Color(0xFF1A2980),
const Color(0xFF26D0CE),
];
}
if (_isSpeechActive) {
return [
const Color(0xFFCB356B),
const Color(0xFFBD3F32),
];
}
return [
const Color(0xFF141E30),
const Color(0xFF243B55),
];
}
Color _getPrimaryGlowColor() {
if (!_isConnected) return Colors.red.shade400;
if (_isAiSpeaking) return const Color.fromARGB(255, 0, 170, 170);
if (_isSpeechActive) return const Color.fromARGB(255, 196, 4, 4);
return Colors.white.withOpacity(0.5);
}
@override
Widget build(BuildContext context) {
return PopScope(
canPop: true,
onPopInvoked: (didPop) async {
if (didPop) {
await _stopRecordingStream();
}
},
child: Dialog(
backgroundColor: Colors.transparent,
insetPadding: EdgeInsets.zero,
child: AnimatedContainer(
duration: const Duration(milliseconds: 1000),
curve: Curves.easeInOut,
decoration: BoxDecoration(
gradient: LinearGradient(
begin: Alignment.topLeft,
end: Alignment.bottomRight,
colors: _getGradientColors(),
),
),
child: Stack(
children: [
Positioned.fill(
child: BackdropFilter(
filter: ImageFilter.blur(sigmaX: 30, sigmaY: 30),
child: Container(color: Colors.transparent),
),
),
SafeArea(
child: Column(
children: [
_buildTopBar(context),
const Spacer(),
_buildCentralVisualizer(),
const Spacer(),
_buildStatusArea(),
const SizedBox(height: 40),
],
),
),
],
),
),
),
);
}
Widget _buildTopBar(BuildContext context) {
return Padding(
padding: const EdgeInsets.symmetric(horizontal: 20, vertical: 16),
child: Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
IconButton(
onPressed: () async {
await _stopRecordingStream();
if (context.mounted) Navigator.pop(context);
},
icon: Container(
padding: const EdgeInsets.all(8),
decoration: BoxDecoration(
shape: BoxShape.circle,
color: Colors.white.withOpacity(0.1),
),
child: const Icon(Icons.close, color: Colors.white, size: 20),
),
),
Container(
padding: const EdgeInsets.symmetric(horizontal: 12, vertical: 6),
decoration: BoxDecoration(
color: Colors.black26,
borderRadius: BorderRadius.circular(20),
border: Border.all(color: Colors.white10),
),
child: Row(
mainAxisSize: MainAxisSize.min,
children: [
const Icon(Icons.auto_awesome, color: Colors.white70, size: 14),
const SizedBox(width: 8),
DidvanText(
'هوشان ، دستیار صوتی دیدوان',
color: Colors.white.withOpacity(0.9),
fontSize: 12,
fontWeight: FontWeight.w600,
),
],
),
),
const SizedBox(width: 48),
],
),
);
}
Widget _buildCentralVisualizer() {
return SizedBox(
height: 350,
width: 350,
child: Stack(
alignment: Alignment.center,
children: [
AnimatedBuilder(
animation: _rippleController,
builder: (context, child) {
return CustomPaint(
painter: RipplePainter(
animationValue: _rippleController.value,
color: _getPrimaryGlowColor(),
),
size: const Size(350, 350),
);
},
),
AnimatedBuilder(
animation: _waveController,
builder: (context, child) {
return CustomPaint(
painter: ModernWavePainter(
waveHeights: _audioWaveHeights,
color: _getPrimaryGlowColor(),
),
size: const Size(280, 280),
);
},
),
AnimatedBuilder(
animation: _orbController,
builder: (context, child) {
final scale = 1.0 +
(_orbController.value * 0.05) +
(math.max(0, _audioWaveHeights.reduce(math.max) * 0.1));
return Transform.scale(
scale: scale,
child: Container(
width: 140,
height: 140,
decoration: BoxDecoration(
shape: BoxShape.circle,
boxShadow: [
BoxShadow(
color: _getPrimaryGlowColor().withOpacity(0.6),
blurRadius: 50,
spreadRadius: 10,
),
BoxShadow(
color: Colors.white.withOpacity(0.3),
blurRadius: 20,
spreadRadius: 0,
),
],
gradient: RadialGradient(
colors: [
Colors.white.withOpacity(0.95),
_getPrimaryGlowColor().withOpacity(0.8),
_getPrimaryGlowColor().withOpacity(0.2),
],
stops: const [0.1, 0.6, 1.0],
),
),
child: Center(
child: Icon(
_isAiSpeaking ? Icons.graphic_eq : Icons.mic_none_rounded,
color: Colors.white.withOpacity(0.9),
size: 40,
),
),
),
);
},
),
],
),
);
}
Widget _buildStatusArea() {
return Column(
mainAxisSize: MainAxisSize.min,
children: [
AnimatedSwitcher(
duration: const Duration(milliseconds: 300),
transitionBuilder: (Widget child, Animation<double> animation) {
return FadeTransition(
opacity: animation,
child: SlideTransition(
position: Tween<Offset>(
begin: const Offset(0.0, 0.2),
end: Offset.zero,
).animate(animation),
child: child,
),
);
},
child: DidvanText(
_statusText,
key: ValueKey<String>(_statusText),
fontSize: 18,
fontWeight: FontWeight.w500,
color: Colors.white.withOpacity(0.95),
textAlign: TextAlign.center,
),
),
const SizedBox(height: 12),
],
);
}
}
class ModernWavePainter extends CustomPainter {
final List<double> waveHeights;
final Color color;
ModernWavePainter({required this.waveHeights, required this.color});
@override
void paint(Canvas canvas, Size size) {
final paint = Paint()
..color = color.withOpacity(0.3)
..style = PaintingStyle.stroke
..strokeWidth = 2.0
..strokeCap = StrokeCap.round;
final center = Offset(size.width / 2, size.height / 2);
final radius = size.width / 2.5;
for (int j = 0; j < 2; j++) {
final path = Path();
final count = waveHeights.length;
final angleStep = (2 * math.pi) / count;
for (int i = 0; i <= count; i++) {
final index = i % count;
final angle = i * angleStep;
final offset = j * 5;
final waveVal = waveHeights[(index + offset) % count];
final r = radius + (waveVal * 30) + (j * 15);
final x = center.dx + r * math.cos(angle);
final y = center.dy + r * math.sin(angle);
if (i == 0) {
path.moveTo(x, y);
} else {
final prevAngle = (i - 1) * angleStep;
final prevIndex = (i - 1) % count;
final prevWave = waveHeights[(prevIndex + offset) % count];
final prevR = radius + (prevWave * 30) + (j * 15);
final prevX = center.dx + prevR * math.cos(prevAngle);
final prevY = center.dy + prevR * math.sin(prevAngle);
final midX = (prevX + x) / 2;
final midY = (prevY + y) / 2;
path.quadraticBezierTo(prevX, prevY, midX, midY);
}
}
path.close();
canvas.drawPath(path, paint);
}
}
@override
bool shouldRepaint(covariant ModernWavePainter oldDelegate) => true;
}
class RipplePainter extends CustomPainter {
final double animationValue;
final Color color;
RipplePainter({required this.animationValue, required this.color});
@override
void paint(Canvas canvas, Size size) {
final paint = Paint()
..style = PaintingStyle.stroke
..strokeWidth = 1.0;
final center = Offset(size.width / 2, size.height / 2);
final maxRadius = size.width / 2;
for (int i = 0; i < 3; i++) {
final progress = (animationValue + (i / 3)) % 1.0;
final radius = progress * maxRadius;
final opacity = (1.0 - progress).clamp(0.0, 1.0);
paint.color = color.withOpacity(opacity * 0.3);
canvas.drawCircle(center, radius, paint);
}
}
@override
bool shouldRepaint(covariant RipplePainter oldDelegate) => true;
}