proxybuy-flutter/lib/screens/mains/profile/help_and_support_page.dart

440 lines
14 KiB
Dart

import 'dart:async';
import 'dart:io';
import 'package:flutter/material.dart';
import 'package:flutter_svg/flutter_svg.dart';
import 'package:image_picker/image_picker.dart';
import 'package:intl/intl.dart';
import 'package:lba/gen/assets.gen.dart';
import 'package:lba/res/colors.dart';
import 'package:lba/widgets/chat_message_audio_player.dart';
import 'package:lba/widgets/chat_text_input_field.dart';
import 'package:lba/widgets/profile_app_bar.dart';
import 'package:lba/widgets/recorded_audio_preview.dart';
import 'package:lba/widgets/recording_indicator.dart';
import 'package:flutter_sound/flutter_sound.dart';
import 'package:path_provider/path_provider.dart';
import 'package:permission_handler/permission_handler.dart';
import 'package:vibration/vibration.dart';
class ChatMessage {
final String? text;
final String? imagePath;
final String? audioPath;
final Duration? audioDuration;
final String time;
final bool isUser;
ChatMessage({
this.text,
this.imagePath,
this.audioPath,
this.audioDuration,
required this.time,
required this.isUser,
});
}
class HelpAndSupportPage extends StatefulWidget {
const HelpAndSupportPage({super.key});
@override
State<HelpAndSupportPage> createState() => _HelpAndSupportPageState();
}
class _HelpAndSupportPageState extends State<HelpAndSupportPage>
with TickerProviderStateMixin {
final TextEditingController _textController = TextEditingController();
final List<ChatMessage> _messages = [];
final GlobalKey<AnimatedListState> _listKey = GlobalKey<AnimatedListState>();
late AnimationController _pageEnterAnimationController;
final ImagePicker _picker = ImagePicker();
final GlobalKey _attachIconKey = GlobalKey();
XFile? _attachedImage;
FlutterSoundRecorder? _audioRecorder;
bool _isRecording = false;
String? _recordedAudioPath;
Duration _recordDuration = Duration.zero;
Timer? _recordingTimer;
late AnimationController _micPulseController;
late Animation<double> _micPulseAnimation;
@override
void initState() {
super.initState();
_pageEnterAnimationController = AnimationController(
vsync: this,
duration: const Duration(milliseconds: 800),
);
_micPulseController = AnimationController(
vsync: this,
duration: const Duration(milliseconds: 1000),
)..repeat(reverse: true);
_micPulseAnimation =
Tween<double>(begin: 1.0, end: 1.4).animate(_micPulseController);
_initRecorder();
_loadInitialMessages();
}
Future<void> _initRecorder() async {
try {
_audioRecorder = FlutterSoundRecorder();
await _audioRecorder!.openRecorder();
} catch (e) {
debugPrint('Error initializing recorder: $e');
}
}
void _loadInitialMessages() {
Future.delayed(const Duration(milliseconds: 500), () {
_addMessage(ChatMessage(
text: "Hello! How can I assist you today?",
time: DateFormat('HH:mm').format(DateTime.now()),
isUser: false,
));
_pageEnterAnimationController.forward();
});
}
@override
void dispose() {
_textController.dispose();
_pageEnterAnimationController.dispose();
_audioRecorder?.closeRecorder();
_recordingTimer?.cancel();
_micPulseController.dispose();
super.dispose();
}
void _sendMessage() {
final text = _textController.text.trim();
final imageFile = _attachedImage;
final audioPath = _recordedAudioPath;
if (text.isNotEmpty || imageFile != null || audioPath != null) {
final message = ChatMessage(
text: text.isNotEmpty ? text : null,
imagePath: imageFile?.path,
audioPath: audioPath,
audioDuration: audioPath != null ? _recordDuration : null,
time: DateFormat('HH:mm').format(DateTime.now()),
isUser: true,
);
_addMessage(message);
_textController.clear();
setState(() {
_attachedImage = null;
_recordedAudioPath = null;
_recordDuration = Duration.zero;
});
Future.delayed(const Duration(seconds: 2), () {
_addMessage(ChatMessage(
text:
"Thanks for reaching out! A support agent will review your message and get back to you shortly.",
time: DateFormat('HH:mm').format(DateTime.now()),
isUser: false,
));
});
}
}
void _addMessage(ChatMessage message) {
_messages.add(message);
_listKey.currentState?.insertItem(_messages.length - 1,
duration: const Duration(milliseconds: 400));
}
void _showAttachmentOptions() {
showModalBottomSheet(
context: context,
backgroundColor: Colors.transparent,
builder: (context) => Container(
padding: const EdgeInsets.all(16),
decoration: BoxDecoration(
color: AppColors.surface,
borderRadius: BorderRadius.vertical(top: Radius.circular(20)),
),
child: Column(
mainAxisSize: MainAxisSize.min,
children: [
ListTile(
leading: SvgPicture.asset(Assets.icons.camera2.path, height: 20, color: AppColors.textSecondary),
title: Text('Take a photo', style: TextStyle(color: AppColors.textPrimary)),
onTap: () {
Navigator.of(context).pop();
_pickImage(ImageSource.camera);
},
),
Divider(color: AppColors.divider),
ListTile(
leading: SvgPicture.asset(Assets.icons.galleryAdd.path, height: 19, color: AppColors.textSecondary),
title: Text('Choose from gallery', style: TextStyle(color: AppColors.textPrimary)),
onTap: () {
Navigator.of(context).pop();
_pickImage(ImageSource.gallery);
},
),
],
),
),
);
}
Future<void> _pickImage(ImageSource source) async {
try {
final XFile? pickedFile = await _picker.pickImage(source: source);
if (pickedFile != null) {
setState(() => _attachedImage = pickedFile);
}
} catch (e) {
debugPrint("Error picking image: $e");
}
}
void _removeAttachedImage() => setState(() => _attachedImage = null);
Future<void> _startRecording() async {
final permission = await Permission.microphone.request();
if (permission != PermissionStatus.granted) return;
try {
if (await Vibration.hasVibrator() == true) {
Vibration.vibrate(duration: 100);
}
final Directory tempDir = await getTemporaryDirectory();
final String filePath = '${tempDir.path}/recording_${DateTime.now().millisecondsSinceEpoch}.aac';
await _audioRecorder!.startRecorder(toFile: filePath, codec: Codec.aacADTS);
setState(() {
_isRecording = true;
_recordDuration = Duration.zero;
});
_startRecordTimer();
} catch (e) {
debugPrint('Error starting recording: $e');
}
}
Future<void> _stopRecording() async {
if (!_isRecording) return;
try {
_recordingTimer?.cancel();
final path = await _audioRecorder!.stopRecorder();
setState(() {
_isRecording = false;
_recordedAudioPath = path;
});
} catch (e) {
debugPrint('Error stopping recording: $e');
}
}
void _startRecordTimer() {
_recordingTimer = Timer.periodic(const Duration(seconds: 1), (timer) {
setState(() => _recordDuration += const Duration(seconds: 1));
});
}
void _deleteRecording() => setState(() {
_recordedAudioPath = null;
_recordDuration = Duration.zero;
});
String _formatDuration(Duration duration) {
String twoDigits(int n) => n.toString().padLeft(2, "0");
String twoDigitMinutes = twoDigits(duration.inMinutes.remainder(60));
String twoDigitSeconds = twoDigits(duration.inSeconds.remainder(60));
return "$twoDigitMinutes:$twoDigitSeconds";
}
@override
Widget build(BuildContext context) {
return Scaffold(
backgroundColor: AppColors.scaffoldBackground,
appBar: const ProfileAppBar(
title: 'Help & Support',
showBackButton: true,
),
body: Container(
decoration: BoxDecoration(
color: AppColors.scaffoldBackground,
image: DecorationImage(
image: AssetImage(Assets.images.seamlessPatternWithShoppingCartsGiftSalevectorBackground6780995561.path),
fit: BoxFit.cover,
),
),
child: Column(
children: [
Expanded(
child: AnimatedList(
key: _listKey,
initialItemCount: _messages.length,
padding: const EdgeInsets.all(16.0),
itemBuilder: (context, index, animation) {
return _buildMessageItem(_messages[index], animation);
},
),
),
if (_attachedImage != null) _buildAttachedImagePreview(),
if (_recordedAudioPath != null)
RecordedAudioPreview(
audioPath: _recordedAudioPath!,
totalDuration: _recordDuration,
onDelete: _deleteRecording,
formatDuration: _formatDuration,
),
if (_isRecording)
RecordingIndicator(
formattedDuration: _formatDuration(_recordDuration),
),
SlideTransition(
position: Tween<Offset>(
begin: const Offset(0, 1),
end: Offset.zero,
).animate(CurvedAnimation(
parent: _pageEnterAnimationController,
curve: Curves.easeOutCubic,
)),
child: ChatTextInputField(
textController: _textController,
onSendMessage: _sendMessage,
onShowAttachmentOptions: _showAttachmentOptions,
onStartRecording: _startRecording,
onStopRecording: _stopRecording,
isRecording: _isRecording,
micPulseAnimation: _micPulseAnimation,
attachIconKey: _attachIconKey,
hintText: "Type your message...",
),
),
],
),
),
);
}
Widget _buildMessageItem(ChatMessage message, Animation<double> animation) {
final isUser = message.isUser;
final slideAnimation = Tween<Offset>(
begin: isUser ? const Offset(1, 0) : const Offset(-1, 0),
end: Offset.zero,
).animate(CurvedAnimation(parent: animation, curve: Curves.easeOutCubic));
return FadeTransition(
opacity: animation,
child: SlideTransition(
position: slideAnimation,
child: Align(
alignment: isUser ? Alignment.centerRight : Alignment.centerLeft,
child: Column(
crossAxisAlignment:
isUser ? CrossAxisAlignment.end : CrossAxisAlignment.start,
children: [
Container(
margin: const EdgeInsets.symmetric(vertical: 6.0),
padding: const EdgeInsets.symmetric(
horizontal: 16.0, vertical: 10.0),
constraints: BoxConstraints(
maxWidth: MediaQuery.of(context).size.width * 0.75,
),
decoration: BoxDecoration(
color: isUser ? AppColors.confirmPopup : AppColors.nearbyPopup,
borderRadius: BorderRadius.circular(20.0).subtract(
isUser
? const BorderRadius.only(bottomRight: Radius.circular(16))
: const BorderRadius.only(bottomLeft: Radius.circular(16)),
),
boxShadow: [
BoxShadow(
color: AppColors.shadowColor,
blurRadius: 10,
offset: const Offset(0, 4),
),
],
),
child: Column(
crossAxisAlignment:
isUser ? CrossAxisAlignment.end : CrossAxisAlignment.start,
children: [
if (message.text != null)
Text(
message.text!,
style: TextStyle(
color: isUser ? Colors.white : AppColors.textPrimary,
fontSize: 16.0),
),
if (message.imagePath != null)
Padding(
padding: const EdgeInsets.only(top: 4.0, bottom: 4.0),
child: ClipRRect(
borderRadius: BorderRadius.circular(12),
child: Image.file(
File(message.imagePath!),
width: 200,
fit: BoxFit.cover,
),
),
),
if (message.audioPath != null)
SizedBox(
width: MediaQuery.of(context).size.width * 0.5,
child: ChatMessageAudioPlayer(
audioPath: message.audioPath!,
isUser: message.isUser,
audioDuration: message.audioDuration,
),
),
],
),
),
Padding(
padding: const EdgeInsets.symmetric(horizontal: 12.0),
child: Text(
message.time,
style: TextStyle(color: AppColors.textSecondary, fontSize: 12.0),
),
),
],
),
),
),
);
}
Widget _buildAttachedImagePreview() {
return Padding(
padding: const EdgeInsets.fromLTRB(16, 8, 16, 0),
child: Align(
alignment: Alignment.centerLeft,
child: Stack(
children: [
ClipRRect(
borderRadius: BorderRadius.circular(12.0),
child: Image.file(
File(_attachedImage!.path),
height: 100,
width: 100,
fit: BoxFit.cover,
),
),
Positioned(
top: 4,
right: 4,
child: GestureDetector(
onTap: _removeAttachedImage,
child: Container(
decoration: BoxDecoration(color: AppColors.textPrimary.withOpacity(0.7), shape: BoxShape.circle),
child: const Icon(Icons.close, color: Colors.white, size: 18),
),
),
),
],
),
),
);
}
}