import 'dart:convert'; import 'dart:io'; import 'package:flutter/material.dart'; import 'package:flutter_riverpod/flutter_riverpod.dart'; import 'package:go_router/go_router.dart'; import 'package:image_picker/image_picker.dart'; import 'package:shared_preferences/shared_preferences.dart'; import '../models/message.dart'; import '../models/session.dart'; // ignore: unused_import import '../models/server_config.dart'; import '../providers/providers.dart'; import '../services/audio_service.dart'; import '../services/message_store.dart'; import '../services/mqtt_service.dart'; import '../theme/app_theme.dart'; import '../widgets/command_bar.dart'; import '../widgets/input_bar.dart'; import '../widgets/message_bubble.dart'; import '../widgets/session_drawer.dart'; import '../widgets/status_dot.dart'; import '../widgets/toast_overlay.dart'; import '../widgets/typing_indicator.dart'; import 'navigate_screen.dart'; class ChatScreen extends ConsumerStatefulWidget { const ChatScreen({super.key}); @override ConsumerState createState() => _ChatScreenState(); } class _ChatScreenState extends ConsumerState with WidgetsBindingObserver { MqttService? _ws; final TextEditingController _textController = TextEditingController(); final ScrollController _scrollController = ScrollController(); final GlobalKey _scaffoldKey = GlobalKey(); bool _isRecording = false; String? _playingMessageId; int _lastSeq = 0; bool _isCatchingUp = false; bool _screenshotForChat = false; final Set _seenSeqs = {}; @override void initState() { super.initState(); WidgetsBinding.instance.addObserver(this); _initAll(); _scrollController.addListener(_onScroll); } Future _initAll() async { // Load lastSeq BEFORE connecting so catch_up sends the right value final prefs = await SharedPreferences.getInstance(); _lastSeq = prefs.getInt('lastSeq') ?? 0; if (!mounted) return; // Listen for playback state changes to reset play button UI AudioService.onPlaybackStateChanged = () { if (mounted) { setState(() { if (!AudioService.isPlaying) { _playingMessageId = null; } }); } }; _initConnection(); } SharedPreferences? _prefs; Future _saveLastSeq() async { _prefs ??= await SharedPreferences.getInstance(); await _prefs!.setInt('lastSeq', _lastSeq); } @override void dispose() { WidgetsBinding.instance.removeObserver(this); _ws?.dispose(); _textController.dispose(); _scrollController.dispose(); super.dispose(); } @override void didChangeAppLifecycleState(AppLifecycleState state) { if (state == AppLifecycleState.resumed) { if (_ws != null && !_ws!.isConnected) { _ws!.connect(); } } } void _onScroll() { if (_scrollController.position.pixels >= _scrollController.position.maxScrollExtent - 100) { ref.read(messagesProvider.notifier).loadMore(); } } // Helper: send a command to the gateway in the expected format void _sendCommand(String command, [Map? args]) { _ws?.send({ 'type': 'command', 'command': command, if (args != null) 'args': args, }); } Future _initConnection() async { ServerConfig? config = ref.read(serverConfigProvider); if (config == null) { for (var i = 0; i < 30 && config == null; i++) { await Future.delayed(const Duration(milliseconds: 100)); if (!mounted) return; config = ref.read(serverConfigProvider); } if (config == null) return; } _ws = MqttService(config: config); _ws!.onStatusChanged = (status) { if (mounted) { ref.read(wsStatusProvider.notifier).state = status; } }; _ws!.onMessage = _handleMessage; _ws!.onOpen = () { final activeId = ref.read(activeSessionIdProvider); _sendCommand('sync', activeId != null ? {'activeSessionId': activeId} : null); // catch_up is still available during the transition period _sendCommand('catch_up', {'lastSeq': _lastSeq}); }; _ws!.onError = (error) { debugPrint('MQTT error: $error'); }; NavigateNotifier.instance = NavigateNotifier( sendKey: (key, sessionId) { _sendCommand('nav', {'key': key}); }, requestScreenshot: (sessionId) { _sendCommand('screenshot'); }, ); await _ws!.connect(); } void _handleMessage(Map msg) { // Track sequence numbers for catch_up protocol final seq = msg['seq'] as int?; if (seq != null) { // Dedup: skip messages we've already processed if (_seenSeqs.contains(seq)) return; _seenSeqs.add(seq); // Keep set bounded if (_seenSeqs.length > 500) { final sorted = _seenSeqs.toList()..sort(); _seenSeqs.removeAll(sorted.sublist(0, sorted.length - 300)); } if (seq > _lastSeq) { _lastSeq = seq; _saveLastSeq(); } } final type = msg['type'] as String?; switch (type) { case 'sessions': _handleSessions(msg); case 'message': case 'text': _handleIncomingMessage(msg); case 'voice': _handleIncomingVoice(msg); case 'image': _handleIncomingImage(msg); case 'typing': final typing = msg['typing'] as bool? ?? msg['isTyping'] as bool? ?? true; ref.read(isTypingProvider.notifier).state = typing; case 'typing_end': ref.read(isTypingProvider.notifier).state = false; case 'screenshot': ref.read(latestScreenshotProvider.notifier).state = msg['data'] as String? ?? msg['imageBase64'] as String?; case 'clear': ref.read(messagesProvider.notifier).clearMessages(); case 'session_switched': _sendCommand('sessions'); case 'session_renamed': _sendCommand('sessions'); case 'transcript': final messageId = msg['messageId'] as String?; final content = msg['content'] as String?; if (messageId != null && content != null) { ref.read(messagesProvider.notifier).updateContent(messageId, content); } case 'unread': final sessionId = msg['sessionId'] as String?; if (sessionId != null) _incrementUnread(sessionId); case 'catch_up': final serverSeq = msg['serverSeq'] as int?; if (serverSeq != null && serverSeq > _lastSeq) { _lastSeq = serverSeq; _saveLastSeq(); } // Merge catch_up messages: only add messages not already in local storage. // We check by content match against existing messages to avoid duplicates // while still picking up messages that arrived while the app was backgrounded. final catchUpMsgs = msg['messages'] as List?; if (catchUpMsgs != null && catchUpMsgs.isNotEmpty) { _isCatchingUp = true; final existing = ref.read(messagesProvider); final existingContents = existing .where((m) => m.role == MessageRole.assistant) .map((m) => m.content) .toSet(); for (final m in catchUpMsgs) { final content = (m as Map)['content'] as String? ?? ''; // Skip if we already have this message locally if (content.isNotEmpty && existingContents.contains(content)) continue; _handleMessage(m); if (content.isNotEmpty) existingContents.add(content); } _isCatchingUp = false; } case 'pong': break; // heartbeat response, ignore case 'delete_message': final msgId = msg['messageId'] as String?; if (msgId != null) { ref.read(messagesProvider.notifier).removeMessage(msgId); } default: break; } } void _handleSessions(Map msg) { final sessionsJson = msg['sessions'] as List?; if (sessionsJson == null) return; final sessions = sessionsJson .map((s) => Session.fromJson(s as Map)) .toList(); ref.read(sessionsProvider.notifier).state = sessions; final activeId = ref.read(activeSessionIdProvider); if (activeId == null && sessions.isNotEmpty) { final active = sessions.firstWhere( (s) => s.isActive, orElse: () => sessions.first, ); ref.read(activeSessionIdProvider.notifier).state = active.id; ref.read(messagesProvider.notifier).switchSession(active.id); } } void _handleIncomingMessage(Map msg) { final sessionId = msg['sessionId'] as String?; final content = msg['content'] as String? ?? msg['text'] as String? ?? ''; final message = Message.text( role: MessageRole.assistant, content: content, status: MessageStatus.sent, ); final activeId = ref.read(activeSessionIdProvider); if (sessionId != null && sessionId != activeId) { // Store message for the other session so it's there when user switches _storeForSession(sessionId, message); _incrementUnread(sessionId); final sessions = ref.read(sessionsProvider); final session = sessions.firstWhere( (s) => s.id == sessionId, orElse: () => Session(id: sessionId, index: 0, name: 'Unknown', type: 'claude'), ); if (mounted) { ToastManager.show( context, sessionName: session.name, preview: content.length > 100 ? '${content.substring(0, 100)}...' : content, onTap: () => _switchSession(sessionId), ); } } else { ref.read(messagesProvider.notifier).addMessage(message); ref.read(isTypingProvider.notifier).state = false; _scrollToBottom(); } } void _handleIncomingVoice(Map msg) { final sessionId = msg['sessionId'] as String?; final audioData = msg['audioBase64'] as String? ?? msg['audio'] as String? ?? msg['data'] as String?; final content = msg['content'] as String? ?? msg['text'] as String? ?? ''; final duration = msg['duration'] as int?; final message = Message( id: msg['id'] as String? ?? DateTime.now().millisecondsSinceEpoch.toString(), role: MessageRole.assistant, type: MessageType.voice, content: content, audioUri: audioData, timestamp: DateTime.now().millisecondsSinceEpoch, status: MessageStatus.sent, duration: duration, ); final activeId = ref.read(activeSessionIdProvider); if (sessionId != null && sessionId != activeId) { _storeForSession(sessionId, message); _incrementUnread(sessionId); final sessions = ref.read(sessionsProvider); final session = sessions.firstWhere( (s) => s.id == sessionId, orElse: () => Session(id: sessionId, index: 0, name: 'Unknown', type: 'claude'), ); if (mounted) { ToastManager.show( context, sessionName: session.name, preview: '🎤 Voice note', onTap: () => _switchSession(sessionId), ); } return; } ref.read(messagesProvider.notifier).addMessage(message); ref.read(isTypingProvider.notifier).state = false; _scrollToBottom(); if (audioData != null && !AudioService.isBackgrounded && !_isCatchingUp) { // Queue incoming voice chunks — don't cancel what's already playing AudioService.queueBase64(audioData); } } void _handleIncomingImage(Map msg) { final imageData = msg['imageBase64'] as String? ?? msg['data'] as String? ?? msg['image'] as String?; final content = msg['content'] as String? ?? msg['caption'] as String? ?? ''; if (imageData == null) return; // Always update the Navigate screen screenshot ref.read(latestScreenshotProvider.notifier).state = imageData; final isScreenshot = content == 'Screenshot' || content == 'Capturing screenshot...'; if (isScreenshot) { // Remove any "Capturing screenshot..." placeholder text messages ref.read(messagesProvider.notifier).removeWhere( (m) => m.role == MessageRole.assistant && m.content == 'Capturing screenshot...', ); // Only add to chat if the Screen button requested it if (!_screenshotForChat) { ref.read(isTypingProvider.notifier).state = false; return; } _screenshotForChat = false; } final message = Message.image( role: MessageRole.assistant, imageBase64: imageData, content: content, status: MessageStatus.sent, ); ref.read(messagesProvider.notifier).addMessage(message); ref.read(isTypingProvider.notifier).state = false; _scrollToBottom(); } /// Store a message for a non-active session so it persists when the user switches to it. void _storeForSession(String sessionId, Message message) { MessageStore.loadAll(sessionId).then((existing) { MessageStore.save(sessionId, [...existing, message]); }); } void _incrementUnread(String sessionId) { final counts = Map.from(ref.read(unreadCountsProvider)); counts[sessionId] = (counts[sessionId] ?? 0) + 1; ref.read(unreadCountsProvider.notifier).state = counts; } Future _switchSession(String sessionId) async { // Stop any playing audio and dismiss keyboard when switching sessions await AudioService.stopPlayback(); setState(() => _playingMessageId = null); if (mounted) FocusScope.of(context).unfocus(); ref.read(activeSessionIdProvider.notifier).state = sessionId; await ref.read(messagesProvider.notifier).switchSession(sessionId); final counts = Map.from(ref.read(unreadCountsProvider)); counts.remove(sessionId); ref.read(unreadCountsProvider.notifier).state = counts; _sendCommand('switch', {'sessionId': sessionId}); _scrollToBottom(); } void _sendTextMessage() { final text = _textController.text.trim(); if (text.isEmpty) return; final message = Message.text( role: MessageRole.user, content: text, status: MessageStatus.sent, ); ref.read(messagesProvider.notifier).addMessage(message); _textController.clear(); FocusScope.of(context).unfocus(); // dismiss keyboard // Send as plain text (not command) — gateway handles plain messages _ws?.send({ 'content': text, 'sessionId': ref.read(activeSessionIdProvider), }); _scrollToBottom(); } Future _startRecording() async { final path = await AudioService.startRecording(); if (path != null) { setState(() => _isRecording = true); } } Future _stopRecording() async { final path = await AudioService.stopRecording(); setState(() => _isRecording = false); if (path == null) return; final file = File(path); if (!await file.exists()) return; final bytes = await file.readAsBytes(); final b64 = base64Encode(bytes); final message = Message.voice( role: MessageRole.user, audioUri: path, status: MessageStatus.sent, ); ref.read(messagesProvider.notifier).addMessage(message); _ws?.send({ 'type': 'voice', 'audioBase64': b64, 'content': '', 'messageId': message.id, 'sessionId': ref.read(activeSessionIdProvider), }); _scrollToBottom(); } Future _cancelRecording() async { await AudioService.cancelRecording(); setState(() => _isRecording = false); } void _replayLast() { final messages = ref.read(messagesProvider); for (var i = messages.length - 1; i >= 0; i--) { final m = messages[i]; if (m.role == MessageRole.assistant && m.type == MessageType.voice && m.audioUri != null) { _playMessage(m); return; } } for (var i = messages.length - 1; i >= 0; i--) { final m = messages[i]; if (m.role == MessageRole.assistant && m.type == MessageType.text) { _ws?.send({ 'type': 'tts', 'text': m.content, 'sessionId': ref.read(activeSessionIdProvider), }); return; } } } void _playMessage(Message message) { if (message.audioUri == null) return; // Toggle: if this message is already playing, stop it if (_playingMessageId == message.id && AudioService.isPlaying) { AudioService.stopPlayback(); setState(() => _playingMessageId = null); return; } setState(() => _playingMessageId = message.id); if (message.audioUri!.startsWith('/')) { AudioService.playSingle(message.audioUri!); } else { AudioService.playBase64(message.audioUri!); } } void _chainPlayFrom(Message message) { final messages = ref.read(messagesProvider); final startIndex = messages.indexWhere((m) => m.id == message.id); if (startIndex < 0) return; final chain = []; for (var i = startIndex; i < messages.length; i++) { final m = messages[i]; if (m.role == MessageRole.assistant && m.type == MessageType.voice && m.audioUri != null) { chain.add(m.audioUri!); } else if (m.role != MessageRole.assistant) { break; } } if (chain.isNotEmpty) { AudioService.playChain(chain); } } void _requestScreenshot() { _screenshotForChat = true; _sendCommand('screenshot'); if (mounted) { ScaffoldMessenger.of(context).showSnackBar( const SnackBar( content: Text('Capturing screenshot...'), duration: Duration(seconds: 2), behavior: SnackBarBehavior.floating, ), ); } } void _navigateToTerminal() { _requestScreenshot(); context.push('/navigate'); } Future _pickPhoto() async { // Capture session ID now — before any async gaps (dialog, encoding) final targetSessionId = ref.read(activeSessionIdProvider); final picker = ImagePicker(); final images = await picker.pickMultiImage( maxWidth: 1920, maxHeight: 1080, imageQuality: 85, ); if (images.isEmpty) return; // Encode all images first final encodedImages = []; for (final img in images) { final bytes = await img.readAsBytes(); encodedImages.add(base64Encode(bytes)); } final caption = await _showCaptionDialog(images.length); if (caption == null) return; // user cancelled // Handle voice caption String textCaption = caption; String? voiceB64; if (caption.startsWith('__voice__:')) { final voicePath = caption.substring('__voice__:'.length); final voiceFile = File(voicePath); if (await voiceFile.exists()) { final voiceBytes = await voiceFile.readAsBytes(); voiceB64 = base64Encode(voiceBytes); } textCaption = ''; } // Send voice FIRST so Whisper transcribes it and the [PAILot:voice] prefix // sets the reply channel. Images follow — Claude sees transcript + images together. if (voiceB64 != null) { final voiceMsg = Message.voice( role: MessageRole.user, audioUri: caption.substring('__voice__:'.length), status: MessageStatus.sent, ); ref.read(messagesProvider.notifier).addMessage(voiceMsg); _ws?.send({ 'type': 'voice', 'audioBase64': voiceB64, 'content': '', 'messageId': voiceMsg.id, 'sessionId': targetSessionId, }); } // Send images — first with text caption (if any), rest without for (var i = 0; i < encodedImages.length; i++) { final isFirst = i == 0; final msgCaption = isFirst ? textCaption : ''; _ws?.send({ 'type': 'image', 'imageBase64': encodedImages[i], 'mimeType': 'image/jpeg', 'caption': msgCaption, 'sessionId': targetSessionId, }); } // Show images in chat locally for (var i = 0; i < encodedImages.length; i++) { final message = Message.image( role: MessageRole.user, imageBase64: encodedImages[i], content: i == 0 ? textCaption : '', status: MessageStatus.sent, ); ref.read(messagesProvider.notifier).addMessage(message); } _scrollToBottom(); } Future _showCaptionDialog(int imageCount) async { final captionController = TextEditingController(); String? voicePath; bool isVoiceRecording = false; bool hasVoiceCaption = false; final result = await showModalBottomSheet( context: context, isScrollControlled: true, builder: (ctx) => StatefulBuilder( builder: (ctx, setSheetState) => Padding( padding: EdgeInsets.only( bottom: MediaQuery.of(ctx).viewInsets.bottom, left: 16, right: 16, top: 16, ), child: Column( mainAxisSize: MainAxisSize.min, children: [ Text( '$imageCount image${imageCount > 1 ? 's' : ''} selected', style: Theme.of(ctx).textTheme.titleSmall, ), const SizedBox(height: 12), // Text caption input if (!isVoiceRecording && !hasVoiceCaption) TextField( controller: captionController, decoration: const InputDecoration( hintText: 'Add a text caption (optional)', border: OutlineInputBorder(), ), autofocus: true, maxLines: 3, ), // Voice recording indicator if (isVoiceRecording) Container( padding: const EdgeInsets.symmetric(vertical: 20), child: const Row( mainAxisAlignment: MainAxisAlignment.center, children: [ Icon(Icons.fiber_manual_record, color: Colors.red, size: 16), SizedBox(width: 8), Text('Recording voice caption...', style: TextStyle(fontSize: 16)), ], ), ), // Voice recorded confirmation if (hasVoiceCaption && !isVoiceRecording) Container( padding: const EdgeInsets.symmetric(vertical: 20), child: const Row( mainAxisAlignment: MainAxisAlignment.center, children: [ Icon(Icons.check_circle, color: Colors.green, size: 20), SizedBox(width: 8), Text('Voice caption recorded', style: TextStyle(fontSize: 16)), ], ), ), const SizedBox(height: 12), // Action row: mic/stop + cancel + send Row( children: [ // Mic / Stop button — large and clear if (!hasVoiceCaption) IconButton.filled( onPressed: () async { if (isVoiceRecording) { final path = await AudioService.stopRecording(); setSheetState(() { isVoiceRecording = false; if (path != null) { voicePath = path; hasVoiceCaption = true; } }); } else { final path = await AudioService.startRecording(); if (path != null) { setSheetState(() => isVoiceRecording = true); } } }, icon: Icon(isVoiceRecording ? Icons.stop : Icons.mic), style: IconButton.styleFrom( backgroundColor: isVoiceRecording ? Colors.red : null, foregroundColor: isVoiceRecording ? Colors.white : null, ), ), const Spacer(), TextButton( onPressed: () async { if (isVoiceRecording) { await AudioService.cancelRecording(); } if (ctx.mounted) Navigator.pop(ctx); }, child: const Text('Cancel'), ), const SizedBox(width: 8), FilledButton( onPressed: isVoiceRecording ? null // disable Send while recording : () { if (voicePath != null) { Navigator.pop(ctx, '__voice__:$voicePath'); } else { Navigator.pop(ctx, captionController.text); } }, child: const Text('Send'), ), ], ), const SizedBox(height: 16), ], ), ), ), ); // Safety net: clean up recording if sheet dismissed by swipe/tap outside if (isVoiceRecording) { await AudioService.cancelRecording(); } captionController.dispose(); return result; } void _clearChat() { showDialog( context: context, builder: (ctx) => AlertDialog( title: const Text('Clear chat?'), content: const Text('This will clear the message history.'), actions: [ TextButton( onPressed: () => Navigator.pop(ctx), child: const Text('Cancel'), ), TextButton( onPressed: () { Navigator.pop(ctx); ref.read(messagesProvider.notifier).clearMessages(); }, child: const Text('Clear', style: TextStyle(color: AppColors.error)), ), ], ), ); } void _sendHelp() { _ws?.send({ 'content': '/h', 'sessionId': ref.read(activeSessionIdProvider), }); } void _toggleTheme() async { final current = ref.read(themeModeProvider); final next = current == ThemeMode.dark ? ThemeMode.light : ThemeMode.dark; ref.read(themeModeProvider.notifier).state = next; final prefs = await SharedPreferences.getInstance(); await prefs.setString('theme_mode', next == ThemeMode.dark ? 'dark' : 'light'); } void _scrollToBottom() { WidgetsBinding.instance.addPostFrameCallback((_) { if (_scrollController.hasClients) { _scrollController.animateTo( 0, duration: const Duration(milliseconds: 200), curve: Curves.easeOut, ); } }); } void _handleNewSession() { _sendCommand('create'); } void _handleSessionRename(Session session, String newName) { _sendCommand('rename', {'sessionId': session.id, 'name': newName}); final sessions = ref.read(sessionsProvider); ref.read(sessionsProvider.notifier).state = sessions .map((s) => s.id == session.id ? s.copyWith(name: newName) : s) .toList(); } void _handleSessionRemove(Session session) { _sendCommand('remove', {'sessionId': session.id}); final sessions = ref.read(sessionsProvider); ref.read(sessionsProvider.notifier).state = sessions.where((s) => s.id != session.id).toList(); } void _handleSessionReorder(int oldIndex, int newIndex) { if (newIndex > oldIndex) newIndex--; final sessions = List.from(ref.read(sessionsProvider)); final item = sessions.removeAt(oldIndex); sessions.insert(newIndex, item); ref.read(sessionsProvider.notifier).state = sessions; } void _refreshSessions() { _sendCommand('sessions'); } @override Widget build(BuildContext context) { final messages = ref.watch(messagesProvider); final wsStatus = ref.watch(wsStatusProvider); final isTyping = ref.watch(isTypingProvider); final sessions = ref.watch(sessionsProvider); final activeSession = ref.watch(activeSessionProvider); final unreadCounts = ref.watch(unreadCountsProvider); final inputMode = ref.watch(inputModeProvider); return GestureDetector( onTap: () => FocusScope.of(context).unfocus(), child: Scaffold( key: _scaffoldKey, appBar: AppBar( leading: IconButton( icon: const Icon(Icons.menu), onPressed: () { FocusScope.of(context).unfocus(); _scaffoldKey.currentState?.openDrawer(); }, ), title: Text( activeSession?.name ?? 'PAILot', style: const TextStyle(fontSize: 16, fontWeight: FontWeight.w600), ), actions: [ StatusDot(status: wsStatus), const SizedBox(width: 12), IconButton( icon: Icon( Theme.of(context).brightness == Brightness.dark ? Icons.light_mode : Icons.dark_mode, size: 20, ), onPressed: _toggleTheme, ), IconButton( icon: const Icon(Icons.settings, size: 20), onPressed: () => context.push('/settings'), ), ], ), drawer: SessionDrawer( sessions: sessions, activeSessionId: activeSession?.id, unreadCounts: unreadCounts, onSelect: (s) => _switchSession(s.id), onRemove: _handleSessionRemove, onRename: _handleSessionRename, onReorder: _handleSessionReorder, onNewSession: _handleNewSession, onRefresh: _refreshSessions, ), body: Column( children: [ Expanded( child: ListView.builder( controller: _scrollController, reverse: true, padding: const EdgeInsets.only(top: 8, bottom: 8), itemCount: messages.length + (isTyping ? 1 : 0), itemBuilder: (context, index) { if (isTyping && index == 0) { return const TypingIndicator(); } final msgIndex = isTyping ? messages.length - index : messages.length - 1 - index; if (msgIndex < 0 || msgIndex >= messages.length) { return const SizedBox.shrink(); } final message = messages[msgIndex]; return MessageBubble( message: message, isPlaying: _playingMessageId == message.id, onPlay: message.type == MessageType.voice ? () => _playMessage(message) : null, onChainPlay: message.type == MessageType.voice && message.role == MessageRole.assistant ? () => _chainPlayFrom(message) : null, onDelete: () { ref.read(messagesProvider.notifier).removeMessage(message.id); }, ); }, ), ), CommandBar( onScreen: _requestScreenshot, onNavigate: _navigateToTerminal, onPhoto: _pickPhoto, onClear: _clearChat, onHelp: inputMode == InputMode.text ? _sendHelp : null, showHelp: inputMode == InputMode.text, ), InputBar( mode: inputMode, isRecording: _isRecording, textController: _textController, onToggleMode: () { ref.read(inputModeProvider.notifier).state = inputMode == InputMode.voice ? InputMode.text : InputMode.voice; }, onRecordStart: _startRecording, onRecordStop: _stopRecording, onRecordCancel: _cancelRecording, onReplay: _replayLast, onSendText: _sendTextMessage, ), ], ), ), ); } }