import 'dart:async'; import 'dart:convert'; import 'dart:io'; import 'package:flutter/foundation.dart'; import 'package:path_provider/path_provider.dart'; import 'package:flutter/material.dart'; import 'package:flutter_riverpod/flutter_riverpod.dart'; import 'package:go_router/go_router.dart'; import 'package:image_picker/image_picker.dart'; import 'package:file_picker/file_picker.dart'; import 'package:shared_preferences/shared_preferences.dart'; import '../models/message.dart'; import '../models/session.dart'; // ignore: unused_import import '../models/server_config.dart'; import '../providers/providers.dart'; import '../services/audio_service.dart'; import '../services/message_store.dart'; import '../services/mqtt_service.dart'; import '../services/trace_service.dart'; import '../services/navigate_notifier.dart'; import '../services/push_service.dart'; import '../theme/app_theme.dart'; import '../services/purchase_service.dart'; import '../widgets/command_bar.dart'; import '../widgets/input_bar.dart'; import '../widgets/message_bubble.dart'; import '../widgets/paywall_banner.dart'; import '../widgets/session_drawer.dart'; import '../widgets/status_dot.dart'; import '../widgets/toast_overlay.dart'; import '../widgets/typing_indicator.dart'; import 'navigate_screen.dart'; class ChatScreen extends ConsumerStatefulWidget { const ChatScreen({super.key}); @override ConsumerState createState() => _ChatScreenState(); } Future _chatLog(String msg) async { debugPrint('[Chat] $msg'); TraceService.instance.addTrace('Chat', msg); if (!kDebugMode) return; try { final dir = await getApplicationDocumentsDirectory(); final file = File('${dir.path}/mqtt_debug.log'); final ts = DateTime.now().toIso8601String().substring(11, 19); await file.writeAsString('[$ts] $msg\n', mode: FileMode.append); } catch (_) {} } class _ChatScreenState extends ConsumerState with WidgetsBindingObserver { MqttService? _ws; PushService? _push; final TextEditingController _textController = TextEditingController(); final ScrollController _scrollController = ScrollController(); final GlobalKey _scaffoldKey = GlobalKey(); bool _isRecording = false; String? _playingMessageId; int _lastSeq = 0; bool _isCatchingUp = false; bool _catchUpReceived = false; bool _screenshotForChat = false; // FIFO dedup queue: O(1) eviction by removing from front when over cap. final List _seenSeqsList = []; final Set _seenSeqs = {}; bool _sessionReady = false; final List> _pendingMessages = []; // _catchUpPending removed: cross-session catch_up messages are now appended // synchronously via MessageStoreV2.append() in the catch_up handler. List? _cachedSessionOrder; Timer? _typingTimer; bool _unreadCountsLoaded = false; @override void initState() { super.initState(); WidgetsBinding.instance.addObserver(this); _initAll(); _scrollController.addListener(_onScroll); } Future _initAll() async { // Initialize append-only message store (reads log, rebuilds index, compacts). await MessageStoreV2.initialize(); // Load persisted state BEFORE connecting final prefs = await SharedPreferences.getInstance(); _lastSeq = prefs.getInt('lastSeq') ?? 0; // Restore persisted unread counts final savedUnreads = prefs.getString('unreadCounts'); if (savedUnreads != null && mounted) { try { final map = (jsonDecode(savedUnreads) as Map) .map((k, v) => MapEntry(k, v as int)); ref.read(unreadCountsProvider.notifier).state = map; } catch (_) {} } _unreadCountsLoaded = true; // Restore saved session order and active session _cachedSessionOrder = prefs.getStringList('sessionOrder'); final savedSessionId = prefs.getString('activeSessionId'); if (savedSessionId != null && mounted) { ref.read(activeSessionIdProvider.notifier).state = savedSessionId; // Synchronous: no async gap between load and any arriving messages. ref.read(messagesProvider.notifier).switchSession(savedSessionId); } if (!mounted) return; // Listen for playback state changes to reset play button UI // Use a brief delay to avoid race between queue transitions AudioService.onPlaybackStateChanged = () { if (mounted) { if (AudioService.isPlaying) { // Something started playing — keep the indicator as-is } else { // Playback stopped — clear indicator only if queue is truly empty. // Use a short delay since the queue transition has a brief gap. Future.delayed(const Duration(milliseconds: 200), () { if (mounted && !AudioService.isPlaying) { setState(() => _playingMessageId = null); } }); } } }; _initConnection(); } SharedPreferences? _prefs; Future _saveLastSeq() async { _prefs ??= await SharedPreferences.getInstance(); await _prefs!.setInt('lastSeq', _lastSeq); } @override void dispose() { WidgetsBinding.instance.removeObserver(this); _ws?.dispose(); _textController.dispose(); _scrollController.dispose(); super.dispose(); } @override void didChangeAppLifecycleState(AppLifecycleState state) { if (state == AppLifecycleState.resumed) { if (_ws != null && !_ws!.isConnected) { _ws!.connect(); } // Don't update badge here — provider might not have loaded persisted counts yet. // Native applicationDidBecomeActive reads correct value from UserDefaults. } else if (state == AppLifecycleState.paused && _unreadCountsLoaded) { // Set badge to total unread count when going to background _updateBadgeFromUnreads(); } } void _updateBadgeFromUnreads() { final counts = ref.read(unreadCountsProvider); _persistUnreadCounts(counts); } // ignore: unused_field bool _isLoadingMore = false; void _onScroll() { // Pagination removed: all messages are loaded synchronously on session // switch via the in-memory index. Nothing to do on scroll. } // Helper: send a command to the gateway in the expected format void _sendCommand(String command, [Map? args]) { _ws?.send({ 'type': 'command', 'command': command, if (args != null) 'args': args, }); } Future _initConnection() async { ServerConfig? config = ref.read(serverConfigProvider); if (config == null) { for (var i = 0; i < 30 && config == null; i++) { await Future.delayed(const Duration(milliseconds: 100)); if (!mounted) return; config = ref.read(serverConfigProvider); } if (config == null) return; } _ws = MqttService(config: config); _ws!.onStatusChanged = (status) { if (mounted) { ref.read(wsStatusProvider.notifier).state = status; if (status == ConnectionStatus.connected) { ref.read(connectionDetailProvider.notifier).state = ''; ref.read(connectedViaProvider.notifier).state = _ws?.connectedVia ?? ''; } else { ref.read(connectedViaProvider.notifier).state = ''; } } }; _ws!.onStatusDetail = (detail) { if (mounted) { ref.read(connectionDetailProvider.notifier).state = detail; } }; _ws!.onMessage = _handleMessage; _ws!.onOpen = () { _sessionReady = false; // Gate messages until sessions arrive _pendingMessages.clear(); // Delay sync slightly to let broker acknowledge our subscriptions first. // Without this, the catch_up response arrives before pailot/control/out // subscription is active, and the message is lost. Future.delayed(const Duration(milliseconds: 200), () { if (!mounted) return; final activeId = ref.read(activeSessionIdProvider); _sendCommand('sync', activeId != null ? {'activeSessionId': activeId} : null); _push?.onMqttConnected(); }); }; _ws!.onResume = () { // App came back from background. The in-memory state already has // any messages received while suspended (addMessage was called). // Just rebuild the UI and scroll to bottom to show them. _chatLog('onResume: rebuilding UI and sending catch_up'); _sendCommand('catch_up', {'lastSeq': _lastSeq}); if (mounted) { setState(() {}); // Scroll after the frame rebuilds WidgetsBinding.instance.addPostFrameCallback((_) { if (mounted) _scrollToBottom(); }); } }; _ws!.onError = (error) { debugPrint('MQTT error: $error'); }; ref.read(navigateNotifierProvider.notifier).state = NavigateNotifier( sendKey: (key, sessionId) { _sendCommand('nav', {'key': key}); }, requestScreenshot: (sessionId) { _sendCommand('screenshot', {'sessionId': sessionId ?? ref.read(activeSessionIdProvider)}); }, ); await _ws!.connect(); // Attach MQTT to trace service for auto-publishing logs to server TraceService.instance.attachMqtt(_ws!); // Initialize push notifications after MQTT is set up so token can be // sent immediately if already connected. _push = PushService(mqttService: _ws!); _push!.onNotificationTap = (data) { final sessionId = data['sessionId'] as String?; final activeId = ref.read(activeSessionIdProvider); // Immediately request catch_up — don't wait for the sync flow. // The message is already in the server queue. _sendCommand('catch_up', {'lastSeq': _lastSeq}); if (sessionId != null && sessionId != activeId && mounted) { _switchSession(sessionId); } }; await _push!.initialize(); } void _handleMessage(Map msg) { final type = msg['type'] as String?; final msgSessionId = msg['sessionId'] as String?; final msgSeq = msg['seq']; TraceService.instance.addTrace( 'handleMessage processing', 'type=$type sessionId=${msgSessionId?.substring(0, msgSessionId.length.clamp(0, 8))} seq=$msgSeq', ); // Track sequence numbers for catch_up protocol final seq = msg['seq'] as int?; if (seq != null) { // Dedup: skip messages we've already processed if (_seenSeqs.contains(seq)) { TraceService.instance.addTrace( 'handleMessage seq deduped', 'seq=$seq type=$type — already seen, dropping', ); return; } _seenSeqs.add(seq); _seenSeqsList.add(seq); // Keep bounded at 500 with O(1) FIFO eviction (drop oldest first) if (_seenSeqsList.length > 500) { final evict = _seenSeqsList.removeAt(0); _seenSeqs.remove(evict); } if (seq > _lastSeq) { _lastSeq = seq; _saveLastSeq(); } } switch (type) { case 'sessions': _handleSessions(msg); case 'message': case 'text': _handleIncomingMessage(msg); case 'voice': _handleIncomingVoice(msg); case 'image': _handleIncomingImage(msg); case 'typing': final typing = msg['typing'] as bool? ?? msg['isTyping'] as bool? ?? msg['active'] as bool? ?? true; final typingSession = msg['sessionId'] as String?; final activeId = ref.read(activeSessionIdProvider); _chatLog('TYPING: session=${typingSession?.substring(0, 8)} active=${activeId?.substring(0, 8)} typing=$typing match=${typingSession == activeId}'); // Strict: only show typing for the ACTIVE session, ignore all others if (activeId != null && typingSession == activeId) { ref.read(isTypingProvider.notifier).state = typing; // Auto-clear after 10s in case typing_end is missed if (typing) { _typingTimer?.cancel(); _typingTimer = Timer(const Duration(seconds: 10), () { if (mounted) ref.read(isTypingProvider.notifier).state = false; }); } else { _typingTimer?.cancel(); } } case 'typing_end': final endSession = msg['sessionId'] as String?; final activeEndId = ref.read(activeSessionIdProvider); if (activeEndId != null && endSession == activeEndId) { ref.read(isTypingProvider.notifier).state = false; } case 'screenshot': ref.read(latestScreenshotProvider.notifier).state = msg['data'] as String? ?? msg['imageBase64'] as String?; case 'clear': ref.read(messagesProvider.notifier).clearMessages(); case 'session_switched': _sendCommand('sessions'); case 'session_renamed': _sendCommand('sessions'); case 'transcript': final messageId = msg['messageId'] as String?; final content = msg['content'] as String?; if (messageId != null && content != null) { // Try updating in current session first final currentMessages = ref.read(messagesProvider); final inCurrent = currentMessages.any((m) => m.id == messageId); if (inCurrent) { ref.read(messagesProvider.notifier).updateContent(messageId, content); } else { // Message is in a different session (user switched after recording). // Load that session's messages from disk, update, and save back. _updateTranscriptOnDisk(messageId, content); } } case 'unread': final sessionId = msg['sessionId'] as String?; if (sessionId != null) _incrementUnread(sessionId); case 'catch_up': _catchUpReceived = true; final serverSeq = msg['serverSeq'] as int?; if (serverSeq != null) { // Always sync to server's seq — if server restarted, its seq may be lower _lastSeq = serverSeq; _saveLastSeq(); } // Merge catch_up messages: only add messages not already displayed. // Dedup by content to avoid showing messages already in the UI. final catchUpMsgs = msg['messages'] as List?; if (catchUpMsgs != null && catchUpMsgs.isNotEmpty) { _isCatchingUp = true; final activeId = ref.read(activeSessionIdProvider); final currentId = ref.read(messagesProvider.notifier).currentSessionId; final existing = ref.read(messagesProvider); final existingContents = existing .where((m) => m.role == MessageRole.assistant) .map((m) => m.content) .toSet(); // Collect cross-session sessions that received messages (for toasts). final crossSessionCounts = {}; final crossSessionPreviews = {}; for (final m in catchUpMsgs) { final map = m as Map; final msgType = map['type'] as String? ?? 'text'; final content = map['content'] as String? ?? map['transcript'] as String? ?? map['caption'] as String? ?? ''; final msgSessionId = map['sessionId'] as String?; final imageData = map['imageBase64'] as String?; // Skip empty text messages (images with no caption are OK) if (content.isEmpty && imageData == null) continue; // Dedup by content (skip images from dedup — they have unique msgIds) if (imageData == null && content.isNotEmpty && existingContents.contains(content)) continue; final Message message; if (msgType == 'image' && imageData != null) { message = Message.image( role: MessageRole.assistant, imageBase64: imageData, content: content, status: MessageStatus.sent, ); } else { message = Message.text( role: MessageRole.assistant, content: content, status: MessageStatus.sent, ); } _chatLog('catch_up msg: session=${msgSessionId?.substring(0, 8) ?? "NULL"} active=${activeId?.substring(0, 8)} content="${content.substring(0, content.length.clamp(0, 40))}"'); if (msgSessionId == null || msgSessionId == currentId) { // Active session or no session: add to UI (addMessage also appends to log). ref.read(messagesProvider.notifier).addMessage(message); } else { // Cross-session: synchronous append — no race condition. MessageStoreV2.append(msgSessionId, message); _incrementUnread(msgSessionId); crossSessionCounts[msgSessionId] = (crossSessionCounts[msgSessionId] ?? 0) + 1; crossSessionPreviews.putIfAbsent(msgSessionId, () => content); } existingContents.add(content); } _isCatchingUp = false; _scrollToBottom(); // Show one toast per cross-session that received messages. if (crossSessionCounts.isNotEmpty && mounted) { final sessions = ref.read(sessionsProvider); for (final entry in crossSessionCounts.entries) { final sid = entry.key; final count = entry.value; final session = sessions.firstWhere( (s) => s.id == sid, orElse: () => Session(id: sid, index: 0, name: 'Unknown', type: 'claude'), ); final preview = count == 1 ? (crossSessionPreviews[sid] ?? '') : '$count messages'; ToastManager.show( context, sessionName: session.name, preview: preview.length > 100 ? '${preview.substring(0, 100)}...' : preview, onTap: () => _switchSession(sid), ); } } // Clear unread for active session if (activeId != null) { final counts = Map.from(ref.read(unreadCountsProvider)); counts.remove(activeId); ref.read(unreadCountsProvider.notifier).state = counts; } } case 'pong': break; // heartbeat response, ignore case 'delete_message': final msgId = msg['messageId'] as String?; if (msgId != null) { ref.read(messagesProvider.notifier).removeMessage(msgId); } default: break; } } void _handleSessions(Map msg) { final sessionsJson = msg['sessions'] as List?; if (sessionsJson == null) return; var sessions = sessionsJson .map((s) => Session.fromJson(s as Map)) .toList(); // Apply saved custom order (reordered sessions persist across updates) sessions = _applyCustomOrder(sessions); ref.read(sessionsProvider.notifier).state = sessions; final activeId = ref.read(activeSessionIdProvider); if (activeId == null && sessions.isNotEmpty) { final active = sessions.firstWhere( (s) => s.isActive, orElse: () => sessions.first, ); ref.read(activeSessionIdProvider.notifier).state = active.id; // Synchronous session switch — no async gap. ref.read(messagesProvider.notifier).switchSession(active.id); SharedPreferences.getInstance().then((p) => p.setString('activeSessionId', active.id)); } // Session is ready — process any pending messages that arrived before sessions list if (!_sessionReady) { _sessionReady = true; // Request catch_up now that session is set _sendCommand('catch_up', {'lastSeq': _lastSeq}); // Drain messages that arrived before sessions list if (_pendingMessages.isNotEmpty) { final pending = List>.from(_pendingMessages); _pendingMessages.clear(); for (final m in pending) { _handleMessage(m); } } } } void _handleIncomingMessage(Map msg) { final sessionId = msg['sessionId'] as String?; final content = msg['content'] as String? ?? msg['text'] as String? ?? ''; TraceService.instance.addTrace( 'handleMessage processing type=text', 'sessionId=${sessionId?.substring(0, sessionId.length.clamp(0, 8))}', ); final message = Message.text( role: MessageRole.assistant, content: content, status: MessageStatus.sent, ); // Use currentSessionId from notifier (what's actually loaded in the provider), // not activeSessionIdProvider (can be stale after background resume). final currentId = ref.read(messagesProvider.notifier).currentSessionId; if (sessionId != null && sessionId != currentId) { // Append directly to the log for the target session — synchronous, no race. TraceService.instance.addTrace( 'message stored for session', 'sessionId=${sessionId.substring(0, sessionId.length.clamp(0, 8))}, toast shown', ); MessageStoreV2.append(sessionId, message); _incrementUnread(sessionId); final sessions = ref.read(sessionsProvider); final session = sessions.firstWhere( (s) => s.id == sessionId, orElse: () => Session(id: sessionId, index: 0, name: 'Unknown', type: 'claude'), ); if (mounted) { ToastManager.show( context, sessionName: session.name, preview: content.length > 100 ? '${content.substring(0, 100)}...' : content, onTap: () => _switchSession(sessionId), ); } } else { TraceService.instance.addTrace( 'message displayed in chat', 'sessionId=${sessionId?.substring(0, sessionId.length.clamp(0, 8)) ?? "global"} len=${content.length}', ); ref.read(messagesProvider.notifier).addMessage(message); ref.read(isTypingProvider.notifier).state = false; _scrollToBottom(); } } Future _handleIncomingVoice(Map msg) async { final sessionId = msg['sessionId'] as String?; final audioData = msg['audioBase64'] as String? ?? msg['audio'] as String? ?? msg['data'] as String?; final content = msg['content'] as String? ?? msg['transcript'] as String? ?? msg['text'] as String? ?? ''; final duration = msg['duration'] as int?; final message = Message( id: msg['id'] as String? ?? DateTime.now().millisecondsSinceEpoch.toString(), role: MessageRole.assistant, type: MessageType.voice, content: content, audioUri: audioData, timestamp: DateTime.now().millisecondsSinceEpoch, status: MessageStatus.sent, duration: duration, ); // Save audio to file so it survives persistence (base64 gets stripped) String? savedAudioPath; if (audioData != null) { try { final dir = await getTemporaryDirectory(); savedAudioPath = '${dir.path}/voice_${message.id}.m4a'; final bytes = base64Decode(audioData.contains(',') ? audioData.split(',').last : audioData); await File(savedAudioPath).writeAsBytes(bytes); } catch (_) { savedAudioPath = null; } } final storedMessage = Message( id: message.id, role: message.role, type: message.type, content: content, audioUri: savedAudioPath ?? audioData, timestamp: message.timestamp, status: message.status, duration: duration, ); final currentId = ref.read(messagesProvider.notifier).currentSessionId; _chatLog('voice: sessionId=$sessionId currentId=$currentId audioPath=$savedAudioPath content="${content.substring(0, content.length.clamp(0, 30))}"'); if (sessionId != null && sessionId != currentId) { _chatLog('voice: cross-session, appending to store for $sessionId'); // Synchronous append — no async gap, no race condition. MessageStoreV2.append(sessionId, storedMessage); _chatLog('voice: appended, incrementing unread'); _incrementUnread(sessionId); final sessions = ref.read(sessionsProvider); final session = sessions.firstWhere( (s) => s.id == sessionId, orElse: () => Session(id: sessionId, index: 0, name: 'Unknown', type: 'claude'), ); if (mounted) { ToastManager.show( context, sessionName: session.name, preview: '🎤 Voice note', onTap: () => _switchSession(sessionId), ); } return; } ref.read(messagesProvider.notifier).addMessage(storedMessage); ref.read(isTypingProvider.notifier).state = false; _scrollToBottom(); if (audioData != null && !AudioService.isBackgrounded && !_isCatchingUp && !_isRecording) { setState(() => _playingMessageId = storedMessage.id); AudioService.queueBase64(audioData); } } void _handleIncomingImage(Map msg) { final imageData = msg['imageBase64'] as String? ?? msg['data'] as String? ?? msg['image'] as String?; final content = msg['content'] as String? ?? msg['caption'] as String? ?? ''; final sessionId = msg['sessionId'] as String?; if (imageData == null) return; // Always update the Navigate screen screenshot provider ref.read(latestScreenshotProvider.notifier).state = imageData; final isScreenshot = content == 'Screenshot' || content == 'Capturing screenshot...' || (msg['type'] == 'screenshot'); if (isScreenshot) { // Remove any "Capturing screenshot..." placeholder text messages ref.read(messagesProvider.notifier).removeWhere( (m) => m.role == MessageRole.assistant && m.content == 'Capturing screenshot...', ); // Only add to chat if the Screen button explicitly requested it if (!_screenshotForChat) { final activeId = ref.read(activeSessionIdProvider); if (sessionId == null || sessionId == activeId) { ref.read(isTypingProvider.notifier).state = false; } return; } _screenshotForChat = false; } final message = Message.image( role: MessageRole.assistant, imageBase64: imageData, content: content, status: MessageStatus.sent, ); // Cross-session routing: append to log for target session if not currently loaded. final currentId = ref.read(messagesProvider.notifier).currentSessionId; if (sessionId != null && sessionId != currentId) { MessageStoreV2.append(sessionId, message); _incrementUnread(sessionId); return; } ref.read(messagesProvider.notifier).addMessage(message); ref.read(isTypingProvider.notifier).state = false; _scrollToBottom(); } /// Superseded by MessageStoreV2.append() — call sites now use the synchronous /// append directly. Kept as dead code until all callers are confirmed removed. // ignore: unused_element void _storeForSession(String sessionId, Message message) { MessageStoreV2.append(sessionId, message); } /// With the append-only log, transcript updates for cross-session messages /// are not patched back to disk (the append-only design doesn't support /// in-place edits). The transcript is updated in-memory if the message is /// in the active session. Cross-session transcript updates are a no-op. Future _updateTranscriptOnDisk(String messageId, String content) async { _chatLog('transcript: cross-session update for messageId=$messageId — in-memory only (append-only log)'); } void _incrementUnread(String sessionId) { final counts = Map.from(ref.read(unreadCountsProvider)); counts[sessionId] = (counts[sessionId] ?? 0) + 1; ref.read(unreadCountsProvider.notifier).state = counts; _persistUnreadCounts(counts); } void _persistUnreadCounts(Map counts) { final total = counts.values.fold(0, (sum, v) => sum + v); // Set badge immediately via platform channel (synchronous native call) PushService.setBadge(total); // Also persist to SharedPreferences for app restart SharedPreferences.getInstance().then((prefs) { prefs.setString('unreadCounts', jsonEncode(counts)); prefs.setInt('badgeCount', total); }); } Future _switchSession(String sessionId) async { // Stop any playing audio, dismiss keyboard, and clear typing indicator await AudioService.stopPlayback(); setState(() => _playingMessageId = null); if (mounted) FocusScope.of(context).unfocus(); ref.read(isTypingProvider.notifier).state = false; ref.read(activeSessionIdProvider.notifier).state = sessionId; // Synchronous — no async gap between session switch and incoming messages. ref.read(messagesProvider.notifier).switchSession(sessionId); SharedPreferences.getInstance().then((p) => p.setString('activeSessionId', sessionId)); final counts = Map.from(ref.read(unreadCountsProvider)); counts.remove(sessionId); ref.read(unreadCountsProvider.notifier).state = counts; _persistUnreadCounts(counts); // Update badge to reflect remaining unreads _updateBadgeFromUnreads(); _sendCommand('switch', {'sessionId': sessionId}); _scrollToBottom(); } void _sendTextMessage() { final text = _textController.text.trim(); if (text.isEmpty) return; final message = Message.text( role: MessageRole.user, content: text, status: MessageStatus.sent, ); ref.read(messagesProvider.notifier).addMessage(message); _textController.clear(); FocusScope.of(context).unfocus(); // dismiss keyboard // Send as plain text (not command) — gateway handles plain messages _ws?.send({ 'content': text, 'sessionId': ref.read(activeSessionIdProvider), }); _scrollToBottom(); } String? _recordingSessionId; // Capture session at recording start Future _startRecording() async { // Stop any playing audio before recording if (AudioService.isPlaying) { await AudioService.stopPlayback(); setState(() => _playingMessageId = null); } _recordingSessionId = ref.read(activeSessionIdProvider); final path = await AudioService.startRecording(); if (path != null) { setState(() => _isRecording = true); } else { _recordingSessionId = null; } } Future _stopRecording() async { final targetSession = _recordingSessionId; _recordingSessionId = null; final path = await AudioService.stopRecording(); setState(() => _isRecording = false); if (path == null) return; final file = File(path); if (!await file.exists()) return; final bytes = await file.readAsBytes(); final b64 = base64Encode(bytes); final message = Message.voice( role: MessageRole.user, audioUri: path, status: MessageStatus.sent, ); ref.read(messagesProvider.notifier).addMessage(message); _ws?.send({ 'type': 'voice', 'audioBase64': b64, 'content': '', 'messageId': message.id, 'sessionId': targetSession, }); _scrollToBottom(); } Future _cancelRecording() async { await AudioService.cancelRecording(); setState(() => _isRecording = false); } void _replayLast() { final messages = ref.read(messagesProvider); for (var i = messages.length - 1; i >= 0; i--) { final m = messages[i]; if (m.role == MessageRole.assistant && m.type == MessageType.voice && m.audioUri != null) { _playMessage(m); return; } } for (var i = messages.length - 1; i >= 0; i--) { final m = messages[i]; if (m.role == MessageRole.assistant && m.type == MessageType.text) { _ws?.send({ 'type': 'tts', 'text': m.content, 'sessionId': ref.read(activeSessionIdProvider), }); return; } } } void _playMessage(Message message) async { if (message.audioUri == null) return; // Toggle: if this message is already playing, stop it if (_playingMessageId == message.id) { await AudioService.stopPlayback(); setState(() => _playingMessageId = null); return; } // Stop any current playback first, then set playing ID AFTER stop completes // (stopPlayback triggers onPlaybackStateChanged which clears _playingMessageId) await AudioService.stopPlayback(); if (!mounted) return; setState(() => _playingMessageId = message.id); if (message.audioUri!.startsWith('/')) { AudioService.playSingle(message.audioUri!); } else { AudioService.playBase64(message.audioUri!); } } void _chainPlayFrom(Message message) { final messages = ref.read(messagesProvider); final startIndex = messages.indexWhere((m) => m.id == message.id); if (startIndex < 0) return; final chain = []; for (var i = startIndex; i < messages.length; i++) { final m = messages[i]; if (m.role == MessageRole.assistant && m.type == MessageType.voice && m.audioUri != null) { chain.add(m.audioUri!); } else if (m.role != MessageRole.assistant) { break; } } if (chain.isNotEmpty) { AudioService.playChain(chain); } } Future _pickFiles(String? targetSessionId) async { final result = await FilePicker.platform.pickFiles( allowMultiple: true, type: FileType.any, ); if (result == null || result.files.isEmpty) return; // Build attachments list final attachments = >[]; for (final file in result.files) { if (file.path == null) continue; final bytes = await File(file.path!).readAsBytes(); final b64 = base64Encode(bytes); final mimeType = _guessMimeType(file.name); attachments.add({ 'data': b64, 'mimeType': mimeType, 'fileName': file.name, }); } if (attachments.isEmpty) return; // Show caption dialog final fileNames = result.files.map((f) => f.name).join(', '); final caption = await _showCaptionDialog(result.files.length); if (caption == null) { if (mounted) FocusManager.instance.primaryFocus?.unfocus(); return; } // Handle voice caption String textCaption = caption; String? voiceB64; if (caption.startsWith('__voice__:')) { final voicePath = caption.substring('__voice__:'.length); final voiceFile = File(voicePath); if (await voiceFile.exists()) { voiceB64 = base64Encode(await voiceFile.readAsBytes()); } textCaption = ''; } // Send voice first if present if (voiceB64 != null) { final voiceMsg = Message.voice( role: MessageRole.user, audioUri: caption.substring('__voice__:'.length), status: MessageStatus.sent, ); ref.read(messagesProvider.notifier).addMessage(voiceMsg); _ws?.send({ 'type': 'voice', 'audioBase64': voiceB64, 'content': '', 'messageId': voiceMsg.id, 'sessionId': targetSessionId, }); } // Send all files as one atomic bundle _ws?.send({ 'type': 'bundle', 'caption': textCaption, 'attachments': attachments, 'sessionId': targetSessionId, }); // Show in chat for (final att in attachments) { final mime = att['mimeType'] as String; final name = att['fileName'] as String? ?? 'file'; if (mime.startsWith('image/')) { ref.read(messagesProvider.notifier).addMessage(Message.image( role: MessageRole.user, imageBase64: att['data'] as String, content: name, status: MessageStatus.sent, )); } else { final size = base64Decode(att['data'] as String).length; ref.read(messagesProvider.notifier).addMessage(Message.text( role: MessageRole.user, content: textCaption.isNotEmpty ? '$textCaption\n📎 $name (${_formatSize(size)})' : '📎 $name (${_formatSize(size)})', status: MessageStatus.sent, )); } } // Dismiss keyboard after file flow completes if (mounted) FocusManager.instance.primaryFocus?.unfocus(); _scrollToBottom(); } String _guessMimeType(String name) { final ext = name.split('.').last.toLowerCase(); const map = { 'jpg': 'image/jpeg', 'jpeg': 'image/jpeg', 'png': 'image/png', 'gif': 'image/gif', 'webp': 'image/webp', 'heic': 'image/heic', 'pdf': 'application/pdf', 'doc': 'application/msword', 'docx': 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', 'xls': 'application/vnd.ms-excel', 'xlsx': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', 'txt': 'text/plain', 'csv': 'text/csv', 'json': 'application/json', 'zip': 'application/zip', 'mp3': 'audio/mpeg', 'mp4': 'video/mp4', }; return map[ext] ?? 'application/octet-stream'; } String _formatSize(int bytes) { if (bytes < 1024) return '$bytes B'; if (bytes < 1024 * 1024) return '${(bytes / 1024).toStringAsFixed(1)} KB'; return '${(bytes / (1024 * 1024)).toStringAsFixed(1)} MB'; } void _requestScreenshot() { _screenshotForChat = true; _sendCommand('screenshot', {'sessionId': ref.read(activeSessionIdProvider)}); if (mounted) { ScaffoldMessenger.of(context).showSnackBar( const SnackBar( content: Text('Capturing screenshot...'), duration: Duration(seconds: 2), behavior: SnackBarBehavior.floating, ), ); } } void _navigateToTerminal() { _requestScreenshot(); context.push('/navigate'); } Future _pickPhoto() async { final targetSessionId = ref.read(activeSessionIdProvider); // Show picker options final source = await showModalBottomSheet( context: context, builder: (ctx) => SafeArea( child: Column( mainAxisSize: MainAxisSize.min, children: [ ListTile( leading: const Icon(Icons.camera_alt), title: const Text('Take Photo'), onTap: () => Navigator.pop(ctx, 'camera'), ), ListTile( leading: const Icon(Icons.photo_library), title: const Text('Photo Library'), onTap: () => Navigator.pop(ctx, 'gallery'), ), ListTile( leading: const Icon(Icons.attach_file), title: const Text('Files'), onTap: () => Navigator.pop(ctx, 'files'), ), ], ), ), ); if (source == null) return; if (source == 'files') { await _pickFiles(targetSessionId); return; } List images; final picker = ImagePicker(); if (source == 'camera') { final photo = await picker.pickImage( source: ImageSource.camera, maxWidth: 1920, maxHeight: 1080, imageQuality: 85, ); if (photo == null) return; images = [photo]; } else { images = await picker.pickMultiImage( maxWidth: 1920, maxHeight: 1080, imageQuality: 85, ); } if (images.isEmpty) return; // Encode all images first final encodedImages = []; for (final img in images) { final bytes = await img.readAsBytes(); encodedImages.add(base64Encode(bytes)); } final caption = await _showCaptionDialog(images.length); if (caption == null) { if (mounted) FocusManager.instance.primaryFocus?.unfocus(); return; // user cancelled } // Handle voice caption String textCaption = caption; String? voiceB64; if (caption.startsWith('__voice__:')) { final voicePath = caption.substring('__voice__:'.length); final voiceFile = File(voicePath); if (await voiceFile.exists()) { final voiceBytes = await voiceFile.readAsBytes(); voiceB64 = base64Encode(voiceBytes); } textCaption = ''; } final attachments = encodedImages.map((b64) => {'data': b64, 'mimeType': 'image/jpeg'} ).toList(); // Create the first image message early so we have its ID for transcript reflection final firstImageMsg = Message.image( role: MessageRole.user, imageBase64: encodedImages[0], content: textCaption.isNotEmpty ? textCaption : (voiceB64 != null ? '🎤 ...' : ''), status: MessageStatus.sent, ); // Send everything as a single atomic bundle _ws?.send({ 'type': 'bundle', 'caption': textCaption, if (voiceB64 != null) 'audioBase64': voiceB64, if (voiceB64 != null) 'voiceMessageId': firstImageMsg.id, 'attachments': attachments, 'sessionId': targetSessionId, }); // Show as combined image+caption bubbles ref.read(messagesProvider.notifier).addMessage(firstImageMsg); for (var i = 1; i < encodedImages.length; i++) { final message = Message.image( role: MessageRole.user, imageBase64: encodedImages[i], content: '', status: MessageStatus.sent, ); ref.read(messagesProvider.notifier).addMessage(message); } // Dismiss keyboard after image flow completes if (mounted) FocusManager.instance.primaryFocus?.unfocus(); _scrollToBottom(); } Future _showCaptionDialog(int imageCount) async { final captionController = TextEditingController(); String? voicePath; bool isVoiceRecording = false; bool hasVoiceCaption = false; final result = await showModalBottomSheet( context: context, isScrollControlled: true, builder: (ctx) => StatefulBuilder( builder: (ctx, setSheetState) => Padding( padding: EdgeInsets.only( bottom: MediaQuery.of(ctx).viewInsets.bottom, left: 16, right: 16, top: 16, ), child: Column( mainAxisSize: MainAxisSize.min, children: [ Text( '$imageCount image${imageCount > 1 ? 's' : ''} selected', style: Theme.of(ctx).textTheme.titleSmall, ), const SizedBox(height: 12), // Text caption input if (!isVoiceRecording && !hasVoiceCaption) TextField( controller: captionController, decoration: const InputDecoration( hintText: 'Add a text caption (optional)', border: OutlineInputBorder(), ), autofocus: true, maxLines: 3, ), // Voice recording indicator if (isVoiceRecording) Container( padding: const EdgeInsets.symmetric(vertical: 20), child: const Row( mainAxisAlignment: MainAxisAlignment.center, children: [ Icon(Icons.fiber_manual_record, color: Colors.red, size: 16), SizedBox(width: 8), Text('Recording voice caption...', style: TextStyle(fontSize: 16)), ], ), ), // Voice recorded confirmation if (hasVoiceCaption && !isVoiceRecording) Container( padding: const EdgeInsets.symmetric(vertical: 20), child: const Row( mainAxisAlignment: MainAxisAlignment.center, children: [ Icon(Icons.check_circle, color: Colors.green, size: 20), SizedBox(width: 8), Text('Voice caption recorded', style: TextStyle(fontSize: 16)), ], ), ), const SizedBox(height: 12), // Action row: mic/stop + cancel + send Row( children: [ // Mic / Stop button — large and clear if (!hasVoiceCaption) IconButton.filled( onPressed: () async { if (isVoiceRecording) { final path = await AudioService.stopRecording(); setSheetState(() { isVoiceRecording = false; if (path != null) { voicePath = path; hasVoiceCaption = true; } }); } else { final path = await AudioService.startRecording(); if (path != null) { setSheetState(() => isVoiceRecording = true); } } }, icon: Icon(isVoiceRecording ? Icons.stop : Icons.mic), style: IconButton.styleFrom( backgroundColor: isVoiceRecording ? Colors.red : null, foregroundColor: isVoiceRecording ? Colors.white : null, ), ), const Spacer(), TextButton( onPressed: () async { if (isVoiceRecording) { await AudioService.cancelRecording(); } if (ctx.mounted) Navigator.pop(ctx); }, child: const Text('Cancel'), ), const SizedBox(width: 8), FilledButton( onPressed: isVoiceRecording ? null // disable Send while recording : () { if (voicePath != null) { Navigator.pop(ctx, '__voice__:$voicePath'); } else { Navigator.pop(ctx, captionController.text); } }, child: const Text('Send'), ), ], ), const SizedBox(height: 16), ], ), ), ), ); // Safety net: clean up recording if sheet dismissed by swipe/tap outside if (isVoiceRecording) { await AudioService.cancelRecording(); } captionController.dispose(); return result; } void _clearChat() { showDialog( context: context, builder: (ctx) => AlertDialog( title: const Text('Clear chat?'), content: const Text('This will clear the message history.'), actions: [ TextButton( onPressed: () => Navigator.pop(ctx), child: const Text('Cancel'), ), TextButton( onPressed: () { Navigator.pop(ctx); ref.read(messagesProvider.notifier).clearMessages(); }, child: const Text('Clear', style: TextStyle(color: AppColors.error)), ), ], ), ); } void _sendHelp() { _ws?.send({ 'content': '/h', 'sessionId': ref.read(activeSessionIdProvider), }); } void _toggleTheme() async { final current = ref.read(themeModeProvider); final next = current == ThemeMode.dark ? ThemeMode.light : ThemeMode.dark; ref.read(themeModeProvider.notifier).state = next; final prefs = await SharedPreferences.getInstance(); await prefs.setString('theme_mode', next == ThemeMode.dark ? 'dark' : 'light'); } void _scrollToBottom() { WidgetsBinding.instance.addPostFrameCallback((_) { if (_scrollController.hasClients) { _scrollController.animateTo( 0, duration: const Duration(milliseconds: 200), curve: Curves.easeOut, ); } }); } void _handleNewSession() { _sendCommand('create'); } /// Called when the user taps an upgrade CTA in the drawer or paywall banner. Future _handleUpgrade() async { await PurchaseService.instance.purchaseFullAccess(); } void _handleSessionRename(Session session, String newName) { _sendCommand('rename', {'sessionId': session.id, 'name': newName}); final sessions = ref.read(sessionsProvider); ref.read(sessionsProvider.notifier).state = sessions .map((s) => s.id == session.id ? s.copyWith(name: newName) : s) .toList(); } void _handleSessionRemove(Session session) { _sendCommand('remove', {'sessionId': session.id}); final sessions = ref.read(sessionsProvider); ref.read(sessionsProvider.notifier).state = sessions.where((s) => s.id != session.id).toList(); } void _handleSessionReorder(int oldIndex, int newIndex) { if (newIndex > oldIndex) newIndex--; final sessions = List.from(ref.read(sessionsProvider)); final item = sessions.removeAt(oldIndex); sessions.insert(newIndex, item); ref.read(sessionsProvider.notifier).state = sessions; // Persist custom order AND update cache so next server update preserves it final ids = sessions.map((s) => s.id).toList(); _cachedSessionOrder = ids; _saveSessionOrder(ids); } void _saveSessionOrder(List ids) { SharedPreferences.getInstance().then((p) => p.setStringList('sessionOrder', ids)); } /// Apply saved custom order to a server-provided session list. /// New sessions (not in saved order) are appended at the end. List _applyCustomOrder(List sessions) { if (_cachedSessionOrder == null || _cachedSessionOrder!.isEmpty) return sessions; final order = _cachedSessionOrder!; final byId = {for (final s in sessions) s.id: s}; final ordered = []; for (final id in order) { final s = byId.remove(id); if (s != null) ordered.add(s); } // Append any new sessions not in saved order ordered.addAll(byId.values); return ordered; } void _refreshSessions() { _sendCommand('sessions'); } @override Widget build(BuildContext context) { final allMessages = ref.watch(messagesProvider); final isPro = ref.watch(isProProvider); // Free tier: filter out messages older than 15 minutes on display. // Storage is unchanged — messages reappear if the user later upgrades. final messages = isPro ? allMessages : allMessages.where((m) { final ts = DateTime.fromMillisecondsSinceEpoch(m.timestamp); final age = DateTime.now().difference(ts); return age <= kFreeTierMessageTtl; }).toList(); final wsStatus = ref.watch(wsStatusProvider); final isTyping = ref.watch(isTypingProvider); final connectionDetail = ref.watch(connectionDetailProvider); final sessions = ref.watch(sessionsProvider); final activeSession = ref.watch(activeSessionProvider); final unreadCounts = ref.watch(unreadCountsProvider); final inputMode = ref.watch(inputModeProvider); return GestureDetector( behavior: HitTestBehavior.translucent, onTap: () => FocusScope.of(context).unfocus(), child: Scaffold( key: _scaffoldKey, appBar: AppBar( leading: IconButton( icon: const Icon(Icons.menu), onPressed: () { FocusScope.of(context).unfocus(); _scaffoldKey.currentState?.openDrawer(); }, ), title: Column( crossAxisAlignment: CrossAxisAlignment.center, mainAxisSize: MainAxisSize.min, children: [ Text( activeSession?.name ?? 'PAILot', style: const TextStyle(fontSize: 16, fontWeight: FontWeight.w600), ), if (connectionDetail.isNotEmpty && wsStatus != ConnectionStatus.connected) Text( connectionDetail, style: TextStyle(fontSize: 11, color: Colors.grey.shade400), ), if (wsStatus == ConnectionStatus.connected && ref.watch(connectedViaProvider).isNotEmpty) Text( 'via ${ref.watch(connectedViaProvider)}', style: TextStyle(fontSize: 11, color: Colors.grey.shade500), ), ], ), actions: [ StatusDot(status: wsStatus), const SizedBox(width: 12), IconButton( icon: Icon( Theme.of(context).brightness == Brightness.dark ? Icons.light_mode : Icons.dark_mode, size: 20, ), onPressed: _toggleTheme, ), IconButton( icon: const Icon(Icons.settings, size: 20), onPressed: () => context.push('/settings'), ), ], ), onDrawerChanged: (isOpened) { if (isOpened) FocusManager.instance.primaryFocus?.unfocus(); }, drawer: SessionDrawer( sessions: sessions, activeSessionId: activeSession?.id, unreadCounts: unreadCounts, isPro: ref.watch(isProProvider), onSelect: (s) => _switchSession(s.id), onRemove: _handleSessionRemove, onRename: _handleSessionRename, onReorder: _handleSessionReorder, onNewSession: _handleNewSession, onRefresh: _refreshSessions, onUpgrade: _handleUpgrade, ), body: Column( children: [ const PaywallBanner(), Expanded( child: ListView.builder( controller: _scrollController, reverse: true, padding: const EdgeInsets.only(top: 8, bottom: 8), itemCount: messages.length + (isTyping ? 1 : 0), itemBuilder: (context, index) { if (isTyping && index == 0) { return const TypingIndicator(); } final msgIndex = isTyping ? messages.length - index : messages.length - 1 - index; if (msgIndex < 0 || msgIndex >= messages.length) { return const SizedBox.shrink(); } final message = messages[msgIndex]; return MessageBubble( message: message, isPlaying: _playingMessageId == message.id, onPlay: message.type == MessageType.voice ? () => _playMessage(message) : null, onChainPlay: message.type == MessageType.voice && message.role == MessageRole.assistant ? () => _chainPlayFrom(message) : null, onDelete: () { ref.read(messagesProvider.notifier).removeMessage(message.id); }, ); }, ), ), CommandBar( onScreen: _requestScreenshot, onNavigate: _navigateToTerminal, onPhoto: _pickPhoto, onClear: _clearChat, onHelp: inputMode == InputMode.text ? _sendHelp : null, showHelp: inputMode == InputMode.text, ), InputBar( mode: inputMode, isRecording: _isRecording, textController: _textController, onToggleMode: () { ref.read(inputModeProvider.notifier).state = inputMode == InputMode.voice ? InputMode.text : InputMode.voice; }, onRecordStart: _startRecording, onRecordStop: _stopRecording, onRecordCancel: _cancelRecording, onReplay: _replayLast, onSendText: _sendTextMessage, ), ], ), ), ); } }