| .. | .. |
|---|
| 85 | 85 | <string>UIInterfaceOrientationLandscapeLeft</string> |
|---|
| 86 | 86 | <string>UIInterfaceOrientationLandscapeRight</string> |
|---|
| 87 | 87 | </array> |
|---|
| 88 | + <key>UIBackgroundModes</key> |
|---|
| 89 | + <array> |
|---|
| 90 | + <string>audio</string> |
|---|
| 91 | + </array> |
|---|
| 88 | 92 | </dict> |
|---|
| 89 | 93 | </plist> |
|---|
| .. | .. |
|---|
| 114 | 114 | }; |
|---|
| 115 | 115 | } |
|---|
| 116 | 116 | |
|---|
| 117 | | - /// Lightweight JSON for persistence (strips heavy binary fields). |
|---|
| 117 | + /// Lightweight JSON for persistence (strips temp audio paths, keeps images). |
|---|
| 118 | 118 | Map<String, dynamic> toJsonLight() { |
|---|
| 119 | 119 | return { |
|---|
| 120 | 120 | 'id': id, |
|---|
| .. | .. |
|---|
| 124 | 124 | 'timestamp': timestamp, |
|---|
| 125 | 125 | if (status != null) 'status': status!.name, |
|---|
| 126 | 126 | if (duration != null) 'duration': duration, |
|---|
| 127 | + // Keep imageBase64 — images are typically 50-200 KB and must survive restart. |
|---|
| 128 | + // audioUri is intentionally omitted: it is a temp file path that won't survive restart. |
|---|
| 129 | + if (imageBase64 != null) 'imageBase64': imageBase64, |
|---|
| 127 | 130 | }; |
|---|
| 128 | 131 | } |
|---|
| 129 | 132 | |
|---|
| .. | .. |
|---|
| 148 | 151 | type == MessageType.voice && |
|---|
| 149 | 152 | (audioUri == null || audioUri!.isEmpty) && |
|---|
| 150 | 153 | content.isEmpty; |
|---|
| 154 | + |
|---|
| 155 | + /// Returns true if this is a text message with no content (empty bubble). |
|---|
| 156 | + bool get isEmptyText => |
|---|
| 157 | + type == MessageType.text && |
|---|
| 158 | + content.trim().isEmpty && |
|---|
| 159 | + imageBase64 == null; |
|---|
| 151 | 160 | } |
|---|
| .. | .. |
|---|
| 47 | 47 | void initState() { |
|---|
| 48 | 48 | super.initState(); |
|---|
| 49 | 49 | WidgetsBinding.instance.addObserver(this); |
|---|
| 50 | | - _loadLastSeq(); |
|---|
| 51 | | - _initConnection(); |
|---|
| 50 | + _initAll(); |
|---|
| 52 | 51 | _scrollController.addListener(_onScroll); |
|---|
| 53 | 52 | } |
|---|
| 54 | 53 | |
|---|
| 55 | | - Future<void> _loadLastSeq() async { |
|---|
| 54 | + Future<void> _initAll() async { |
|---|
| 55 | + // Load lastSeq BEFORE connecting so catch_up sends the right value |
|---|
| 56 | 56 | final prefs = await SharedPreferences.getInstance(); |
|---|
| 57 | 57 | _lastSeq = prefs.getInt('lastSeq') ?? 0; |
|---|
| 58 | + if (!mounted) return; |
|---|
| 59 | + |
|---|
| 60 | + // Listen for playback state changes to reset play button UI |
|---|
| 61 | + AudioService.onPlaybackStateChanged = () { |
|---|
| 62 | + if (mounted) { |
|---|
| 63 | + setState(() { |
|---|
| 64 | + if (!AudioService.isPlaying) { |
|---|
| 65 | + _playingMessageId = null; |
|---|
| 66 | + } |
|---|
| 67 | + }); |
|---|
| 68 | + } |
|---|
| 69 | + }; |
|---|
| 70 | + |
|---|
| 71 | + _initConnection(); |
|---|
| 58 | 72 | } |
|---|
| 59 | 73 | |
|---|
| 60 | 74 | void _saveLastSeq() { |
|---|
| .. | .. |
|---|
| 538 | 552 | textCaption = ''; |
|---|
| 539 | 553 | } |
|---|
| 540 | 554 | |
|---|
| 541 | | - // Send all images together — first with caption, rest without |
|---|
| 542 | | - for (var i = 0; i < encodedImages.length; i++) { |
|---|
| 543 | | - final isFirst = i == 0; |
|---|
| 544 | | - final msgCaption = isFirst ? textCaption : ''; |
|---|
| 545 | | - |
|---|
| 546 | | - _ws?.send({ |
|---|
| 547 | | - 'type': 'image', |
|---|
| 548 | | - 'imageBase64': encodedImages[i], |
|---|
| 549 | | - 'mimeType': 'image/jpeg', |
|---|
| 550 | | - 'caption': msgCaption, |
|---|
| 551 | | - if (isFirst && voiceB64 != null) 'audioBase64': voiceB64, |
|---|
| 552 | | - 'sessionId': ref.read(activeSessionIdProvider), |
|---|
| 553 | | - // Signal how many images follow so receiving session can wait |
|---|
| 554 | | - if (isFirst && encodedImages.length > 1) |
|---|
| 555 | | - 'totalImages': encodedImages.length, |
|---|
| 556 | | - }); |
|---|
| 557 | | - } |
|---|
| 558 | | - |
|---|
| 559 | | - // If voice caption, also send the voice message so it gets transcribed |
|---|
| 555 | + // Send voice FIRST so Whisper transcribes it and the [PAILot:voice] prefix |
|---|
| 556 | + // sets the reply channel. Images follow — Claude sees transcript + images together. |
|---|
| 560 | 557 | if (voiceB64 != null) { |
|---|
| 561 | 558 | final voiceMsg = Message.voice( |
|---|
| 562 | 559 | role: MessageRole.user, |
|---|
| .. | .. |
|---|
| 569 | 566 | 'audioBase64': voiceB64, |
|---|
| 570 | 567 | 'content': '', |
|---|
| 571 | 568 | 'messageId': voiceMsg.id, |
|---|
| 569 | + 'sessionId': ref.read(activeSessionIdProvider), |
|---|
| 570 | + }); |
|---|
| 571 | + } |
|---|
| 572 | + |
|---|
| 573 | + // Send images — first with text caption (if any), rest without |
|---|
| 574 | + for (var i = 0; i < encodedImages.length; i++) { |
|---|
| 575 | + final isFirst = i == 0; |
|---|
| 576 | + final msgCaption = isFirst ? textCaption : ''; |
|---|
| 577 | + |
|---|
| 578 | + _ws?.send({ |
|---|
| 579 | + 'type': 'image', |
|---|
| 580 | + 'imageBase64': encodedImages[i], |
|---|
| 581 | + 'mimeType': 'image/jpeg', |
|---|
| 582 | + 'caption': msgCaption, |
|---|
| 572 | 583 | 'sessionId': ref.read(activeSessionIdProvider), |
|---|
| 573 | 584 | }); |
|---|
| 574 | 585 | } |
|---|
| .. | .. |
|---|
| 591 | 602 | final captionController = TextEditingController(); |
|---|
| 592 | 603 | String? voicePath; |
|---|
| 593 | 604 | bool isVoiceRecording = false; |
|---|
| 605 | + bool hasVoiceCaption = false; |
|---|
| 594 | 606 | |
|---|
| 595 | | - return showModalBottomSheet<String>( |
|---|
| 607 | + final result = await showModalBottomSheet<String>( |
|---|
| 596 | 608 | context: context, |
|---|
| 597 | 609 | isScrollControlled: true, |
|---|
| 598 | 610 | builder: (ctx) => StatefulBuilder( |
|---|
| .. | .. |
|---|
| 611 | 623 | style: Theme.of(ctx).textTheme.titleSmall, |
|---|
| 612 | 624 | ), |
|---|
| 613 | 625 | const SizedBox(height: 12), |
|---|
| 614 | | - TextField( |
|---|
| 615 | | - controller: captionController, |
|---|
| 616 | | - decoration: InputDecoration( |
|---|
| 617 | | - hintText: 'Add a caption (optional)', |
|---|
| 618 | | - border: const OutlineInputBorder(), |
|---|
| 619 | | - suffixIcon: IconButton( |
|---|
| 620 | | - icon: Icon( |
|---|
| 621 | | - isVoiceRecording ? Icons.stop_circle : Icons.mic, |
|---|
| 622 | | - color: isVoiceRecording ? Colors.red : null, |
|---|
| 623 | | - ), |
|---|
| 624 | | - onPressed: () async { |
|---|
| 625 | | - if (isVoiceRecording) { |
|---|
| 626 | | - final path = await AudioService.stopRecording(); |
|---|
| 627 | | - setSheetState(() => isVoiceRecording = false); |
|---|
| 628 | | - if (path != null) { |
|---|
| 629 | | - voicePath = path; |
|---|
| 630 | | - captionController.text = '🎤 Voice caption recorded'; |
|---|
| 631 | | - } |
|---|
| 632 | | - } else { |
|---|
| 633 | | - final path = await AudioService.startRecording(); |
|---|
| 634 | | - if (path != null) { |
|---|
| 635 | | - setSheetState(() => isVoiceRecording = true); |
|---|
| 636 | | - } |
|---|
| 637 | | - } |
|---|
| 638 | | - }, |
|---|
| 626 | + // Text caption input |
|---|
| 627 | + if (!isVoiceRecording && !hasVoiceCaption) |
|---|
| 628 | + TextField( |
|---|
| 629 | + controller: captionController, |
|---|
| 630 | + decoration: const InputDecoration( |
|---|
| 631 | + hintText: 'Add a text caption (optional)', |
|---|
| 632 | + border: OutlineInputBorder(), |
|---|
| 633 | + ), |
|---|
| 634 | + autofocus: true, |
|---|
| 635 | + maxLines: 3, |
|---|
| 636 | + ), |
|---|
| 637 | + // Voice recording indicator |
|---|
| 638 | + if (isVoiceRecording) |
|---|
| 639 | + Container( |
|---|
| 640 | + padding: const EdgeInsets.symmetric(vertical: 20), |
|---|
| 641 | + child: const Row( |
|---|
| 642 | + mainAxisAlignment: MainAxisAlignment.center, |
|---|
| 643 | + children: [ |
|---|
| 644 | + Icon(Icons.fiber_manual_record, color: Colors.red, size: 16), |
|---|
| 645 | + SizedBox(width: 8), |
|---|
| 646 | + Text('Recording voice caption...', style: TextStyle(fontSize: 16)), |
|---|
| 647 | + ], |
|---|
| 639 | 648 | ), |
|---|
| 640 | 649 | ), |
|---|
| 641 | | - autofocus: true, |
|---|
| 642 | | - maxLines: 3, |
|---|
| 643 | | - enabled: !isVoiceRecording, |
|---|
| 644 | | - ), |
|---|
| 650 | + // Voice recorded confirmation |
|---|
| 651 | + if (hasVoiceCaption && !isVoiceRecording) |
|---|
| 652 | + Container( |
|---|
| 653 | + padding: const EdgeInsets.symmetric(vertical: 20), |
|---|
| 654 | + child: const Row( |
|---|
| 655 | + mainAxisAlignment: MainAxisAlignment.center, |
|---|
| 656 | + children: [ |
|---|
| 657 | + Icon(Icons.check_circle, color: Colors.green, size: 20), |
|---|
| 658 | + SizedBox(width: 8), |
|---|
| 659 | + Text('Voice caption recorded', style: TextStyle(fontSize: 16)), |
|---|
| 660 | + ], |
|---|
| 661 | + ), |
|---|
| 662 | + ), |
|---|
| 645 | 663 | const SizedBox(height: 12), |
|---|
| 664 | + // Action row: mic/stop + cancel + send |
|---|
| 646 | 665 | Row( |
|---|
| 647 | | - mainAxisAlignment: MainAxisAlignment.end, |
|---|
| 648 | 666 | children: [ |
|---|
| 667 | + // Mic / Stop button — large and clear |
|---|
| 668 | + if (!hasVoiceCaption) |
|---|
| 669 | + IconButton.filled( |
|---|
| 670 | + onPressed: () async { |
|---|
| 671 | + if (isVoiceRecording) { |
|---|
| 672 | + final path = await AudioService.stopRecording(); |
|---|
| 673 | + setSheetState(() { |
|---|
| 674 | + isVoiceRecording = false; |
|---|
| 675 | + if (path != null) { |
|---|
| 676 | + voicePath = path; |
|---|
| 677 | + hasVoiceCaption = true; |
|---|
| 678 | + } |
|---|
| 679 | + }); |
|---|
| 680 | + } else { |
|---|
| 681 | + final path = await AudioService.startRecording(); |
|---|
| 682 | + if (path != null) { |
|---|
| 683 | + setSheetState(() => isVoiceRecording = true); |
|---|
| 684 | + } |
|---|
| 685 | + } |
|---|
| 686 | + }, |
|---|
| 687 | + icon: Icon(isVoiceRecording ? Icons.stop : Icons.mic), |
|---|
| 688 | + style: IconButton.styleFrom( |
|---|
| 689 | + backgroundColor: isVoiceRecording ? Colors.red : null, |
|---|
| 690 | + foregroundColor: isVoiceRecording ? Colors.white : null, |
|---|
| 691 | + ), |
|---|
| 692 | + ), |
|---|
| 693 | + const Spacer(), |
|---|
| 649 | 694 | TextButton( |
|---|
| 650 | | - onPressed: () { |
|---|
| 651 | | - if (isVoiceRecording) AudioService.cancelRecording(); |
|---|
| 652 | | - Navigator.pop(ctx); |
|---|
| 695 | + onPressed: () async { |
|---|
| 696 | + if (isVoiceRecording) { |
|---|
| 697 | + await AudioService.cancelRecording(); |
|---|
| 698 | + } |
|---|
| 699 | + if (ctx.mounted) Navigator.pop(ctx); |
|---|
| 653 | 700 | }, |
|---|
| 654 | 701 | child: const Text('Cancel'), |
|---|
| 655 | 702 | ), |
|---|
| 656 | 703 | const SizedBox(width: 8), |
|---|
| 657 | 704 | FilledButton( |
|---|
| 658 | | - onPressed: () { |
|---|
| 659 | | - if (voicePath != null) { |
|---|
| 660 | | - // Voice caption: send as voice message with images |
|---|
| 661 | | - Navigator.pop(ctx, '__voice__:$voicePath'); |
|---|
| 662 | | - } else { |
|---|
| 663 | | - Navigator.pop(ctx, captionController.text); |
|---|
| 664 | | - } |
|---|
| 665 | | - }, |
|---|
| 705 | + onPressed: isVoiceRecording |
|---|
| 706 | + ? null // disable Send while recording |
|---|
| 707 | + : () { |
|---|
| 708 | + if (voicePath != null) { |
|---|
| 709 | + Navigator.pop(ctx, '__voice__:$voicePath'); |
|---|
| 710 | + } else { |
|---|
| 711 | + Navigator.pop(ctx, captionController.text); |
|---|
| 712 | + } |
|---|
| 713 | + }, |
|---|
| 666 | 714 | child: const Text('Send'), |
|---|
| 667 | 715 | ), |
|---|
| 668 | 716 | ], |
|---|
| .. | .. |
|---|
| 673 | 721 | ), |
|---|
| 674 | 722 | ), |
|---|
| 675 | 723 | ); |
|---|
| 724 | + |
|---|
| 725 | + // Safety net: clean up recording if sheet dismissed by swipe/tap outside |
|---|
| 726 | + if (isVoiceRecording) { |
|---|
| 727 | + await AudioService.cancelRecording(); |
|---|
| 728 | + } |
|---|
| 729 | + |
|---|
| 730 | + captionController.dispose(); |
|---|
| 731 | + return result; |
|---|
| 676 | 732 | } |
|---|
| 677 | 733 | |
|---|
| 678 | 734 | void _clearChat() { |
|---|
| .. | .. |
|---|
| 84 | 84 | setState(() => _isWaking = true); |
|---|
| 85 | 85 | |
|---|
| 86 | 86 | try { |
|---|
| 87 | | - await WolService.wake(mac); |
|---|
| 87 | + await WolService.wake(mac, localHost: _localHostController.text.trim()); |
|---|
| 88 | 88 | if (mounted) { |
|---|
| 89 | 89 | ScaffoldMessenger.of(context).showSnackBar( |
|---|
| 90 | 90 | const SnackBar(content: Text('Wake-on-LAN packet sent')), |
|---|
| .. | .. |
|---|
| 7 | 7 | import 'package:record/record.dart'; |
|---|
| 8 | 8 | |
|---|
| 9 | 9 | /// Manages audio recording (AAC) and playback (queue + singleton). |
|---|
| 10 | +/// |
|---|
| 11 | +/// Incoming voice chunks are queued and played sequentially. |
|---|
| 12 | +/// Manual taps play a single file (or chain from that point). |
|---|
| 10 | 13 | class AudioService { |
|---|
| 11 | 14 | AudioService._(); |
|---|
| 12 | 15 | |
|---|
| .. | .. |
|---|
| 15 | 18 | static bool _isRecording = false; |
|---|
| 16 | 19 | static String? _currentRecordingPath; |
|---|
| 17 | 20 | |
|---|
| 18 | | - // Chain playback state |
|---|
| 19 | | - static final List<String> _playbackQueue = []; |
|---|
| 20 | | - static int _queueIndex = 0; |
|---|
| 21 | | - static bool _isChainPlaying = false; |
|---|
| 21 | + // Playback queue — file paths waiting to be played |
|---|
| 22 | + static final List<String> _queue = []; |
|---|
| 23 | + static bool _isPlaying = false; |
|---|
| 24 | + |
|---|
| 25 | + // Callback when playback starts/stops — UI uses this to update play buttons |
|---|
| 26 | + static void Function()? onPlaybackStateChanged; |
|---|
| 22 | 27 | |
|---|
| 23 | 28 | // Autoplay suppression |
|---|
| 24 | 29 | static bool _isBackgrounded = false; |
|---|
| 25 | 30 | |
|---|
| 26 | 31 | /// Initialize the audio service and set up lifecycle observer. |
|---|
| 27 | 32 | static void init() { |
|---|
| 28 | | - // Listen for app lifecycle changes to suppress autoplay when backgrounded |
|---|
| 29 | 33 | WidgetsBinding.instance.addObserver(_LifecycleObserver()); |
|---|
| 30 | 34 | |
|---|
| 35 | + // Configure audio session for background playback |
|---|
| 36 | + _player.setAudioContext(AudioContext( |
|---|
| 37 | + iOS: AudioContextIOS( |
|---|
| 38 | + category: AVAudioSessionCategory.playback, |
|---|
| 39 | + options: {AVAudioSessionOptions.mixWithOthers}, |
|---|
| 40 | + ), |
|---|
| 41 | + android: const AudioContextAndroid( |
|---|
| 42 | + isSpeakerphoneOn: false, |
|---|
| 43 | + audioMode: AndroidAudioMode.normal, |
|---|
| 44 | + audioFocus: AndroidAudioFocus.gain, |
|---|
| 45 | + ), |
|---|
| 46 | + )); |
|---|
| 47 | + |
|---|
| 48 | + // When a track finishes, play the next in queue |
|---|
| 31 | 49 | _player.onPlayerComplete.listen((_) { |
|---|
| 32 | | - if (_isChainPlaying) { |
|---|
| 33 | | - _playNext(); |
|---|
| 34 | | - } |
|---|
| 50 | + _onTrackComplete(); |
|---|
| 35 | 51 | }); |
|---|
| 36 | 52 | } |
|---|
| 37 | 53 | |
|---|
| 38 | | - /// Whether we are currently recording. |
|---|
| 39 | | - static bool get isRecording => _isRecording; |
|---|
| 54 | + static void _onTrackComplete() { |
|---|
| 55 | + if (_queue.isNotEmpty) { |
|---|
| 56 | + _playNextInQueue(); |
|---|
| 57 | + } else { |
|---|
| 58 | + _isPlaying = false; |
|---|
| 59 | + onPlaybackStateChanged?.call(); |
|---|
| 60 | + } |
|---|
| 61 | + } |
|---|
| 40 | 62 | |
|---|
| 41 | | - /// Whether the app is backgrounded (suppresses autoplay). |
|---|
| 63 | + static Future<void> _playNextInQueue() async { |
|---|
| 64 | + if (_queue.isEmpty) { |
|---|
| 65 | + _isPlaying = false; |
|---|
| 66 | + onPlaybackStateChanged?.call(); |
|---|
| 67 | + return; |
|---|
| 68 | + } |
|---|
| 69 | + |
|---|
| 70 | + final path = _queue.removeAt(0); |
|---|
| 71 | + try { |
|---|
| 72 | + await _player.play(DeviceFileSource(path)); |
|---|
| 73 | + _isPlaying = true; |
|---|
| 74 | + onPlaybackStateChanged?.call(); |
|---|
| 75 | + } catch (_) { |
|---|
| 76 | + // Skip broken file, try next |
|---|
| 77 | + _onTrackComplete(); |
|---|
| 78 | + } |
|---|
| 79 | + } |
|---|
| 80 | + |
|---|
| 81 | + // ── Recording ── |
|---|
| 82 | + |
|---|
| 83 | + static bool get isRecording => _isRecording; |
|---|
| 42 | 84 | static bool get isBackgrounded => _isBackgrounded; |
|---|
| 43 | 85 | |
|---|
| 44 | | - /// Start recording audio in AAC format. |
|---|
| 45 | | - /// Returns the file path where the recording will be saved. |
|---|
| 46 | 86 | static Future<String?> startRecording() async { |
|---|
| 47 | 87 | if (_isRecording) return null; |
|---|
| 48 | 88 | |
|---|
| .. | .. |
|---|
| 67 | 107 | return path; |
|---|
| 68 | 108 | } |
|---|
| 69 | 109 | |
|---|
| 70 | | - /// Stop recording and return the file path. |
|---|
| 71 | 110 | static Future<String?> stopRecording() async { |
|---|
| 72 | 111 | if (!_isRecording) return null; |
|---|
| 73 | 112 | |
|---|
| .. | .. |
|---|
| 77 | 116 | return path; |
|---|
| 78 | 117 | } |
|---|
| 79 | 118 | |
|---|
| 80 | | - /// Cancel the current recording and delete the file. |
|---|
| 81 | 119 | static Future<void> cancelRecording() async { |
|---|
| 82 | 120 | if (!_isRecording) return; |
|---|
| 83 | 121 | |
|---|
| .. | .. |
|---|
| 92 | 130 | } |
|---|
| 93 | 131 | } |
|---|
| 94 | 132 | |
|---|
| 95 | | - /// Play a single audio source (cancels any current playback). |
|---|
| 96 | | - static Future<void> playSingle(String source, |
|---|
| 97 | | - {bool cancelPrevious = true}) async { |
|---|
| 98 | | - if (cancelPrevious) { |
|---|
| 99 | | - await stopPlayback(); |
|---|
| 100 | | - } |
|---|
| 133 | + // ── Playback ── |
|---|
| 101 | 134 | |
|---|
| 102 | | - _isChainPlaying = false; |
|---|
| 103 | | - |
|---|
| 104 | | - if (source.startsWith('http://') || source.startsWith('https://')) { |
|---|
| 105 | | - await _player.play(UrlSource(source)); |
|---|
| 106 | | - } else if (source.startsWith('/')) { |
|---|
| 107 | | - await _player.play(DeviceFileSource(source)); |
|---|
| 108 | | - } else { |
|---|
| 109 | | - // Assume base64 data URI or asset |
|---|
| 110 | | - await _player.play(UrlSource(source)); |
|---|
| 111 | | - } |
|---|
| 112 | | - } |
|---|
| 113 | | - |
|---|
| 114 | | - /// Play a base64-encoded audio blob by writing to a temp file first. |
|---|
| 115 | | - /// Stops any current playback. |
|---|
| 116 | | - static Future<void> playBase64(String base64Audio) async { |
|---|
| 135 | + /// Play a single file. Stops current playback and clears the queue. |
|---|
| 136 | + static Future<void> playSingle(String source) async { |
|---|
| 117 | 137 | await stopPlayback(); |
|---|
| 118 | 138 | |
|---|
| 139 | + if (source.startsWith('/')) { |
|---|
| 140 | + await _player.play(DeviceFileSource(source)); |
|---|
| 141 | + } else { |
|---|
| 142 | + // base64 data — write to temp file first |
|---|
| 143 | + final path = await _base64ToFile(source); |
|---|
| 144 | + if (path == null) return; |
|---|
| 145 | + await _player.play(DeviceFileSource(path)); |
|---|
| 146 | + } |
|---|
| 147 | + _isPlaying = true; |
|---|
| 148 | + onPlaybackStateChanged?.call(); |
|---|
| 149 | + } |
|---|
| 150 | + |
|---|
| 151 | + /// Play a base64-encoded audio blob. Stops current playback. |
|---|
| 152 | + static Future<void> playBase64(String base64Audio) async { |
|---|
| 153 | + await stopPlayback(); |
|---|
| 119 | 154 | final path = await _base64ToFile(base64Audio); |
|---|
| 120 | 155 | if (path == null) return; |
|---|
| 121 | 156 | |
|---|
| 122 | 157 | await _player.play(DeviceFileSource(path)); |
|---|
| 158 | + _isPlaying = true; |
|---|
| 159 | + onPlaybackStateChanged?.call(); |
|---|
| 123 | 160 | } |
|---|
| 124 | 161 | |
|---|
| 125 | | - /// Queue a base64-encoded audio blob for playback. |
|---|
| 126 | | - /// If nothing is playing, starts immediately. If already playing, |
|---|
| 127 | | - /// adds to the chain queue so it plays after the current one finishes. |
|---|
| 162 | + /// Queue a base64-encoded audio blob for sequential playback. |
|---|
| 163 | + /// If nothing is playing, starts immediately. |
|---|
| 164 | + /// If already playing, appends to queue — plays after current finishes. |
|---|
| 128 | 165 | static Future<void> queueBase64(String base64Audio) async { |
|---|
| 129 | 166 | final path = await _base64ToFile(base64Audio); |
|---|
| 130 | 167 | if (path == null) return; |
|---|
| 131 | 168 | |
|---|
| 132 | | - if (_player.state == PlayerState.playing || _isChainPlaying) { |
|---|
| 133 | | - // Already playing — add to queue |
|---|
| 134 | | - _playbackQueue.add(path); |
|---|
| 169 | + if (_isPlaying) { |
|---|
| 170 | + // Already playing — just add to queue, it will play when current finishes |
|---|
| 171 | + _queue.add(path); |
|---|
| 135 | 172 | } else { |
|---|
| 136 | | - // Nothing playing — start chain |
|---|
| 137 | | - _playbackQueue.clear(); |
|---|
| 138 | | - _playbackQueue.add(path); |
|---|
| 139 | | - _queueIndex = 0; |
|---|
| 140 | | - _isChainPlaying = true; |
|---|
| 141 | | - await _playCurrent(); |
|---|
| 173 | + // Nothing playing — start immediately |
|---|
| 174 | + await _player.play(DeviceFileSource(path)); |
|---|
| 175 | + _isPlaying = true; |
|---|
| 176 | + onPlaybackStateChanged?.call(); |
|---|
| 142 | 177 | } |
|---|
| 143 | 178 | } |
|---|
| 179 | + |
|---|
| 180 | + /// Chain playback: play a list of sources sequentially. |
|---|
| 181 | + /// First one plays immediately, rest are queued. |
|---|
| 182 | + static Future<void> playChain(List<String> sources) async { |
|---|
| 183 | + if (sources.isEmpty) return; |
|---|
| 184 | + if (_isBackgrounded) return; |
|---|
| 185 | + |
|---|
| 186 | + await stopPlayback(); |
|---|
| 187 | + |
|---|
| 188 | + // Queue all except the first |
|---|
| 189 | + for (var i = 1; i < sources.length; i++) { |
|---|
| 190 | + _queue.add(sources[i]); |
|---|
| 191 | + } |
|---|
| 192 | + |
|---|
| 193 | + // Play the first one |
|---|
| 194 | + final first = sources[0]; |
|---|
| 195 | + if (first.startsWith('/')) { |
|---|
| 196 | + await _player.play(DeviceFileSource(first)); |
|---|
| 197 | + } else { |
|---|
| 198 | + final path = await _base64ToFile(first); |
|---|
| 199 | + if (path == null) return; |
|---|
| 200 | + await _player.play(DeviceFileSource(path)); |
|---|
| 201 | + } |
|---|
| 202 | + _isPlaying = true; |
|---|
| 203 | + onPlaybackStateChanged?.call(); |
|---|
| 204 | + } |
|---|
| 205 | + |
|---|
| 206 | + /// Stop all playback and clear queue. |
|---|
| 207 | + static Future<void> stopPlayback() async { |
|---|
| 208 | + _queue.clear(); |
|---|
| 209 | + _isPlaying = false; |
|---|
| 210 | + await _player.stop(); |
|---|
| 211 | + onPlaybackStateChanged?.call(); |
|---|
| 212 | + } |
|---|
| 213 | + |
|---|
| 214 | + /// Whether audio is currently playing. |
|---|
| 215 | + static bool get isPlaying => _isPlaying; |
|---|
| 216 | + |
|---|
| 217 | + // ── Helpers ── |
|---|
| 144 | 218 | |
|---|
| 145 | 219 | static Future<String?> _base64ToFile(String base64Audio) async { |
|---|
| 146 | 220 | final dir = await getTemporaryDirectory(); |
|---|
| .. | .. |
|---|
| 152 | 226 | return path; |
|---|
| 153 | 227 | } |
|---|
| 154 | 228 | |
|---|
| 155 | | - /// Chain playback: play a list of audio sources sequentially. |
|---|
| 156 | | - static Future<void> playChain(List<String> sources) async { |
|---|
| 157 | | - if (sources.isEmpty) return; |
|---|
| 158 | | - if (_isBackgrounded) return; // Suppress autoplay when backgrounded |
|---|
| 159 | | - |
|---|
| 160 | | - await stopPlayback(); |
|---|
| 161 | | - |
|---|
| 162 | | - _playbackQueue.clear(); |
|---|
| 163 | | - _playbackQueue.addAll(sources); |
|---|
| 164 | | - _queueIndex = 0; |
|---|
| 165 | | - _isChainPlaying = true; |
|---|
| 166 | | - |
|---|
| 167 | | - await _playCurrent(); |
|---|
| 168 | | - } |
|---|
| 169 | | - |
|---|
| 170 | | - static Future<void> _playCurrent() async { |
|---|
| 171 | | - if (_queueIndex >= _playbackQueue.length) { |
|---|
| 172 | | - _isChainPlaying = false; |
|---|
| 173 | | - return; |
|---|
| 174 | | - } |
|---|
| 175 | | - |
|---|
| 176 | | - final source = _playbackQueue[_queueIndex]; |
|---|
| 177 | | - if (source.startsWith('/')) { |
|---|
| 178 | | - await _player.play(DeviceFileSource(source)); |
|---|
| 179 | | - } else { |
|---|
| 180 | | - await _player.play(UrlSource(source)); |
|---|
| 181 | | - } |
|---|
| 182 | | - } |
|---|
| 183 | | - |
|---|
| 184 | | - static Future<void> _playNext() async { |
|---|
| 185 | | - _queueIndex++; |
|---|
| 186 | | - if (_queueIndex < _playbackQueue.length) { |
|---|
| 187 | | - await _playCurrent(); |
|---|
| 188 | | - } else { |
|---|
| 189 | | - _isChainPlaying = false; |
|---|
| 190 | | - } |
|---|
| 191 | | - } |
|---|
| 192 | | - |
|---|
| 193 | | - /// Stop all playback. |
|---|
| 194 | | - static Future<void> stopPlayback() async { |
|---|
| 195 | | - _isChainPlaying = false; |
|---|
| 196 | | - _playbackQueue.clear(); |
|---|
| 197 | | - await _player.stop(); |
|---|
| 198 | | - } |
|---|
| 199 | | - |
|---|
| 200 | | - /// Whether audio is currently playing. |
|---|
| 201 | | - static bool get isPlaying => |
|---|
| 202 | | - _player.state == PlayerState.playing; |
|---|
| 203 | | - |
|---|
| 204 | 229 | static List<int>? _decodeBase64(String b64) { |
|---|
| 205 | 230 | try { |
|---|
| 206 | | - // Remove data URI prefix if present |
|---|
| 207 | 231 | final cleaned = b64.contains(',') ? b64.split(',').last : b64; |
|---|
| 208 | 232 | return List<int>.from( |
|---|
| 209 | 233 | Uri.parse('data:;base64,$cleaned').data!.contentAsBytes(), |
|---|
| .. | .. |
|---|
| 213 | 237 | } |
|---|
| 214 | 238 | } |
|---|
| 215 | 239 | |
|---|
| 216 | | - /// Dispose resources. |
|---|
| 217 | 240 | static Future<void> dispose() async { |
|---|
| 218 | 241 | await cancelRecording(); |
|---|
| 219 | 242 | await stopPlayback(); |
|---|
| .. | .. |
|---|
| 82 | 82 | final jsonStr = await file.readAsString(); |
|---|
| 83 | 83 | final List<dynamic> jsonList = jsonDecode(jsonStr) as List<dynamic>; |
|---|
| 84 | 84 | final allMessages = jsonList |
|---|
| 85 | | - .map((j) => Message.fromJson(j as Map<String, dynamic>)) |
|---|
| 86 | | - .where((m) => !m.isEmptyVoice) // Filter out voice msgs with no content |
|---|
| 85 | + .map((j) => _messageFromJson(j as Map<String, dynamic>)) |
|---|
| 86 | + .where((m) => !m.isEmptyVoice && !m.isEmptyText) |
|---|
| 87 | 87 | .toList(); |
|---|
| 88 | 88 | |
|---|
| 89 | 89 | // Paginate from the end (newest messages first in storage) |
|---|
| .. | .. |
|---|
| 106 | 106 | final jsonStr = await file.readAsString(); |
|---|
| 107 | 107 | final List<dynamic> jsonList = jsonDecode(jsonStr) as List<dynamic>; |
|---|
| 108 | 108 | return jsonList |
|---|
| 109 | | - .map((j) => Message.fromJson(j as Map<String, dynamic>)) |
|---|
| 110 | | - .where((m) => !m.isEmptyVoice) |
|---|
| 109 | + .map((j) => _messageFromJson(j as Map<String, dynamic>)) |
|---|
| 110 | + .where((m) => !m.isEmptyVoice && !m.isEmptyText) |
|---|
| 111 | 111 | .toList(); |
|---|
| 112 | 112 | } catch (e) { |
|---|
| 113 | 113 | return []; |
|---|
| 114 | 114 | } |
|---|
| 115 | 115 | } |
|---|
| 116 | 116 | |
|---|
| 117 | + /// Deserialize a message from JSON, applying migration rules: |
|---|
| 118 | + /// - Voice messages without audioUri are downgraded to text (transcript only). |
|---|
| 119 | + /// This handles messages saved before a restart, where the temp audio file |
|---|
| 120 | + /// is no longer available. The transcript (content) is preserved. |
|---|
| 121 | + static Message _messageFromJson(Map<String, dynamic> json) { |
|---|
| 122 | + final raw = Message.fromJson(json); |
|---|
| 123 | + if (raw.type == MessageType.voice && |
|---|
| 124 | + (raw.audioUri == null || raw.audioUri!.isEmpty)) { |
|---|
| 125 | + // Downgrade to text so the bubble shows the transcript instead of a |
|---|
| 126 | + // broken play button. |
|---|
| 127 | + return Message( |
|---|
| 128 | + id: raw.id, |
|---|
| 129 | + role: raw.role, |
|---|
| 130 | + type: MessageType.text, |
|---|
| 131 | + content: raw.content, |
|---|
| 132 | + timestamp: raw.timestamp, |
|---|
| 133 | + status: raw.status, |
|---|
| 134 | + duration: raw.duration, |
|---|
| 135 | + ); |
|---|
| 136 | + } |
|---|
| 137 | + return raw; |
|---|
| 138 | + } |
|---|
| 139 | + |
|---|
| 117 | 140 | /// Delete stored messages for a session. |
|---|
| 118 | 141 | static Future<void> delete(String sessionId) async { |
|---|
| 119 | 142 | try { |
|---|
| .. | .. |
|---|
| 60 | 60 | // Send Wake-on-LAN if MAC configured |
|---|
| 61 | 61 | if (config.macAddress != null && config.macAddress!.isNotEmpty) { |
|---|
| 62 | 62 | try { |
|---|
| 63 | | - await WolService.wake(config.macAddress!); |
|---|
| 63 | + await WolService.wake(config.macAddress!, localHost: config.localHost); |
|---|
| 64 | 64 | } catch (_) {} |
|---|
| 65 | 65 | } |
|---|
| 66 | 66 | |
|---|
| .. | .. |
|---|
| 32 | 32 | return packet.toBytes(); |
|---|
| 33 | 33 | } |
|---|
| 34 | 34 | |
|---|
| 35 | + /// Derive subnet broadcast from an IP address (e.g., 192.168.1.100 → 192.168.1.255). |
|---|
| 36 | + static String? _subnetBroadcast(String? ip) { |
|---|
| 37 | + if (ip == null || ip.isEmpty) return null; |
|---|
| 38 | + final parts = ip.split('.'); |
|---|
| 39 | + if (parts.length != 4) return null; |
|---|
| 40 | + return '${parts[0]}.${parts[1]}.${parts[2]}.255'; |
|---|
| 41 | + } |
|---|
| 42 | + |
|---|
| 35 | 43 | /// Send a Wake-on-LAN packet for the given MAC address. |
|---|
| 36 | | - /// Broadcasts to 255.255.255.255:9 and optionally to a subnet broadcast. |
|---|
| 37 | | - static Future<void> wake(String macAddress, {String? subnetBroadcast}) async { |
|---|
| 44 | + /// Broadcasts to 255.255.255.255 and subnet broadcast derived from localHost. |
|---|
| 45 | + /// Sends on ports 7 and 9 for maximum compatibility. |
|---|
| 46 | + static Future<void> wake(String macAddress, {String? localHost}) async { |
|---|
| 38 | 47 | final macBytes = _parseMac(macAddress); |
|---|
| 39 | 48 | if (macBytes == null) { |
|---|
| 40 | 49 | throw ArgumentError('Invalid MAC address: $macAddress'); |
|---|
| .. | .. |
|---|
| 48 | 57 | ); |
|---|
| 49 | 58 | socket.broadcastEnabled = true; |
|---|
| 50 | 59 | |
|---|
| 51 | | - // Send to broadcast address |
|---|
| 52 | | - final broadcastAddr = InternetAddress('255.255.255.255'); |
|---|
| 53 | | - socket.send(packet, broadcastAddr, 9); |
|---|
| 60 | + final targets = <InternetAddress>[ |
|---|
| 61 | + InternetAddress('255.255.255.255'), |
|---|
| 62 | + ]; |
|---|
| 54 | 63 | |
|---|
| 55 | | - // Also send to subnet broadcast if provided |
|---|
| 56 | | - if (subnetBroadcast != null && subnetBroadcast.isNotEmpty) { |
|---|
| 64 | + // Add subnet broadcast derived from localHost |
|---|
| 65 | + final subnet = _subnetBroadcast(localHost); |
|---|
| 66 | + if (subnet != null) { |
|---|
| 57 | 67 | try { |
|---|
| 58 | | - final subnetAddr = InternetAddress(subnetBroadcast); |
|---|
| 59 | | - socket.send(packet, subnetAddr, 9); |
|---|
| 60 | | - } catch (_) { |
|---|
| 61 | | - // Ignore invalid subnet broadcast address |
|---|
| 62 | | - } |
|---|
| 68 | + targets.add(InternetAddress(subnet)); |
|---|
| 69 | + } catch (_) {} |
|---|
| 63 | 70 | } |
|---|
| 64 | 71 | |
|---|
| 65 | | - // Send a few extra packets for reliability |
|---|
| 66 | | - await Future.delayed(const Duration(milliseconds: 100)); |
|---|
| 67 | | - socket.send(packet, broadcastAddr, 9); |
|---|
| 68 | | - await Future.delayed(const Duration(milliseconds: 100)); |
|---|
| 69 | | - socket.send(packet, broadcastAddr, 9); |
|---|
| 72 | + // Send to all targets on both common WoL ports |
|---|
| 73 | + for (final addr in targets) { |
|---|
| 74 | + socket.send(packet, addr, 9); |
|---|
| 75 | + socket.send(packet, addr, 7); |
|---|
| 76 | + } |
|---|
| 77 | + |
|---|
| 78 | + // Repeat for reliability |
|---|
| 79 | + for (var i = 0; i < 3; i++) { |
|---|
| 80 | + await Future.delayed(const Duration(milliseconds: 100)); |
|---|
| 81 | + for (final addr in targets) { |
|---|
| 82 | + socket.send(packet, addr, 9); |
|---|
| 83 | + } |
|---|
| 84 | + } |
|---|
| 70 | 85 | |
|---|
| 71 | 86 | socket.close(); |
|---|
| 72 | 87 | } |
|---|
| .. | .. |
|---|
| 214 | 214 | : message.imageBase64!, |
|---|
| 215 | 215 | ); |
|---|
| 216 | 216 | |
|---|
| 217 | | - return GestureDetector( |
|---|
| 218 | | - onTap: () { |
|---|
| 219 | | - Navigator.of(context).push( |
|---|
| 220 | | - MaterialPageRoute( |
|---|
| 221 | | - builder: (_) => ImageViewer(imageBytes: bytes), |
|---|
| 222 | | - ), |
|---|
| 223 | | - ); |
|---|
| 224 | | - }, |
|---|
| 225 | | - child: ClipRRect( |
|---|
| 226 | | - borderRadius: BorderRadius.circular(8), |
|---|
| 227 | | - child: Image.memory( |
|---|
| 228 | | - bytes, |
|---|
| 229 | | - width: 260, |
|---|
| 230 | | - height: 180, |
|---|
| 231 | | - fit: BoxFit.cover, |
|---|
| 232 | | - errorBuilder: (_, e, st) => const SizedBox( |
|---|
| 233 | | - width: 260, |
|---|
| 234 | | - height: 60, |
|---|
| 235 | | - child: Center(child: Text('Image decode error')), |
|---|
| 217 | + return Column( |
|---|
| 218 | + crossAxisAlignment: CrossAxisAlignment.start, |
|---|
| 219 | + children: [ |
|---|
| 220 | + GestureDetector( |
|---|
| 221 | + onTap: () { |
|---|
| 222 | + Navigator.of(context).push( |
|---|
| 223 | + MaterialPageRoute( |
|---|
| 224 | + builder: (_) => ImageViewer(imageBytes: bytes), |
|---|
| 225 | + ), |
|---|
| 226 | + ); |
|---|
| 227 | + }, |
|---|
| 228 | + child: ClipRRect( |
|---|
| 229 | + borderRadius: BorderRadius.circular(8), |
|---|
| 230 | + child: Image.memory( |
|---|
| 231 | + bytes, |
|---|
| 232 | + width: 260, |
|---|
| 233 | + height: 180, |
|---|
| 234 | + fit: BoxFit.cover, |
|---|
| 235 | + errorBuilder: (_, e, st) => const SizedBox( |
|---|
| 236 | + width: 260, |
|---|
| 237 | + height: 60, |
|---|
| 238 | + child: Center(child: Text('Image decode error')), |
|---|
| 239 | + ), |
|---|
| 240 | + ), |
|---|
| 236 | 241 | ), |
|---|
| 237 | 242 | ), |
|---|
| 238 | | - ), |
|---|
| 243 | + if (message.content.isNotEmpty) ...[ |
|---|
| 244 | + const SizedBox(height: 6), |
|---|
| 245 | + Text( |
|---|
| 246 | + message.content, |
|---|
| 247 | + style: TextStyle( |
|---|
| 248 | + fontSize: 14, |
|---|
| 249 | + color: _isUser ? Colors.white.withAlpha(220) : null, |
|---|
| 250 | + height: 1.3, |
|---|
| 251 | + ), |
|---|
| 252 | + ), |
|---|
| 253 | + ], |
|---|
| 254 | + ], |
|---|
| 239 | 255 | ); |
|---|
| 240 | 256 | } |
|---|
| 241 | 257 | |
|---|