From 25119a9b148a291ba0af4f9f70801d12f2309147 Mon Sep 17 00:00:00 2001
From: Matthias Nott <mnott@mnsoft.org>
Date: Sat, 21 Mar 2026 21:04:28 +0100
Subject: [PATCH] fix: audio chain playback, empty bubbles, playback state reset

---
 lib/services/message_store.dart |    4 
 lib/services/audio_service.dart |  211 +++++++++++++++++++++++++++-------------------------
 lib/models/message.dart         |    6 +
 lib/screens/chat_screen.dart    |   12 +++
 4 files changed, 130 insertions(+), 103 deletions(-)

diff --git a/lib/models/message.dart b/lib/models/message.dart
index 6ea436b..48dc9c0 100644
--- a/lib/models/message.dart
+++ b/lib/models/message.dart
@@ -151,4 +151,10 @@
       type == MessageType.voice &&
       (audioUri == null || audioUri!.isEmpty) &&
       content.isEmpty;
+
+  /// Returns true if this is a text message with no content (empty bubble).
+  bool get isEmptyText =>
+      type == MessageType.text &&
+      content.trim().isEmpty &&
+      imageBase64 == null;
 }
diff --git a/lib/screens/chat_screen.dart b/lib/screens/chat_screen.dart
index 0d6cc0f..f1d4436 100644
--- a/lib/screens/chat_screen.dart
+++ b/lib/screens/chat_screen.dart
@@ -56,6 +56,18 @@
     final prefs = await SharedPreferences.getInstance();
     _lastSeq = prefs.getInt('lastSeq') ?? 0;
     if (!mounted) return;
+
+    // Listen for playback state changes to reset play button UI
+    AudioService.onPlaybackStateChanged = () {
+      if (mounted) {
+        setState(() {
+          if (!AudioService.isPlaying) {
+            _playingMessageId = null;
+          }
+        });
+      }
+    };
+
     _initConnection();
   }
 
diff --git a/lib/services/audio_service.dart b/lib/services/audio_service.dart
index 8e4db7c..680a0fd 100644
--- a/lib/services/audio_service.dart
+++ b/lib/services/audio_service.dart
@@ -7,6 +7,9 @@
 import 'package:record/record.dart';
 
 /// Manages audio recording (AAC) and playback (queue + singleton).
+///
+/// Incoming voice chunks are queued and played sequentially.
+/// Manual taps play a single file (or chain from that point).
 class AudioService {
   AudioService._();
 
@@ -15,21 +18,21 @@
   static bool _isRecording = false;
   static String? _currentRecordingPath;
 
-  // Chain playback state
-  static final List<String> _playbackQueue = [];
-  static int _queueIndex = 0;
-  static bool _isChainPlaying = false;
+  // Playback queue — file paths waiting to be played
+  static final List<String> _queue = [];
+  static bool _isPlaying = false;
+
+  // Callback when playback starts/stops — UI uses this to update play buttons
+  static void Function()? onPlaybackStateChanged;
 
   // Autoplay suppression
   static bool _isBackgrounded = false;
 
   /// Initialize the audio service and set up lifecycle observer.
   static void init() {
-    // Listen for app lifecycle changes to suppress autoplay when backgrounded
     WidgetsBinding.instance.addObserver(_LifecycleObserver());
 
-    // Configure audio session for playback — allows audio to continue
-    // when screen locks or app goes to background
+    // Configure audio session for background playback
     _player.setAudioContext(AudioContext(
       iOS: AudioContextIOS(
         category: AVAudioSessionCategory.playback,
@@ -42,21 +45,44 @@
       ),
     ));
 
+    // When a track finishes, play the next in queue
     _player.onPlayerComplete.listen((_) {
-      if (_isChainPlaying) {
-        _playNext();
-      }
+      _onTrackComplete();
     });
   }
 
-  /// Whether we are currently recording.
-  static bool get isRecording => _isRecording;
+  static void _onTrackComplete() {
+    if (_queue.isNotEmpty) {
+      _playNextInQueue();
+    } else {
+      _isPlaying = false;
+      onPlaybackStateChanged?.call();
+    }
+  }
 
-  /// Whether the app is backgrounded (suppresses autoplay).
+  static Future<void> _playNextInQueue() async {
+    if (_queue.isEmpty) {
+      _isPlaying = false;
+      onPlaybackStateChanged?.call();
+      return;
+    }
+
+    final path = _queue.removeAt(0);
+    try {
+      await _player.play(DeviceFileSource(path));
+      _isPlaying = true;
+      onPlaybackStateChanged?.call();
+    } catch (_) {
+      // Skip broken file, try next
+      _onTrackComplete();
+    }
+  }
+
+  // ── Recording ──
+
+  static bool get isRecording => _isRecording;
   static bool get isBackgrounded => _isBackgrounded;
 
-  /// Start recording audio in AAC format.
-  /// Returns the file path where the recording will be saved.
   static Future<String?> startRecording() async {
     if (_isRecording) return null;
 
@@ -81,7 +107,6 @@
     return path;
   }
 
-  /// Stop recording and return the file path.
   static Future<String?> stopRecording() async {
     if (!_isRecording) return null;
 
@@ -91,7 +116,6 @@
     return path;
   }
 
-  /// Cancel the current recording and delete the file.
   static Future<void> cancelRecording() async {
     if (!_isRecording) return;
 
@@ -106,55 +130,91 @@
     }
   }
 
-  /// Play a single audio source (cancels any current playback).
-  static Future<void> playSingle(String source,
-      {bool cancelPrevious = true}) async {
-    if (cancelPrevious) {
-      await stopPlayback();
-    }
+  // ── Playback ──
 
-    _isChainPlaying = false;
-
-    if (source.startsWith('http://') || source.startsWith('https://')) {
-      await _player.play(UrlSource(source));
-    } else if (source.startsWith('/')) {
-      await _player.play(DeviceFileSource(source));
-    } else {
-      // Assume base64 data URI or asset
-      await _player.play(UrlSource(source));
-    }
-  }
-
-  /// Play a base64-encoded audio blob by writing to a temp file first.
-  /// Stops any current playback.
-  static Future<void> playBase64(String base64Audio) async {
+  /// Play a single file. Stops current playback and clears the queue.
+  static Future<void> playSingle(String source) async {
     await stopPlayback();
 
+    if (source.startsWith('/')) {
+      await _player.play(DeviceFileSource(source));
+    } else {
+      // base64 data — write to temp file first
+      final path = await _base64ToFile(source);
+      if (path == null) return;
+      await _player.play(DeviceFileSource(path));
+    }
+    _isPlaying = true;
+    onPlaybackStateChanged?.call();
+  }
+
+  /// Play a base64-encoded audio blob. Stops current playback.
+  static Future<void> playBase64(String base64Audio) async {
+    await stopPlayback();
     final path = await _base64ToFile(base64Audio);
     if (path == null) return;
 
     await _player.play(DeviceFileSource(path));
+    _isPlaying = true;
+    onPlaybackStateChanged?.call();
   }
 
-  /// Queue a base64-encoded audio blob for playback.
-  /// If nothing is playing, starts immediately. If already playing,
-  /// adds to the chain queue so it plays after the current one finishes.
+  /// Queue a base64-encoded audio blob for sequential playback.
+  /// If nothing is playing, starts immediately.
+  /// If already playing, appends to queue — plays after current finishes.
   static Future<void> queueBase64(String base64Audio) async {
     final path = await _base64ToFile(base64Audio);
     if (path == null) return;
 
-    if (_player.state == PlayerState.playing || _isChainPlaying) {
-      // Already playing — add to queue
-      _playbackQueue.add(path);
+    if (_isPlaying) {
+      // Already playing — just add to queue, it will play when current finishes
+      _queue.add(path);
     } else {
-      // Nothing playing — start chain
-      _playbackQueue.clear();
-      _playbackQueue.add(path);
-      _queueIndex = 0;
-      _isChainPlaying = true;
-      await _playCurrent();
+      // Nothing playing — start immediately
+      await _player.play(DeviceFileSource(path));
+      _isPlaying = true;
+      onPlaybackStateChanged?.call();
     }
   }
+
+  /// Chain playback: play a list of sources sequentially.
+  /// First one plays immediately, rest are queued.
+  static Future<void> playChain(List<String> sources) async {
+    if (sources.isEmpty) return;
+    if (_isBackgrounded) return;
+
+    await stopPlayback();
+
+    // Queue all except the first
+    for (var i = 1; i < sources.length; i++) {
+      _queue.add(sources[i]);
+    }
+
+    // Play the first one
+    final first = sources[0];
+    if (first.startsWith('/')) {
+      await _player.play(DeviceFileSource(first));
+    } else {
+      final path = await _base64ToFile(first);
+      if (path == null) return;
+      await _player.play(DeviceFileSource(path));
+    }
+    _isPlaying = true;
+    onPlaybackStateChanged?.call();
+  }
+
+  /// Stop all playback and clear queue.
+  static Future<void> stopPlayback() async {
+    _queue.clear();
+    _isPlaying = false;
+    await _player.stop();
+    onPlaybackStateChanged?.call();
+  }
+
+  /// Whether audio is currently playing.
+  static bool get isPlaying => _isPlaying;
+
+  // ── Helpers ──
 
   static Future<String?> _base64ToFile(String base64Audio) async {
     final dir = await getTemporaryDirectory();
@@ -166,58 +226,8 @@
     return path;
   }
 
-  /// Chain playback: play a list of audio sources sequentially.
-  static Future<void> playChain(List<String> sources) async {
-    if (sources.isEmpty) return;
-    if (_isBackgrounded) return; // Suppress autoplay when backgrounded
-
-    await stopPlayback();
-
-    _playbackQueue.clear();
-    _playbackQueue.addAll(sources);
-    _queueIndex = 0;
-    _isChainPlaying = true;
-
-    await _playCurrent();
-  }
-
-  static Future<void> _playCurrent() async {
-    if (_queueIndex >= _playbackQueue.length) {
-      _isChainPlaying = false;
-      return;
-    }
-
-    final source = _playbackQueue[_queueIndex];
-    if (source.startsWith('/')) {
-      await _player.play(DeviceFileSource(source));
-    } else {
-      await _player.play(UrlSource(source));
-    }
-  }
-
-  static Future<void> _playNext() async {
-    _queueIndex++;
-    if (_queueIndex < _playbackQueue.length) {
-      await _playCurrent();
-    } else {
-      _isChainPlaying = false;
-    }
-  }
-
-  /// Stop all playback.
-  static Future<void> stopPlayback() async {
-    _isChainPlaying = false;
-    _playbackQueue.clear();
-    await _player.stop();
-  }
-
-  /// Whether audio is currently playing.
-  static bool get isPlaying =>
-      _player.state == PlayerState.playing;
-
   static List<int>? _decodeBase64(String b64) {
     try {
-      // Remove data URI prefix if present
       final cleaned = b64.contains(',') ? b64.split(',').last : b64;
       return List<int>.from(
         Uri.parse('data:;base64,$cleaned').data!.contentAsBytes(),
@@ -227,7 +237,6 @@
     }
   }
 
-  /// Dispose resources.
   static Future<void> dispose() async {
     await cancelRecording();
     await stopPlayback();
diff --git a/lib/services/message_store.dart b/lib/services/message_store.dart
index 424d73a..aa51fc8 100644
--- a/lib/services/message_store.dart
+++ b/lib/services/message_store.dart
@@ -83,7 +83,7 @@
       final List<dynamic> jsonList = jsonDecode(jsonStr) as List<dynamic>;
       final allMessages = jsonList
           .map((j) => _messageFromJson(j as Map<String, dynamic>))
-          .where((m) => !m.isEmptyVoice) // Filter out voice msgs with no content
+          .where((m) => !m.isEmptyVoice && !m.isEmptyText)
           .toList();
 
       // Paginate from the end (newest messages first in storage)
@@ -107,7 +107,7 @@
       final List<dynamic> jsonList = jsonDecode(jsonStr) as List<dynamic>;
       return jsonList
           .map((j) => _messageFromJson(j as Map<String, dynamic>))
-          .where((m) => !m.isEmptyVoice)
+          .where((m) => !m.isEmptyVoice && !m.isEmptyText)
           .toList();
     } catch (e) {
       return [];

--
Gitblit v1.3.1