Matthias Nott
2026-03-23 07ad99d7c4f8c52930442a34d316e634435bd75a
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
import 'dart:async';
import 'dart:io';
import 'package:audioplayers/audioplayers.dart';
import 'package:flutter/widgets.dart';
import 'package:path_provider/path_provider.dart';
import 'package:record/record.dart';
/// Manages audio recording (AAC) and playback (queue + singleton).
///
/// Incoming voice chunks are queued and played sequentially.
/// Manual taps play a single file (or chain from that point).
class AudioService {
  AudioService._();
  static final AudioRecorder _recorder = AudioRecorder();
  static final AudioPlayer _player = AudioPlayer();
  static bool _isRecording = false;
  static String? _currentRecordingPath;
  // Playback queue — file paths waiting to be played
  static final List<String> _queue = [];
  static bool _isPlaying = false;
  // Callback when playback starts/stops — UI uses this to update play buttons
  static void Function()? onPlaybackStateChanged;
  // Autoplay suppression
  static bool _isBackgrounded = false;
  /// Initialize the audio service and set up lifecycle observer.
  static void init() {
    WidgetsBinding.instance.addObserver(_LifecycleObserver());
    // Configure audio session for background playback
    _player.setAudioContext(AudioContext(
      iOS: AudioContextIOS(
        category: AVAudioSessionCategory.playback,
        options: {AVAudioSessionOptions.mixWithOthers},
      ),
      android: const AudioContextAndroid(
        isSpeakerphoneOn: false,
        audioMode: AndroidAudioMode.normal,
        audioFocus: AndroidAudioFocus.gain,
      ),
    ));
    // When a track finishes, play the next in queue
    _player.onPlayerComplete.listen((_) {
      _onTrackComplete();
    });
  }
  static void _onTrackComplete() {
    if (_queue.isNotEmpty) {
      _playNextInQueue();
    } else {
      _isPlaying = false;
      onPlaybackStateChanged?.call();
    }
  }
  static Future<void> _playNextInQueue() async {
    if (_queue.isEmpty) {
      _isPlaying = false;
      onPlaybackStateChanged?.call();
      return;
    }
    final path = _queue.removeAt(0);
    try {
      await _player.play(DeviceFileSource(path));
      _isPlaying = true;
      onPlaybackStateChanged?.call();
    } catch (_) {
      // Skip broken file, try next
      _onTrackComplete();
    }
  }
  // ── Recording ──
  static bool get isRecording => _isRecording;
  static bool get isBackgrounded => _isBackgrounded;
  static Future<String?> startRecording() async {
    if (_isRecording) return null;
    final hasPermission = await _recorder.hasPermission();
    if (!hasPermission) return null;
    final dir = await getTemporaryDirectory();
    final path =
        '${dir.path}/recording_${DateTime.now().millisecondsSinceEpoch}.m4a';
    await _recorder.start(
      const RecordConfig(
        encoder: AudioEncoder.aacLc,
        bitRate: 128000,
        sampleRate: 44100,
      ),
      path: path,
    );
    _isRecording = true;
    _currentRecordingPath = path;
    return path;
  }
  static Future<String?> stopRecording() async {
    if (!_isRecording) return null;
    final path = await _recorder.stop();
    _isRecording = false;
    _currentRecordingPath = null;
    return path;
  }
  static Future<void> cancelRecording() async {
    if (!_isRecording) return;
    await _recorder.stop();
    _isRecording = false;
    if (_currentRecordingPath != null) {
      try {
        await File(_currentRecordingPath!).delete();
      } catch (_) {}
      _currentRecordingPath = null;
    }
  }
  // ── Playback ──
  /// Play a single file. Stops current playback and clears the queue.
  static Future<void> playSingle(String source) async {
    await stopPlayback();
    if (source.startsWith('/')) {
      await _player.play(DeviceFileSource(source));
    } else {
      // base64 data — write to temp file first
      final path = await _base64ToFile(source);
      if (path == null) return;
      await _player.play(DeviceFileSource(path));
    }
    _isPlaying = true;
    onPlaybackStateChanged?.call();
  }
  /// Play a base64-encoded audio blob. Stops current playback.
  static Future<void> playBase64(String base64Audio) async {
    await stopPlayback();
    final path = await _base64ToFile(base64Audio);
    if (path == null) return;
    await _player.play(DeviceFileSource(path));
    _isPlaying = true;
    onPlaybackStateChanged?.call();
  }
  /// Queue a base64-encoded audio blob for sequential playback.
  /// If nothing is playing, starts immediately.
  /// If already playing, appends to queue — plays after current finishes.
  static Future<void> queueBase64(String base64Audio) async {
    final path = await _base64ToFile(base64Audio);
    if (path == null) return;
    if (_isPlaying) {
      // Already playing — just add to queue, it will play when current finishes
      _queue.add(path);
    } else {
      // Nothing playing — start immediately
      await _player.play(DeviceFileSource(path));
      _isPlaying = true;
      onPlaybackStateChanged?.call();
    }
  }
  /// Chain playback: play a list of sources sequentially.
  /// First one plays immediately, rest are queued.
  static Future<void> playChain(List<String> sources) async {
    if (sources.isEmpty) return;
    if (_isBackgrounded) return;
    await stopPlayback();
    // Queue all except the first
    for (var i = 1; i < sources.length; i++) {
      _queue.add(sources[i]);
    }
    // Play the first one
    final first = sources[0];
    if (first.startsWith('/')) {
      await _player.play(DeviceFileSource(first));
    } else {
      final path = await _base64ToFile(first);
      if (path == null) return;
      await _player.play(DeviceFileSource(path));
    }
    _isPlaying = true;
    onPlaybackStateChanged?.call();
  }
  /// Stop all playback and clear queue.
  static Future<void> stopPlayback() async {
    _queue.clear();
    _isPlaying = false;
    await _player.stop();
    onPlaybackStateChanged?.call();
  }
  /// Whether audio is currently playing.
  static bool get isPlaying => _isPlaying;
  // ── Helpers ──
  static Future<String?> _base64ToFile(String base64Audio) async {
    final dir = await getTemporaryDirectory();
    final path =
        '${dir.path}/playback_${DateTime.now().millisecondsSinceEpoch}.m4a';
    final bytes = _decodeBase64(base64Audio);
    if (bytes == null) return null;
    await File(path).writeAsBytes(bytes);
    return path;
  }
  static List<int>? _decodeBase64(String b64) {
    try {
      final cleaned = b64.contains(',') ? b64.split(',').last : b64;
      return List<int>.from(
        Uri.parse('data:;base64,$cleaned').data!.contentAsBytes(),
      );
    } catch (_) {
      return null;
    }
  }
  static Future<void> dispose() async {
    await cancelRecording();
    await stopPlayback();
    _recorder.dispose();
    _player.dispose();
  }
}
class _LifecycleObserver extends WidgetsBindingObserver {
  @override
  void didChangeAppLifecycleState(AppLifecycleState state) {
    switch (state) {
      case AppLifecycleState.paused:
      case AppLifecycleState.inactive:
      case AppLifecycleState.detached:
        AudioService._isBackgrounded = true;
      case AppLifecycleState.resumed:
        AudioService._isBackgrounded = false;
      default:
        break;
    }
  }
}