Matthias Nott
2026-03-21 fa34201bc07e5312ff0c6825933cd02ce7900254
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
import 'dart:async';
import 'dart:io';
import 'package:audioplayers/audioplayers.dart';
import 'package:flutter/widgets.dart';
import 'package:path_provider/path_provider.dart';
import 'package:record/record.dart';
/// Manages audio recording (AAC) and playback (queue + singleton).
class AudioService {
  AudioService._();
  static final AudioRecorder _recorder = AudioRecorder();
  static final AudioPlayer _player = AudioPlayer();
  static bool _isRecording = false;
  static String? _currentRecordingPath;
  // Chain playback state
  static final List<String> _playbackQueue = [];
  static int _queueIndex = 0;
  static bool _isChainPlaying = false;
  // Autoplay suppression
  static bool _isBackgrounded = false;
  /// Initialize the audio service and set up lifecycle observer.
  static void init() {
    // Listen for app lifecycle changes to suppress autoplay when backgrounded
    WidgetsBinding.instance.addObserver(_LifecycleObserver());
    // Configure audio session for playback — allows audio to continue
    // when screen locks or app goes to background
    _player.setAudioContext(AudioContext(
      iOS: AudioContextIOS(
        category: AVAudioSessionCategory.playback,
        options: {AVAudioSessionOptions.mixWithOthers},
      ),
      android: const AudioContextAndroid(
        isSpeakerphoneOn: false,
        audioMode: AndroidAudioMode.normal,
        audioFocus: AndroidAudioFocus.gain,
      ),
    ));
    _player.onPlayerComplete.listen((_) {
      if (_isChainPlaying) {
        _playNext();
      }
    });
  }
  /// Whether we are currently recording.
  static bool get isRecording => _isRecording;
  /// Whether the app is backgrounded (suppresses autoplay).
  static bool get isBackgrounded => _isBackgrounded;
  /// Start recording audio in AAC format.
  /// Returns the file path where the recording will be saved.
  static Future<String?> startRecording() async {
    if (_isRecording) return null;
    final hasPermission = await _recorder.hasPermission();
    if (!hasPermission) return null;
    final dir = await getTemporaryDirectory();
    final path =
        '${dir.path}/recording_${DateTime.now().millisecondsSinceEpoch}.m4a';
    await _recorder.start(
      const RecordConfig(
        encoder: AudioEncoder.aacLc,
        bitRate: 128000,
        sampleRate: 44100,
      ),
      path: path,
    );
    _isRecording = true;
    _currentRecordingPath = path;
    return path;
  }
  /// Stop recording and return the file path.
  static Future<String?> stopRecording() async {
    if (!_isRecording) return null;
    final path = await _recorder.stop();
    _isRecording = false;
    _currentRecordingPath = null;
    return path;
  }
  /// Cancel the current recording and delete the file.
  static Future<void> cancelRecording() async {
    if (!_isRecording) return;
    await _recorder.stop();
    _isRecording = false;
    if (_currentRecordingPath != null) {
      try {
        await File(_currentRecordingPath!).delete();
      } catch (_) {}
      _currentRecordingPath = null;
    }
  }
  /// Play a single audio source (cancels any current playback).
  static Future<void> playSingle(String source,
      {bool cancelPrevious = true}) async {
    if (cancelPrevious) {
      await stopPlayback();
    }
    _isChainPlaying = false;
    if (source.startsWith('http://') || source.startsWith('https://')) {
      await _player.play(UrlSource(source));
    } else if (source.startsWith('/')) {
      await _player.play(DeviceFileSource(source));
    } else {
      // Assume base64 data URI or asset
      await _player.play(UrlSource(source));
    }
  }
  /// Play a base64-encoded audio blob by writing to a temp file first.
  /// Stops any current playback.
  static Future<void> playBase64(String base64Audio) async {
    await stopPlayback();
    final path = await _base64ToFile(base64Audio);
    if (path == null) return;
    await _player.play(DeviceFileSource(path));
  }
  /// Queue a base64-encoded audio blob for playback.
  /// If nothing is playing, starts immediately. If already playing,
  /// adds to the chain queue so it plays after the current one finishes.
  static Future<void> queueBase64(String base64Audio) async {
    final path = await _base64ToFile(base64Audio);
    if (path == null) return;
    if (_player.state == PlayerState.playing || _isChainPlaying) {
      // Already playing — add to queue
      _playbackQueue.add(path);
    } else {
      // Nothing playing — start chain
      _playbackQueue.clear();
      _playbackQueue.add(path);
      _queueIndex = 0;
      _isChainPlaying = true;
      await _playCurrent();
    }
  }
  static Future<String?> _base64ToFile(String base64Audio) async {
    final dir = await getTemporaryDirectory();
    final path =
        '${dir.path}/playback_${DateTime.now().millisecondsSinceEpoch}.m4a';
    final bytes = _decodeBase64(base64Audio);
    if (bytes == null) return null;
    await File(path).writeAsBytes(bytes);
    return path;
  }
  /// Chain playback: play a list of audio sources sequentially.
  static Future<void> playChain(List<String> sources) async {
    if (sources.isEmpty) return;
    if (_isBackgrounded) return; // Suppress autoplay when backgrounded
    await stopPlayback();
    _playbackQueue.clear();
    _playbackQueue.addAll(sources);
    _queueIndex = 0;
    _isChainPlaying = true;
    await _playCurrent();
  }
  static Future<void> _playCurrent() async {
    if (_queueIndex >= _playbackQueue.length) {
      _isChainPlaying = false;
      return;
    }
    final source = _playbackQueue[_queueIndex];
    if (source.startsWith('/')) {
      await _player.play(DeviceFileSource(source));
    } else {
      await _player.play(UrlSource(source));
    }
  }
  static Future<void> _playNext() async {
    _queueIndex++;
    if (_queueIndex < _playbackQueue.length) {
      await _playCurrent();
    } else {
      _isChainPlaying = false;
    }
  }
  /// Stop all playback.
  static Future<void> stopPlayback() async {
    _isChainPlaying = false;
    _playbackQueue.clear();
    await _player.stop();
  }
  /// Whether audio is currently playing.
  static bool get isPlaying =>
      _player.state == PlayerState.playing;
  static List<int>? _decodeBase64(String b64) {
    try {
      // Remove data URI prefix if present
      final cleaned = b64.contains(',') ? b64.split(',').last : b64;
      return List<int>.from(
        Uri.parse('data:;base64,$cleaned').data!.contentAsBytes(),
      );
    } catch (_) {
      return null;
    }
  }
  /// Dispose resources.
  static Future<void> dispose() async {
    await cancelRecording();
    await stopPlayback();
    _recorder.dispose();
    _player.dispose();
  }
}
class _LifecycleObserver extends WidgetsBindingObserver {
  @override
  void didChangeAppLifecycleState(AppLifecycleState state) {
    switch (state) {
      case AppLifecycleState.paused:
      case AppLifecycleState.inactive:
      case AppLifecycleState.detached:
        AudioService._isBackgrounded = true;
      case AppLifecycleState.resumed:
        AudioService._isBackgrounded = false;
      default:
        break;
    }
  }
}