import 'dart:async'; import 'dart:io'; import 'package:audioplayers/audioplayers.dart'; import 'package:flutter/widgets.dart'; import 'package:path_provider/path_provider.dart'; import 'package:record/record.dart'; /// Manages audio recording (AAC) and playback (queue + singleton). class AudioService { AudioService._(); static final AudioRecorder _recorder = AudioRecorder(); static final AudioPlayer _player = AudioPlayer(); static bool _isRecording = false; static String? _currentRecordingPath; // Chain playback state static final List _playbackQueue = []; static int _queueIndex = 0; static bool _isChainPlaying = false; // Autoplay suppression static bool _isBackgrounded = false; /// Initialize the audio service and set up lifecycle observer. static void init() { // Listen for app lifecycle changes to suppress autoplay when backgrounded WidgetsBinding.instance.addObserver(_LifecycleObserver()); _player.onPlayerComplete.listen((_) { if (_isChainPlaying) { _playNext(); } }); } /// Whether we are currently recording. static bool get isRecording => _isRecording; /// Whether the app is backgrounded (suppresses autoplay). static bool get isBackgrounded => _isBackgrounded; /// Start recording audio in AAC format. /// Returns the file path where the recording will be saved. static Future startRecording() async { if (_isRecording) return null; final hasPermission = await _recorder.hasPermission(); if (!hasPermission) return null; final dir = await getTemporaryDirectory(); final path = '${dir.path}/recording_${DateTime.now().millisecondsSinceEpoch}.m4a'; await _recorder.start( const RecordConfig( encoder: AudioEncoder.aacLc, bitRate: 128000, sampleRate: 44100, ), path: path, ); _isRecording = true; _currentRecordingPath = path; return path; } /// Stop recording and return the file path. static Future stopRecording() async { if (!_isRecording) return null; final path = await _recorder.stop(); _isRecording = false; _currentRecordingPath = null; return path; } /// Cancel the current recording and delete the file. static Future cancelRecording() async { if (!_isRecording) return; await _recorder.stop(); _isRecording = false; if (_currentRecordingPath != null) { try { await File(_currentRecordingPath!).delete(); } catch (_) {} _currentRecordingPath = null; } } /// Play a single audio source (cancels any current playback). static Future playSingle(String source, {bool cancelPrevious = true}) async { if (cancelPrevious) { await stopPlayback(); } _isChainPlaying = false; if (source.startsWith('http://') || source.startsWith('https://')) { await _player.play(UrlSource(source)); } else if (source.startsWith('/')) { await _player.play(DeviceFileSource(source)); } else { // Assume base64 data URI or asset await _player.play(UrlSource(source)); } } /// Play a base64-encoded audio blob by writing to a temp file first. /// Stops any current playback. static Future playBase64(String base64Audio) async { await stopPlayback(); final path = await _base64ToFile(base64Audio); if (path == null) return; await _player.play(DeviceFileSource(path)); } /// Queue a base64-encoded audio blob for playback. /// If nothing is playing, starts immediately. If already playing, /// adds to the chain queue so it plays after the current one finishes. static Future queueBase64(String base64Audio) async { final path = await _base64ToFile(base64Audio); if (path == null) return; if (_player.state == PlayerState.playing || _isChainPlaying) { // Already playing — add to queue _playbackQueue.add(path); } else { // Nothing playing — start chain _playbackQueue.clear(); _playbackQueue.add(path); _queueIndex = 0; _isChainPlaying = true; await _playCurrent(); } } static Future _base64ToFile(String base64Audio) async { final dir = await getTemporaryDirectory(); final path = '${dir.path}/playback_${DateTime.now().millisecondsSinceEpoch}.m4a'; final bytes = _decodeBase64(base64Audio); if (bytes == null) return null; await File(path).writeAsBytes(bytes); return path; } /// Chain playback: play a list of audio sources sequentially. static Future playChain(List sources) async { if (sources.isEmpty) return; if (_isBackgrounded) return; // Suppress autoplay when backgrounded await stopPlayback(); _playbackQueue.clear(); _playbackQueue.addAll(sources); _queueIndex = 0; _isChainPlaying = true; await _playCurrent(); } static Future _playCurrent() async { if (_queueIndex >= _playbackQueue.length) { _isChainPlaying = false; return; } final source = _playbackQueue[_queueIndex]; if (source.startsWith('/')) { await _player.play(DeviceFileSource(source)); } else { await _player.play(UrlSource(source)); } } static Future _playNext() async { _queueIndex++; if (_queueIndex < _playbackQueue.length) { await _playCurrent(); } else { _isChainPlaying = false; } } /// Stop all playback. static Future stopPlayback() async { _isChainPlaying = false; _playbackQueue.clear(); await _player.stop(); } /// Whether audio is currently playing. static bool get isPlaying => _player.state == PlayerState.playing; static List? _decodeBase64(String b64) { try { // Remove data URI prefix if present final cleaned = b64.contains(',') ? b64.split(',').last : b64; return List.from( Uri.parse('data:;base64,$cleaned').data!.contentAsBytes(), ); } catch (_) { return null; } } /// Dispose resources. static Future dispose() async { await cancelRecording(); await stopPlayback(); _recorder.dispose(); _player.dispose(); } } class _LifecycleObserver extends WidgetsBindingObserver { @override void didChangeAppLifecycleState(AppLifecycleState state) { switch (state) { case AppLifecycleState.paused: case AppLifecycleState.inactive: case AppLifecycleState.detached: AudioService._isBackgrounded = true; case AppLifecycleState.resumed: AudioService._isBackgrounded = false; default: break; } } }