import 'dart:async'; import 'dart:io'; import 'package:audioplayers/audioplayers.dart'; import 'package:flutter/widgets.dart'; import 'package:path_provider/path_provider.dart'; import 'package:record/record.dart'; /// Manages audio recording (AAC) and playback (queue + singleton). /// /// Incoming voice chunks are queued and played sequentially. /// Manual taps play a single file (or chain from that point). class AudioService { AudioService._(); static final AudioRecorder _recorder = AudioRecorder(); static final AudioPlayer _player = AudioPlayer(); static bool _isRecording = false; static String? _currentRecordingPath; // Playback queue — file paths waiting to be played static final List _queue = []; static bool _isPlaying = false; // Callback when playback starts/stops — UI uses this to update play buttons static void Function()? onPlaybackStateChanged; // Autoplay suppression static bool _isBackgrounded = false; /// Initialize the audio service and set up lifecycle observer. static void init() { WidgetsBinding.instance.addObserver(_LifecycleObserver()); // Configure audio session for background playback _player.setAudioContext(AudioContext( iOS: AudioContextIOS( category: AVAudioSessionCategory.playback, options: {AVAudioSessionOptions.mixWithOthers}, ), android: const AudioContextAndroid( isSpeakerphoneOn: false, audioMode: AndroidAudioMode.normal, audioFocus: AndroidAudioFocus.gain, ), )); // When a track finishes, play the next in queue _player.onPlayerComplete.listen((_) { _onTrackComplete(); }); } static void _onTrackComplete() { if (_queue.isNotEmpty) { _playNextInQueue(); } else { _isPlaying = false; onPlaybackStateChanged?.call(); } } static Future _playNextInQueue() async { if (_queue.isEmpty) { _isPlaying = false; onPlaybackStateChanged?.call(); return; } final path = _queue.removeAt(0); try { await _player.play(DeviceFileSource(path)); _isPlaying = true; onPlaybackStateChanged?.call(); } catch (_) { // Skip broken file, try next _onTrackComplete(); } } // ── Recording ── static bool get isRecording => _isRecording; static bool get isBackgrounded => _isBackgrounded; static Future startRecording() async { if (_isRecording) return null; final hasPermission = await _recorder.hasPermission(); if (!hasPermission) return null; final dir = await getTemporaryDirectory(); final path = '${dir.path}/recording_${DateTime.now().millisecondsSinceEpoch}.m4a'; await _recorder.start( const RecordConfig( encoder: AudioEncoder.aacLc, bitRate: 128000, sampleRate: 44100, ), path: path, ); _isRecording = true; _currentRecordingPath = path; return path; } static Future stopRecording() async { if (!_isRecording) return null; final path = await _recorder.stop(); _isRecording = false; _currentRecordingPath = null; return path; } static Future cancelRecording() async { if (!_isRecording) return; await _recorder.stop(); _isRecording = false; if (_currentRecordingPath != null) { try { await File(_currentRecordingPath!).delete(); } catch (_) {} _currentRecordingPath = null; } } // ── Playback ── /// Play a single file. Stops current playback and clears the queue. static Future playSingle(String source) async { await stopPlayback(); if (source.startsWith('/')) { await _player.play(DeviceFileSource(source)); } else { // base64 data — write to temp file first final path = await _base64ToFile(source); if (path == null) return; await _player.play(DeviceFileSource(path)); } _isPlaying = true; onPlaybackStateChanged?.call(); } /// Play a base64-encoded audio blob. Stops current playback. static Future playBase64(String base64Audio) async { await stopPlayback(); final path = await _base64ToFile(base64Audio); if (path == null) return; await _player.play(DeviceFileSource(path)); _isPlaying = true; onPlaybackStateChanged?.call(); } /// Queue a base64-encoded audio blob for sequential playback. /// If nothing is playing, starts immediately. /// If already playing, appends to queue — plays after current finishes. static Future queueBase64(String base64Audio) async { final path = await _base64ToFile(base64Audio); if (path == null) return; if (_isPlaying) { // Already playing — just add to queue, it will play when current finishes _queue.add(path); } else { // Nothing playing — start immediately await _player.play(DeviceFileSource(path)); _isPlaying = true; onPlaybackStateChanged?.call(); } } /// Chain playback: play a list of sources sequentially. /// First one plays immediately, rest are queued. static Future playChain(List sources) async { if (sources.isEmpty) return; if (_isBackgrounded) return; await stopPlayback(); // Queue all except the first for (var i = 1; i < sources.length; i++) { _queue.add(sources[i]); } // Play the first one final first = sources[0]; if (first.startsWith('/')) { await _player.play(DeviceFileSource(first)); } else { final path = await _base64ToFile(first); if (path == null) return; await _player.play(DeviceFileSource(path)); } _isPlaying = true; onPlaybackStateChanged?.call(); } /// Stop all playback and clear queue. static Future stopPlayback() async { _queue.clear(); _isPlaying = false; await _player.stop(); onPlaybackStateChanged?.call(); } /// Whether audio is currently playing. static bool get isPlaying => _isPlaying; // ── Helpers ── static Future _base64ToFile(String base64Audio) async { final dir = await getTemporaryDirectory(); final path = '${dir.path}/playback_${DateTime.now().millisecondsSinceEpoch}.m4a'; final bytes = _decodeBase64(base64Audio); if (bytes == null) return null; await File(path).writeAsBytes(bytes); return path; } static List? _decodeBase64(String b64) { try { final cleaned = b64.contains(',') ? b64.split(',').last : b64; return List.from( Uri.parse('data:;base64,$cleaned').data!.contentAsBytes(), ); } catch (_) { return null; } } static Future dispose() async { await cancelRecording(); await stopPlayback(); _recorder.dispose(); _player.dispose(); } } class _LifecycleObserver extends WidgetsBindingObserver { @override void didChangeAppLifecycleState(AppLifecycleState state) { switch (state) { case AppLifecycleState.paused: case AppLifecycleState.inactive: case AppLifecycleState.detached: AudioService._isBackgrounded = true; case AppLifecycleState.resumed: AudioService._isBackgrounded = false; default: break; } } }