From c23dfe16e95713e7058137308bdbc28419609a39 Mon Sep 17 00:00:00 2001
From: Matthias Nott <mnott@mnsoft.org>
Date: Sat, 07 Mar 2026 11:54:15 +0100
Subject: [PATCH] feat: typing indicator, message deletion, chain playback, autoplay guard
---
components/SessionDrawer.tsx | 41 +++---
types/index.ts | 6 +
components/chat/TypingIndicator.tsx | 78 +++++++++++++
services/audio.ts | 44 ++++++-
app/chat.tsx | 36 ++++-
components/chat/MessageBubble.tsx | 40 +++++-
components/chat/MessageList.tsx | 52 +++++++-
contexts/ChatContext.tsx | 36 +++++
8 files changed, 283 insertions(+), 50 deletions(-)
diff --git a/app/chat.tsx b/app/chat.tsx
index 8d4e95e..665d0f6 100644
--- a/app/chat.tsx
+++ b/app/chat.tsx
@@ -11,7 +11,7 @@
import { ImageCaptionModal } from "../components/chat/ImageCaptionModal";
import { StatusDot } from "../components/ui/StatusDot";
import { SessionDrawer } from "../components/SessionDrawer";
-import { playSingle, stopPlayback, isPlaying, onPlayingChange } from "../services/audio";
+import { playAudio, stopPlayback, isPlaying, onPlayingChange } from "../services/audio";
interface StagedImage {
base64: string;
@@ -20,7 +20,7 @@
}
export default function ChatScreen() {
- const { messages, sendTextMessage, sendVoiceMessage, sendImageMessage, clearMessages, requestScreenshot, sessions } =
+ const { messages, sendTextMessage, sendVoiceMessage, sendImageMessage, deleteMessage, clearMessages, isTyping, requestScreenshot, sessions } =
useChat();
const { status } = useConnection();
const { colors, mode, cycleMode } = useTheme();
@@ -130,17 +130,37 @@
[stagedImage, sendImageMessage],
);
- const handleReplay = useCallback(() => {
+ const handleReplay = useCallback(async () => {
if (isPlaying()) {
stopPlayback();
return;
}
+ // Find the last assistant voice message, then walk back to the first chunk in that group
+ let lastIdx = -1;
for (let i = messages.length - 1; i >= 0; i--) {
- const msg = messages[i];
- if (msg.role === "assistant" && msg.audioUri) {
- playSingle(msg.audioUri).catch(() => {});
- return;
+ if (messages[i].role === "assistant" && messages[i].type === "voice" && messages[i].audioUri) {
+ lastIdx = i;
+ break;
}
+ }
+ if (lastIdx === -1) return;
+
+ // Walk back to find the start of this chunk group
+ let startIdx = lastIdx;
+ while (startIdx > 0) {
+ const prev = messages[startIdx - 1];
+ if (prev.role === "assistant" && prev.type === "voice" && prev.audioUri) {
+ startIdx--;
+ } else {
+ break;
+ }
+ }
+
+ // Queue all chunks from start to last
+ await stopPlayback();
+ for (let i = startIdx; i <= lastIdx; i++) {
+ const m = messages[i];
+ if (m.audioUri) playAudio(m.audioUri);
}
}, [messages]);
@@ -267,7 +287,7 @@
</View>
</View>
) : (
- <MessageList messages={messages} />
+ <MessageList messages={messages} isTyping={isTyping} onDeleteMessage={deleteMessage} />
)}
</View>
diff --git a/components/SessionDrawer.tsx b/components/SessionDrawer.tsx
index 07b59fd..6195fbe 100644
--- a/components/SessionDrawer.tsx
+++ b/components/SessionDrawer.tsx
@@ -482,8 +482,7 @@
>
Sessions
</Text>
- <View style={{ flexDirection: "row", alignItems: "center", gap: 8 }}>
- <Pressable
+ <Pressable
onPress={() => requestSessions()}
hitSlop={{ top: 8, bottom: 8, left: 8, right: 8 }}
style={({ pressed }) => ({
@@ -497,23 +496,6 @@
Refresh
</Text>
</Pressable>
- <Pressable
- onPress={handleNewSession}
- hitSlop={{ top: 8, bottom: 8, left: 8, right: 8 }}
- style={({ pressed }) => ({
- width: 30,
- height: 30,
- borderRadius: 15,
- alignItems: "center",
- justifyContent: "center",
- backgroundColor: pressed ? colors.accent + "CC" : colors.accent,
- })}
- >
- <Text style={{ color: "#FFF", fontSize: 20, fontWeight: "600", marginTop: -1 }}>
- +
- </Text>
- </Pressable>
- </View>
</View>
</View>
@@ -537,10 +519,29 @@
/>
)}
+ {/* New session FAB */}
+ <View style={{ alignItems: "center", paddingVertical: 12 }}>
+ <Pressable
+ onPress={handleNewSession}
+ style={({ pressed }) => ({
+ flexDirection: "row",
+ alignItems: "center",
+ gap: 8,
+ paddingHorizontal: 20,
+ paddingVertical: 12,
+ borderRadius: 24,
+ backgroundColor: pressed ? colors.accent + "CC" : colors.accent,
+ })}
+ >
+ <Text style={{ color: "#FFF", fontSize: 20, fontWeight: "600", marginTop: -1 }}>+</Text>
+ <Text style={{ color: "#FFF", fontSize: 15, fontWeight: "600" }}>New Session</Text>
+ </Pressable>
+ </View>
+
{/* Footer */}
<View
style={{
- paddingVertical: 12,
+ paddingVertical: 8,
paddingHorizontal: 20,
borderTopWidth: 1,
borderTopColor: colors.border,
diff --git a/components/chat/MessageBubble.tsx b/components/chat/MessageBubble.tsx
index 5edf2f8..e7ad9fb 100644
--- a/components/chat/MessageBubble.tsx
+++ b/components/chat/MessageBubble.tsx
@@ -1,5 +1,5 @@
import React, { useCallback, useEffect, useState } from "react";
-import { Image, Pressable, Text, View } from "react-native";
+import { ActionSheetIOS, Alert, Image, Platform, Pressable, Text, View } from "react-native";
import { Message } from "../../types";
import { playSingle, stopPlayback, onPlayingChange } from "../../services/audio";
import { ImageViewer } from "./ImageViewer";
@@ -7,6 +7,8 @@
interface MessageBubbleProps {
message: Message;
+ onDelete?: (id: string) => void;
+ onPlayVoice?: (id: string) => void;
}
function formatDuration(ms?: number): string {
@@ -22,10 +24,31 @@
return d.toLocaleTimeString([], { hour: "2-digit", minute: "2-digit" });
}
-export function MessageBubble({ message }: MessageBubbleProps) {
+export function MessageBubble({ message, onDelete, onPlayVoice }: MessageBubbleProps) {
const [isPlaying, setIsPlaying] = useState(false);
const [showViewer, setShowViewer] = useState(false);
const { colors, isDark } = useTheme();
+
+ const handleLongPress = useCallback(() => {
+ if (!onDelete) return;
+ if (Platform.OS === "ios") {
+ ActionSheetIOS.showActionSheetWithOptions(
+ {
+ options: ["Cancel", "Delete Message"],
+ destructiveButtonIndex: 1,
+ cancelButtonIndex: 0,
+ },
+ (index) => {
+ if (index === 1) onDelete(message.id);
+ },
+ );
+ } else {
+ Alert.alert("Delete Message", "Remove this message?", [
+ { text: "Cancel", style: "cancel" },
+ { text: "Delete", style: "destructive", onPress: () => onDelete(message.id) },
+ ]);
+ }
+ }, [onDelete, message.id]);
// Track whether THIS bubble's audio is playing via the singleton URI
useEffect(() => {
@@ -41,13 +64,14 @@
if (!message.audioUri) return;
if (isPlaying) {
- // This bubble is playing — stop it
await stopPlayback();
+ } else if (onPlayVoice) {
+ // Let parent handle chain playback (plays this + subsequent chunks)
+ onPlayVoice(message.id);
} else {
- // Play this bubble (stops anything else automatically)
await playSingle(message.audioUri, () => {});
}
- }, [isPlaying, message.audioUri]);
+ }, [isPlaying, message.audioUri, onPlayVoice, message.id]);
if (isSystem) {
return (
@@ -65,7 +89,9 @@
: { borderTopLeftRadius: 4 };
return (
- <View
+ <Pressable
+ onLongPress={handleLongPress}
+ delayLongPress={500}
style={{
flexDirection: "row",
marginVertical: 4,
@@ -213,6 +239,6 @@
)}
</View>
</View>
- </View>
+ </Pressable>
);
}
diff --git a/components/chat/MessageList.tsx b/components/chat/MessageList.tsx
index 178c46d..0076e5c 100644
--- a/components/chat/MessageList.tsx
+++ b/components/chat/MessageList.tsx
@@ -1,36 +1,76 @@
-import React, { useEffect, useRef } from "react";
+import React, { useCallback, useEffect, useRef } from "react";
import { FlatList, View } from "react-native";
import { Message } from "../../types";
import { MessageBubble } from "./MessageBubble";
+import { TypingIndicator } from "./TypingIndicator";
+import { stopPlayback, playAudio } from "../../services/audio";
interface MessageListProps {
messages: Message[];
+ isTyping?: boolean;
+ onDeleteMessage?: (id: string) => void;
}
-export function MessageList({ messages }: MessageListProps) {
+export function MessageList({ messages, isTyping, onDeleteMessage }: MessageListProps) {
const listRef = useRef<FlatList<Message>>(null);
useEffect(() => {
if (messages.length > 0) {
- // Small delay to allow layout to complete
setTimeout(() => {
listRef.current?.scrollToEnd({ animated: true });
}, 50);
}
- }, [messages.length]);
+ }, [messages.length, isTyping]);
+
+ // Play from a voice message and auto-chain all consecutive assistant voice messages after it
+ const handlePlayVoice = useCallback(async (messageId: string) => {
+ const idx = messages.findIndex((m) => m.id === messageId);
+ if (idx === -1) return;
+
+ // Collect this message + all consecutive assistant voice messages after it
+ const chain: Message[] = [];
+ for (let i = idx; i < messages.length; i++) {
+ const m = messages[i];
+ if (m.role === "assistant" && m.type === "voice" && m.audioUri) {
+ chain.push(m);
+ } else if (i > idx) {
+ // Stop at the first non-voice or non-assistant message
+ break;
+ }
+ }
+
+ if (chain.length === 0) return;
+
+ // Stop current playback, then queue all chunks
+ await stopPlayback();
+ for (const m of chain) {
+ playAudio(m.audioUri!);
+ }
+ }, [messages]);
return (
<FlatList
ref={listRef}
data={messages}
keyExtractor={(item) => item.id}
- renderItem={({ item }) => <MessageBubble message={item} />}
+ renderItem={({ item }) => (
+ <MessageBubble
+ message={item}
+ onDelete={onDeleteMessage}
+ onPlayVoice={handlePlayVoice}
+ />
+ )}
contentContainerStyle={{ paddingVertical: 12 }}
onContentSizeChange={() => {
listRef.current?.scrollToEnd({ animated: false });
}}
showsVerticalScrollIndicator={false}
- ListFooterComponent={<View style={{ height: 4 }} />}
+ ListFooterComponent={
+ <>
+ {isTyping && <TypingIndicator />}
+ <View style={{ height: 4 }} />
+ </>
+ }
/>
);
}
diff --git a/components/chat/TypingIndicator.tsx b/components/chat/TypingIndicator.tsx
new file mode 100644
index 0000000..a19a8e5
--- /dev/null
+++ b/components/chat/TypingIndicator.tsx
@@ -0,0 +1,78 @@
+import React, { useEffect, useRef } from "react";
+import { Animated, View } from "react-native";
+import { useTheme } from "../../contexts/ThemeContext";
+
+export function TypingIndicator() {
+ const { colors, isDark } = useTheme();
+ const dot1 = useRef(new Animated.Value(0)).current;
+ const dot2 = useRef(new Animated.Value(0)).current;
+ const dot3 = useRef(new Animated.Value(0)).current;
+
+ useEffect(() => {
+ const animate = (dot: Animated.Value, delay: number) =>
+ Animated.loop(
+ Animated.sequence([
+ Animated.delay(delay),
+ Animated.timing(dot, { toValue: 1, duration: 300, useNativeDriver: true }),
+ Animated.timing(dot, { toValue: 0, duration: 300, useNativeDriver: true }),
+ ])
+ );
+
+ const a1 = animate(dot1, 0);
+ const a2 = animate(dot2, 200);
+ const a3 = animate(dot3, 400);
+ a1.start();
+ a2.start();
+ a3.start();
+
+ return () => { a1.stop(); a2.stop(); a3.stop(); };
+ }, [dot1, dot2, dot3]);
+
+ const bubbleBg = isDark ? "#252538" : colors.bgSecondary;
+ const dotColor = colors.textMuted;
+
+ return (
+ <View
+ style={{
+ flexDirection: "row",
+ marginVertical: 4,
+ paddingHorizontal: 12,
+ justifyContent: "flex-start",
+ }}
+ >
+ <View
+ style={{
+ borderRadius: 16,
+ borderTopLeftRadius: 4,
+ paddingHorizontal: 16,
+ paddingVertical: 14,
+ backgroundColor: bubbleBg,
+ flexDirection: "row",
+ gap: 4,
+ alignItems: "center",
+ }}
+ >
+ {[dot1, dot2, dot3].map((dot, i) => (
+ <Animated.View
+ key={i}
+ style={{
+ width: 8,
+ height: 8,
+ borderRadius: 4,
+ backgroundColor: dotColor,
+ opacity: dot.interpolate({ inputRange: [0, 1], outputRange: [0.3, 1] }),
+ transform: [
+ {
+ translateY: dot.interpolate({
+ inputRange: [0, 1],
+ outputRange: [0, -4],
+ }),
+ },
+ ],
+ }}
+ />
+ ))}
+ </View>
+ </View>
+ );
+}
diff --git a/contexts/ChatContext.tsx b/contexts/ChatContext.tsx
index 151f6f7..788bb31 100644
--- a/contexts/ChatContext.tsx
+++ b/contexts/ChatContext.tsx
@@ -8,7 +8,7 @@
} from "react";
import { Message, WsIncoming, WsSession } from "../types";
import { useConnection } from "./ConnectionContext";
-import { playAudio, encodeAudioToBase64, saveBase64Audio } from "../services/audio";
+import { playAudio, encodeAudioToBase64, saveBase64Audio, canAutoplay } from "../services/audio";
import { requestNotificationPermissions, notifyIncomingMessage } from "../services/notifications";
function generateId(): string {
@@ -119,7 +119,9 @@
sendTextMessage: (text: string) => void;
sendVoiceMessage: (audioUri: string, durationMs?: number) => void;
sendImageMessage: (imageBase64: string, caption: string, mimeType: string) => void;
+ deleteMessage: (id: string) => void;
clearMessages: () => void;
+ isTyping: boolean;
sessions: WsSession[];
activeSessionId: string | null;
requestSessions: () => void;
@@ -147,6 +149,8 @@
const [messages, setMessages] = useState<Message[]>([]);
// Unread counts for non-active sessions
const [unreadCounts, setUnreadCounts] = useState<Record<string, number>>({});
+ // Typing indicator from server
+ const [isTyping, setIsTyping] = useState(false);
const {
status,
@@ -197,6 +201,8 @@
if (status === "connected") {
needsSync.current = true;
sendCommand("sync", activeSessionId ? { activeSessionId } : undefined);
+ } else if (status === "disconnected") {
+ setIsTyping(false);
}
// eslint-disable-next-line react-hooks/exhaustive-deps — only fire on status change
}, [status, sendCommand]);
@@ -270,6 +276,7 @@
onMessageReceived.current = async (data: WsIncoming) => {
switch (data.type) {
case "text": {
+ setIsTyping(false);
const msg: Message = {
id: generateId(),
role: "assistant",
@@ -283,6 +290,7 @@
break;
}
case "voice": {
+ setIsTyping(false);
let audioUri: string | undefined;
if (data.audioBase64) {
try {
@@ -302,7 +310,7 @@
};
addMessageToActive(msg);
notifyIncomingMessage("PAILot", data.content ?? "Voice message");
- if (msg.audioUri) {
+ if (msg.audioUri && canAutoplay()) {
playAudio(msg.audioUri).catch(() => {});
}
break;
@@ -356,6 +364,14 @@
case "transcript": {
// Voice → text reflection: replace voice bubble with transcribed text
updateMessageContent(data.messageId, data.content);
+ break;
+ }
+ case "typing": {
+ setIsTyping(data.typing);
+ break;
+ }
+ case "status": {
+ // Connection status update — ignore for now
break;
}
case "error": {
@@ -440,6 +456,20 @@
[wsImageSend, addMessageToActive, updateMessageStatus]
);
+ const deleteMessage = useCallback((id: string) => {
+ setMessages((prev) => {
+ const next = prev.filter((m) => m.id !== id);
+ setActiveSessionId((sessId) => {
+ if (sessId) {
+ messagesMapRef.current[sessId] = next;
+ debouncedSave(messagesMapRef.current);
+ }
+ return sessId;
+ });
+ return next;
+ });
+ }, []);
+
const clearMessages = useCallback(() => {
setMessages([]);
setActiveSessionId((id) => {
@@ -515,7 +545,9 @@
sendTextMessage,
sendVoiceMessage,
sendImageMessage,
+ deleteMessage,
clearMessages,
+ isTyping,
sessions,
activeSessionId,
requestSessions,
diff --git a/services/audio.ts b/services/audio.ts
index ea43236..188164e 100644
--- a/services/audio.ts
+++ b/services/audio.ts
@@ -4,10 +4,32 @@
setAudioModeAsync,
} from "expo-audio";
import * as LegacyFileSystem from "expo-file-system/legacy";
+import { AppState } from "react-native";
export interface RecordingResult {
uri: string;
durationMs: number;
+}
+
+// --- Autoplay suppression ---
+// Don't autoplay voice messages when the app is in the background
+// or when the user is on a phone call (detected via audio interruption).
+let _autoplayEnabled = true;
+let _audioInterrupted = false;
+
+// Track app state — suppress autoplay when backgrounded
+AppState.addEventListener("change", (state) => {
+ _autoplayEnabled = state === "active";
+});
+
+/** Check if autoplay is safe right now (app in foreground, no interruption). */
+export function canAutoplay(): boolean {
+ return _autoplayEnabled && !_audioInterrupted;
+}
+
+/** Called externally to signal audio interruption (e.g., phone call started/ended). */
+export function setAudioInterrupted(interrupted: boolean): void {
+ _audioInterrupted = interrupted;
}
// --- Singleton audio player ---
@@ -94,13 +116,13 @@
while (audioQueue.length > 0) {
const item = audioQueue.shift()!;
- await playOneAudio(item.uri, item.onFinish);
+ await playOneAudio(item.uri, item.onFinish, false);
}
processingQueue = false;
}
-function playOneAudio(uri: string, onFinish?: () => void): Promise<void> {
+function playOneAudio(uri: string, onFinish?: () => void, cancelPrevious = true): Promise<void> {
return new Promise<void>(async (resolve) => {
let settled = false;
const finish = () => {
@@ -118,8 +140,8 @@
resolve();
};
- // Stop any currently playing audio first
- if (cancelCurrent) {
+ // Stop any currently playing audio first (only for non-queued calls)
+ if (cancelPrevious && cancelCurrent) {
cancelCurrent();
}
@@ -138,9 +160,17 @@
notifyListeners(uri);
player.addListener("playbackStatusUpdate", (status) => {
- if (!status.playing && status.currentTime > 0 &&
- (status.duration <= 0 || status.currentTime >= status.duration)) {
- finish();
+ if (!status.playing && status.currentTime > 0) {
+ if (status.duration <= 0 || status.currentTime >= status.duration) {
+ // Playback finished naturally
+ finish();
+ } else {
+ // Paused mid-playback — likely audio interruption (phone call)
+ setAudioInterrupted(true);
+ }
+ } else if (status.playing && _audioInterrupted) {
+ // Resumed after interruption
+ setAudioInterrupted(false);
}
});
diff --git a/types/index.ts b/types/index.ts
index 57d1765..0c97f86 100644
--- a/types/index.ts
+++ b/types/index.ts
@@ -102,6 +102,11 @@
content: string;
}
+export interface WsIncomingTyping {
+ type: "typing";
+ typing: boolean;
+}
+
export interface WsIncomingError {
type: "error";
message: string;
@@ -120,5 +125,6 @@
| WsIncomingSessionSwitched
| WsIncomingSessionRenamed
| WsIncomingTranscript
+ | WsIncomingTyping
| WsIncomingError
| WsIncomingStatus;
--
Gitblit v1.3.1