coze-middleschool/src/components/Provider/RealtimeClientProvider.tsx

430 lines
13 KiB
TypeScript
Raw Blame History

This file contains ambiguous Unicode characters!

This file contains ambiguous Unicode characters that may be confused with others in your current locale. If your use case is intentional and legitimate, you can safely ignore this warning. Use the Escape button to highlight these characters.

import { ChatEventType, RoleType } from "@coze/api";
import {
EventNames,
RealtimeAPIError,
RealtimeClient,
RealtimeUtils,
} from "@coze/realtime-api";
import {
createContext,
ReactNode,
useCallback,
useContext,
useRef,
useState,
} from "react";
import { useToast } from "@/hooks/use-toast";
import { FileParser, FileParseStatus } from "./FileParser";
type RoomInfo = {
appId: string;
roomId: string;
token: string;
uid: string;
};
export type FileInfo = {
type: string;
url: string;
tableName: string;
provinceName: string;
subjectClaim: string;
};
export const RealtimeClientContext = createContext<{
client: RealtimeClient | null;
isConnecting: boolean;
isConnected: boolean;
audioEnabled: boolean;
isSupportVideo: boolean;
messageList: {
content: string;
role: RoleType;
event?: any;
fileInfo?: FileInfo;
fileParseStatus?: number;
}[];
isAiTalking: boolean;
roomInfo: RoomInfo | null;
initClient: (opts: { initMessage?: string; fileInfo?: FileInfo }) => void;
handleConnect: (opts: { initMessage?: string; fileInfo?: FileInfo }) => Promise<void>;
handleInterrupt: () => void;
handleDisconnect: () => void;
toggleMicrophone: () => void;
}>(/* 默认值省略 */ null!);
export const useRealtimeClient = () => {
const ctx = useContext(RealtimeClientContext);
if (!ctx) throw new Error("useRealtimeClient 必须在 RealtimeClientProvider 内部使用");
return ctx;
};
export const RealtimeClientProvider = ({ children }: { children: ReactNode }) => {
const token = import.meta.env.VITE_COZE_TOKEN;
const botId = import.meta.env.VITE_COZE_BOT_ID;
const voiceId = import.meta.env.VITE_COZE_VOICE_ID;
const connectorId = "1024";
const clientRef = useRef<RealtimeClient | null>(null);
const connectingLockRef = useRef(false);
const [messageList, setMessageList] = useState<{
content: string;
role: RoleType;
event?: any;
fileInfo?: FileInfo;
fileParseStatus?: number;
}[]>([]);
const [isConnecting, setIsConnecting] = useState(false);
const [isConnected, setIsConnected] = useState(false);
const [audioEnabled, setAudioEnabled] = useState(true);
const [isSupportVideo] = useState(false);
const [isAiTalking, setIsAiTalking] = useState(false);
const [roomInfo, setRoomInfo] = useState<RoomInfo | null>(null);
// 引入状态机
const fileParseStatusRef = useRef<FileParseStatus>(-1);
const fileParserRef = useRef(
new FileParser((newStatus:any) => {
fileParseStatusRef.current = newStatus;
// 根据不同状态更新 messageList
if (newStatus === 0) {
appendAssistantMessage("AI正在解析您的文档...");
} else if (newStatus === 1) {
replaceLastAssistantMessage("AI正在调用插件");
} else if (newStatus === 2) {
replaceLastAssistantMessage("文档解析完成");
}
})
);
const { toast } = useToast();
/** Helpers */
const appendAssistantMessage = (content: string) => {
setMessageList(prev => {
return [
...prev,
{ content, role: RoleType.Assistant }
]
});
};
const replaceLastAssistantMessage = (content: string) => {
setMessageList(prev => {
return [
...prev.slice(0, -1),
{ content, role: RoleType.Assistant }
]
});
};
/** 初始化客户端并设置监听 */
const initClient = async ({
initMessage,
fileInfo,
}: {
initMessage?: string;
fileInfo?: FileInfo;
}) => {
const perm = await RealtimeUtils.checkDevicePermission(false);
const device = await RealtimeUtils.getAudioDevices();
if (!perm.audio) {
toast({ title: "连接错误", description: "需要麦克风访问权限" });
throw new Error("需要麦克风访问权限");
}
if (device.audioInputs.length === 0) {
toast({ title: "连接错误", description: "没有麦克风设备" });
throw new Error("没有麦克风设备");
}
const client = new RealtimeClient({
accessToken: token,
botId,
voiceId,
connectorId,
allowPersonalAccessTokenInBrowser: true,
suppressStationaryNoise: true,
suppressNonStationaryNoise: true,
debug: true,
});
clientRef.current = client;
setupEventListeners(client);
setupMessageEventListeners(client, { initMessage, fileInfo });
setupInitMessageEventListener(client, { initMessage, fileInfo });
};
/** 连接房间 */
const handleConnect = async ({
initMessage,
fileInfo,
}: {
initMessage?: string;
fileInfo?: FileInfo;
}) => {
if (connectingLockRef.current) return;
connectingLockRef.current = true;
if (isConnected || isConnecting) {
connectingLockRef.current = false;
return;
}
try {
if (!clientRef.current) {
await initClient({ initMessage, fileInfo });
}
await clientRef.current!.connect();
await clientRef.current!.setAudioEnable(false);
setAudioEnabled(false);
} catch (error: any) {
if (error instanceof RealtimeAPIError) {
console.error(`连接错误 (${error.code}): ${error.message}`);
} else {
console.error("连接错误:" + error);
}
} finally {
connectingLockRef.current = false;
}
};
const handleInterrupt = () => {
clientRef.current?.interrupt();
};
const handleDisconnect = async () => {
setIsAiTalking(false);
setMessageList([]);
await clientRef.current?.setAudioEnable(false);
setAudioEnabled(false);
await clientRef.current?.disconnect();
clientRef.current?.clearEventHandlers();
clientRef.current = null;
setIsConnected(false);
};
const toggleMicrophone = async () => {
await clientRef.current?.setAudioEnable(!audioEnabled);
setAudioEnabled(!audioEnabled);
};
/** 首条初始化消息session.create & bot.join */
const setupInitMessageEventListener = useCallback(
(
client: RealtimeClient,
{ initMessage, fileInfo }: { initMessage?: string; fileInfo?: FileInfo }
) => {
client.on(EventNames.ALL_SERVER, async (eventName, event: any) => {
if (eventName === "server.session.created") {
await client.sendMessage({
id: "",
event_type: "session.update",
data: {
chat_config: { allow_voice_interrupt: false },
turn_detection: { silence_duration_ms: 2000 },
},
});
}
if (eventName === "server.bot.join") {
if (initMessage) {
await clientRef.current!.sendMessage({
id: "",
event_type: "conversation.message.create",
data: {
role: "user",
content_type: "text",
content: initMessage,
},
});
} else if (fileInfo) {
await clientRef.current!.sendMessage({
id: "",
event_type: "conversation.message.create",
data: {
role: "user",
content_type: "object_string",
content: JSON.stringify([
{ type: "text", text: "帮我解读这个文件,结合当下的专业行情以及对该专业未来的发展趋势,简介的给出志愿建议" },
{ type: "image", file_url: fileInfo.url },
]),
},
});
}
}
});
},
[]
);
/** 消息及文件解析监听 */
const setupMessageEventListeners = (
client: RealtimeClient,
opts: { initMessage?: string; fileInfo?: FileInfo }
) => {
client.on(EventNames.ALL, (_eventName, event: any) => {
// 交给状态机处理解析流程
fileParserRef.current.handleEvent(_eventName, event);
// 普通消息流处理
if (
event.event_type !== ChatEventType.CONVERSATION_MESSAGE_DELTA &&
event.event_type !== ChatEventType.CONVERSATION_MESSAGE_COMPLETED
) {
// 处理conversation.created事件
if (event.event_type === "conversation.created" && !opts.initMessage && !opts.fileInfo) {
setMessageList(prev => [
...prev,
{
content: event.data.prologue,
role: RoleType.Assistant,
event
}
]);
}
return;
}
// 如果是assistant的completed消息或verbose类型直接返回
if (
(event.data.role === "assistant" && event.event_type === ChatEventType.CONVERSATION_MESSAGE_COMPLETED &&event.data.type === "verbose" ) ||
event.data.type === "answer" && event.event_type === ChatEventType.CONVERSATION_MESSAGE_COMPLETED
) {
return;
}
// 如果没有fileInfo过滤掉function_call和tool_response类型的消息
if (!opts.fileInfo && (event.data.type === "function_call" || event.data.type === "tool_response")) {
return;
}
const content = event.data.content;
setMessageList(prev => {
// 合并增量
console.log("合并增量",prev);
console.log("信息",content);
// 处理工具回调结果
try {
const parsedContent = JSON.parse(content);
if (parsedContent.msg_type === "time_capsule_recall" ||
(parsedContent.name && parsedContent.arguments)) {
// 如果没有fileInfo不创建假信息
if (!opts.fileInfo) {
return prev;
}
// 检查是否已存在工具回调消息
const existingToolMessageIndex = prev.findIndex(msg =>
msg.content === "正在处理您的请求..." &&
msg.fileInfo === opts.fileInfo
);
if (existingToolMessageIndex !== -1) {
// 更新已存在的消息的fileParseStatus
const newStatus = parsedContent.msg_type === "time_capsule_recall" ? 0 : 2;
return [
...prev.slice(0, existingToolMessageIndex),
{
...prev[existingToolMessageIndex],
fileParseStatus: newStatus
},
...prev.slice(existingToolMessageIndex + 1)
];
} else {
// 创建新的工具回调消息
return [
...prev,
{
content: "正在处理您的请求...",
role: event.data.role,
event,
fileInfo: opts.fileInfo,
fileParseStatus: parsedContent.msg_type === "time_capsule_recall" ? 0 : 2
},
];
}
}
} catch (e) {
// 如果不是JSON格式继续正常处理
}
if (
prev.length > 0 &&
prev[prev.length - 1].event?.event_type ===
ChatEventType.CONVERSATION_MESSAGE_DELTA &&
event.event_type === ChatEventType.CONVERSATION_MESSAGE_DELTA &&
prev[prev.length - 1].event.data.answer_id ===
event.data.answer_id
) {
return [
...prev.slice(0, -1),
{
content: prev[prev.length - 1].content + content,
role: prev[prev.length - 1].role,
event,
},
];
}
// 新消息追加
return [
...prev,
{ content, role: event.data.role, event },
];
});
});
};
/** 基本连接状态 & 语音事件监听 */
const setupEventListeners = useCallback(
(client: RealtimeClient) => {
client.on(EventNames.AUDIO_AGENT_SPEECH_STARTED, async () => {
setIsAiTalking(true);
await clientRef.current?.setAudioEnable(false);
setAudioEnabled(false);
});
client.on(EventNames.AUDIO_AGENT_SPEECH_STOPPED, async () => {
setIsAiTalking(false);
await clientRef.current?.setAudioEnable(true);
setAudioEnabled(true);
});
client.on(EventNames.CONNECTING, () => {
setIsConnecting(true);
setIsConnected(false);
});
client.on(EventNames.CONNECTED, (_name, evt) => {
setRoomInfo(evt as RoomInfo);
setIsConnecting(false);
setIsConnected(true);
});
client.on(EventNames.ALL_SERVER, (name, evt) => {
// 其它全局服务端事件可在此处理
});
},
[]
);
return (
<RealtimeClientContext.Provider
value={{
client: clientRef.current,
isConnecting,
isConnected,
audioEnabled,
isSupportVideo,
messageList,
isAiTalking,
roomInfo,
initClient,
handleConnect,
handleInterrupt,
handleDisconnect,
toggleMicrophone,
}}
>
{children}
</RealtimeClientContext.Provider>
);
};