coze-realtime/src/components/Provider/RealtimeClientProvider.tsx

314 lines
8.5 KiB
TypeScript
Raw Blame History

This file contains ambiguous Unicode characters!

This file contains ambiguous Unicode characters that may be confused with others in your current locale. If your use case is intentional and legitimate, you can safely ignore this warning. Use the Escape button to highlight these characters.

import { ChatEventType, RoleType } from "@coze/api";
import {
EventNames,
RealtimeAPIError,
RealtimeClient,
RealtimeUtils,
} from "@coze/realtime-api";
import {
createContext,
ReactNode,
useCallback,
useContext,
useRef,
useState,
} from "react";
import { useToast } from "@/hooks/use-toast";
import { MessageHandlerStrategy } from "@/hooks/useRealtimeClient";
type RoomInfo = {
appId: string;
roomId: string;
token: string;
uid: string;
};
export type FileInfo = {
type: string;
url: string;
tableName: string;
provinceName: string;
subjectClaim: string;
};
export const RealtimeClientContext = createContext<{
client: RealtimeClient | null;
isConnecting: boolean;
isConnected: boolean;
audioEnabled: boolean;
isSupportVideo: boolean;
messageList: {
content: string;
role: RoleType;
event?: any;
fileInfo?: FileInfo;
fileParseStatus?: number;
}[];
isAiTalking: boolean;
roomInfo: RoomInfo | null;
initClient: (opts: { initMessage?: string; fileInfo?: FileInfo }) => void;
handleConnect: (opts: {
initMessage?: string;
fileInfo?: FileInfo;
}) => Promise<void>;
handleInterrupt: () => void;
handleDisconnect: () => void;
toggleMicrophone: () => void;
}>(/* 默认值省略 */ null!);
export const useRealtimeClient = () => {
const ctx = useContext(RealtimeClientContext);
if (!ctx)
throw new Error("useRealtimeClient 必须在 RealtimeClientProvider 内部使用");
return ctx;
};
export const RealtimeClientProvider = ({
children,
}: {
children: ReactNode;
}) => {
const token = import.meta.env.VITE_COZE_TOKEN;
const botId = import.meta.env.VITE_COZE_BOT_ID;
const voiceId = import.meta.env.VITE_COZE_VOICE_ID;
const connectorId = "1024";
const clientRef = useRef<RealtimeClient | null>(null);
const connectingLockRef = useRef(false);
const [messageList, setMessageList] = useState<
{
content: string;
role: RoleType;
event?: any;
fileInfo?: FileInfo;
fileParseStatus?: number;
}[]
>([]);
const [isConnecting, setIsConnecting] = useState(false);
const [isConnected, setIsConnected] = useState(false);
const [audioEnabled, setAudioEnabled] = useState(true);
const [isSupportVideo] = useState(false);
const [isAiTalking, setIsAiTalking] = useState(false);
const [roomInfo, setRoomInfo] = useState<RoomInfo | null>(null);
const { toast } = useToast();
const messageHandlerStrategy = useRef(new MessageHandlerStrategy());
/** 初始化客户端并设置监听 */
const initClient = async ({
initMessage,
fileInfo,
}: {
initMessage?: string;
fileInfo?: FileInfo;
}) => {
const client = new RealtimeClient({
accessToken: token,
botId,
voiceId,
connectorId,
allowPersonalAccessTokenInBrowser: true,
suppressStationaryNoise: true,
suppressNonStationaryNoise: true,
debug: false,
});
clientRef.current = client;
setupEventListeners(client);
setupMessageEventListeners(client, { initMessage, fileInfo });
setupInitMessageEventListener(client, { initMessage, fileInfo });
};
/** 连接房间 */
const handleConnect = async ({
initMessage,
fileInfo,
}: {
initMessage?: string;
fileInfo?: FileInfo;
}) => {
if (connectingLockRef.current) return;
connectingLockRef.current = true;
if (isConnected || isConnecting) {
connectingLockRef.current = false;
return;
}
const perm = await RealtimeUtils.checkDevicePermission(false);
const device = await RealtimeUtils.getAudioDevices();
if (!perm.audio) {
toast({ title: "连接错误", description: "需要麦克风访问权限" });
return;
// throw new Error("需要麦克风访问权限");
}
if (device.audioInputs.length === 0) {
toast({ title: "连接错误", description: "没有麦克风设备" });
return;
// throw new Error("没有麦克风设备");
}
try {
if (!clientRef.current) {
await initClient({ initMessage, fileInfo });
}
await clientRef.current!.connect();
await clientRef.current!.setAudioEnable(false);
setAudioEnabled(false);
} catch (error: any) {
if (error instanceof RealtimeAPIError) {
console.error(`连接错误 (${error.code}): ${error.message}`);
} else {
console.error("连接错误:" + error);
}
} finally {
connectingLockRef.current = false;
}
};
const handleInterrupt = () => {
clientRef.current?.interrupt();
};
const handleDisconnect = async () => {
setIsAiTalking(false);
setMessageList([]);
await clientRef.current?.setAudioEnable(false);
setAudioEnabled(false);
await clientRef.current?.disconnect();
clientRef.current?.clearEventHandlers();
clientRef.current = null;
setIsConnected(false);
};
const toggleMicrophone = async () => {
await clientRef.current?.setAudioEnable(!audioEnabled);
setAudioEnabled(!audioEnabled);
};
/** 首条初始化消息session.create & bot.join */
const setupInitMessageEventListener = useCallback(
(
client: RealtimeClient,
{ initMessage, fileInfo }: { initMessage?: string; fileInfo?: FileInfo }
) => {
client.on(EventNames.ALL_SERVER, async (eventName, _event: any) => {
if (eventName === "server.session.created") {
await client.sendMessage({
id: "",
event_type: "session.update",
data: {
chat_config: { allow_voice_interrupt: false },
turn_detection: { silence_duration_ms: 2000 },
},
});
}
if (eventName === "server.bot.join") {
if (initMessage) {
await clientRef.current!.sendMessage({
id: "",
event_type: "conversation.message.create",
data: {
role: "user",
content_type: "text",
content: initMessage,
},
});
} else if (fileInfo) {
await clientRef.current!.sendMessage({
id: "",
event_type: "conversation.message.create",
data: {
role: "user",
content_type: "object_string",
content: JSON.stringify([
{
type: "text",
text: "帮我解读这个文件,结合当下的专业行情以及对该专业未来的发展趋势,简介的给出志愿建议",
},
{ type: "image", file_url: fileInfo.url },
]),
},
});
}
}
});
},
[]
);
/** 消息及文件解析监听 */
const setupMessageEventListeners = (
client: RealtimeClient,
opts: { initMessage?: string; fileInfo?: FileInfo }
) => {
client.on(EventNames.ALL, (_eventName, event: any) => {
// 交给状态机处理解析流程
if (
event.event_type !== ChatEventType.CONVERSATION_MESSAGE_DELTA &&
event.event_type !== ChatEventType.CONVERSATION_MESSAGE_COMPLETED &&
event.event_type !== "conversation.created"
) {
return;
}
setMessageList(prev =>
messageHandlerStrategy.current.process(event, prev, opts)
);
});
};
/** 基本连接状态 & 语音事件监听 */
const setupEventListeners = useCallback((client: RealtimeClient) => {
client.on(EventNames.AUDIO_AGENT_SPEECH_STARTED, async () => {
setIsAiTalking(true);
await clientRef.current?.setAudioEnable(false);
setAudioEnabled(false);
});
client.on(EventNames.AUDIO_AGENT_SPEECH_STOPPED, async () => {
setIsAiTalking(false);
await clientRef.current?.setAudioEnable(true);
setAudioEnabled(true);
});
client.on(EventNames.CONNECTING, () => {
setIsConnecting(true);
setIsConnected(false);
});
client.on(EventNames.CONNECTED, (_name, evt) => {
setRoomInfo(evt as RoomInfo);
setIsConnecting(false);
setIsConnected(true);
});
client.on(EventNames.ALL_SERVER, (_name, _evt) => {
// 其它全局服务端事件可在此处理
});
}, []);
return (
<RealtimeClientContext.Provider
value={{
client: clientRef.current,
isConnecting,
isConnected,
audioEnabled,
isSupportVideo,
messageList,
isAiTalking,
roomInfo,
initClient,
handleConnect,
handleInterrupt,
handleDisconnect,
toggleMicrophone,
}}
>
{children}
</RealtimeClientContext.Provider>
);
};