feat: 增加文件解读

master
xjs 2025-04-28 16:11:36 +08:00
parent 1ebb21d23c
commit 7409229dd5
9 changed files with 346 additions and 93 deletions

View File

@ -3,7 +3,7 @@
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>六维小助手</title>
<title>六维填报师</title>
<meta name="description" content="AIGC对话" />
<meta name="generator" content="React" />
<meta name="keywords" content="music, music-site" />

View File

@ -37,3 +37,24 @@ export const fetchReport = async ({
return { result: [], message: response.message };
}
};
export const fetchFile = async ({
params,
options,
}: {
params: { id: string; location: string };
options?: { signal?: AbortSignal; headers?: Record<string, string> };
}) => {
const response = await getRequest(
"https://api.v3.ycymedu.com/api/volunTb/downloadpdfUrl",
params,
options
);
if (response.code === 200) {
return { result: response.result };
} else {
return { result: "", message: response.message };
}
};

View File

@ -5,22 +5,24 @@ import AntechamberScore from "@/components/AntechamberScore";
import { useContext, useEffect, useState } from "react";
import { RealtimeClientContext } from "@/components/Provider/RealtimeClientProvider";
import { useSearchParams } from "react-router-dom";
import { fetchReport, fetchUserToken } from "@/apis/user";
import { fetchUserToken } from "@/apis/user";
import { useToast } from "@/hooks/use-toast";
import { useAbortController } from "@/hooks/useAbortController";
import { ReportContext } from "@/components/Provider/ReportResolveProvider";
// import { ReportContext } from "@/components/Provider/ReportResolveProvider";
import AntechamberFile from "@/components/AntechamberFile";
import AntechamberReport from "@/components/AntechamberReport";
export default function Antechamber() {
const { handleConnect } = useContext(RealtimeClientContext);
const { setHasHandledReport,hasHandledReport } = useContext(ReportContext);
const [searchParams] = useSearchParams();
const [disable,setDisable] = useState(true);
const token = searchParams.get("token") || '';
const reportId = searchParams.get("reportId") || '';
const reportType = searchParams.get("reportType") || '';
// const reportId = searchParams.get("reportId") || '';
// const reportType = searchParams.get("reportType") || '';
const { toast } = useToast();
const { getSignal } = useAbortController();
@ -30,7 +32,9 @@ export default function Antechamber() {
const { result, message } = await fetchUserToken({
options: {
signal: getSignal(),
headers: {"Authorization":`Bearer ${token}`}
headers: {
"Authorization": `Bearer ${encodeURIComponent(token)}`
}
}
});
if (message) {
@ -52,49 +56,53 @@ export default function Antechamber() {
}
};
const getReport = async () => {
try {
const { result, message } = await fetchReport({
params:{Type:reportType,Id:reportId},
options: {
signal: getSignal(),
headers: {"Authorization":`Bearer ${token}`}
}
});
if (message) {
console.log(message);
} else {
handleConnect(result as string);
setHasHandledReport(true)
}
} catch (error: any) {
if (error.name !== 'AbortError') {
console.error('获取报告失败:', error);
}
}
}
// const getReport = async () => {
// try {
// const { result, message } = await fetchReport({
// params:{Type:reportType,Id:reportId},
// options: {
// signal: getSignal(),
// headers: {
// "Authorization": `Bearer ${encodeURIComponent(token)}`
// }
// }
// });
// if (message) {
// console.log(message);
// } else {
// handleConnect({initMessage:result as string});
// setHasHandledReport(true)
// }
// } catch (error: any) {
// if (error.name !== 'AbortError') {
// console.error('获取报告失败:', error);
// }
// }
// }
useEffect(() => {
getUserToken();
}, [token]);
useEffect(() => {
if(reportId && reportType && !hasHandledReport){
getReport();
}
}, [reportId, reportType,hasHandledReport]);
// useEffect(() => {
// if(reportId && reportType && !hasHandledReport){
// getReport();
// }
// }, [reportId, reportType,hasHandledReport]);
const toRoom = (initMessage?:string) => {
const toRoom = (params:{initMessage?:string,fileUrl?:string}) => {
if(disable){
return;
}
handleConnect(initMessage);
handleConnect(params);
};
return (
<div className="flex flex-col items-center h-full">
<AntechamberHeader toRoom={toRoom} />
<AntechamberScore toRoom={toRoom} />
<InvokeButton disable={disable} onClick={() => toRoom()} />
<AntechamberFile toRoom={toRoom} />
<AntechamberReport />
<InvokeButton disable={disable} onClick={() => toRoom({})} />
</div>
);
}

View File

@ -0,0 +1,51 @@
import { fetchFile } from "@/apis/user";
import { useToast } from "@/hooks/use-toast";
import { useAbortController } from "@/hooks/useAbortController";
import { useContext, useEffect } from "react";
import { useSearchParams } from "react-router-dom";
import { ReportContext } from "../Provider/ReportResolveProvider";
import { RealtimeClientContext } from "../Provider/RealtimeClientProvider";
export default function AntechamberFile() {
const [searchParams] = useSearchParams();
const fileId = searchParams.get("fileId") || "";
const locationCode = searchParams.get("locationCode") || "";
const token = searchParams.get("token") || "";
const { toast } = useToast();
const { getSignal } = useAbortController();
const { setHasHandledReport,hasHandledReport } = useContext(ReportContext);
const { handleConnect } = useContext(RealtimeClientContext);
const useFileFetch = async () => {
const result = await fetchFile({
params: { id: fileId, location: locationCode },
options: {
signal: getSignal(),
headers: { Authorization: `Bearer ${token}` },
},
});
if (result.message) {
toast({
title: result.message,
});
}
let url = result.result as string;
handleConnect({
fileUrl: url,
});
setHasHandledReport(true);
};
useEffect(() => {
if (fileId && locationCode && !hasHandledReport) {
useFileFetch();
}
}, [fileId, locationCode,hasHandledReport]);
return <></>;
}

View File

@ -9,7 +9,7 @@ import { fetchQuestions } from "@/apis/questions";
import { useAbortController } from "@/hooks/useAbortController";
type Props = {
toRoom: (initMessage?:string) =>void;
toRoom: ({initMessage,fileUrl}:{initMessage?:string,fileUrl?:string}) =>void;
};
export default function HeaderGroup({ toRoom }: Props) {
@ -64,7 +64,7 @@ export default function HeaderGroup({ toRoom }: Props) {
};
const handleQuestion = async (question: string) => {
toRoom(question);
toRoom({initMessage:question});
};
return (

View File

@ -0,0 +1,50 @@
import { useContext, useEffect } from "react";
import { ReportContext } from "../Provider/ReportResolveProvider";
import { fetchReport } from "@/apis/user";
import { useSearchParams } from "react-router-dom";
import { useAbortController } from "@/hooks/useAbortController";
import { RealtimeClientContext } from "../Provider/RealtimeClientProvider";
export default function AntechamberReport() {
const [searchParams] = useSearchParams();
const token = searchParams.get("token") || '';
const reportId = searchParams.get("reportId") || '';
const reportType = searchParams.get("reportType") || '';
const { setHasHandledReport,hasHandledReport } = useContext(ReportContext);
const { getSignal } = useAbortController();
const { handleConnect } = useContext(RealtimeClientContext);
const getReport = async () => {
try {
const { result, message } = await fetchReport({
params:{Type:reportType,Id:reportId},
options: {
signal: getSignal(),
headers: {
"Authorization": `Bearer ${encodeURIComponent(token)}`
}
}
});
if (message) {
console.log(message);
} else {
handleConnect({initMessage:result as string});
setHasHandledReport(true)
}
} catch (error: any) {
if (error.name !== 'AbortError') {
console.error('获取报告失败:', error);
}
}
}
useEffect(() => {
if(reportId && reportType && !hasHandledReport){
getReport();
}
}, [reportId, reportType,hasHandledReport]);
return <></>;
}

View File

@ -6,7 +6,7 @@ import RightBlueIcon from '/icons/rightBlue.png';
import style from './index.module.css';
type Props = {
toRoom: (initMessage?:string) => void;
toRoom: ({initMessage,fileUrl}:{initMessage?:string,fileUrl?:string}) => void;
};
export default function MyInput({ toRoom }: Props) {
@ -19,7 +19,7 @@ export default function MyInput({ toRoom }: Props) {
const handleQuestion = async () => {
toRoom(`我的高考地点在${provinceName},我选择的科目是${subjectGroup},我的高考分数为${expectedScore}分。我适合哪些学校和专业`);
toRoom({initMessage:`我的高考地点在${provinceName},我选择的科目是${subjectGroup},我的高考分数为${expectedScore}分。帮我出一个科学的参考志愿表`});
};
return (

View File

@ -32,8 +32,21 @@ export const RealtimeClientContext = createContext<{
messageList: { content: string; role: RoleType }[];
isAiTalking: boolean;
roomInfo: RoomInfo | null;
initClient: (initMessage?: string) => void;
handleConnect: (initMessage?: string) => Promise<void>;
fileParseStatus: number;
initClient: ({
initMessage,
fileUrl,
}: {
initMessage?: string;
fileUrl?: string;
}) => void;
handleConnect: ({
initMessage,
fileUrl,
}: {
initMessage?: string;
fileUrl?: string;
}) => Promise<void>;
handleInterrupt: () => void;
handleDisconnect: () => void;
toggleMicrophone: () => void;
@ -46,6 +59,7 @@ export const RealtimeClientContext = createContext<{
messageList: [],
isAiTalking: false,
roomInfo: null,
fileParseStatus: -1,
initClient: () => {},
handleConnect: () => Promise.resolve(),
handleInterrupt: () => {},
@ -77,7 +91,7 @@ export const RealtimeClientProvider = ({
const clientRef = useRef<RealtimeClient | null>(null);
// 实时语音回复消息列表
const [messageList, setMessageList] = useState<
{ content: string; role: RoleType }[]
{ content: string; role: RoleType; event?: any }[]
>([]);
// 是否正在连接
const [isConnecting, setIsConnecting] = useState(false);
@ -92,9 +106,19 @@ export const RealtimeClientProvider = ({
const [roomInfo, setRoomInfo] = useState<RoomInfo | null>(null);
// 记录文件解析是否完成 分为 没有解析 -1未解析 0解析中 1解析完成 2
const fileParseStatusRef = useRef(-1);
const { toast } = useToast();
const initClient = async (_initMessage?: string) => {
const initClient = async ({
initMessage,
fileUrl,
}: {
initMessage?: string;
fileUrl?: string;
}) => {
const permission = await RealtimeUtils.checkDevicePermission(false);
const device = await RealtimeUtils.getAudioDevices();
@ -120,23 +144,34 @@ export const RealtimeClientProvider = ({
voiceId: voiceId,
connectorId: connectorId,
allowPersonalAccessTokenInBrowser: true, // 可选:允许在浏览器中使用个人访问令牌
suppressStationaryNoise: true,
suppressNonStationaryNoise: true,
debug: false,
});
clientRef.current = client;
setupEventListeners(client);
setupMessageEventListeners(client, _initMessage ?? "");
setupInitMessageEventListener(client, _initMessage);
setupMessageEventListeners(client, { initMessage, fileUrl });
setupInitMessageEventListener(client, { initMessage, fileUrl });
};
const handleConnect = async (initMessage?: string) => {
const handleConnect = async ({
initMessage,
fileUrl,
}: {
initMessage?: string;
fileUrl?: string;
}) => {
try {
if (!clientRef.current) {
await initClient(initMessage);
await initClient({ initMessage, fileUrl });
} else {
await handleDisconnect();
await initClient({ initMessage, fileUrl });
}
await clientRef.current?.connect();
await toggleMicrophone();
// await toggleMicrophone();
} catch (error) {
console.error(error);
if (error instanceof RealtimeAPIError) {
@ -172,10 +207,10 @@ export const RealtimeClientProvider = ({
// 关闭客户的时候清除一些信息
setIsAiTalking(false);
setMessageList([]);
await clientRef.current?.setAudioEnable(false);
setAudioEnabled(false);
// await clientRef.current?.setAudioEnable(false);
// setAudioEnabled(false);
clientRef.current?.disconnect();
await clientRef.current?.disconnect();
clientRef.current?.clearEventHandlers();
clientRef.current = null;
setIsConnected(false);
@ -194,7 +229,10 @@ export const RealtimeClientProvider = ({
};
const setupInitMessageEventListener = useCallback(
(client: RealtimeClient, _initMessage?: string) => {
(
client: RealtimeClient,
{ initMessage, fileUrl }: { initMessage?: string; fileUrl?: string }
) => {
client.on(EventNames.ALL_SERVER, async (eventName, _event: any) => {
if (eventName === "server.session.created") {
await client.sendMessage({
@ -204,10 +242,13 @@ export const RealtimeClientProvider = ({
chat_config: {
allow_voice_interrupt: false,
},
turn_detection: {
silence_duration_ms: 2000,
},
},
});
}
if (eventName === "server.bot.join" && _initMessage) {
if (eventName === "server.bot.join" && initMessage) {
// 这里需要加个 server. 前缀
await clientRef.current?.sendMessage({
id: "",
@ -215,7 +256,25 @@ export const RealtimeClientProvider = ({
data: {
role: "user",
content_type: "text",
content: _initMessage,
content: initMessage,
},
});
} else if (eventName === "server.bot.join" && fileUrl) {
fileParseStatusRef.current = 0;
await clientRef.current?.sendMessage({
id: "",
event_type: "conversation.message.create",
data: {
role: "user",
content_type: "object_string",
content: JSON.stringify([
{
type: "text",
text: "帮我解读这个文件,结合当下的专业行情以及对该专业未来的发展趋势,简介的给出大学建议",
},
{ type: "image", file_url: fileUrl },
]),
},
});
}
@ -226,10 +285,8 @@ export const RealtimeClientProvider = ({
const setupMessageEventListeners = (
client: RealtimeClient,
_initMessage: string
{ initMessage, fileUrl }: { initMessage?: string; fileUrl?: string }
) => {
let lastEvent: any;
client.on(EventNames.ALL, (_eventName, event: any) => {
// AI智能体设置
@ -241,32 +298,54 @@ export const RealtimeClientProvider = ({
) {
return;
}
const content = event.data.content;
if (
event.data.type === "function_call" &&
JSON.parse(content).name === "doc_reader-PDF_reader"
) {
fileParseStatusRef.current = 1;
} else if (
event.data.type === "tool_response" &&
fileParseStatusRef.current === 1
) {
fileParseStatusRef.current = 2;
}
setMessageList((prev) => {
// 如果上一个事件是增量更新,则附加到最后一条消息
if (
lastEvent?.event_type === ChatEventType.CONVERSATION_MESSAGE_DELTA &&
prev.length > 0 &&
prev[prev.length - 1].event?.event_type ===
ChatEventType.CONVERSATION_MESSAGE_DELTA &&
event.event_type === ChatEventType.CONVERSATION_MESSAGE_DELTA &&
lastEvent.data.type === event.data.type &&
lastEvent.data.answer_id === event.data.answer_id
prev[prev.length - 1].event.data.type === event.data.type &&
prev[prev.length - 1].event.data.answer_id === event.data.answer_id
) {
return [
...prev.slice(0, -1),
{
content: prev[prev.length - 1].content + content,
role: prev[prev.length - 1].role,
event: event,
},
];
}
// 添加AI的欢迎语
if (
_initMessage === "" &&
typeof initMessage === "undefined" &&
typeof fileUrl === "undefined" &&
event.event_type === "conversation.created"
) {
return [
...prev,
{ content: event.data.prologue, role: RoleType.Assistant },
{
content: event.data.prologue,
role: RoleType.Assistant,
event: event,
},
];
}
@ -278,11 +357,59 @@ export const RealtimeClientProvider = ({
(event.data.type === "answer" || event.data.type === "question") &&
event.data.role !== RoleType.Assistant)
) {
return [...prev, { content: content, role: event.data.role }];
// lastEvent = event;
if(event.event_type === ChatEventType.CONVERSATION_MESSAGE_DELTA && fileParseStatusRef.current === 2){
fileParseStatusRef.current = -1;
}
return [
...prev,
{ content: content, role: event.data.role, event: event },
];
}
// 添加一个文件解析的信息
if (
fileParseStatusRef.current === 0 &&
event.event_type === "conversation.message.completed"
) {
return [
...prev,
{
content: "AI正在解析您的文档...",
role: RoleType.Assistant,
event: event,
fileParseStatus: fileParseStatusRef.current,
},
];
} else if (
fileParseStatusRef.current === 1 &&
event.event_type === "conversation.message.completed"
) {
return [
...prev.slice(0, -1),
{
content: "AI正在调用插件",
role: prev[prev.length - 1].role,
event: event,
fileParseStatus: fileParseStatusRef.current,
},
];
} else if (
fileParseStatusRef.current === 2 &&
event.event_type === "conversation.message.completed"
) {
return [
...prev.slice(0, -1),
{
content: "文档解析完成",
role: RoleType.Assistant,
event: event,
fileParseStatus: fileParseStatusRef.current,
},
];
}
return prev;
});
lastEvent = event;
});
};
@ -293,16 +420,16 @@ export const RealtimeClientProvider = ({
client.on(EventNames.AUDIO_AGENT_SPEECH_STARTED, async () => {
// console.log("AI开始说话");
setIsAiTalking(true);
await clientRef.current?.setAudioEnable(false);
setAudioEnabled(false);
// await clientRef.current?.setAudioEnable(false);
// setAudioEnabled(false);
});
// 监听 AI 结束说话事件
client.on(EventNames.AUDIO_AGENT_SPEECH_STOPPED, async () => {
// console.log("AI结束说话");
setIsAiTalking(false);
await clientRef.current?.setAudioEnable(true);
setAudioEnabled(true);
// await clientRef.current?.setAudioEnable(true);
// setAudioEnabled(true);
});
// 监听连接客户端
@ -321,23 +448,6 @@ export const RealtimeClientProvider = ({
[clientRef.current]
);
// 发送信息
// const sendUserMessageWithText = async (message: string) => {
// try {
// await clientRef.current?.sendMessage({
// id: "",
// event_type: "conversation.message.create",
// data: {
// role: "user",
// content_type: "text",
// content: message,
// },
// });
// } catch (error) {
// console.error("发送消息失败:" + error);
// }
// };
return (
<RealtimeClientContext.Provider
value={{
@ -349,6 +459,7 @@ export const RealtimeClientProvider = ({
messageList,
isAiTalking,
roomInfo,
fileParseStatus: fileParseStatusRef.current,
initClient,
handleConnect,
handleInterrupt,

View File

@ -1,7 +1,7 @@
import { useRef, useEffect, useContext } from "react";
import { RealtimeClientContext } from "../Provider/RealtimeClientProvider";
import ReactMarkdown from 'react-markdown'
import gfm from 'remark-gfm'
import ReactMarkdown from "react-markdown";
import gfm from "remark-gfm";
import { RoleType } from "@coze/api";
export default function RoomConversation() {
@ -21,9 +21,19 @@ export default function RoomConversation() {
<div className="flex-1 flex flex-col overflow-y-auto">
<div className="w-full min-h-[120px] h-[120px]">
<div className="relative h-full">
<img src="/icons/hello.gif" alt="" className="absolute top-0 h-[97px] left-[50%] translate-x-[-50%]"/>
<img src="/icons/conversation-bg.png" alt="background" className='w-[222px] h-[49px] absolute bottom-0 left-[50%] translate-x-[-50%]'/>
<div className="text-black text-[14px] absolute bottom-[22px] left-[50%] translate-x-[-50%] z-[10]">HeyAI</div>
<img
src="/icons/hello.gif"
alt=""
className="absolute top-0 h-[97px] left-[50%] translate-x-[-50%]"
/>
<img
src="/icons/conversation-bg.png"
alt="background"
className="w-[222px] h-[49px] absolute bottom-0 left-[50%] translate-x-[-50%]"
/>
<div className="text-black text-[14px] absolute bottom-[22px] left-[50%] translate-x-[-50%] z-[10]">
HeyAI
</div>
</div>
</div>
<div className="flex-1 overflow-y-auto p-4 space-y-4">
@ -43,7 +53,9 @@ export default function RoomConversation() {
: "bg-blue-500 text-white rounded-tr-none"
}`}
>
<ReactMarkdown remarkPlugins={[gfm]}>{message.content}</ReactMarkdown>
<ReactMarkdown remarkPlugins={[gfm]}>
{message.content}
</ReactMarkdown>
</div>
</div>
))}