feat: 更新版本号
parent
e9fb86c23d
commit
033127de94
|
|
@ -6,7 +6,7 @@
|
|||
"dependencies": {
|
||||
"@arco-design/web-react": "^2.65.0",
|
||||
"@reduxjs/toolkit": "^1.8.3",
|
||||
"@volcengine/rtc": "4.58.9",
|
||||
"@volcengine/rtc": "4.66.1",
|
||||
"autolinker": "^4.0.0",
|
||||
"i18next": "^21.8.16",
|
||||
"react": "^18.2.0",
|
||||
|
|
|
|||
Binary file not shown.
|
Before Width: | Height: | Size: 3.2 KiB After Width: | Height: | Size: 823 B |
|
|
@ -7,8 +7,8 @@ import { ConfigFactory } from './config';
|
|||
|
||||
export * from './common';
|
||||
|
||||
export const AIGC_PROXY_HOST = 'http://192.168.31.106:3001';
|
||||
export const DEMO_VERSION = '1.4.0';
|
||||
export const AIGC_PROXY_HOST = 'https://aigc.ycymedu.com';
|
||||
export const DEMO_VERSION = '1.5.0';
|
||||
|
||||
export const Config = ConfigFactory;
|
||||
export default new ConfigFactory();
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@
|
|||
* SPDX-license-identifier: BSD-3-Clause
|
||||
*/
|
||||
|
||||
|
||||
import VERTC, {
|
||||
MirrorType,
|
||||
StreamIndex,
|
||||
|
|
@ -23,8 +22,10 @@ import VERTC, {
|
|||
PlayerEvent,
|
||||
NetworkQuality,
|
||||
VideoRenderMode,
|
||||
ScreenEncoderConfig,
|
||||
} from '@volcengine/rtc';
|
||||
import RTCAIAnsExtension from '@volcengine/rtc/extension-ainr';
|
||||
import { Message } from '@arco-design/web-react';
|
||||
import openAPIs from '@/app/api';
|
||||
import aigcConfig from '@/config';
|
||||
import Utils from '@/utils/utils';
|
||||
|
|
@ -34,6 +35,7 @@ export interface IEventListener {
|
|||
handleError: (e: { errorCode: any }) => void;
|
||||
handleUserJoin: (e: onUserJoinedEvent) => void;
|
||||
handleUserLeave: (e: onUserLeaveEvent) => void;
|
||||
handleTrackEnded: (e: { kind: string; isScreen: boolean }) => void;
|
||||
handleUserPublishStream: (e: { userId: string; mediaType: MediaType }) => void;
|
||||
handleUserUnpublishStream: (e: {
|
||||
userId: string;
|
||||
|
|
@ -45,7 +47,6 @@ export interface IEventListener {
|
|||
handleLocalAudioPropertiesReport: (e: LocalAudioPropertiesInfo[]) => void;
|
||||
handleRemoteAudioPropertiesReport: (e: RemoteAudioPropertiesInfo[]) => void;
|
||||
handleAudioDeviceStateChanged: (e: DeviceInfo) => void;
|
||||
handleUserMessageReceived: (e: { userId: string; message: any }) => void;
|
||||
handleAutoPlayFail: (e: AutoPlayFailedEvent) => void;
|
||||
handlePlayerEvent: (e: PlayerEvent) => void;
|
||||
handleUserStartAudioCapture: (e: { userId: string }) => void;
|
||||
|
|
@ -69,7 +70,7 @@ export interface BasicBody {
|
|||
room_id: string;
|
||||
user_id: string;
|
||||
login_token: string | null;
|
||||
init_msg:string|null;
|
||||
init_msg:string|null
|
||||
}
|
||||
|
||||
export const AIAnsExtension = new RTCAIAnsExtension();
|
||||
|
|
@ -107,7 +108,9 @@ export class RTCClient {
|
|||
await this.engine.registerExtension(AIAnsExtension);
|
||||
AIAnsExtension.enable();
|
||||
} catch (error) {
|
||||
console.error((error as any).message);
|
||||
console.warn(
|
||||
`当前环境不支持 AI 降噪, 此错误可忽略, 不影响实际使用, e: ${(error as any).message}`
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
@ -115,6 +118,7 @@ export class RTCClient {
|
|||
handleError,
|
||||
handleUserJoin,
|
||||
handleUserLeave,
|
||||
handleTrackEnded,
|
||||
handleUserPublishStream,
|
||||
handleUserUnpublishStream,
|
||||
handleRemoteStreamStats,
|
||||
|
|
@ -122,7 +126,6 @@ export class RTCClient {
|
|||
handleLocalAudioPropertiesReport,
|
||||
handleRemoteAudioPropertiesReport,
|
||||
handleAudioDeviceStateChanged,
|
||||
handleUserMessageReceived,
|
||||
handleAutoPlayFail,
|
||||
handlePlayerEvent,
|
||||
handleUserStartAudioCapture,
|
||||
|
|
@ -133,6 +136,7 @@ export class RTCClient {
|
|||
this.engine.on(VERTC.events.onError, handleError);
|
||||
this.engine.on(VERTC.events.onUserJoined, handleUserJoin);
|
||||
this.engine.on(VERTC.events.onUserLeave, handleUserLeave);
|
||||
this.engine.on(VERTC.events.onTrackEnded, handleTrackEnded);
|
||||
this.engine.on(VERTC.events.onUserPublishStream, handleUserPublishStream);
|
||||
this.engine.on(VERTC.events.onUserUnpublishStream, handleUserUnpublishStream);
|
||||
this.engine.on(VERTC.events.onRemoteStreamStats, handleRemoteStreamStats);
|
||||
|
|
@ -140,7 +144,6 @@ export class RTCClient {
|
|||
this.engine.on(VERTC.events.onAudioDeviceStateChanged, handleAudioDeviceStateChanged);
|
||||
this.engine.on(VERTC.events.onLocalAudioPropertiesReport, handleLocalAudioPropertiesReport);
|
||||
this.engine.on(VERTC.events.onRemoteAudioPropertiesReport, handleRemoteAudioPropertiesReport);
|
||||
this.engine.on(VERTC.events.onUserMessageReceived, handleUserMessageReceived);
|
||||
this.engine.on(VERTC.events.onAutoplayFailed, handleAutoPlayFail);
|
||||
this.engine.on(VERTC.events.onPlayerEvent, handlePlayerEvent);
|
||||
this.engine.on(VERTC.events.onUserStartAudioCapture, handleUserStartAudioCapture);
|
||||
|
|
@ -197,21 +200,42 @@ export class RTCClient {
|
|||
audioOutputs: MediaDeviceInfo[];
|
||||
videoInputs: MediaDeviceInfo[];
|
||||
}> {
|
||||
const { video, audio = true } = props || {};
|
||||
const { video = false, audio = true } = props || {};
|
||||
let audioInputs: MediaDeviceInfo[] = [];
|
||||
let audioOutputs: MediaDeviceInfo[] = [];
|
||||
let videoInputs: MediaDeviceInfo[] = [];
|
||||
const { video: hasVideoPermission, audio: hasAudioPermission } = await VERTC.enableDevices({
|
||||
video,
|
||||
audio,
|
||||
});
|
||||
if (audio) {
|
||||
const inputs = await VERTC.enumerateAudioCaptureDevices();
|
||||
const outputs = await VERTC.enumerateAudioPlaybackDevices();
|
||||
audioInputs = inputs.filter((i) => i.deviceId && i.kind === 'audioinput');
|
||||
audioOutputs = outputs.filter((i) => i.deviceId && i.kind === 'audiooutput');
|
||||
this._audioCaptureDevice = audioInputs.filter((i) => i.deviceId)?.[0]?.deviceId;
|
||||
if (hasAudioPermission) {
|
||||
if (!audioInputs?.length) {
|
||||
Message.error('无麦克风设备, 请先确认设备情况。');
|
||||
}
|
||||
if (!audioOutputs?.length) {
|
||||
// Message.error('无扬声器设备, 请先确认设备情况。');
|
||||
}
|
||||
} else {
|
||||
Message.error('暂无麦克风设备权限, 请先确认设备权限授予情况。');
|
||||
}
|
||||
}
|
||||
if (video) {
|
||||
videoInputs = await VERTC.enumerateVideoCaptureDevices();
|
||||
videoInputs = videoInputs.filter((i) => i.deviceId && i.kind === 'videoinput');
|
||||
this._videoCaptureDevice = videoInputs?.[0]?.deviceId;
|
||||
if (hasVideoPermission) {
|
||||
if (!videoInputs?.length) {
|
||||
Message.error('无摄像头设备, 请先确认设备情况。');
|
||||
}
|
||||
} else {
|
||||
Message.error('暂无摄像头设备权限, 请先确认设备权限授予情况。');
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
|
|
@ -230,7 +254,17 @@ export class RTCClient {
|
|||
await this.engine.stopVideoCapture();
|
||||
};
|
||||
|
||||
startAudioCapture = async (mic?: string) => {
|
||||
startScreenCapture = async (enableAudio = false) => {
|
||||
await this.engine.startScreenCapture({
|
||||
enableAudio,
|
||||
});
|
||||
};
|
||||
|
||||
stopScreenCapture = async () => {
|
||||
await this.engine.stopScreenCapture();
|
||||
};
|
||||
|
||||
startAudioCapture = async (mic?: string) => {
|
||||
await this.engine.startAudioCapture(mic || this._audioCaptureDevice);
|
||||
};
|
||||
|
||||
|
|
@ -246,6 +280,18 @@ export class RTCClient {
|
|||
this.engine.unpublishStream(mediaType);
|
||||
};
|
||||
|
||||
publishScreenStream = async (mediaType: MediaType) => {
|
||||
await this.engine.publishScreen(mediaType);
|
||||
};
|
||||
|
||||
unpublishScreenStream = async (mediaType: MediaType) => {
|
||||
await this.engine.unpublishScreen(mediaType);
|
||||
};
|
||||
|
||||
setScreenEncoderConfig = async (description: ScreenEncoderConfig) => {
|
||||
await this.engine.setScreenEncoderConfig(description);
|
||||
};
|
||||
|
||||
/**
|
||||
* @brief 设置业务标识参数
|
||||
* @param businessId
|
||||
|
|
@ -290,12 +336,19 @@ export class RTCClient {
|
|||
return this.engine.setLocalVideoMirrorType(type);
|
||||
};
|
||||
|
||||
setLocalVideoPlayer = (userId: string, renderDom?: string | HTMLElement) => {
|
||||
return this.engine.setLocalVideoPlayer(StreamIndex.STREAM_INDEX_MAIN, {
|
||||
renderDom,
|
||||
userId,
|
||||
renderMode: VideoRenderMode.RENDER_MODE_HIDDEN,
|
||||
});
|
||||
setLocalVideoPlayer = (
|
||||
userId: string,
|
||||
renderDom?: string | HTMLElement,
|
||||
isScreenShare = false
|
||||
) => {
|
||||
return this.engine.setLocalVideoPlayer(
|
||||
isScreenShare ? StreamIndex.STREAM_INDEX_SCREEN : StreamIndex.STREAM_INDEX_MAIN,
|
||||
{
|
||||
renderDom,
|
||||
userId,
|
||||
renderMode: VideoRenderMode.RENDER_MODE_FILL,
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
|
|
@ -311,7 +364,6 @@ export class RTCClient {
|
|||
if(this.basicInfo.init_msg){
|
||||
agentConfig.WelcomeMessage = ""
|
||||
}
|
||||
|
||||
const options = {
|
||||
AppId: aigcConfig.BaseConfig.AppId,
|
||||
BusinessId: aigcConfig.BaseConfig.BusinessId,
|
||||
|
|
@ -351,11 +403,7 @@ export class RTCClient {
|
|||
/**
|
||||
* @brief 命令 AIGC
|
||||
*/
|
||||
commandAudioBot = (
|
||||
command: COMMAND,
|
||||
interruptMode = INTERRUPT_PRIORITY.NONE,
|
||||
message = ''
|
||||
) => {
|
||||
commandAudioBot = (command: COMMAND, interruptMode = INTERRUPT_PRIORITY.NONE, message = '') => {
|
||||
if (this.audioBotEnabled) {
|
||||
this.engine.sendUserBinaryMessage(
|
||||
aigcConfig.BotName,
|
||||
|
|
|
|||
|
|
@ -30,14 +30,11 @@ import {
|
|||
addAutoPlayFail,
|
||||
removeAutoPlayFail,
|
||||
updateAITalkState,
|
||||
setHistoryMsg,
|
||||
setCurrentMsg,
|
||||
updateNetworkQuality,
|
||||
} from '@/store/slices/room';
|
||||
import RtcClient, { IEventListener } from './RtcClient';
|
||||
|
||||
import { setMicrophoneList, updateSelectedDevice } from '@/store/slices/device';
|
||||
import Utils from '@/utils/utils';
|
||||
import { useMessageHandler } from '@/utils/handler';
|
||||
|
||||
const useRtcListeners = (): IEventListener => {
|
||||
|
|
@ -45,12 +42,19 @@ const useRtcListeners = (): IEventListener => {
|
|||
const { parser } = useMessageHandler();
|
||||
const playStatus = useRef<{ [key: string]: { audio: boolean; video: boolean } }>({});
|
||||
|
||||
const debounceSetHistoryMsg = Utils.debounce((text: string, user: string) => {
|
||||
const isAudioEnable = RtcClient.getAudioBotEnabled();
|
||||
if (isAudioEnable) {
|
||||
dispatch(setHistoryMsg({ text, user }));
|
||||
const handleTrackEnded = async (event: { kind: string; isScreen: boolean }) => {
|
||||
const { kind, isScreen } = event;
|
||||
/** 浏览器自带的屏幕共享关闭触发方式,通过 onTrackEnd 事件去关闭 */
|
||||
if (isScreen && kind === 'video') {
|
||||
await RtcClient.stopScreenCapture();
|
||||
await RtcClient.unpublishScreenStream(MediaType.VIDEO);
|
||||
dispatch(
|
||||
updateLocalUser({
|
||||
publishScreen: false,
|
||||
})
|
||||
);
|
||||
}
|
||||
}, 600);
|
||||
};
|
||||
|
||||
const handleUserJoin = (e: onUserJoinedEvent) => {
|
||||
const extraInfo = JSON.parse(e.userInfo.extraInfo || '{}');
|
||||
|
|
@ -167,22 +171,6 @@ const useRtcListeners = (): IEventListener => {
|
|||
}
|
||||
};
|
||||
|
||||
const handleUserMessageReceived = (e: { userId: string; message: any }) => {
|
||||
/** debounce 记录用户输入文字 */
|
||||
if (e.message) {
|
||||
const msgObj = JSON.parse(e.message || '{}');
|
||||
if (msgObj.text) {
|
||||
const { text: msg, definite, user_id: user } = msgObj;
|
||||
if ((window as any)._debug_mode) {
|
||||
dispatch(setHistoryMsg({ msg, user }));
|
||||
} else {
|
||||
debounceSetHistoryMsg(msg, user);
|
||||
}
|
||||
dispatch(setCurrentMsg({ msg, definite, user }));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const handleAutoPlayFail = (event: AutoPlayFailedEvent) => {
|
||||
const { userId, kind } = event;
|
||||
let playUser = playStatus.current?.[userId] || {};
|
||||
|
|
@ -264,6 +252,7 @@ const useRtcListeners = (): IEventListener => {
|
|||
handleError,
|
||||
handleUserJoin,
|
||||
handleUserLeave,
|
||||
handleTrackEnded,
|
||||
handleUserPublishStream,
|
||||
handleUserUnpublishStream,
|
||||
handleRemoteStreamStats,
|
||||
|
|
@ -271,7 +260,6 @@ const useRtcListeners = (): IEventListener => {
|
|||
handleLocalAudioPropertiesReport,
|
||||
handleRemoteAudioPropertiesReport,
|
||||
handleAudioDeviceStateChanged,
|
||||
handleUserMessageReceived,
|
||||
handleAutoPlayFail,
|
||||
handlePlayerEvent,
|
||||
handleUserStartAudioCapture,
|
||||
|
|
|
|||
|
|
@ -14,6 +14,12 @@ import { COMMAND, INTERRUPT_PRIORITY } from '@/utils/handler';
|
|||
import RtcClient from '@/lib/RtcClient';
|
||||
import { setCurrentMsg, setHistoryMsg } from '@/store/slices/room';
|
||||
|
||||
interface Message {
|
||||
value: string;
|
||||
user: string;
|
||||
isInterrupted?: boolean;
|
||||
}
|
||||
|
||||
const lines: (string | React.ReactNode)[] = [];
|
||||
|
||||
function Conversation(props: React.HTMLAttributes<HTMLDivElement>) {
|
||||
|
|
@ -83,7 +89,7 @@ function Conversation(props: React.HTMLAttributes<HTMLDivElement>) {
|
|||
) : (
|
||||
''
|
||||
)}
|
||||
{msgHistory?.map(({ value, user, isInterrupted }, index) => {
|
||||
{msgHistory?.map(({ value, user, isInterrupted }: Message, index: number) => {
|
||||
const isUserMsg = user === userId;
|
||||
const isRobotMsg = user === Config.BotName;
|
||||
if (!isUserMsg && !isRobotMsg) {
|
||||
|
|
|
|||
|
|
@ -140,7 +140,7 @@
|
|||
justify-content: center;
|
||||
gap: 16px;
|
||||
background-color: #fff;
|
||||
padding: 8px 35px;
|
||||
padding: 8px 15px;
|
||||
|
||||
.talkWrapper {
|
||||
--h: 16px;
|
||||
|
|
|
|||
|
|
@ -2210,10 +2210,10 @@
|
|||
"@typescript-eslint/types" "5.31.0"
|
||||
eslint-visitor-keys "^3.3.0"
|
||||
|
||||
"@volcengine/rtc@4.58.9":
|
||||
version "4.58.9"
|
||||
resolved "https://registry.yarnpkg.com/@volcengine/rtc/-/rtc-4.58.9.tgz#841ebaddd5d4963c71abd33037bd76d1d490d928"
|
||||
integrity sha512-nnXnNW9pVo8ynBSxVe0ikNIdxWfoSx5oOnwK7EoMCXdc2bJgHATpz/B+Kv2F1k4GjYAbo7ZcOm/g3cchvHgH5Q==
|
||||
"@volcengine/rtc@4.66.1":
|
||||
version "4.66.1"
|
||||
resolved "https://registry.yarnpkg.com/@volcengine/rtc/-/rtc-4.66.1.tgz#1934c269b31216f43718ae46b169c59ac5e474f2"
|
||||
integrity sha512-APznH6eosmKJC1HYJJ8s6G3Mq3OSgw6ivv6uCiayM5QNMBj+GW6zxf+MVsk5rm6r4R92TLwQErWonJ8yzGO4xA==
|
||||
dependencies:
|
||||
eventemitter3 "^4.0.7"
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue