"use client" import * as React from "react" import { cn } from "@/lib/utils" import { ICameraVideoTrack, ILocalVideoTrack, IMicrophoneAudioTrack } from "agora-rtc-sdk-ng" import { useAppSelector, useAppDispatch } from "@/common/hooks" import { isVoiceGenderSupported, VideoSourceType } from "@/common/constant" import { ITextItem, EMessageType, IChatItem } from "@/types" import { rtcManager, IUserTracks, IRtcUser } from "@/manager" import { setRoomConnected, addChatItem, setVoiceType, setOptions, } from "@/store/reducers/global" import AgentVoicePresetSelect from "@/components/Agent/VoicePresetSelect" import AgentView from "@/components/Agent/View" import MicrophoneBlock from "@/components/Agent/Microphone" import CameraBlock from "@/components/Agent/Camera" import VideoBlock from "@/components/Agent/Camera" let hasInit: boolean = false export default function RTCCard(props: { className?: string }) { const { className } = props const dispatch = useAppDispatch() const options = useAppSelector((state) => state.global.options) const voiceType = useAppSelector((state) => state.global.voiceType) const selectedGraphId = useAppSelector((state) => state.global.graphName) const { userId, channel } = options const [videoTrack, setVideoTrack] = React.useState() const [audioTrack, setAudioTrack] = React.useState() const [screenTrack, setScreenTrack] = React.useState() const [remoteuser, setRemoteUser] = React.useState() const [videoSourceType, setVideoSourceType] = React.useState(VideoSourceType.CAMERA) React.useEffect(() => { if (!options.channel) { return } if (hasInit) { return } init() return () => { if (hasInit) { destory() } } }, [options.channel]) const init = async () => { console.log("[rtc] init") rtcManager.on("localTracksChanged", onLocalTracksChanged) rtcManager.on("textChanged", onTextChanged) rtcManager.on("remoteUserChanged", onRemoteUserChanged) await rtcManager.createCameraTracks() await rtcManager.createMicrophoneTracks() await rtcManager.join({ channel, userId, }) dispatch( setOptions({ ...options, appId: rtcManager.appId ?? "", token: rtcManager.token ?? "", }), ) await rtcManager.publish() dispatch(setRoomConnected(true)) hasInit = true } const destory = async () => { console.log("[rtc] destory") rtcManager.off("textChanged", onTextChanged) rtcManager.off("localTracksChanged", onLocalTracksChanged) rtcManager.off("remoteUserChanged", onRemoteUserChanged) await rtcManager.destroy() dispatch(setRoomConnected(false)) hasInit = false } const onRemoteUserChanged = (user: IRtcUser) => { console.log("[rtc] onRemoteUserChanged", user) setRemoteUser(user) } const onLocalTracksChanged = (tracks: IUserTracks) => { console.log("[rtc] onLocalTracksChanged", tracks) const { videoTrack, audioTrack, screenTrack } = tracks setVideoTrack(videoTrack) setScreenTrack(screenTrack) if (audioTrack) { setAudioTrack(audioTrack) } } const onTextChanged = (text: IChatItem) => { console.log("[rtc] onTextChanged", text) dispatch( addChatItem(text), ) } const onVoiceChange = (value: any) => { dispatch(setVoiceType(value)) } const onVideoSourceTypeChange = async (value: VideoSourceType) => { await rtcManager.switchVideoSource(value) setVideoSourceType(value) } return ( <>
{/* -- Agent */}

Audio & Video

{ isVoiceGenderSupported(selectedGraphId) ? : null}
{/* -- You */}
) }