在views/Channel/components
文件夹下新增一个组件StreamHandler
, 该组件为后续我们处理游戏房间的组件, 先初步编写声网接入逻辑
const options = {
appId:
process.env.REACT_APP_AGORA_APPID || "default id",
channel: process.env.REACT_APP_AGORA_CHANNEL || "test",
token:
process.env.REACT_APP_AGORA_TOKEN ||
"default token",
uid: process.env.REACT_APP_AGORA_UID || "default uid",
};
const StreamHandler = (props) => {
const { userInfo, messageInfo, channelId, enableLocalVoice = false } = props;
const [rtcClient, setRtcClient] = useState(null);
const [connectStatus, setConnectStatus] = useState(false);
useEffect(() => {
AgoraRTC.setLogLevel(3);
const client = AgoraRTC.createClient({ mode: "rtc", codec: "vp8" });
client
.join(options.appId, options.channel, options.token, userInfo?.username)
.then(() => {
setConnectStatus(true);
console.log("[Stream] join channel success");
})
.catch((e) => {
console.log(e);
});
setRtcClient(client);
return () => {
client.leave();
setRtcClient(null);
};
}, []);
return (
<>
{!connectStatus && <Spin tip="Loading" size="large" />}
</>
);
}
const mapStateToProps = ({ app }) => {
return {
userInfo: app.userInfo,
};
};
export default memo(connect(mapStateToProps)(StreamHandler));
然后回到Channel
中, 在之前的renderStreamChannel
函数中添加上StreamHandler
组件
const [enableVoice, setEnableVoice] = useState(false);
const toggleVoice = () => {
setEnableVoice((enable) => {
return !enable;
});
}
const renderStreamChannel = () => {
return (
<>
<div className={s.messageRowWrap}>
<StreamHandler messageInfo={messageInfo} channelId={channelId} enableLocalVoice={enableVoice} />
</div>
<div className={s.iptWrap}>
<Input chatType={CHAT_TYPE.groupChat} fromId={channelId} extraMenuItems={renderStreamMenu()} />
</div>
</>
);
}
const renderStreamMenu = () => {
return [
{
key: "voice",
label: (
<div
className="circleDropItem"
onClick={toggleVoice}
>
<Icon
name="person_wave_slash"
size="24px"
iconClass="circleDropMenuIcon"
/>
<span className="circleDropMenuOp">
{enableVoice ? "关闭语音" : "开启语音"}
</span>
</div>
),
}
];
}
此时我们创建一个video-
开题的游戏频道, 应该可以看到命令行中输出了RTC连接成功信息. [Stream] join channel success
接下来我们继续做实质的RTC推流逻辑, 及用户上下播的入口. 但在那之前, 先简单过一下声网RTC中的一些概念.
在上面的接入中, 我们已经完成了创建对象并加入频道两步.
在RTC中, 可以传输音频和视频信号, 由于单个RTC客户端要传输不同种类的数据, 每个单独的音视频源被分成不同的track
(由于它们都是实时不断产生的, 我们称作流), 随后通过publish
方法, 将我们本地的信号源交付给RTC客户端传输.
随后通过user-published
事件的回调来在其他用户发布信号源时进行处理, 首先需要subscribe
该用户来获取后续数据, 随后根据不同类型的信号流做处理.
离开时需要关闭本地当前的信号源, 并退出RTC客户端.
最后通过user-unpublished
事件监听其他用户退出, 移除它们对应的信号流.
const StreamHandler = (props) => {
...
const localVideoEle = useRef(null);
const canvasEle = useRef(null);
const [rtcClient, setRtcClient] = useState(null);
const [connectStatus, setConnectStatus] = useState(false);
const [remoteUser, setRemoteUser] = useState(null);
const [remoteVoices, setRemoteVoices] = useState([]);
const [remoteVideo, setRemoteVideo] = useState(null);
useEffect(() => {
...
client.on("user-published", async (user, mediaType) => {
await client.subscribe(user, mediaType);
console.log("[Stream] subscribe success on user ", user);
if (mediaType === "video") {
if (remoteUser && remoteUser.uid !== user.uid) {
console.error(
"already in a call, can not subscribe another user ",
user
);
return;
}
const remoteVideoTrack = user.videoTrack;
remoteVideoTrack.play(localVideoEle.current);
setRemoteVideo(remoteVideoTrack);
setRemoteUser(user);
}
if (mediaType === "audio") {
const remoteAudioTrack = user.audioTrack;
if (remoteVoices.findIndex((item) => item.uid === user.uid) == -1) {
remoteAudioTrack.play();
setRemoteVoices([
...remoteVoices,
{ audio: remoteAudioTrack, uid: user.uid },
]);
}
}
});
client.on("user-unpublished", (user) => {
console.log("[Stream] user-unpublished", user);
removeUserStream(user);
});
setRtcClient(client);
return () => {
client.leave();
setRtcClient(null);
};
}, []);
const removeUserStream = (user) => {
if (remoteUser && remoteUser.uid === user.uid) {
setRemoteUser(null);
setRemoteVideo(null);
}
setRemoteVoices(remoteVoices.filter((voice) => voice.uid !== user.uid));
};
}
接着我们根据之前提到的自定义消息判断当前在播状态, 以最后一条自定义消息为准.
const StreamHandler = (props) => {
const { userInfo, messageInfo, channelId, enableLocalVoice = false } = props;
const firstStreamMessage = useMemo(() => {
return messageInfo?.list?.find(
(item) => item.type === "custom" && item?.ext?.type === "stream"
);
}, [messageInfo]);
const hasRemoteStream =
firstStreamMessage?.ext?.status === CMD_START_STREAM &&
firstStreamMessage?.ext?.user !== userInfo?.username;
const [localStreaming, setLocalStreaming] = useState(
firstStreamMessage?.ext?.status === CMD_START_STREAM &&
firstStreamMessage?.ext?.user === userInfo?.username
);
const toggleLocalGameStream = () => {
if (hasRemoteStream) {
return;
}
setLocalStreaming(!localStreaming);
};
return (
<>
{!connectStatus && <Spin tip="Loading" size="large" />}
{hasRemoteStream ? (
<RemoteStreamHandler
remoteUser={firstStreamMessage?.ext?.user}
localVideoRef={localVideoEle}
channelId={channelId}
userInfo={userInfo}
rtcClient={rtcClient}
/>
) : (
<LocalStreamHandler
localStreaming={localStreaming}
canvasRef={canvasEle}
toggleLocalGameStream={toggleLocalGameStream}
rtcClient={rtcClient}
userInfo={userInfo}
channelId={channelId}
/>
)}
</>
);
}
我们根据hasRemoteStream
分成两种逻辑RemoteStreamHandler
和LocalStreamHandler
(可以先用div+文字的空实现占位), 首先我们来看本地游戏的逻辑
const LocalStreamHandler = (props) => {
const {
toggleLocalGameStream,
canvasRef,
localStreaming,
rtcClient,
userInfo,
channelId,
} = props;
const [localVideoStream, setLocalVideoStream] = useState(false);
const localPlayerContainerRef = useRef(null);
useEffect(() => {
if (!localPlayerContainerRef.current) return;
const f = async () => {
let lgs = await AgoraRTC.createCameraVideoTrack();
lgs.play(localPlayerContainerRef.current);
setLocalGameStream(lgs);
}
f();
}, [localPlayerContainerRef])
const renderLocalStream = () => {
return (
<div style={{ height: "100%" }} ref={localPlayerContainerRef}>
</div>
)
}
const renderFloatButtons = () => {
return (
<FloatButton.Group
icon={<DesktopOutlined />}
trigger="click"
style={{ left: "380px" }}
>
<FloatButton
onClick={toggleLocalGameStream}
icon={
localStreaming ? <VideoCameraFilled /> : <VideoCameraOutlined />
}
tooltip={<div>{localStreaming ? "停止直播" : "开始直播"}</div>}
/>
</FloatButton.Group>
);
};
return (
<>
<div style={{ height: "100%" }}>
{renderFloatButtons()}
{renderLocalStream()}
</div>
</>
);
}
现在我们进入直播房间已经可以看到本地摄像头的内容了, 但我们还没有将视频流投放到RTC中, 且上播逻辑也没有处理
useEffect(() => {
if (!localStreaming || !rtcClient || !localVideoStream) {
return;
}
console.log("height", canvasRef.current.height);
console.log("publishing local stream", localVideoStream);
rtcClient.publish(localVideoStream).then(() => {
sendStreamMessage(
{
user: userInfo?.username,
status: CMD_START_STREAM,
},
channelId
).then(() => {
message.success({
content: "start streaming",
});
});
});
return () => {
if (localVideoStream) {
rtcClient.unpublish(localVideoStream);
localVideoStream.stop();
sendStreamMessage(
{
user: userInfo?.username,
status: CMD_END_STREAM,
},
channelId
);
message.info({
content: "stop streaming",
});
}
};
}, [rtcClient, localStreaming, canvasRef, userInfo, channelId, localVideoStream]);
为了测试直播效果, 我们需要登录第二个账号(使用浏览器的匿名/开其他的浏览器, 此时cookie没有共享, 可以多账号登录), 进入相同频道, 开启直播, 此时第一个账号应该会自动刷新状态(如果没有则手动切换一下频道), 进入到RemoteStreamHandler
, 说明我们直播的逻辑已经完成.
接下来是远程流的渲染逻辑, 它的逻辑相对简单, 观看者可以选择开始/停止观看直播流
const RemoteStreamHandler = (props) => {
const {
remoteUser,
localVideoRef,
toggleRemoteVideo,
channelId,
userInfo,
rtcClient,
} = props;
const enableForceStop = true;
const forceStopStream = () => {
sendStreamMessage(
{
user: userInfo?.username,
status: CMD_END_STREAM,
},
channelId
);
};
const renderRemoteStream = () => {
return (
<div style={{ height: "100%" }}>
<div
id="remote-player"
style={{
width: "100%",
height: "90%",
border: "1px solid #fff",
}}
ref={localVideoRef}
/>
<div
style={{
display: "flex",
justifyContent: "center",
marginTop: "10px",
}}
>
<span style={{ color: "#0ECD0A" }}>{remoteUser}</span>
is playing{" "}
</div>
</div>
);
};
const renderFloatButtons = () => {
return (
<FloatButton.Group
icon={<DesktopOutlined />}
trigger="click"
style={{ left: "380px" }}
>
<FloatButton
onClick={toggleRemoteVideo}
icon={<VideoCameraAddOutlined />}
tooltip={<div>观看/停止观看直播</div>}
/>
{enableForceStop && (
<FloatButton
onClick={forceStopStream}
icon={<VideoCameraAddOutlined />}
tooltip={<div>强制停止直播</div>}
/>
)}
</FloatButton.Group>
);
};
return (
<>
<div style={{ height: "100%" }}>
{renderFloatButtons()}
{renderRemoteStream()}
</div>
</>
);
}
开关远程流的代码在StreamHander
中, 作为参数传给RemoteStream
const toggleRemoteVideo = () => {
if (!hasRemoteStream) {
return;
}
console.log("[Stream] set remote video to ", !enableRemoteVideo);
if (enableRemoteVideo) {
remoteVideo?.stop();
} else {
remoteVideo?.play(localVideoEle.current);
}
setEnableRemoteVideo(!enableRemoteVideo);
};
ok, 现在我们已经实现了基于声网RTC, 在环信超级社区集成视频直播的功能.
RustNESEmulator 是一个基于Rust语言的NES模拟器, 我们在web平台可以使用它编译好的wasm版本
并将mario.nes
文件放到src/assets
目录下, 这是初代马里奥游戏的ROM文件(你也可以使用你喜欢的nes游戏, 如果遇到问题, 欢迎到RustNESEmulator中提issue)
import * as wasm from "@/pkg";
class State {
constructor() {
this.sample_rate = 44100;
this.buffer_size = 1024;
this.nes = null;
this.animation_id = null;
this.empty_buffers = [];
this.audio_ctx = null;
this.gain_node = null;
this.next_start_time = 0;
this.last_tick = 0;
this.mute = false;
this.setup_audio();
console.log("[NES]: create state");
}
load_rom(rom) {
this.nes = wasm.WebNes.new(rom, "canvas", this.sample_rate);
this.run();
}
toggleMute() {
this.mute = !this.mute;
}
setup_audio() {
const AudioContext = window.AudioContext || window.webkitAudioContext;
if (!AudioContext) {
console.error("Browser does not support audio");
return;
}
this.audio_ctx = new AudioContext();
this.gain_node = this.audio_ctx.createGain();
this.gain_node.gain.setValueAtTime(1, 0);
}
run() {
const now = performance.now();
this.animation_id = requestAnimationFrame(this.run.bind(this));
if (now - this.last_tick > 16) {
this.nes.do_frame();
this.queue_audio();
this.last_tick = now;
}
}
get_audio_buffer() {
if (!this.audio_ctx) {
throw new Error("AudioContext not created");
}
if (this.empty_buffers.length) {
return this.empty_buffers.pop();
} else {
return this.audio_ctx.createBuffer(1, this.buffer_size, this.sample_rate);
}
}
queue_audio() {
if (!this.audio_ctx || !this.gain_node) {
throw new Error("Audio not set up correctly");
}
this.gain_node.gain.setValueAtTime(1, this.audio_ctx.currentTime);
const audioBuffer = this.get_audio_buffer();
this.nes.audio_callback(this.buffer_size, audioBuffer.getChannelData(0));
if (this.mute) {
return;
}
const source = this.audio_ctx.createBufferSource();
source.buffer = audioBuffer;
source.connect(this.gain_node).connect(this.audio_ctx.destination);
source.onended = () => {
this.empty_buffers.push(audioBuffer);
};
const latency = 0.032;
const audio_ctxTime = this.audio_ctx.currentTime + latency;
const start = Math.max(this.next_start_time, audio_ctxTime);
source.start(start);
this.next_start_time = start + this.buffer_size / this.sample_rate;
}
}
export default State;
import mario_url from "@/assets/mario.nes";
import * as wasm_emulator from "@/pkg";
import State from "./state";
const LocalStreamHandler = (props) => {
const stateRef = useRef(new State());
useEffect(() => {
if (!canvasRef) {
return;
}
wasm_emulator.wasm_main();
fetch(mario_url, {
headers: { "Content-Type": "application/octet-stream" },
})
.then((response) => response.arrayBuffer())
.then((data) => {
let mario = new Uint8Array(data);
stateRef.current.load_rom(mario);
});
}, [canvasRef]);
const renderLocalStream = () => {
return (
<div style={{ height: "100%" }}>
<canvas
id="canvas"
style={{ width: 600, height: 500 }}
width="600"
height="500"
ref={canvasRef}
/>
</div>
);
};
}
这一步完成后, 我们就可以在本地试玩马里奥游戏了, 键盘绑定为
A = J
B = K
Select = RShift
Start = Return
Up = W
Down = S
Left = A
Right = D
useEffect(() => {
if (!localStreaming || !rtcClient) {
return;
}
let stream = canvasRef.current.captureStream(30);
let localVideoStream = AgoraRTC.createCustomVideoTrack({
mediaStreamTrack: stream.getVideoTracks()[0],
});
console.log("height", canvasRef.current.height);
console.log("publishing local stream", localVideoStream);
rtcClient.publish(localVideoStream).then(() => {
sendStreamMessage(
{
user: userInfo?.username,
status: CMD_START_STREAM,
},
channelId
).then(() => {
message.success({
content: "start streaming",
});
});
});
return () => {
if (localVideoStream) {
rtcClient.unpublish(localVideoStream);
localVideoStream.stop();
sendStreamMessage(
{
user: userInfo?.username,
status: CMD_END_STREAM,
},
channelId
);
message.info({
content: "stop streaming",
});
}
};
}, [rtcClient, localStreaming, canvasRef, userInfo, channelId]);
最后总结一下房间的流程图
