作为学习 WebRTC 的练习,我试图展示本地网络摄像头并并排显示网络摄像头的延迟播放。为了实现这一点,我试图将记录的 blob 传递给 BufferSource 并使用相应的 MediaSource 作为视频元素的源。
// the ondataavailable callback for the MediaRecorder
async function handleDataAvailable(event) {
// console.log("handleDataAvailable", event);
if (event.data && event.data.size > 0) {
recordedBlobs.push(event.data);
}
if (recordedBlobs.length > 5) {
if (recordedBlobs.length === 5)
console.log("buffered enough for delayed playback");
if (!updatingBuffer) {
updatingBuffer = true;
const bufferedBlob = recordedBlobs.shift();
const bufferedAsArrayBuffer = await bufferedBlob.arrayBuffer();
if (!sourceBuffer.updating) {
console.log("appending to buffer");
sourceBuffer.appendBuffer(bufferedAsArrayBuffer);
} else {
console.warn("Buffer still updating... ");
recordedBlobs.unshift(bufferedBlob);
}
}
}
} …Run Code Online (Sandbox Code Playgroud) 我有这个简单的代码来获取视频流的块并在MediaSource中播放它们.我看到视频,但有时会停止.它可能会工作几秒钟或几分钟.但最后它在某个时刻停止了.chrome:// media-internals /显示没有错误.
这有什么不对?
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
var mediaSource = new MediaSource();
var constraints = {
"audio": true,
"video": {
"mandatory": {
"minWidth": 320, "maxWidth": 320,
"minHeight": 240, "maxHeight": 240
}, "optional": []
}
};
window.mediaSource = mediaSource;
var sourceBuffer;
var video = document.querySelector('#video');
window.video = video;
video.src = window.URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', function (e) {
console.log("sourceopen");
sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vorbis,vp8"');
window.sourceBuffer = sourceBuffer;
}, false);
mediaSource.addEventListener('error', function (e) {
console.log("error", e)
}, false);
var stack …Run Code Online (Sandbox Code Playgroud)