使用 requestAnimationFrame 将视频绘制到画布时出现内存泄漏
我正在尝试通过在 Canvas 中组合 screen1 和 screen2 来进行双显示器屏幕录制。我正在使用 vue 和 electro 来做到这一点。但在我对代码进行故障排除并缩小问题范围后,我总是遇到内存泄漏。我发现这个简单的代码会导致内存泄漏,但直到现在我也无法找出为什么在画布内绘图会导致内存泄漏。我还尝试在绘制之前清理画布,但仍然出现内存泄漏。我的完整代码在这里:
<!--Electron版本選擇共享畫面-->
<template>
<div v-show="false" class="modal-container">
<section class="modal">
<div class="modalTitleName">ScreenRecord Result</div>
<div class="modalTitleBarRightBtn">
<button class="closeButton" v-on:click="close">
<span class="icon"
><img src="images/icon_black_close.svg" style="width: 20px"
/></span>
</button>
</div>
<div class="screenshoot-result">
<canvas style="display: none" id="canvasRecord"></canvas>
<img id="my-preview" />
<video style="display: none" id="video1" autoplay="autoplay" />
<video style="display: none" id="video2" autoplay="autoplay" />
<!-- <video id="video3" autoplay="autoplay" /> -->
</div>
<div class="modalSaveButton">
<button class="saveButton">
<span class="icon">Save Screenshoot</span>
</button>
</div>
</section>
</div>
</template>
<script>
import { langKey } from "@/scripts/starise-lang.js";
import Logger from "@/scripts/starise-logger";
const logger = Logger("ScreenRecordModule");
export default {
name: "ScreenRecordModule",
components: {},
props: {},
data: () => {
return {
langKey: langKey,
show: "screen",
screenSources: [],
imageFormat: "image/jpeg",
img: null,
width: [],
height: [],
readyToShow: false,
imageSave: null,
videoStream: null,
mediaRecorder: null,
soundRecorder: null,
chuncks: [],
videoURL: null,
streamWrite: null,
recorderStream: null,
canvas: null,
video1: null,
video2: null,
ctx: null,
};
},
created() {
// init(window);
},
mounted() {
logger.debug("mounted");
if (window.electron) {
// logger.debug("PATH:%o", window.electron);
}
this.init();
},
methods: {
init() {
logger.debug("init");
let _inst = this;
this.screenSources = [];
if (window.electron) {
// 取得現有視窗
window.electron.desktopCapturer
.getSources({
types: ["screen"],
})
.then(async (sources) => {
for (const source of sources) {
if (source.id.includes("screen:")) {
const stream = await navigator.mediaDevices.getUserMedia({
audio:
process.platform === "win32"
? {
mandatory: {
chromeMediaSource: "desktop",
},
}
: false,
video: {
mandatory: {
chromeMediaSource: "desktop",
chromeMediaSourceId: source.id,
// maxWidth: 4000,
// maxHeight: 4000,
},
},
});
let stream_settings = stream.getVideoTracks()[0].getSettings();
logger.debug("Stream setting:%o", stream_settings);
// actual width & height of the camera video
let stream_width = stream_settings.width;
let stream_height = stream_settings.height;
logger.debug("Width: " + stream_width + "px");
logger.debug("Height: " + stream_height + "px");
_inst.screenSources.push(stream);
}
}
try {
this.handleStream(_inst.screenSources);
} catch (error) {
logger.debug("THIS IS SCREENSOURCES ERROR: %o", error);
}
});
}
},
async handleStream(screenSources) {
// Create hidden video tag
let video = [
document.getElementById("video1"),
document.getElementById("video2"),
];
for (let i = 0; i < screenSources.length; i++) {
video[i].srcObject = screenSources[i];
video[i].onloadedmetadata = function () {
video[i].play();
};
}
this.readyToShow = true;
logger.debug("Num of Screen: %o", this.screenSources.length);
this.video1 = document.getElementById("video1");
this.video2 = document.getElementById("video2");
this.canvas = document.getElementById("canvasRecord");
this.ctx = this.canvas.getContext("2d");
this.canvas.height = 1080;
this.canvas.width = 1920 * this.screenSources.length;
/* Add audio in and audio in desktop */
const speakerStream = await navigator.mediaDevices.getUserMedia({
audio: true,
video: false,
});
const audioDesktop = await navigator.mediaDevices.getUserMedia({
audio: {
mandatory: {
chromeMediaSource: "desktop",
},
},
video: {
mandatory: {
chromeMediaSource: "desktop",
},
},
});
//Mix the track
this.recorderStream = this.mixer(audioDesktop, speakerStream);
//Add audio track to canvas stream
const canvasStream = this.canvas.captureStream();
canvasStream.addTrack(this.recorderStream.getAudioTracks()[0]);
this.mediaRecorder = new MediaRecorder(canvasStream);
let chunks = [];
this.mediaRecorder.ondataavailable = function (e) {
if (e.data.size > 0) {
chunks.push(e.data);
}
};
this.mediaRecorder.onstop = function (e) {
let blob = new Blob(chunks, { type: "video/mp4" }); // other types are available such as 'video/webm' for instance, see the doc for more info
chunks = [];
this.videoURL = URL.createObjectURL(blob);
let a = document.createElement("a");
document.body.appendChild(a);
a.style = "display: none";
a.href = this.videoURL;
a.download = Date.now() + ".mp4";
a.click();
window.URL.revokeObjectURL(this.videoURL);
// video3.src = this.videoURL;
this.mediaRecorder = null;
};
this.mediaRecorder.start(3000);
// if (this.screenSources.length > 1) {
// window.requestAnimationFrame(this.drawFirstVideo);
// window.requestAnimationFrame(this.drawSecondVideo);
// } else {
// window.requestAnimationFrame(this.drawFirstVideo);
// }
this.testDraw();
},
testDraw() {
// this.ctx.clearRect(0,0,this.canvas.width, this.canvas.height)
this.ctx.fillStyle = "#FF0000";
this.ctx.fillRect(0, 0, 150, 75);
requestAnimationFrame(this.testDraw);
},
drawFirstVideo() {
this.ctx.drawImage(this.video1, 0, 0);
requestAnimationFrame(this.drawFirstVideo);
},
drawSecondVideo() {
this.ctx.drawImage(this.video2, 1920, 0);
window.requestAnimationFrame(this.drawSecondVideo);
},
//Mixing Desktop Audio
mixer(windowSource, speakerSource) {
const audioContext = new AudioContext();
const mediaStreamDestination =
audioContext.createMediaStreamDestination();
if (
windowSource &&
!!windowSource &&
windowSource.getAudioTracks().length > 0
) {
logger.debug("windowSource");
audioContext
.createMediaStreamSource(windowSource)
.connect(mediaStreamDestination);
}
if (
speakerSource &&
!!speakerSource &&
speakerSource.getAudioTracks().length > 0
) {
audioContext
.createMediaStreamSource(speakerSource)
.connect(mediaStreamDestination);
}
return new MediaStream(
mediaStreamDestination.stream
.getTracks()
.concat(windowSource.getVideoTracks())
);
},
showContext(type) {
this.show = type;
},
close() {
this.$store.commit("starv/show", { showScreenRecordModule: false });
},
picked(id, type) {
window.setDesktopShareSourceId(id, type);
this.$store.commit("starv/show", { showScreenshotModule: false });
},
},
watch: {
"$store.state.starv.show.startScreenRecord": function (isRecord) {
if (!isRecord) {
this.mediaRecorder.stop();
this.close();
}
},
},
};
</script>
<style scoped src="@/styles/ShareScreenPicker.css" />
缩小问题范围后,我知道这部分代码会导致内存泄漏:
this.testDraw();
},
testDraw() {
// this.ctx.clearRect(0,0,this.canvas.width, this.canvas.height)
this.ctx.fillStyle = "#FF0000";
this.ctx.fillRect(0, 0, 150, 75);
requestAnimationFrame(this.testDraw);
},
以前有人遇到过同样的问题吗?谢谢
I am trying to do a dual monitor screen recording by combining screen1 and screen2 in Canvas. I am using vue and electron to do this. But I always got a memory leak, after I troubleshoot my code and narrow the problem. I found that this simple code causes a memory leak, but until now I could not find out why drawing inside the canvas cause a memory leak. I also try to clean the canvas before draw but still get a memory leak. My full code is here:
<!--Electron版本選擇共享畫面-->
<template>
<div v-show="false" class="modal-container">
<section class="modal">
<div class="modalTitleName">ScreenRecord Result</div>
<div class="modalTitleBarRightBtn">
<button class="closeButton" v-on:click="close">
<span class="icon"
><img src="images/icon_black_close.svg" style="width: 20px"
/></span>
</button>
</div>
<div class="screenshoot-result">
<canvas style="display: none" id="canvasRecord"></canvas>
<img id="my-preview" />
<video style="display: none" id="video1" autoplay="autoplay" />
<video style="display: none" id="video2" autoplay="autoplay" />
<!-- <video id="video3" autoplay="autoplay" /> -->
</div>
<div class="modalSaveButton">
<button class="saveButton">
<span class="icon">Save Screenshoot</span>
</button>
</div>
</section>
</div>
</template>
<script>
import { langKey } from "@/scripts/starise-lang.js";
import Logger from "@/scripts/starise-logger";
const logger = Logger("ScreenRecordModule");
export default {
name: "ScreenRecordModule",
components: {},
props: {},
data: () => {
return {
langKey: langKey,
show: "screen",
screenSources: [],
imageFormat: "image/jpeg",
img: null,
width: [],
height: [],
readyToShow: false,
imageSave: null,
videoStream: null,
mediaRecorder: null,
soundRecorder: null,
chuncks: [],
videoURL: null,
streamWrite: null,
recorderStream: null,
canvas: null,
video1: null,
video2: null,
ctx: null,
};
},
created() {
// init(window);
},
mounted() {
logger.debug("mounted");
if (window.electron) {
// logger.debug("PATH:%o", window.electron);
}
this.init();
},
methods: {
init() {
logger.debug("init");
let _inst = this;
this.screenSources = [];
if (window.electron) {
// 取得現有視窗
window.electron.desktopCapturer
.getSources({
types: ["screen"],
})
.then(async (sources) => {
for (const source of sources) {
if (source.id.includes("screen:")) {
const stream = await navigator.mediaDevices.getUserMedia({
audio:
process.platform === "win32"
? {
mandatory: {
chromeMediaSource: "desktop",
},
}
: false,
video: {
mandatory: {
chromeMediaSource: "desktop",
chromeMediaSourceId: source.id,
// maxWidth: 4000,
// maxHeight: 4000,
},
},
});
let stream_settings = stream.getVideoTracks()[0].getSettings();
logger.debug("Stream setting:%o", stream_settings);
// actual width & height of the camera video
let stream_width = stream_settings.width;
let stream_height = stream_settings.height;
logger.debug("Width: " + stream_width + "px");
logger.debug("Height: " + stream_height + "px");
_inst.screenSources.push(stream);
}
}
try {
this.handleStream(_inst.screenSources);
} catch (error) {
logger.debug("THIS IS SCREENSOURCES ERROR: %o", error);
}
});
}
},
async handleStream(screenSources) {
// Create hidden video tag
let video = [
document.getElementById("video1"),
document.getElementById("video2"),
];
for (let i = 0; i < screenSources.length; i++) {
video[i].srcObject = screenSources[i];
video[i].onloadedmetadata = function () {
video[i].play();
};
}
this.readyToShow = true;
logger.debug("Num of Screen: %o", this.screenSources.length);
this.video1 = document.getElementById("video1");
this.video2 = document.getElementById("video2");
this.canvas = document.getElementById("canvasRecord");
this.ctx = this.canvas.getContext("2d");
this.canvas.height = 1080;
this.canvas.width = 1920 * this.screenSources.length;
/* Add audio in and audio in desktop */
const speakerStream = await navigator.mediaDevices.getUserMedia({
audio: true,
video: false,
});
const audioDesktop = await navigator.mediaDevices.getUserMedia({
audio: {
mandatory: {
chromeMediaSource: "desktop",
},
},
video: {
mandatory: {
chromeMediaSource: "desktop",
},
},
});
//Mix the track
this.recorderStream = this.mixer(audioDesktop, speakerStream);
//Add audio track to canvas stream
const canvasStream = this.canvas.captureStream();
canvasStream.addTrack(this.recorderStream.getAudioTracks()[0]);
this.mediaRecorder = new MediaRecorder(canvasStream);
let chunks = [];
this.mediaRecorder.ondataavailable = function (e) {
if (e.data.size > 0) {
chunks.push(e.data);
}
};
this.mediaRecorder.onstop = function (e) {
let blob = new Blob(chunks, { type: "video/mp4" }); // other types are available such as 'video/webm' for instance, see the doc for more info
chunks = [];
this.videoURL = URL.createObjectURL(blob);
let a = document.createElement("a");
document.body.appendChild(a);
a.style = "display: none";
a.href = this.videoURL;
a.download = Date.now() + ".mp4";
a.click();
window.URL.revokeObjectURL(this.videoURL);
// video3.src = this.videoURL;
this.mediaRecorder = null;
};
this.mediaRecorder.start(3000);
// if (this.screenSources.length > 1) {
// window.requestAnimationFrame(this.drawFirstVideo);
// window.requestAnimationFrame(this.drawSecondVideo);
// } else {
// window.requestAnimationFrame(this.drawFirstVideo);
// }
this.testDraw();
},
testDraw() {
// this.ctx.clearRect(0,0,this.canvas.width, this.canvas.height)
this.ctx.fillStyle = "#FF0000";
this.ctx.fillRect(0, 0, 150, 75);
requestAnimationFrame(this.testDraw);
},
drawFirstVideo() {
this.ctx.drawImage(this.video1, 0, 0);
requestAnimationFrame(this.drawFirstVideo);
},
drawSecondVideo() {
this.ctx.drawImage(this.video2, 1920, 0);
window.requestAnimationFrame(this.drawSecondVideo);
},
//Mixing Desktop Audio
mixer(windowSource, speakerSource) {
const audioContext = new AudioContext();
const mediaStreamDestination =
audioContext.createMediaStreamDestination();
if (
windowSource &&
!!windowSource &&
windowSource.getAudioTracks().length > 0
) {
logger.debug("windowSource");
audioContext
.createMediaStreamSource(windowSource)
.connect(mediaStreamDestination);
}
if (
speakerSource &&
!!speakerSource &&
speakerSource.getAudioTracks().length > 0
) {
audioContext
.createMediaStreamSource(speakerSource)
.connect(mediaStreamDestination);
}
return new MediaStream(
mediaStreamDestination.stream
.getTracks()
.concat(windowSource.getVideoTracks())
);
},
showContext(type) {
this.show = type;
},
close() {
this.$store.commit("starv/show", { showScreenRecordModule: false });
},
picked(id, type) {
window.setDesktopShareSourceId(id, type);
this.$store.commit("starv/show", { showScreenshotModule: false });
},
},
watch: {
"$store.state.starv.show.startScreenRecord": function (isRecord) {
if (!isRecord) {
this.mediaRecorder.stop();
this.close();
}
},
},
};
</script>
<style scoped src="@/styles/ShareScreenPicker.css" />
After narrowing down my problem, I know that this part of my code causes memory leak:
this.testDraw();
},
testDraw() {
// this.ctx.clearRect(0,0,this.canvas.width, this.canvas.height)
this.ctx.fillStyle = "#FF0000";
this.ctx.fillRect(0, 0, 150, 75);
requestAnimationFrame(this.testDraw);
},
Has anyone face this same issue before? Thank you
如果你对这篇内容有疑问,欢迎到本站社区发帖提问 参与讨论,获取更多帮助,或者扫码二维码加入 Web 技术交流群。
绑定邮箱获取回复消息
由于您还没有绑定你的真实邮箱,如果其他用户或者作者回复了您的评论,将不能在第一时间通知您!
发布评论
评论(1)
我发现问题出在
this.canvas.captureStream()
中。我想并排拼贴 1920x1080 以创建双显示器屏幕录像机,因此我需要宽度为 3840x1080 的大画布。我认为Javascript没有足够的时间来进行垃圾收集,当我尝试进行单次录制时,分辨率为1920*1080,一切正常。在这里,如果我们要在大画布上进行 captureStream,我们应该选择牺牲以下两件事之一:
将画布捕获流设置为较小的 fps,例如 15 或 10 fps,例如,
this.captureStream(10)
10 fps第二个选项是在不牺牲 fps 的情况下重新缩放到较小的画布尺寸。
I found the problem was in
this.canvas.captureStream()
. I want to collage 1920x1080 side by side to create dual-monitor screen recorder, therefore I need a big canvas with a width of 3840x1080. I think that Javascript does not have enough time to do a garbage collector, when I tried to do single recording, with the resolution 1920*1080, everything goes to normal.Here, if we would to do captureStream on the big canvas, we should choose to sacrifice one of two things below:
Set canvas capture stream to less fps for example 15 or 10 fps, for example,
this.captureStream(10)
for 10 fpsSecond option is to rescale to smaller canvas size without sacrifice the fps.