可以用captureStream 加 MediaRecorder 实现例子:<!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8" /> <meta http-equiv="X-UA-Compatible" content="IE=edge" /> <meta name="viewport" content="width=device-width, initial-scale=1.0" /> <title>Document</title> </head> <body> <canvas id="canvas" width="400" height="400"></canvas> <a id="download" href="">下载</a> <script> const canvas = document.querySelector("#canvas"); const ctx = canvas.getContext("2d"); function loadImage(src) { return new Promise(function (r) { const img = new Image(); img.onload = () => { r(img); }; img.src = src; }); } function record(canvas, len) { var recordedChunks = []; return new Promise(function (res, rej) { var stream = canvas.captureStream(25 /*fps*/); mediaRecorder = new MediaRecorder(stream, { mimeType: "video/webm; codecs=vp9", }); mediaRecorder.start(len || 4000); mediaRecorder.ondataavailable = function (event) { recordedChunks.push(event.data); if (mediaRecorder.state === "recording") { mediaRecorder.stop(); } }; mediaRecorder.onstop = function (event) { var blob = new Blob(recordedChunks, { type: "video/webm" }); var url = URL.createObjectURL(blob); res(url); }; }); } const start = async () => { const img1 = await loadImage("./1.jpg"); const img2 = await loadImage("./2.jpg"); let flag = false; setInterval(() => { flag = !flag; ctx.drawImage(flag ? img1 : img2, 0, 0, 400, 400); }, 1000); const url = await record(canvas); alert("完成"); document.querySelector("a").href = url; }; start(); </script> </body> </html>
如果是 Node.js 后端的话可以调用 ffmpeg 的接口,剩下的就是看 ffmpeg API 了,跟 JS 已经无关了。如果是浏览器里前端可以自己完成,那你想多了,WebAssembly 可以,JS 不行。
可以用captureStream 加 MediaRecorder 实现
例子: