WebCodecs mix-blend-mode: screen 混合模式
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Document</title>
<style>
.bg,
.video,
.canvas {
width: 200px;
height: auto;
object-fit: contain;
box-sizing: border-box;
}
</style>
</head>
<body>
<img src="./school_overcast-s.jpg" alt="" class="bg" />
<video src="./fire.mp4" controls autoplay loop muted class="video"></video>
<canvas class="canvas"></canvas>
<script src="./mp4box.min.js"></script>
<script>
const imgBg = document.querySelector('.bg');
/** @type {HTMLVideoElement} */
const video = document.querySelector('.video');
/** @type {HTMLCanvasElement} */
const canvas = document.querySelector('.canvas');
/** @type {CanvasRenderingContext2D} */
const ctx = canvas.getContext('2d');
const mp4box = MP4Box.createFile();
// console.log("mp4box => ", mp4box)
// 解码的视频轨道
let videoTrack = null,
videoDecoder = null;
// 解码的视频画面序列文件
const videoFrames = [];
let nbSampleTotal = 0,
countSample = 0;
mp4box.onError = (e) => {
console.error('Error:', e);
};
mp4box.onReady = (info) => {
console.log('Info:', info);
videoTrack = info.videoTracks[0];
if (videoTrack) {
// 提取给定 `track id` 的轨道样本
mp4box.setExtractionOptions(videoTrack.id, 'video', {
// 每次回调调用的样本数
nbSamples: 100,
});
}
// 设置视频解码器
videoDecoder = new VideoDecoder({
async output(videoFrame) {
// console.log('videoFrame => ', videoFrame);
const img = await createImageBitmap(videoFrame);
videoFrames.push({
img,
duration: videoFrame.duration,
timestamp: videoFrame.timestamp,
});
videoFrame.close();
},
error(err) {
console.log('videoDecoder error => ', err);
},
});
nbSampleTotal = videoTrack.nb_samples;
videoDecoder.configure({
codec: videoTrack.codec,
codedWidth: videoTrack.track_width,
codedHeight: videoTrack.track_height,
description: getExtraData(),
});
mp4box.start();
};
mp4box.onSamples = (tranckId, ref, samples) => {
// console.log('Samples:', tranckId, ref, samples);
// samples 采集的数据
if (videoTrack.id === tranckId) {
mp4box.stop();
countSample += samples.length;
for (const { is_sync, duration, data, cts } of samples) {
const type = is_sync ? 'key' : 'delta';
const chunk = new EncodedVideoChunk({
type,
timestamp: cts,
duration,
data,
});
videoDecoder.decode(chunk);
}
if (countSample === nbSampleTotal) {
videoDecoder.flush();
}
}
};
/**
* 生成 `VideoDecoder`.configure() 参数的 `description` 信息
*/
function getExtraData() {
const entry = mp4box.moov.traks[0].mdia.minf.stbl.stsd.entries[0];
const box = entry.avcC ?? entry.hvcC ?? entry.vpcC;
if (box) {
const stream = new DataStream(undefined, 0, DataStream.BIG_ENDIAN);
box.write(stream);
return new Uint8Array(stream.buffer.slice(8));
}
}
let index = 0;
/**
* @param {DOMHighResTimeStamp} now
* @param {VideoFrameCallbackMetadata} metadata
*/
function drawFrame() {
const { img, timestamp, duration } = videoFrames[index];
ctx.clearRect(0, 0, canvas.width, canvas.height);
ctx.globalCompositeOperation = 'source-over';
// 绘制背景
ctx.drawImage(imgBg, 0, 0, canvas.width, canvas.height);
// 使用 screen 混合模式
ctx.globalCompositeOperation = 'screen';
ctx.drawImage(img, 0, 0, canvas.width, canvas.height);
index++;
if (index === videoFrames.length) {
index = 0;
}
video.requestVideoFrameCallback(drawFrame);
}
fetch('./fire.mp4')
.then((res) => res.arrayBuffer())
.then((buffer) => {
buffer.fileStart = 0;
mp4box.appendBuffer(buffer);
mp4box.flush();
});
video.addEventListener('loadedmetadata', () => {
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
video.requestVideoFrameCallback(drawFrame);
});
</script>
</body>
</html>
标签:canvas,const,videoTrack,mp4box,videoFrame,mix,webcodecs,video,blend
From: https://www.cnblogs.com/chlai/p/18175782