编译ffmpeg
脚本 build.sh
export FFMPEG_PATH=./ffmpeg-snapshot/decoder_wasm/ffmpeg
echo "Running Emscripten..."
emcc -O1 ffmpeg_decode.c --pre-js decode.js \
-I ${FFMPEG_PATH}/include/ \
${FFMPEG_PATH}/lib/libavcodec.a \
${FFMPEG_PATH}/lib/libavutil.a \
${FFMPEG_PATH}/lib/libswscale.a \
-o ffmpeg_decode.js \
-s EXPORTED_FUNCTIONS=_malloc,_free \
-s ALLOW_MEMORY_GROWTH=1 \
-s ASSERTIONS=1 \
-lworkerfs.js
# -s EXPORTED_RUNTIME_METHODS=ccall,cwrap,allocate,UTF8ToString,intArrayFromString \
# -s ENVIRONMENT=web \
# -s MODULARIZE=1 \
# -s FORCE_FILESYSTEM=1 \
# -s RESERVED_FUNCTION_POINTERS \
# -s EXPORT_ES6=1 \
# -s USE_ES6_IMPORT_META=0
echo "Finished Build"
源码
#include <libavcodec/avcodec.h>
#include <libavutil/channel_layout.h>
#include <libavutil/common.h>
#include <libavutil/frame.h>
#include <libavutil/samplefmt.h>
#include <libavutil/opt.h>
#include <libavutil/imgutils.h>
#include <libavutil/parseutils.h>
#include <libavutil/mem.h>
#include <libswscale/swscale.h>
#include <libavformat/avformat.h>
#ifndef EM_PORT_API
#if defined(__EMSCRIPTEN__)
#include <emscripten.h>
#if defined(__cplusplus)
#define EM_PORT_API(rettype) extern "C" rettype EMSCRIPTEN_KEEPALIVE
#else
#define EM_PORT_API(rettype) rettype EMSCRIPTEN_KEEPALIVE
#endif
#else
#if defined(__cplusplus)
#define EM_PORT_API(rettype) extern "C" rettype
#else
#define EM_PORT_API(rettype) rettype
#endif
#endif
#endif
const AVCodec *videoCodec = NULL;
AVCodecContext *videoCodecCtx = NULL;
AVCodecParserContext *parser = NULL;
AVPacket *pkt = NULL;
AVFrame *yuvFrame = NULL;
AVFrame *rgbFrame = NULL;
struct SwsContext *img_ctx = NULL;
unsigned char *out_buffer = NULL;
int frameWidth = 0;
int frameHeight = 0;
uint8_t *frame = NULL;
EM_PORT_API(int)
getWidth()
{
return frameWidth;
}
EM_PORT_API(int)
getHeight()
{
return frameHeight;
}
EM_PORT_API(uint8_t *)
getFrame()
{
return frame;
}
EM_PORT_API(void)
init(int codecID)
{
pkt = av_packet_alloc();
if (!pkt)
{
printf("pkt alloc failed.\n");
return;
}
yuvFrame = av_frame_alloc();
if (!yuvFrame)
{
printf("yuvFrame alloc failed.\n");
return;
}
rgbFrame = av_frame_alloc();
if (!rgbFrame)
{
printf("rgbFrame alloc failed.\n");
return;
}
videoCodec = avcodec_find_decoder(codecID);
if (!videoCodec)
{
printf("videoCodec find failed.\n");
return;
}
parser = av_parser_init(codecID);
if (!parser)
{
printf("parser init failed.\n");
return;
}
videoCodecCtx = avcodec_alloc_context3(videoCodec);
if (!videoCodecCtx)
{
printf("videoCodecCtx alloc failed.\n");
return;
}
int ret = avcodec_open2(videoCodecCtx, videoCodec, NULL);
if (ret < 0)
{
printf("videoCodecCtx alloc failed.\n");
return;
}
printf("codec init success.\n");
}
EM_PORT_API(void)
close()
{
if (parser)
{
av_parser_close(parser);
parser = NULL;
}
if (pkt)
{
av_packet_free(&pkt);
pkt = NULL;
}
if (yuvFrame)
{
av_frame_free(&yuvFrame);
yuvFrame = NULL;
}
if (rgbFrame)
{
av_frame_free(&rgbFrame);
rgbFrame = NULL;
}
if (videoCodecCtx)
{
avcodec_free_context(&videoCodecCtx);
}
if (videoCodecCtx)
{
avcodec_close(videoCodecCtx);
videoCodecCtx = NULL;
}
if (out_buffer)
{
av_free(out_buffer);
out_buffer = NULL;
}
if (img_ctx) {
sws_freeContext(img_ctx);
img_ctx = NULL;
}
printf("close %s\n", __FUNCTION__);
}
EM_PORT_API(void)
decode()
{
int ret = avcodec_send_packet(videoCodecCtx, pkt);
if (ret >= 0)
{
while ((ret = avcodec_receive_frame(videoCodecCtx, yuvFrame)) >= 0)
{
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
break;
else if (ret < 0)
{
fprintf(stderr, "Error during decoding\n");
break;
}
if (!img_ctx)
{
printf("init img_ctx\n");
img_ctx = sws_getContext(videoCodecCtx->width,
videoCodecCtx->height,
videoCodecCtx->pix_fmt,
videoCodecCtx->width,
videoCodecCtx->height,
AV_PIX_FMT_RGB32,
SWS_BICUBIC, NULL, NULL, NULL);
}
if (!out_buffer)
{
printf("init out_buffer\n");
int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGB32, videoCodecCtx->width, videoCodecCtx->height, 1);
out_buffer = (unsigned char *)av_malloc(numBytes * sizeof(unsigned char));
int res = av_image_fill_arrays(
rgbFrame->data, rgbFrame->linesize,
out_buffer, AV_PIX_FMT_RGB32,
videoCodecCtx->width, videoCodecCtx->height, 1);
if (res < 0)
{
break;
}
}
sws_scale(img_ctx,
yuvFrame->data, yuvFrame->linesize,
0, videoCodecCtx->height,
rgbFrame->data, rgbFrame->linesize);
// printf("codec h264 success.\n");
av_packet_unref(pkt);
frameWidth = videoCodecCtx->width;
frameHeight = videoCodecCtx->height;
frame = rgbFrame->data[0];
}
}
av_packet_unref(pkt);
}
EM_PORT_API(void)
parsePkt(uint8_t *data, int len)
{
// printf("parsePkt:%d\n", len);
int eof = !len;
while (len > 0 || eof)
{
int ret = av_parser_parse2(parser, videoCodecCtx, &pkt->data, &pkt->size,
data, 4096, AV_NOPTS_VALUE, AV_NOPTS_VALUE, 0);
if (ret < 0)
{
fprintf(stderr, "Error while parsing\n");
continue;
}
data += ret;
len -= ret;
if (pkt->size)
{
decode();
}
}
}
js 胶水
var Module = typeof Module != 'undefined' ? Module : {};
// --pre-jses are emitted after the Module integration code, so that they can
// refer to Module (if they choose; they can also define Module)
Module = {};
Module.onRuntimeInitialized = function () {
console.log(Module);
let message = {
type: "message",
info: "init"
};
postMessage(message);
};
let u8Array;
console.log("Worker: mission start.");
let allDataLength;
let pos = 0;
let startArray;
let dataArray;
let cnt = 0;
let time = "";
let runFlag = false;
let timeMap = new Map();
let codecId = 27;
function getTimesRange() {
console.log("获取POS");
timeMap.clear();
let index = 0;
timeMap.set(0, 0);
while (true) {
if (pos + startLength > allDataLength) {
break;
}
startArray = u8Array.slice(pos, pos + startLength);
if (pos + singleDataLength * cnt > allDataLength) {
break;
}
pos += singleDataLength * cnt;
index++;
timeMap.set(pos, index);
}
let message = {
type: "updatePos",
info: pos,
map: timeMap
};
postMessage(message);
pos = 0;
}
function decodeArray() {
// console.log(allDataLength, pos, new Date().getMilliseconds());
if (pos + startLength > allDataLength) {
console.log("Worker: mission finished.");
pos = 0;
return;
}
startArray = u8Array.slice(pos, pos + startLength);
pos += startLength;
if (pos + singleDataLength * cnt > allDataLength) {
console.log("Worker: mission finished.");
pos = 0;
return;
}
dataArray = u8Array.slice(pos, pos + singleDataLength * cnt);
pos += singleDataLength * cnt;
var ptr = Module._malloc(1024 * cnt * dataArray.BYTES_PER_ELEMENT);
Module.HEAPU8.set(dataArray, ptr);
Module._parsePkt(ptr, 1024* cnt);
let outputPtr = Module._getFrame();
// console.log("_parsePkt end");
Module._free(ptr);
if (0 == outputPtr) {
if (runFlag) {
setTimeout(() => {
decodeArray();
}, 1);
return;
}
}
var rgbData = new Uint8ClampedArray(
Module.HEAPU8.subarray(
outputPtr,
outputPtr + Module._getWidth() * Module._getHeight() * 4,
),
);
let rgbObj = {
width: Module._getWidth(),
height: Module._getHeight(),
rgb: rgbData,
time: time,
currentPos: timeMap.get(pos)
};
let message = {
type: "image",
info: rgbObj
};
postMessage(message, [message.info.rgb.buffer]);
if (runFlag) {
setTimeout(() => {
decodeArray();
}, 1);
}
}
onmessage = function (e) {
if ("message" == e.data.type) {
if ("start" == e.data.info) {
runFlag = true;
decodeArray();
} else if ("stop" == e.data.info) {
runFlag = false;
}
} else if ("updatePos" == e.data.type) {
pos = e.data.info;
runFlag = false;
decodeArray();
} else if ("updateCodecId" == e.data.type) {
codecId = e.data.info;
Module._close();
Module._init(codecId);
console.log(codecId);
} else {
u8Array = e.data;
allDataLength = u8Array.length;
pos = 0;
Module._close();
Module._init(codecId);
getTimesRange();
}
};
标签:WebAssembly,ffmpeg,videoCodecCtx,pos,Module,编译,let,NULL,data
From: https://blog.51cto.com/u_11997528/7610920