头文件:
xformat.h
#pragma once
/// <summary>
/// 封装和解封装基类
/// </summary>
#include <mutex>
struct AVFormatContext;
struct AVCodecParameters;
struct AVPacket;
struct XRational
{
int num; ///< Numerator
int den; ///< Denominator
};
class XFormat
{
public:
/// <summary>
/// 复制参数 线程安全
/// </summary>
/// <param name="stream_index">对应c_->streams 下标</param>
/// <param name="dst">输出参数</param>
/// <returns>是否成功</returns>
bool CopyPara(int stream_index, AVCodecParameters* dst);
/// <summary>
/// 设置上下文,并且清理上次的设置的值,如果传递NULL,相当于关闭上下文3
/// 线程安全
/// </summary>
/// <param name="c"></param>
void set_c(AVFormatContext* c);
int audio_index() { return audio_index_; }
int video_index() { return video_index_; }
XRational video_time_base() { return video_time_base_; }
XRational audio_time_base() { return audio_time_base_; }
//根据timebase换算时间
bool RescaleTime(AVPacket* pkt, long long offset_pts, XRational time_base);
protected:
AVFormatContext* c_ = nullptr; //封装解封装上下文
std::mutex mux_; //c_ 资源互斥
int video_index_ = 0;//video和audio在stream中索引
int audio_index_ = 1;
XRational video_time_base_ = { 1,25 };
XRational audio_time_base_ = { 1,9000 };
};
xdemux.h
#pragma once
#include "xformat.h"
class XDemux :public XFormat
{
public:
/// <summary>
/// 打开解封装
/// </summary>
/// <param name="url">解封装地址 支持rtsp</param>
/// <returns>失败返回nullptr</returns>
static AVFormatContext* Open(const char* url);
/// <summary>
/// 读取一帧数据
/// </summary>
/// <param name="pkt">输出数据</param>
/// <returns>是否成功</returns>
bool Read(AVPacket* pkt);
bool Seek(long long pts, int stream_index);
};
xmux.h
#pragma once
#include "xformat.h"
//
/// 媒体封装
class XMux :public XFormat
{
public:
//
打开封装
static AVFormatContext* Open(const char* url);
bool WriteHead();
bool Write(AVPacket* pkt);
bool WriteEnd();
};
源文件:
main.cpp
#include <iostream>
#include <thread>
#include "xdemux.h"
#include "xmux.h"
using namespace std;
extern "C" { //指定函数是c语言函数,函数名不包含重载标注
//引用ffmpeg头文件
#include <libavformat/avformat.h>
}
//预处理指令导入库
#pragma comment(lib,"avformat.lib")
#pragma comment(lib,"avutil.lib")
#pragma comment(lib,"avcodec.lib")
void PrintErr(int err)
{
char buf[1024] = { 0 };
av_strerror(err, buf, sizeof(buf) - 1);
cerr << endl;
}
#define CERR(err) if(err!=0){ PrintErr(err);getchar();return -1;}
int main(int argc, char* argv[])
{
/// 输入参数处理
//使用说明
string useage = "124_test_xformat 输入文件 输出文件 开始时间(秒) 结束时间(秒)\n";//五个参数 最少三个参数 像ffmpeg执行命令行一样
useage += "124_test_xformat v1080.mp4 test_out.mp4 10 20";
cout << useage << endl;
if (argc < 3)
{
return -1;
}
string in_file = argv[1];//第二个参数是输入文件
string out_file = argv[2];//第三个参数是输出文明
/// 截取10 ~ 20 秒之间的音频视频 取多不取少
// 假定 9 11秒有关键帧 我们取第9秒
int begin_sec = 0; //截取开始时间
int end_sec = 0; //截取结束时间
if (argc > 3)
begin_sec = atoi(argv[3]);//截断开始时间,字符串转int类型
if (argc > 4)
end_sec = atoi(argv[4]);
///
//打开媒体文件
//const char* url = "v1080.mp4";
/// 解封装
//解封装输入上下文
XDemux demux;
auto demux_c = demux.Open(in_file.c_str());
demux.set_c(demux_c);
/// 封装
//编码器上下文
//const char* out_url = "test_mux.mp4";
XMux mux;
auto mux_c = mux.Open(out_file.c_str());
mux.set_c(mux_c);
auto mvs = mux_c->streams[mux.video_index()]; //视频流信息
auto mas = mux_c->streams[mux.audio_index()]; //视频流信息
//有视频
if (demux.video_index() >= 0)
{
mvs->time_base.num = demux.video_time_base().num;
mvs->time_base.den = demux.video_time_base().den;
//复制视频参数
demux.CopyPara(demux.video_index(), mvs->codecpar);
}
//有音频
if (demux.audio_index() >= 0)
{
mas->time_base.num = demux.audio_time_base().num;
mas->time_base.den = demux.audio_time_base().den;
//复制音频参数
demux.CopyPara(demux.audio_index(), mas->codecpar);
}
mux.WriteHead();
long long video_begin_pts = 0;
long long audio_begin_pts = 0; //音频的开始时间
long long video_end_pts = 0;
//开始截断秒数 算出输入视频的pts
if (begin_sec > 0)
{
//计算视频的开始和结束播放pts
if (demux.video_index() >= 0 && demux.video_time_base().num > 0)
{
double t = (double)demux.video_time_base().den / (double)demux.video_time_base().num;
video_begin_pts = t * begin_sec;
video_end_pts = t * end_sec;
demux.Seek(video_begin_pts, demux.video_index()); //移动到开始帧
}
//计算音频的开始播放pts
if (demux.audio_index() >= 0 && demux.audio_time_base().num > 0)
{
double t = (double)demux.audio_time_base().den / (double)demux.audio_time_base().num;
audio_begin_pts = t * begin_sec;
}
}
int audio_count = 0;
int video_count = 0;
double total_sec = 0;
AVPacket pkt;
for (;;)
{
if (!demux.Read(&pkt))//执行了Read函数
{
break;
}
// 视频 时间大于结束时间,跳出程序
if (video_end_pts > 0
&& pkt.stream_index == demux.video_index()
&& pkt.pts > video_end_pts)
{
av_packet_unref(&pkt);
break;
}
if (pkt.stream_index == demux.video_index())
{
mux.RescaleTime(&pkt, video_begin_pts, demux.video_time_base());
video_count++;
if (demux.video_time_base().den > 0)
total_sec += pkt.duration * ((double)demux.video_time_base().num / (double)demux.video_time_base().den);
}
else if (pkt.stream_index == demux.audio_index())
{
mux.RescaleTime(&pkt, audio_begin_pts, demux.audio_time_base());
audio_count++;
}
//写入音视频帧 会清理pkt
mux.Write(&pkt);
}
//写入结尾 包含文件偏移索引
mux.WriteEnd();
demux.set_c(nullptr);
mux.set_c(nullptr);
cout << "输出文件" << out_file << ":" << endl;
cout << "视频帧:" << video_count << endl;
cout << "音频帧:" << audio_count << endl;
cout << "总时长:" << total_sec << endl;
getchar();
return 0;
}
xformat.cpp
#include "xformat.h"
#include <iostream>
#include <thread>
using namespace std;
extern "C" { //指定函数是c语言函数,函数名不包含重载标注
//引用ffmpeg头文件
#include <libavformat/avformat.h>
}
//预处理指令导入库
#pragma comment(lib,"avformat.lib")
#pragma comment(lib,"avutil.lib")
using namespace std;
void XFormat::set_c(AVFormatContext* c)
{
unique_lock<mutex> lock(mux_);
if (c_) //清理原值
{
if (c_->oformat) //输出上下文
{
if (c_->pb)
avio_closep(&c_->pb);
avformat_free_context(c_);
}
else if (c_->iformat) //输入上下文
{
avformat_close_input(&c_);
}
else
{
avformat_free_context(c_);
}
}
c_ = c;
if (!c_)return;
//用于区分是否有音频或者视频流
audio_index_ = -1;
video_index_ = -1;
//区分音视频stream 索引
for (int i = 0; i < c->nb_streams; i++)
{
//音频
if (c->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
{
audio_index_ = i;
audio_time_base_.den = c->streams[i]->time_base.den;
audio_time_base_.num = c->streams[i]->time_base.num;
}
else if (c->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
{
video_index_ = i;
video_time_base_.den = c->streams[i]->time_base.den;
video_time_base_.num = c->streams[i]->time_base.num;
}
}
}
/// <summary>
/// 复制参数 线程安全
/// </summary>
/// <param name="stream_index">对应c_->streams 下标</param>
/// <param name="dst">输出参数</param>
/// <returns>是否成功</returns>
bool XFormat::CopyPara(int stream_index, AVCodecParameters* dst)
{
unique_lock<mutex> lock(mux_);
if (!c_)
{
return false;
}
if (stream_index<0 || stream_index>c_->nb_streams)
return false;
auto re = avcodec_parameters_copy(dst, c_->streams[stream_index]->codecpar);
if (re < 0)
{
return false;
}
return true;
}
bool XFormat::RescaleTime(AVPacket* pkt, long long offset_pts, XRational time_base)
{
unique_lock<mutex> lock(mux_);
if (!c_)return false;
auto out_stream = c_->streams[pkt->stream_index];
AVRational in_time_base;
in_time_base.num = time_base.num;
in_time_base.den = time_base.den;
pkt->pts = av_rescale_q_rnd(pkt->pts - offset_pts, in_time_base,
out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX)
);
pkt->dts = av_rescale_q_rnd(pkt->dts - offset_pts, in_time_base,
out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX)
);
pkt->duration = av_rescale_q(pkt->duration, in_time_base, out_stream->time_base);
pkt->pos = -1;
return true;
}
xdemux.cpp
#include "xdemux.h"
#include <iostream>
#include <thread>
using namespace std;
extern "C" { //指定函数是c语言函数,函数名不包含重载标注
//引用ffmpeg头文件
#include <libavformat/avformat.h>
}
static void PrintErr(int err)
{
char buf[1024] = { 0 };
av_strerror(err, buf, sizeof(buf) - 1);
cerr << buf << endl;
}
#define BERR(err) if(err!= 0){PrintErr(err);return 0;}
AVFormatContext* XDemux::Open(const char* url)
{
AVFormatContext* c = nullptr;
//打开封装上下文
auto re = avformat_open_input(&c, url, nullptr, nullptr);
BERR(re);
//获取媒体信息
re = avformat_find_stream_info(c, nullptr);
BERR(re);
//打印输入封装信息
av_dump_format(c, 0, url, 0);
return c;
}
bool XDemux::Read(AVPacket* pkt)
{
unique_lock<mutex> lock(mux_);
if (!c_)return false;
auto re = av_read_frame(c_, pkt);
BERR(re);
return true;
}
bool XDemux::Seek(long long pts, int stream_index)
{
unique_lock<mutex> lock(mux_);
if (!c_)return false;
auto re = av_seek_frame(c_, stream_index, pts,
AVSEEK_FLAG_FRAME | AVSEEK_FLAG_BACKWARD);
BERR(re);
return true;
}
xmux.cpp
#include "xmux.h"
#include <iostream>
#include <thread>
using namespace std;
extern "C" { //指定函数是c语言函数,函数名不包含重载标注
//引用ffmpeg头文件
#include <libavformat/avformat.h>
}
static void PrintErr(int err)
{
char buf[1024] = { 0 };
av_strerror(err, buf, sizeof(buf) - 1);
cerr << buf << endl;
}
#define BERR(err) if(err!= 0){PrintErr(err);return 0;}
//
打开封装
AVFormatContext* XMux::Open(const char* url)
{
AVFormatContext* c = nullptr;
//创建上下文
auto re = avformat_alloc_output_context2(&c, NULL, NULL, url);
BERR(re);
//添加视频音频流
auto vs = avformat_new_stream(c, NULL); //视频流
vs->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
auto as = avformat_new_stream(c, NULL); //音频流
as->codecpar->codec_type = AVMEDIA_TYPE_AUDIO;
//打开IO
re = avio_open(&c->pb, url, AVIO_FLAG_WRITE);
BERR(re);
return c;
}
bool XMux::Write(AVPacket* pkt)
{
unique_lock<mutex> lock(mux_);
if (!c_)return false;
//写入一帧数据,内部缓冲排序dts,通过pkt=null 可以写入缓冲
auto re = av_interleaved_write_frame(c_,pkt);
BERR(re);
return true;
}
bool XMux::WriteEnd()
{
unique_lock<mutex> lock(mux_);
if (!c_)return false;
av_interleaved_write_frame(c_, nullptr);//写入排序缓冲
auto re = av_write_trailer(c_);
BERR(re);
return true;
}
bool XMux::WriteHead()
{
unique_lock<mutex> lock(mux_);
if (!c_)return false;
auto re = avformat_write_header(c_, nullptr);
BERR(re);
//打印输出上下文
av_dump_format(c_, 0, c_->url, 1);
return true;
}
运行结果:
在属性的调试参数中我们我们默认v1080.mp4 test_out.mp4 10 20,四个参数,截断了一段十秒的视频
标签:index,封装,ffmpeg,pkt,文件名,base,video,time,demux From: https://blog.csdn.net/m0_51386664/article/details/139710651