首页 > 其他分享 >9、FFmpeg使用clion+mingw32编译学习y420p,yuv,rgb编码

9、FFmpeg使用clion+mingw32编译学习y420p,yuv,rgb编码

时间:2022-09-28 11:36:22浏览次数:50  
标签:picture y420p set FFmpeg mingw32 frame ret av codecParameters


基本思想:继续学习ffmpeg基础知识

第一步:进行y420p解码,然后将数据转rgb24格式,显示,重点学习了sws_getContext函数,可以通过他进行各种转码

cmakelists.txt文件内容

cmake_minimum_required(VERSION 3.16)
project(untitled15)
set(CMAKE_CXX_STANDARD 11)

include_directories(${CMAKE_SOURCE_DIR}/include)

set(OpenCV_DIR "D:\\Opencv440\\buildMinGW")#改为mingw-bulid的位置
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_SOURCE_DIR}/cmake/")

find_package(OpenCV REQUIRED)
#set(OpenCV_LIBS opencv_core opencv_imgproc opencv_highgui opencv_imgcodecs)

add_library(libavformat STATIC IMPORTED)
set_target_properties(libavformat PROPERTIES IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/lib/libavformat.dll.a)

add_library(libavdevice STATIC IMPORTED)
set_target_properties(libavdevice PROPERTIES IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/lib/libavdevice.dll.a)


add_library(libavcodec STATIC IMPORTED)
set_target_properties(libavcodec PROPERTIES IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/lib/libavcodec.dll.a)

add_library(libavfilter STATIC IMPORTED)
set_target_properties(libavfilter PROPERTIES IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/lib/libavfilter.dll.a)

add_library(libavutil STATIC IMPORTED)
set_target_properties(libavutil PROPERTIES IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/lib/libavutil.dll.a)

add_library(libswresample STATIC IMPORTED)
set_target_properties(libswresample PROPERTIES IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/lib/libswresample.dll.a)
add_library(libswscale STATIC IMPORTED)
set_target_properties(libswscale PROPERTIES IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/lib/libswscale.dll.a)
add_executable(untitled15 main.cpp )

target_link_libraries(untitled15 ${OpenCV_LIBS}
libavformat
libavdevice
libavcodec
libavfilter
libavutil
libswresample
libswscale
)

源代码

#include <stdio.h>
#include <string>
#include <iostream>
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>

extern "C"
{

#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavutil/avutil.h"
#include "libavutil/imgutils.h"

};

using namespace std;

int main(int argc, char *argv[]) {

const char *path = "rtsp://admin:[email protected]:554/h264/ch33/main/av_stream";
AVDictionary *optional=NULL;
av_dict_set(&optional,"stimeout","6000000",0);
av_dict_set(&optional,"rstp_transport","tcp",0);
av_dict_set(&optional,"buffer_size","1024000",0);
av_dict_set(&optional,"max_delay","500000",0);

AVFormatContext *pFormat = NULL;
int ret = avformat_open_input(&pFormat, path, NULL, &optional);
if (ret < 0) {
perror("avformat_open_input");
avformat_free_context(pFormat);
return -1;
}
av_dict_free(&optional);
printf("avformat_open_input successfully\n");
ret = avformat_find_stream_info(pFormat, NULL);
if (ret < 0) {
perror("avformat_find_stream_info\n");
return -1;
}
printf("avformat_find_stream_info successfully\n");
int time = pFormat->duration;
int mbittime = (time / 100000) / 60;
int mminttime = (time / 100000) % 60;
printf("video time: %d'm %d's\n", mbittime, mminttime);
av_dump_format(pFormat, 0, path, 0);
int videoindex = -1;
for (int i = 0; i < pFormat->nb_streams; i++) {
if (pFormat->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
videoindex = i;
break;
}
}
if (videoindex == -1) {
printf("don't find video stream\n");
return -1;
}


AVCodecParameters *codecParameters = pFormat->streams[videoindex]->codecpar;
printf("video width %d\n", codecParameters->width);
printf("video height %d\n", codecParameters->height);
AVCodec *pCodec = avcodec_find_decoder(codecParameters->codec_id);
AVCodecContext *pCodecCtx = avcodec_alloc_context3(pCodec);
ret = avcodec_open2(pCodecCtx, pCodec, NULL);
if (ret < 0) {//打开解码器
printf("Could not open codec.\n");
return -1;
}
AVFrame *picture = av_frame_alloc();
picture->width = codecParameters->width;
picture->height = codecParameters->height;
picture->format = AV_PIX_FMT_YUV420P;
ret = av_frame_get_buffer(picture, 1);
if (ret < 0) {
printf("av_frame_get_buffer error\n");
return -1;
}
printf("picture->linesize[0] %d\n", picture->linesize[0]);
AVFrame *pFrame = av_frame_alloc();
pFrame->width = codecParameters->width;
pFrame->height = codecParameters->height;
pFrame->format = AV_PIX_FMT_YUV420P;
ret = av_frame_get_buffer(pFrame, 1);
if (ret < 0) {
printf("av_frame_get_buffer error\n");
return -1;
}
AVFrame *pFrameRGB = av_frame_alloc();
pFrameRGB->width = codecParameters->width;
pFrameRGB->height = codecParameters->height;
pFrameRGB->format = AV_PIX_FMT_RGB24;
ret = av_frame_get_buffer(pFrameRGB, 1);
if (ret < 0) {
printf("av_frame_get_buffer error\n");
return -1;
}


int picture_size = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, codecParameters->width, codecParameters->height,
1);//计算这个格式的图片,需要多少字节来存储
uint8_t *out_buff = (uint8_t *) av_malloc(picture_size * sizeof(uint8_t));
av_image_fill_arrays(picture->data, picture->linesize, out_buff, AV_PIX_FMT_YUV420P, codecParameters->width,
codecParameters->height, 1);
//这个函数 是缓存转换格式,可以不用 以为上面已经设置了AV_PIX_FMT_YUV420P
SwsContext *img_convert_ctx = sws_getContext(codecParameters->width, codecParameters->height, AV_PIX_FMT_YUV420P,
codecParameters->width, codecParameters->height, AV_PIX_FMT_RGB24, 4,
NULL, NULL, NULL);
AVPacket *packet = av_packet_alloc();


while (av_read_frame(pFormat, packet) >= 0) {
if (packet->stream_index == videoindex) {
ret = avcodec_send_packet(pCodecCtx, packet);
if (ret < 0) {
printf("avcodec_send_packet error\n");
continue;
}
av_packet_unref(packet);
int got_picture = avcodec_receive_frame(pCodecCtx, pFrame);
av_frame_is_writable(pFrame);
if (got_picture < 0) {
printf("avcodec_receive_frame error\n");
continue;
}

sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize, 0,
codecParameters->height,
pFrameRGB->data, pFrameRGB->linesize);


cv::Mat mRGB(cv::Size(codecParameters->width, codecParameters->height), CV_8UC3);
mRGB.data = (unsigned char *) pFrameRGB->data[0];
cv::Mat mBGR;
cv::cvtColor(mRGB, mBGR, cv::COLOR_RGB2BGR);

cv::imshow("demo", mBGR);
cv::waitKey(1);


}


}
av_frame_free(&picture);
av_frame_free(&pFrame);
av_frame_free(&pFrameRGB);
avformat_free_context(pFormat);
av_packet_free(&packet);
avcodec_free_context(&pCodecCtx);
return 0;
}

标签:picture,y420p,set,FFmpeg,mingw32,frame,ret,av,codecParameters
From: https://blog.51cto.com/u_12504263/5719071

相关文章

  • ffmpeg 使用记录
    这周周末尝试把我硬盘上面的视频文件压缩了一下,但是效果并不理想。其中主要有两个原因,视频本来就是h264的编码,再重新编码也没啥用,因为限制大小的主要是码率ffmpegGPU加......
  • FFmpeg
    下载官网 http://ffmpeg.org/ 安装解决即可,bin目录下有 ffmpeg、ffplay、ffprobe三个文件,将bin目录添加到环境变量;运行ffmpeg-version 安装成功  ......
  • SpringBoot集成ffmpeg实现视频转码播放
    背景之前构建过文件预览服务,对于视频部分前端播放组件限制只能为mp4格式,为了支持更多视频格式决定对方案进行升级,由于视频格式较多,针对每一种格式定制选择播放器不太现实,......
  • windows 桌面ffmpeg流媒体分享 录屏hls推流nginx(windows搭建rtmp服务器/hls服务器)
    一、前言1.本文目的为实现windows下开机自动启动的桌面直播流媒体服务。2具体方案为定时任务自动启动ffmpeg脚本开启hls流媒体直播,nginx自动启动hls流web服务。3.网页......
  • FFmpeg入门详解--音视频原理及应用:梅会东:清华大学出版社
    大家好,我的第一本书正式出版了,可以在京东各大店铺抢购哦。《FFmpeg入门详解--音视频原理及应用:梅会东:清华大学出版社》         出书过程非常艰辛,来......
  • ffmpeg 视频合并
    转载:实战详细讲解ffmpeg命令的使用(来自一线的经验,视频合并&avi转MP4&补空白音频【收藏下来一定用的到】)视频合并示例:#filelist.txt:#file'a.flv'#file'b.flv'#-f......
  • ffmpeg常用命令
    裁剪视频ffmpeg-itest.mp4-ss00:00:00-t02:08:00-acodecaac-vcodech264-strict-2out.mp4-ss00:00:00开始时间-t02:08:00持续时间  视频压缩ffmp......
  • ffmpeg常用命令笔记
    将mp4视频转换为指定宽高的视频big_buck_bunny.mp4为原视频,big_buck_bunny_1.mp4为新视频,用-s指定宽高ffmpeg-y-i./big_buck_bunny.mp4-s2560*1440big_buck_bunn......
  • VS Code出现“由于找不到ffmpeg.dll,无法继续执行代码”的系统错误
    参考此贴:VSCode出现“由于找不到ffmpeg.dll,无法继续执行代码”的系统错误_红豆泥心累的博客-CSDN博客_由于找不到ffmpeg.dll,无法继续执行代码我也遇到了,win11之前都没......
  • windows10-msys2-msvc编译ffmpeg4.4.2
    下载msys2在msys2安装目录下创建文件msys2_ffmpeg.batcall"D:\ProgramFiles\MicrosoftVisualStudio\2022\Enterprise\VC\Auxiliary\Build\vcvars64.bat"setMSY......