RK3568基于mpp实现硬解码(二):FFmpeg + mpp实现ipc摄像头图像解码

RK3568基于mpp实现硬解码(二):FFmpeg + mpp实现ipc摄像头图像解码

在上一篇文章中已经成功编译mpp并导入到项目中,接下来基于mpi接口实现解码器

说明一下视频图像是通过ipc摄像头采集的。在项目中通过FFmpeg的RTSP拉流获取ipc摄像头的码流数据,FFmpeg拉流这部分不作叙述,只需要知道FFmpeg拉流后数据保存在AVPacket这个结构体当中。

一、MPI接口使用说明

官方文档中给出的图示已经介绍了MPI接口的使用过程

关于解码过程可以参照test目录下的mpi_dec_test.c文件

二、AVPacket转为MppPacket

mpp的使用流程可以参考官方的mpi_dec_test.c。因为官方demo传入的是一个容器文件(比如MP4文件),我的项目要处理的是FFmpeg拉取的码流数据,数据保存在AVPacket结构体中。mpp要处理的是MppPacket结构体,所以要将AVPacket中的码流数据转到MppPacket中保存

三、分帧处理的问题

这个问题在官方文档中已有解释

四、相关代码实现

#mppvideodecode.h

#ifndef MPPVIDEODECODE_H

#define MPPVIDEODECODE_H

#define myDebug() qDebug()<<"["<metaObject()->className()<<"]"<<__FUNCTION__<<__LINE__

#include

#include

#include

#include "libavcodec/packet.h"

#include "rk_mpi.h"

#include "mpp_log.h"

#include "mpp_mem.h"

#include "mpi_dec_utils.h"

#include "utils.h"

#include "mpp_time.h"

#include "libavcodec/avcodec.h"

typedef struct

{

MppCtx ctx;

MppApi *mpi;

MppBufferGroup frm_grp;

size_t max_usage;

RK_S32 frame_count; //解出帧的计数

} MpiDecLoopData;

class MppVideoDecode : public QObject

{

Q_OBJECT

public:

MppVideoDecode();

void initMpiDecLoopData(); //初始化mpp相关参数

QVector mppDecode(AVPacket* avpacket); //解码

QString getFrameFormat(MppFrame frame); //获取图像像素格式

void deInit(); //释放相关资源

private:

MppFrame m_frame = NULL;

MppPacket m_packet = NULL;

MpiDecLoopData m_data;

QVector m_vecMppFrame; //存放解出的帧

};

#endif // MPPVIDEODECODE_H

#mppvideodecode.cpp

#include "mppvideodecode.h"

MppVideoDecode::MppVideoDecode()

{

initMpiDecLoopData();

}

void MppVideoDecode::deInit()

{

if(m_packet){

mpp_packet_deinit(&m_packet);

m_packet = NULL;

}

if (m_data.frm_grp) {

mpp_buffer_group_put(m_data.frm_grp);

m_data.frm_grp = NULL;

}

for(MppFrame frame : m_vecMppFrame){

if(frame){

mpp_frame_deinit(&frame);

frame = NULL;

}

}

m_vecMppFrame.clear();

// if (m_data.ctx) {

// mpp_destroy(m_data.ctx);

// m_data.ctx = NULL;

// }

// if(!m_vecMppFrame.isEmpty()){

// m_vecMppFrame.clear();

// }

}

void MppVideoDecode::initMpiDecLoopData()

{

MPP_RET ret = MPP_OK; //操作结果

MppCtx ctx = NULL;

MppApi *mpi = NULL;

MpiCmd mpi_cmd = MPP_CMD_BASE; //控制模式

MppParam param = NULL; //控制参数

RK_U32 need_split = 1; //分帧标志位

MppCodingType type = MPP_VIDEO_CodingAVC; //解码格式h.264

memset(&m_data, 0, sizeof(m_data));

ret = mpp_create(&ctx, &mpi);

if (MPP_OK != ret) {

mpp_err("mpp_create failed\n");

if (m_data.ctx) {

mpp_destroy(m_data.ctx);

m_data.ctx = NULL;

}

}

//内部分帧处理

mpi_cmd = MPP_DEC_SET_PARSER_SPLIT_MODE;

param = &need_split;

ret = mpi->control(ctx, mpi_cmd, param);

if (MPP_OK != ret) {

mpp_err("mpi->control failed\n");

if (m_data.ctx) {

mpp_destroy(m_data.ctx);

m_data.ctx = NULL;

}

}

mpi_cmd = MPP_SET_INPUT_BLOCK; //处理输入是否为阻塞模式(param为1表示输入缓冲区满时会阻塞)

param = &need_split;

ret = mpi->control(ctx, mpi_cmd, param);

if (MPP_OK != ret) {

mpp_err("mpi->control failed\n");

if (m_data.ctx) {

mpp_destroy(m_data.ctx);

m_data.ctx = NULL;

}

}

ret = mpp_init(ctx, MPP_CTX_DEC, type);

if (MPP_OK != ret) {

mpp_err("mpp_init failed\n");

if (m_data.ctx) {

mpp_destroy(m_data.ctx);

m_data.ctx = NULL;

}

}

m_data.ctx = ctx;

m_data.mpi = mpi;

m_data.frame_count = 0;

}

QVector MppVideoDecode::mppDecode(AVPacket* avpacket)

{

RK_U32 pkt_done = 0;

RK_U32 err_info = 0;

MPP_RET ret = MPP_OK;

MppCtx ctx = m_data.ctx;

MppApi *mpi = m_data.mpi;

//RK_S64 t_s, t_e;

mpp_packet_init(&m_packet, avpacket->data, avpacket->size);

mpp_packet_set_pts(m_packet, avpacket->pts); //显示时间戳

// t_s = mpp_time();

//第一层循环用于确认必须把packet送进解码器

do {

RK_S32 times = 5;

ret = mpi->decode_put_packet(ctx, m_packet);

if (MPP_OK == ret)

pkt_done = 1;

// then get all available frame and release

do {

RK_S32 get_frm = 0; //用于判断一包packet中是否还有frame

RK_U32 frm_eos = 0; //表示图像结束的标志

try_again:

ret = mpi->decode_get_frame(ctx, &m_frame);

if (MPP_ERR_TIMEOUT == ret) {

if (times > 0) {

times--;

msleep(2);

goto try_again;

}

mpp_err("decode_get_frame failed too much time\n");

}

if (MPP_OK != ret) {

mpp_err("decode_get_frame failed ret %d\n", ret);

break;

}

if (m_frame) {

if (mpp_frame_get_info_change(m_frame)) {

RK_U32 width = mpp_frame_get_width(m_frame);

RK_U32 height = mpp_frame_get_height(m_frame);

RK_U32 hor_stride = mpp_frame_get_hor_stride(m_frame);

RK_U32 ver_stride = mpp_frame_get_ver_stride(m_frame);

RK_U32 buf_size = mpp_frame_get_buf_size(m_frame);

mpp_log("decode_get_frame get info changed found\n");

mpp_log("decoder require buffer w:h [%d:%d] stride [%d:%d] buf_size %d",

width, height, hor_stride, ver_stride, buf_size);

ret = mpp_buffer_group_get_internal(&m_data.frm_grp, MPP_BUFFER_TYPE_ION);

if (ret) {

mpp_err("get mpp buffer group failed ret %d\n", ret);

break;

}

mpi->control(ctx, MPP_DEC_SET_EXT_BUF_GROUP, m_data.frm_grp);

mpi->control(ctx, MPP_DEC_SET_INFO_CHANGE_READY, NULL);

} else {

err_info = mpp_frame_get_errinfo(m_frame) | mpp_frame_get_discard(m_frame);

if (err_info) {

mpp_log("decoder_get_frame get err info:%d discard:%d.\n",

mpp_frame_get_errinfo(m_frame), mpp_frame_get_discard(m_frame));

}

// MppBuffer buffer = mpp_frame_get_buffer(m_frame);

// myDebug() << "buffer addr:" << buffer;

}

// myDebug() << "the frame format is:" << getFrameFormat(frame); //获取图像格式(yuv or rgb)

m_data.frame_count++;

mpp_log("decode_get_frame get frame %d\n", m_data.frame_count);

frm_eos = mpp_frame_get_eos(m_frame); //图像结束标志

get_frm = 1;

m_vecMppFrame.append(m_frame);

}

// try get runtime frame memory usage

if (m_data.frm_grp) {

size_t usage = mpp_buffer_group_usage(m_data.frm_grp);

if (usage > m_data.max_usage)

m_data.max_usage = usage;

}

if (frm_eos) {

mpp_log("found last frame\n");

break;

}

if (get_frm)

continue;

break;

} while (1);

if(pkt_done)

break;

/*

* why sleep here:

* mpi->decode_put_packet will failed when packet in internal queue is

* full,waiting the package is consumed .Usually hardware decode one

* frame which resolution is 1080p needs 2 ms,so here we sleep 3ms

* * is enough.

*/

msleep(3);

}while (1);

// t_e = mpp_time();

// myDebug() << "解一个packet所用时间为" << (t_e - t_s) / 1000 << "ms";

for(const MppFrame frame : m_vecMppFrame){

myDebug() << "addr111111:" << frame;

}

return m_vecMppFrame;

}

QString MppVideoDecode::getFrameFormat(MppFrame frame)

{

MppFrameFormat fmt = mpp_frame_get_fmt(frame);

switch(fmt){

case MPP_FMT_YUV420SP:

return "MPP_FMT_YUV420SP";

case MPP_FMT_YUV420SP_10BIT:

return "MPP_FMT_YUV420SP_10BIT";

case MPP_FMT_YUV422SP:

return "MPP_FMT_YUV422SP";

case MPP_FMT_YUV422SP_10BIT:

return "MPP_FMT_YUV422SP_10BIT";

case MPP_FMT_YUV420P:

return "MPP_FMT_YUV420P";

case MPP_FMT_YUV420SP_VU:

return "MPP_FMT_YUV420SP_VU";

case MPP_FMT_YUV422P:

return "MPP_FMT_YUV422P";

case MPP_FMT_YUV422SP_VU:

return "MPP_FMT_YUV422SP_VU";

case MPP_FMT_YUV422_YUYV:

return "MPP_FMT_YUV422_YUYV";

case MPP_FMT_YUV422_YVYU:

return "MPP_FMT_YUV422_YVYU";

case MPP_FMT_YUV422_UYVY:

return "MPP_FMT_YUV422_UYVY";

case MPP_FMT_YUV422_VYUY:

return "MPP_FMT_YUV422_VYUY";

case MPP_FMT_YUV400:

return "MPP_FMT_YUV400";

case MPP_FMT_YUV440SP:

return "MPP_FMT_YUV440SP";

case MPP_FMT_YUV411SP:

return "MPP_FMT_YUV411SP";

case MPP_FMT_YUV444SP:

return "MPP_FMT_YUV444SP";

case MPP_FMT_YUV444P:

return "MPP_FMT_YUV444P";

case MPP_FMT_YUV444SP_10BIT:

return "MPP_FMT_YUV444SP_10BIT";

case MPP_FMT_AYUV2BPP:

return "MPP_FMT_AYUV2BPP";

case MPP_FMT_AYUV1BPP:

return "MPP_FMT_AYUV1BPP";

case MPP_FMT_YUV_BUTT:

return "MPP_FMT_YUV_BUTT";

case MPP_FMT_RGB565:

return "MPP_FMT_RGB565";

case MPP_FMT_BGR565:

return "MPP_FMT_BGR565";

case MPP_FMT_RGB555:

return "MPP_FMT_RGB555";

case MPP_FMT_BGR555:

return "MPP_FMT_BGR555";

case MPP_FMT_RGB444:

return "MPP_FMT_RGB444";

case MPP_FMT_BGR444:

return "MPP_FMT_BGR444";

case MPP_FMT_RGB888:

return "MPP_FMT_RGB888";

case MPP_FMT_BGR888:

return "MPP_FMT_BGR888";

case MPP_FMT_RGB101010:

return "MPP_FMT_RGB101010";

case MPP_FMT_BGR101010:

return "MPP_FMT_BGR101010";

case MPP_FMT_ARGB8888:

return "MPP_FMT_ARGB8888";

case MPP_FMT_ABGR8888:

return "MPP_FMT_ABGR8888";

case MPP_FMT_BGRA8888:

return "MPP_FMT_BGRA8888";

case MPP_FMT_RGBA8888:

return "MPP_FMT_RGBA8888";

case MPP_FMT_ARGB4444:

return "MPP_FMT_ARGB4444";

case MPP_FMT_ARGB1555:

return "MPP_FMT_ARGB1555";

case MPP_FMT_RGB_BUTT:

return "MPP_FMT_RGB_BUTT";

case MPP_FMT_BUTT:

return "MPP_FMT_BUTT";

}

}

可以参考

https://github.com/MUZLATAN/ffmpeg_rtsp_mpp

五、decode_get_frame返回值为NULL

我在调用mpp库的mpp_create、mpp_init、mpp_packet_init、decode_put_packet甚至decode_get_frame返回的值都是0(正常),但是decode_get_frame解出来的frame为NULL

网上搜到的原因一般有两个 (1)在调用mpp_init时传入的解码类型与实际编码类型不相符(比如摄像头码流用的是H265编码,mpp_init传入的却是H264方法) (2)FFmpeg拉流后的码流保存在AVPacket中,但是AVPacket中的码流不足以构成一帧数据所以解不出来

但是我的原因都不是以上这些。在FFmpeg拉流时,因为要不断从摄像头中拉数据,所以要用循环

while(1){

...

avformat_open_input();

...

}

我在调用mpp时,把mpp_create()和mpp_init()也放在循环中了

while(1){

...

avformat_open_input();

mpp(){

mpp_create();

mpp_init();

decode_put_packet();

decode_get_frame();

}

...

}

这就导致每拉一次流,就重新创建一个mpp对象,所以读取的frame为NULL。最后修改代码为以下,成功读取的frame

mpp(){

mpp_create();

mpp_init();

}

while(1){

...

avformat_open_input();

...

decode_put_packet();

decode_get_frame();

...

}

相关故事

火影忍者手游鼬怎么得 鼬获取途径介绍
www.38365-365.com

火影忍者手游鼬怎么得 鼬获取途径介绍

快乐无法恒久,痛苦也不会永驻 | 大脑的平衡机制
任丘36524便利店电话

快乐无法恒久,痛苦也不会永驻 | 大脑的平衡机制

免费图片标注工具推荐,这6个不容错过!
www.38365-365.com

免费图片标注工具推荐,这6个不容错过!