1、写在前面
此文章主要包含解码H264视频流数据,主要有以下几点:
1、H264视频帧为Annex B格式,若使用AVCC格式可自行研究;
2、H264视频裸流,非解码视频文件(若有需要我后期可添加这部分代码);
3、支持输出RGB24或YUV420格式,其他可自行修改;
4、FFmpeg官网代码迭代及接口变更较大,代码适应于FFmpeg3.4.2"Cantor"、3.3.7"Hilbert"等版本,较旧接口请看旧版本代码;
2、新版本
FFmpegVideoDecoder.h
#include <libavcodec/avcodec.h> /** 视频流解码器初始化 @param ctx 解码参数结构体AVCodecParameters @see FFmpeg_H264DecoderInit,此为解码H264视频流 @return 初始化成功返回0,否则<0 */ int FFmpeg_VideoDecoderInit(AVCodecParameters *ctx); /** H264视频流解码器初始化 @return 初始化成功返回0,否则<0 */ int FFmpeg_H264DecoderInit(void); /** 释放解码器 @return 初始化成功返回0,否则<0 */ int FFmpeg_VideoDecoderRelease(void); //return 0:暂未收到解码数据,-1:解码失败,1:解码成功 /** 解码视频流数据 @param inbuf 视频裸流数据 @param inbufSize 视频裸流数据大小 @param framePara 接收帧参数数组:{width,height,linesize1,linesiz2,linesize3} @param outRGBBuf 输出RGB数据(若已申请内存) @param outYUVBuf 输出YUV数据(若已申请内存) @return 成功返回解码数据帧大小,否则<=0 */ int FFmpeg_H264Decode(unsigned char * inbuf, int inbufSize, int *framePara, unsigned char *outRGBBuf, unsigned char **outYUVBuf);
FFmpegVideoDecoder.c
#include <libavformat/avformat.h> #include <libswscale/swscale.h> #include "FFmpegVideoDecoder.h" struct AVCodecContext *pAVCodecCtx_decoder = NULL; struct AVCodec *pAVCodec_decoder; struct AVPacket mAVPacket_decoder; struct AVFrame *pAVFrame_decoder = NULL; struct SwsContext* pImageConvertCtx_decoder = NULL; struct AVFrame *pFrameYUV_decoder = NULL; int FFmpeg_VideoDecoderInit(AVCodecParameters *codecParameters) { if (!codecParameters) { CPrintf("Source codec context is NULL."); //CPrintf需替换为printf return -1; } FFmpeg_VideoDecoderRelease(); avcodec_register_all(); pAVCodec_decoder = avcodec_find_decoder(codecParameters->codec_id); if (!pAVCodec_decoder) { CPrintf1("Can not find codec:%d\n", codecParameters->codec_id); return -2; } pAVCodecCtx_decoder = avcodec_alloc_context3(pAVCodec_decoder); if (!pAVCodecCtx_decoder) { CPrintf("Failed to alloc codec context."); FFmpeg_VideoDecoderRelease(); return -3; } if (avcodec_parameters_to_context(pAVCodecCtx_decoder, codecParameters) < 0) { CPrintf("Failed to copy avcodec parameters to codec context."); FFmpeg_VideoDecoderRelease(); return -3; } if (avcodec_open2(pAVCodecCtx_decoder, pAVCodec_decoder, NULL) < 0){ CPrintf("Failed to open h264 decoder"); FFmpeg_VideoDecoderRelease(); return -4; } av_init_packet(&mAVPacket_decoder); pAVFrame_decoder = av_frame_alloc(); pFrameYUV_decoder = av_frame_alloc(); return 0; } int FFmpeg_H264DecoderInit() { avcodec_register_all(); AVCodec *pAVCodec = avcodec_find_decoder(AV_CODEC_ID_H264); if (!pAVCodec){ CPrintf("can not find H264 codec\n"); return -1; } AVCodecContext *pAVCodecCtx = avcodec_alloc_context3(pAVCodec); if (pAVCodecCtx == NULL) { CPrintf("Could not alloc video context!\n"); return -2; } AVCodecParameters *codecParameters = avcodec_parameters_alloc(); if (avcodec_parameters_from_context(codecParameters, pAVCodecCtx) < 0) { CPrintf("Failed to copy avcodec parameters from codec context."); avcodec_parameters_free(&codecParameters); avcodec_free_context(&pAVCodecCtx); return -3; } int ret = FFmpeg_VideoDecoderInit(codecParameters); avcodec_parameters_free(&codecParameters); avcodec_free_context(&pAVCodecCtx); return ret; } int FFmpeg_VideoDecoderRelease() { if (pAVCodecCtx_decoder != NULL) { avcodec_free_context(&pAVCodecCtx_decoder); pAVCodecCtx_decoder = NULL; } if (pAVFrame_decoder != NULL) { av_packet_unref(&mAVPacket_decoder); av_free(pAVFrame_decoder); pAVFrame_decoder = NULL; } if (pFrameYUV_decoder) { av_frame_unref(pFrameYUV_decoder); av_free(pFrameYUV_decoder); pFrameYUV_decoder = NULL; } if (pImageConvertCtx_decoder) { sws_freeContext(pImageConvertCtx_decoder); } av_packet_unref(&mAVPacket_decoder); return 0; } int FFmpeg_H264Decode(unsigned char *inbuf, int inbufSize, int *framePara, unsigned char *outRGBBuf, unsigned char **outYUVBuf) { if (!pAVCodecCtx_decoder || !pAVFrame_decoder || !inbuf || inbufSize<=0 || !framePara || (!outRGBBuf && !outYUVBuf)) { return -1; } av_frame_unref(pAVFrame_decoder); av_frame_unref(pFrameYUV_decoder); framePara[0] = framePara[1] = 0; mAVPacket_decoder.data = inbuf; mAVPacket_decoder.size = inbufSize; int ret = avcodec_send_packet(pAVCodecCtx_decoder, &mAVPacket_decoder); if (ret == 0) { ret = avcodec_receive_frame(pAVCodecCtx_decoder, pAVFrame_decoder); if (ret == 0) { framePara[0] = pAVFrame_decoder->width; framePara[1] = pAVFrame_decoder->height; if (outYUVBuf) { *outYUVBuf = (unsigned char *)pAVFrame_decoder->data; framePara[2] = pAVFrame_decoder->linesize[0]; framePara[3] = pAVFrame_decoder->linesize[1]; framePara[4] = pAVFrame_decoder->linesize[2]; } else if (outRGBBuf) { pFrameYUV_decoder->data[0] = outRGBBuf; pFrameYUV_decoder->data[1] = NULL; pFrameYUV_decoder->data[2] = NULL; pFrameYUV_decoder->data[3] = NULL; int linesize[4] = { pAVCodecCtx_decoder->width * 3, pAVCodecCtx_decoder->height * 3, 0, 0 }; pImageConvertCtx_decoder = sws_getContext(pAVCodecCtx_decoder->width, pAVCodecCtx_decoder->height, AV_PIX_FMT_YUV420P, pAVCodecCtx_decoder->width, pAVCodecCtx_decoder->height, AV_PIX_FMT_RGB24, SWS_FAST_BILINEAR, NULL, NULL, NULL); sws_scale(pImageConvertCtx_decoder, (const uint8_t* const *) pAVFrame_decoder->data, pAVFrame_decoder->linesize, 0, pAVCodecCtx_decoder->height, pFrameYUV_decoder->data, linesize); sws_freeContext(pImageConvertCtx_decoder); return 1; } } else if (ret == AVERROR(EAGAIN)) { return 0; } else { return -1; } } return 0; }
3、旧版本
FFmpegVideoDecoder.h
int FFmpeg_VideoDecoderInit(AVCodecContext *ctx); int FFmpeg_H264DecoderInit(void); int FFmpeg_VideoDecoderRelease(void); int FFmpeg_H264Decode(unsigned char * inbuf, int inbufSize, int *framePara, unsigned char *outRGBBuf, unsigned char **outYUVBuf);
FFmpegVideoDecoder.c
#include <libavformat/avformat.h> #include <libswscale/swscale.h> #include "FFmpegVideoDecoder.h" struct AVCodecContext *pAVCodecCtx = NULL; struct AVCodec *pAVCodec; struct AVPacket mAVPacket; struct AVFrame *pAVFrame = NULL; struct SwsContext* pImageConvertCtx = NULL; struct AVFrame *pFrameYUV = NULL; int FFmpeg_VideoDecoderInit(AVCodecContext *ctx) { if (!ctx) { cv_printf("Source codec context is NULL."); //cv_printf需替换为printf return -1; } FFmpeg_VideoDecoderRelease(); avcodec_register_all(); pAVCodec = avcodec_find_decoder(ctx->codec_id); if (!pAVCodec) { cv_printf("Can not find codec:%d\n", ctx->codec_id); return -2; } pAVCodecCtx = avcodec_alloc_context3(pAVCodec); if (!pAVCodecCtx || avcodec_copy_context(pAVCodecCtx, ctx) != 0) { cv_printf("Failed to alloc codec context."); FFmpeg_VideoDecoderRelease(); return -3; } if (avcodec_open2(pAVCodecCtx, pAVCodec, NULL) < 0){ cv_printf("Failed to open h264 decoder"); FFmpeg_VideoDecoderRelease(); return -4; } av_init_packet(&mAVPacket); pAVFrame = av_frame_alloc(); pFrameYUV = av_frame_alloc(); return 0; } int FFmpeg_H264DecoderInit() { avcodec_register_all(); AVCodec *pAVCodec = avcodec_find_decoder(AV_CODEC_ID_H264); if (!pAVCodec){ cv_printf("can not find H264 codec\n"); return -1; } AVCodecContext *pAVCodecCtx = avcodec_alloc_context3(pAVCodec); int ret = FFmpeg_VideoDecoderInit(pAVCodecCtx); if (pAVCodecCtx || ret < 0) { avcodec_free_context(&pAVCodecCtx); pAVCodecCtx = NULL; } return ret; } int FFmpeg_VideoDecoderRelease() { if (pAVCodecCtx != NULL) { avcodec_close(pAVCodecCtx); avcodec_free_context(&pAVCodecCtx); pAVCodecCtx = NULL; } if (pAVFrame != NULL) { av_packet_unref(&mAVPacket); av_free(pAVFrame); pAVFrame = NULL; } if (pFrameYUV) { av_frame_unref(pFrameYUV); av_free(pFrameYUV); pFrameYUV = NULL; } if (pImageConvertCtx) { sws_freeContext(pImageConvertCtx); } av_packet_unref(&mAVPacket); return 0; } int FFmpeg_H264Decode(unsigned char * inbuf, int inbufSize, int *framePara, unsigned char *outRGBBuf, unsigned char **outYUVBuf) { av_frame_unref(pAVFrame); av_frame_unref(pFrameYUV); framePara[0] = framePara[1] = 0; mAVPacket.data = inbuf; mAVPacket.size = inbufSize; if (inbuf==NULL || inbufSize<=0) { return -1; } int len = -1, got_picture = 0; len = avcodec_decode_video2(pAVCodecCtx, pAVFrame, &got_picture, &mAVPacket); if (len < 0) { cv_printf("解码错误:%d\n",len); return len; } if (got_picture > 0) { framePara[0] = pAVFrame->width; framePara[1] = pAVFrame->height; if (outYUVBuf) { *outYUVBuf = (unsigned char *)pAVFrame->data; framePara[2] = pAVFrame->linesize[0]; framePara[3] = pAVFrame->linesize[1]; framePara[4] = pAVFrame->linesize[2]; } else if (outRGBBuf) { pFrameYUV->data[0] = outRGBBuf; pFrameYUV->data[1] = NULL; pFrameYUV->data[2] = NULL; pFrameYUV->data[3] = NULL; int linesize[4] = { pAVCodecCtx->width * 3, pAVCodecCtx->height * 3, 0, 0 }; pImageConvertCtx = sws_getContext(pAVCodecCtx->width, pAVCodecCtx->height, AV_PIX_FMT_YUV420P, pAVCodecCtx->width, pAVCodecCtx->height, AV_PIX_FMT_RGB24, SWS_FAST_BILINEAR, NULL, NULL, NULL); sws_scale(pImageConvertCtx, (const uint8_t* const *) pAVFrame->data, pAVFrame->linesize, 0, pAVCodecCtx->height, pFrameYUV->data, linesize); sws_freeContext(pImageConvertCtx); } } return len; }