一步一步学习,每天进步一点点
ffmpeg + x264 + qt 解码 编码h264
解码:把h264编码格式的mp4文件解码后保存rgb为ppm格式
编码:把解码保存的rgb格式编码为h264
代码:
解码部分:
.pro
TEMPLATE = app
CONFIG += console
CONFIG -= qt
SOURCES += main.cpp
INCLUDEPATH += -I/usr/local/include/
LIBS += -L/usr/local/lib -lavformat -lavcodec -lavutil -lswscale \
-lpostproc -lavfilter -lswresample -lavdevice -lx264 -lm -lz -lpthread
main.cpp
#include <iostream>
extern "C"
{
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
}
using namespace std;
void SaveFrame(AVFrame *pFrame, int width, int height, int iFrame)
{
FILE *pFile;
char szFilename[32];
int y;
sprintf(szFilename, "../video/frame%d.ppm", iFrame);
pFile=fopen(szFilename, "wb");
if(pFile==NULL)
return;
fprintf(pFile, "P6\n%d %d\n255\n", width, height);
for(y=0; y<height; y++)
fwrite(pFrame->data[0]+y*pFrame->linesize[0], 1, width*3, pFile);
fclose(pFile);
}
int main(int argc, char *argv[])
{
AVFormatContext *pFormatCtx = NULL;
int i, videoStream;
AVCodecContext *pCodecCtx = NULL;
AVCodec *pCodec = NULL;
AVFrame *pFrame = NULL;
AVFrame *pFrameRGB = NULL;
AVPacket packet;
int frameFinished;
int numBytes;
uint8_t *buffer = NULL;
AVDictionary *optionsDict = NULL;
struct SwsContext *sws_ctx = NULL;
av_register_all();
if(avformat_open_input(&pFormatCtx, "../video/0.mp4", NULL, NULL)!=0)
return -1;
if(avformat_find_stream_info(pFormatCtx, NULL)<0)
return -1;
av_dump_format(pFormatCtx, 0, argv[1], 0);
videoStream=-1;
for(i=0; i<pFormatCtx->nb_streams; i++)
if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) {
videoStream=i;
break;
}
if(videoStream==-1)
return -1;
pCodecCtx=pFormatCtx->streams[videoStream]->codec;
pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
if(pCodec==NULL) {
fprintf(stderr, "Unsupported codec!\n");
return -1;
}
if(avcodec_open2(pCodecCtx, pCodec, &optionsDict)<0)
return -1;
pFrame=avcodec_alloc_frame();
pFrameRGB=avcodec_alloc_frame();
if(pFrameRGB==NULL)
return -1;
numBytes=avpicture_get_size(PIX_FMT_RGB24, pCodecCtx->width,
pCodecCtx->height);
buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
sws_ctx =
sws_getContext
(
pCodecCtx->width,
pCodecCtx->height,
pCodecCtx->pix_fmt,
pCodecCtx->width,
pCodecCtx->height,
PIX_FMT_RGB24,
SWS_BILINEAR,
NULL,
NULL,
NULL
);
avpicture_fill((AVPicture *)pFrameRGB, buffer, PIX_FMT_RGB24,
pCodecCtx->width, pCodecCtx->height);
i=0;
while(av_read_frame(pFormatCtx, &packet)>=0)
{
if(packet.stream_index==videoStream)
{
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished,
&packet);
if(frameFinished)
{
sws_scale
(
sws_ctx,
(uint8_t const * const *)pFrame->data,
pFrame->linesize,
0,
pCodecCtx->height,
pFrameRGB->data,
pFrameRGB->linesize
);
++i;
SaveFrame(pFrameRGB, pCodecCtx->width, pCodecCtx->height,
i);
}
}
av_free_packet(&packet);
}
av_free(buffer);
av_free(pFrameRGB);
av_free(pFrame);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
return 0;
}
编码:
TEMPLATE = app
CONFIG += console
CONFIG -= qt
SOURCES += main.cpp
INCLUDEPATH += -I/usr/local/include/
LIBS += -L/usr/local/lib -lavformat -lavcodec -lavutil -lswscale \
-lpostproc -lavfilter -lswresample -lavdevice -lx264 -lm -lz -lpthread
main.cpp
#include <iostream>
#include <sys/types.h>
#include <sys/stat.h>
#include <unistd.h>
#include <errno.h>
extern "C"
{
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
}
using namespace std;
int main()
{
char *p = "P6\n720 576\n255\n";
int headlen = strlen(p);
struct stat statbuff;
memset(&statbuff, 0, sizeof(statbuff));
if(stat("../video/frame10.ppm", &statbuff) < 0)
{
perror("stat error");
return -1;
}
int filelen = statbuff.st_size;
int datalen = filelen - headlen;
cout << "filesize: " << filelen << endl;
cout << "headlen: " << headlen << endl;
cout << "datalen: " << datalen << endl;
AVFrame *pRGBFrame = new AVFrame;
AVFrame *pYUVFrame = new AVFrame;
AVCodecContext *pCodecCtx = NULL;
AVCodecContext *pInCodecCtc = NULL;
AVCodec *pCodecH264;
uint8_t *yuv_buff;
uint8_t *rgb_buff;
av_register_all();
avcodec_register_all();
pCodecH264 = avcodec_find_encoder(CODEC_ID_H264);
if(pCodecH264 == NULL)
{
return -1;
}
pCodecCtx = avcodec_alloc_context3(pCodecH264);
if(pCodecCtx == NULL)
{
return -1;
}
pCodecCtx->bit_rate = 4000000;
pCodecCtx->width = 720;
pCodecCtx->height = 576;
AVRational rate ;
rate.num = 1;
rate.den = 25;
pCodecCtx->time_base = rate;
pCodecCtx->gop_size = 8;
pCodecCtx->max_b_frames = 1;
pCodecCtx->pix_fmt = PIX_FMT_YUV420P;//PIX_FMT_RGB24
if(avcodec_open2(pCodecCtx, pCodecH264,NULL) < 0)
{
return -1;
}
int yuvsize = pCodecCtx->width * pCodecCtx->height;
yuv_buff = (uint8_t *)malloc((yuvsize*3)/2);
rgb_buff = (uint8_t *)malloc(datalen);
int outbuf_size = 1024*1024;
uint8_t * outbuf= (uint8_t*)malloc(outbuf_size);
int u_size = 0;
FILE *f = NULL;
char *filename = "../video/test.h264";
f = fopen (filename, "wb");
{
if(f == NULL)
{
perror("fopen h264 error");
return -1;
}
}
SwsContext *scxt = sws_getContext(pCodecCtx->width, pCodecCtx->height, PIX_FMT_BGR24,
pCodecCtx->width,pCodecCtx->height, PIX_FMT_YUV420P,
SWS_POINT,NULL,NULL,NULL);
AVPacket avpkt;
int iFrame = 1;
FILE *frgb = NULL;
while(true)
{
char szFilename[32];
sprintf(szFilename, "../video/frame%d.ppm", iFrame);
frgb = fopen(szFilename, "r");
if(frgb == NULL)
{
break;
}
fseek(frgb,headlen,SEEK_SET);
fread(rgb_buff,sizeof(uint8_t),datalen,frgb);
fclose(frgb);
frgb = NULL;
iFrame++;
avpicture_fill((AVPicture*)pRGBFrame,rgb_buff,PIX_FMT_RGB24,pCodecCtx->width,pCodecCtx->height);
avpicture_fill((AVPicture*)pYUVFrame,yuv_buff,PIX_FMT_YUV420P,pCodecCtx->width,pCodecCtx->height);
sws_scale(scxt, pRGBFrame->data, pRGBFrame->linesize,0,pCodecCtx->height,pYUVFrame->data,pYUVFrame->linesize);
int got_packet_ptr = 0;
av_init_packet(&avpkt);
avpkt.data = outbuf;
avpkt.size = outbuf_size;
u_size = avcodec_encode_video2(pCodecCtx, &avpkt, pYUVFrame, &got_packet_ptr);
if (u_size == 0)
{
fwrite(avpkt.data, 1, avpkt.size, f);
}
pYUVFrame->pts++;
}
fclose(f);
free(pRGBFrame);
free(pYUVFrame);
free(rgb_buff);
free(outbuf);
avcodec_close(pCodecCtx);
av_free(pCodecCtx);
return 0;
}