2024年1月3日发(作者:)
ffmpeg封装H246为MP4
前言
文章主要对H264视频流封装为MP4格式文件的讲述,有实时H264视频流的封装和h264文件的封装,本文主要针对飞思卡尔6Q-vpu视频编码后的视频封装,所以没涉及到音频,
一、h264视频文件的封装
这部分代码主要是从雷博那里借鉴过来的,雷博的文章是音频和视频封装,我这里只实现视频的封装,具体实现方法差不多,就是少了音频这一路,代码如下:
#include
#define __STDC_CONSTANT_MACROS
#include
/*
FIX: H.264 in some container format (FLV, MP4, MKV etc.) need
"h264_mp4toannexb" bitstream filter (BSF)
*Add SPS,PPS in front of IDR frame
*Add start code ("0,0,0,1") in front of NALU
H.264 in some container (MPEG2TS) don't need this BSF.
*/
//'1': Use H.264 Bitstream Filter
#define USE_H264BSF 0
/*
FIX:AAC in some container format (FLV, MP4, MKV etc.) need
"aac_adtstoasc" bitstream filter (BSF)
*/
//'1': Use AAC Bitstream Filter
#define USE_AACBSF 0
int main(int argc, char* argv[])
{
AVOutputFormat *ofmt = NULL;
//Input AVFormatContext and Output AVFormatContext
AVFormatContext *ifmt_ctx_v = NULL, *ifmt_ctx_a = NULL,*ofmt_ctx = NULL;
AVPacket pkt;
1
int ret, i;
int videoindex_v=0,videoindex_out=0;
int frame_index=0;
int64_t cur_pts_v=0,cur_pts_a=0;
//const char *in_filename_v = "cuc_";//Input file URL
const char *in_filename_v = "vpu.h264";
//const char *in_filename_a = "cuc_3";
//const char *in_filename_a = "gowest.m4a";
//const char *in_filename_a = "";
const char *in_filename_a = "3";
const char *out_filename = "4";//Output file URL
av_register_all();
//Input
if ((ret = avformat_open_input(&ifmt_ctx_v, in_filename_v, 0, 0)) < 0) {
printf( "Could not open input file.");
goto end;
}
if ((ret = avformat_find_stream_info(ifmt_ctx_v, 0)) < 0) {
printf( "Failed to retrieve input stream information");
goto end;
}
/*if ((ret = avformat_open_input(&ifmt_ctx_a, in_filename_a, 0, 0)) < 0) {
printf( "Could not open input file.");
goto end;
}
if ((ret = avformat_find_stream_info(ifmt_ctx_a, 0)) < 0) {
printf( "Failed to retrieve input stream information");
goto end;
}*/
printf("===========Input Information==========n");
av_dump_format(ifmt_ctx_v, 0, in_filename_v, 0);
//av_dump_format(ifmt_ctx_a, 0, in_filename_a, 0);
printf("======================================n");
//Output
avformat_alloc_output_context2(&ofmt_ctx, NULL, NULL, out_filename);
if (!ofmt_ctx) {
printf( "Could not create output contextn");
ret = AVERROR_UNKNOWN;
goto end;
}
ofmt = ofmt_ctx->oformat;
printf("ifmt_ctx_v->nb_streams=%dn",ifmt_ctx_v->nb_streams);
for (i = 0; i < ifmt_ctx_v->nb_streams; i++) {
2
//Create output AVStream according to input AVStream
//if(ifmt_ctx_v->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO)
{
AVStream *in_stream = ifmt_ctx_v->streams[i];
AVStream *out_stream = avformat_new_stream(ofmt_ctx,
in_stream->codec->codec);
videoindex_v=i;
if (!out_stream) {
printf( "Failed allocating output streamn");
ret = AVERROR_UNKNOWN;
goto end;
}
videoindex_out=out_stream->index;
//Copy the settings of AVCodecContext
if (avcodec_copy_context(out_stream->codec, in_stream->codec) < 0) {
printf( "Failed to copy context from input to output stream codec contextn");
goto end;
}
out_stream->codec->codec_tag = 0;
if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
//break;
}
}
/*
for (i = 0; i < ifmt_ctx_a->nb_streams; i++) {
//Create output AVStream according to input AVStream
if(ifmt_ctx_a->streams[i]->codec->codec_type==AVMEDIA_TYPE_AUDIO){
AVStream *in_stream = ifmt_ctx_a->streams[i];
AVStream *out_stream = avformat_new_stream(ofmt_ctx,
in_stream->codec->codec);
audioindex_a=i;
if (!out_stream) {
printf( "Failed allocating output streamn");
ret = AVERROR_UNKNOWN;
goto end;
}
audioindex_out=out_stream->index;
//Copy the settings of AVCodecContext
if (avcodec_copy_context(out_stream->codec, in_stream->codec) < 0) {
printf( "Failed to copy context from input to output stream codec contextn");
goto end;
}
out_stream->codec->codec_tag = 0;
3
if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
break;
}
}
*/
printf("==========Output Information==========n");
av_dump_format(ofmt_ctx, 0, out_filename, 1);
printf("======================================n");
//Open output file
if (!(ofmt->flags & AVFMT_NOFILE)) {
if (avio_open(&ofmt_ctx->pb, out_filename, AVIO_FLAG_WRITE) < 0) {
printf( "Could not open output file '%s'", out_filename);
goto end;
}
}
//Write file header
if (avformat_write_header(ofmt_ctx, NULL) < 0) {
printf( "Error occurred when opening output filen");
goto end;
}
//FIX
#if USE_H264BSF
AVBitStreamFilterContext* h264bsfc = av_bitstream_filter_init("h264_mp4toannexb");
#endif
#if USE_AACBSF
AVBitStreamFilterContext* aacbsfc = av_bitstream_filter_init("aac_adtstoasc");
#endif
while (1) {
AVFormatContext *ifmt_ctx;
int stream_index=0;
AVStream *in_stream, *out_stream;
//Get an AVPacket
//if(av_compare_ts(cur_pts_v,ifmt_ctx_v->streams[videoindex_v]->time_base,cur_pts_a,ifmt_ctx_a->streams[audioindex_a]->time_base) <= 0)
{
ifmt_ctx=ifmt_ctx_v;
stream_index=videoindex_out;
if(av_read_frame(ifmt_ctx, &pkt) >= 0){
do{
in_stream = ifmt_ctx->streams[_index];
out_stream = ofmt_ctx->streams[stream_index];
printf("stream_index==%d,_index==%d,videoindex_v=%dn",
4
stream_index,_index,videoindex_v);
if(_index==videoindex_v){
//FIX:No PTS (Example: Raw H.264)
//Simple Write PTS
if(==AV_NOPTS_VALUE){
printf("frame_index==%dn",frame_index);
//Write PTS
AVRational time_base1=in_stream->time_base;
//Duration between 2 frames (us)
int64_t
calc_duration=(double)AV_TIME_BASE/av_q2d(in_stream->r_frame_rate);
//Parameters
=(double)(frame_index*calc_duration)/(double)(av_q2d(time_base1)*AV_TIME_BASE);
=;
on=(double)calc_duration/(double)(av_q2d(time_base1)*AV_TIME_BASE);
frame_index++;
}
cur_pts_v=;
break;
}
}while(av_read_frame(ifmt_ctx, &pkt) >= 0);
}else{
break;
}
}
/*else
{
ifmt_ctx=ifmt_ctx_a;
stream_index=audioindex_out;
if(av_read_frame(ifmt_ctx, &pkt) >= 0){
do{
in_stream = ifmt_ctx->streams[_index];
out_stream = ofmt_ctx->streams[stream_index];
if(_index==audioindex_a){
//FIX:No PTS
//Simple Write PTS
if(==AV_NOPTS_VALUE){
//Write PTS
AVRational time_base1=in_stream->time_base;
//Duration between 2 frames (us)
int64_t
5
calc_duration=(double)AV_TIME_BASE/av_q2d(in_stream->r_frame_rate);
//Parameters
=(double)(frame_index*calc_duration)/(double)(av_q2d(time_base1)*AV_TIME_BASE);
=;
on=(double)calc_duration/(double)(av_q2d(time_base1)*AV_TIME_BASE);
frame_index++;
}
cur_pts_a=;
break;
}
}while(av_read_frame(ifmt_ctx, &pkt) >= 0);
}else{
break;
}
}*/
//FIX:Bitstream Filter
#if USE_H264BSF
av_bitstream_filter_filter(h264bsfc, in_stream->codec, NULL, &, &,
, , 0);
#endif
#if USE_AACBSF
av_bitstream_filter_filter(aacbsfc, out_stream->codec, NULL, &, &,
, , 0);
#endif
//Convert PTS/DTS
= av_rescale_q_rnd(, in_stream->time_base, out_stream->time_base,
(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
= av_rescale_q_rnd(, in_stream->time_base, out_stream->time_base,
(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
on = av_rescale_q(on, in_stream->time_base,
out_stream->time_base);
= -1;
_index=stream_index;
printf("Write 1 Packet. size:%5dtpts:%lldn",,);
//Write
if (av_interleaved_write_frame(ofmt_ctx, &pkt) < 0) {
printf( "Error muxing packetn");
break;
}
av_free_packet(&pkt);
}
6
//Write file trailer
av_write_trailer(ofmt_ctx);
#if USE_H264BSF
av_bitstream_filter_close(h264bsfc);
#endif
#if USE_AACBSF
av_bitstream_filter_close(aacbsfc);
#endif
end:
avformat_close_input(&ifmt_ctx_v);
//avformat_close_input(&ifmt_ctx_a);
/* close output */
if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE))
avio_close(ofmt_ctx->pb);
avformat_free_context(ofmt_ctx);
if (ret < 0 && ret != AVERROR_EOF) {
printf( "Error occurred.n");
return -1;
}
return 0;
}
二、h264视频实时流的封装
大体思路和上面文件操作是差不多的,区别在于不能像上面一样可以获取文件中视频流的信息,这样你就得自己去设置这些参数,下面程序是根据我自己的视频流设置的,如果你的不同,需要修改;具体参数如下:
(1)AVCodecContext 的配置参数
enum AVMediaType codec_type:编解码器的类型(视频,音频...)
struct AVCodec *codec:采用的解码器AVCodec(H.)
int bit_rate:平均比特率
uint8_t *extradata; int extradata_size:针对特定编码器包含的附加信息(例如对于H.264解码器来说,存储SPS,PPS等)
AVRational time_base:根据该参数,可以把PTS转化为实际的时间(单位为秒s)
int width, height:如果是视频的话,代表宽和高
7
int refs:运动估计参考帧的个数(H.264的话会有多帧,MPEG2这类的一般就没有了)
int sample_rate:采样率(音频)
int channels:声道数(音频)
enum AVSampleFormat sample_fmt:采样格式
int profile:型(H.264里面就有,其他编码标准应该也有)
int level:级(和profile差不太多)
(2)还有一个结构体参数也需要去设置,在AVPacket结构体中,重要的变量有以下几个:
uint8_t *data:压缩编码的数据。
例如对于H.264来说。1个AVPacket的data通常对应一个NAL。
注意:在这里只是对应,而不是一模一样。他们之间有微小的差别:使用FFMPEG类库分离出多媒体文件中的H.264码流
因此在使用FFMPEG进行视音频处理的时候,常常可以将得到的AVPacket的data数据直接写成文件,从而得到视音频的码流文件。
int size:data的大小
int64_t pts:显示时间戳
int64_t dts:解码时间戳
int stream_index:标识该AVPacket所属的视频/音频流。
总体代码如下:
#include"ffmpeg_mp4.h"
int getVopType( const void *p, int len )
{
if ( !p || 6 >= len )
return -1;
unsigned char *b = (unsigned char*)p;
// Verify NAL marker
if ( b[ 0 ] || b[ 1 ] || 0x01 != b[ 2 ] )
{ b++;
if ( b[ 0 ] || b[ 1 ] || 0x01 != b[ 2 ] )
return -1;
} // end if
b += 3;
8
// Verify VOP id
if ( 0xb6 == *b )
{
b++;
return ( *b & 0xc0 ) >> 6;
} // end if
switch( *b )
{
case 0x65 : return 0;
case 0x61 : return 1;
case 0x01 : return 2;
} // end switch
return -1;
}
/* Add an output stream */
AVStream *add_stream(AVFormatContext *oc, AVCodec **codec, enum AVCodecID codec_id)
{
AVCodecContext *c;
AVStream *st;
/* find the encoder */
*codec = avcodec_find_encoder(codec_id);
if (!*codec)
{
printf("could not find encoder for '%s' n", avcodec_get_name(codec_id));
exit(1);
}
st = avformat_new_stream(oc, *codec);
if (!st)
{
printf("could not allocate stream n");
exit(1);
}
st->id = oc->nb_streams-1;
c = st->codec;
vi = st->index;
switch ((*codec)->type)
{
case AVMEDIA_TYPE_AUDIO:
printf("AVMEDIA_TYPE_AUDIOn");
c->sample_fmt = (*codec)->sample_fmts ? (*codec)->sample_fmts[0] :
AV_SAMPLE_FMT_FLTP;
c->bit_rate = 64000;
c->sample_rate = 44100;
c->channels = 2;
9
break;
case AVMEDIA_TYPE_VIDEO:
printf("AVMEDIA_TYPE_VIDEOn");
c->codec_id = AV_CODEC_ID_H264;
c->bit_rate = 0;
c->width = 1920;
c->height = 1080;
c->time_ = 50;
c->time_ = 1;
c->gop_size = 1;
c->pix_fmt = STREAM_PIX_FMT;
if (c->codec_id == AV_CODEC_ID_MPEG2VIDEO)
{
c->max_b_frames = 2;
}
if (c->codec_id == AV_CODEC_ID_MPEG1VIDEO)
{
c->mb_decision = 2;
}
break;
default:
break;
}
if (oc->oformat->flags & AVFMT_GLOBALHEADER)
{
c->flags |= CODEC_FLAG_GLOBAL_HEADER;
}
return st;
}
void open_video(AVFormatContext *oc, AVCodec *codec, AVStream *st)
{
int ret;
AVCodecContext *c = st->codec;
/* open the codec */
ret = avcodec_open2(c, codec, NULL);
if (ret < 0)
{
printf("could not open video codec");
//exit(1);
}
}
int CreateMp4()
{
int ret; // 成功返回0,失败返回1
10
const char* pszFileName = "/udisk/4";
AVOutputFormat *fmt;
AVCodec *video_codec;
AVStream *m_pVideoSt;
av_register_all();
avformat_alloc_output_context2(&m_pOc, NULL, NULL, pszFileName);
if (!m_pOc)
{
printf("Could not deduce output format from file extension: using MPEG. n");
avformat_alloc_output_context2(&m_pOc, NULL, "mpeg", pszFileName);
}
if (!m_pOc)
{
return 1;
}
fmt = m_pOc->oformat;
if (fmt->video_codec != AV_CODEC_ID_NONE)
{
printf("11111add_streamn");
m_pVideoSt = add_stream(m_pOc, &video_codec, fmt->video_codec);
}
if (m_pVideoSt)
{
printf("11111open_videon");
open_video(m_pOc, video_codec, m_pVideoSt);
}
printf("==========Output Information==========n");
av_dump_format(m_pOc, 0, pszFileName, 1);
printf("======================================n");
/* open the output file, if needed */
if (!(fmt->flags & AVFMT_NOFILE))
{
ret = avio_open(&m_pOc->pb, pszFileName, AVIO_FLAG_WRITE);
if (ret < 0)
{
printf("could not open %sn", pszFileName);
return 1;
}
}
/* Write the stream header, if any */
ret = avformat_write_header(m_pOc, NULL);
if (ret < 0)
{
printf("Error occurred when opening output file");
11
return 1;
}
}
/* write h264 data to mp4 file
* 创建mp4文件返回2;写入数据帧返回0 */
void WriteVideo(void* data, int nLen)
{
int ret;
if ( 0 > vi )
{
printf("vi less than 0");
//return -1;
}
AVStream *pst = m_pOc->streams[ vi ];
//printf("vi=====%dn",vi);
// Init packet
AVPacket pkt;
// 我的添加,为了计算pts
AVCodecContext *c = pst->codec;
av_init_packet( &pkt );
|= ( 0 >= getVopType( data, nLen ) ) ? AV_PKT_FLAG_KEY : 0;
_index = pst->index;
= (uint8_t*)data;
= nLen;
// Wait for key frame
if ( waitkey )
if ( 0 == ( & AV_PKT_FLAG_KEY ) )
return ;
else
waitkey = 0;
= (ptsInc++) * (90000/STREAM_FRAME_RATE);
= av_rescale_q((ptsInc++)*2, pst->codec->time_base,pst->time_base);
// = (ptsInc++) * (90000/STREAM_FRAME_RATE);
// =av_rescale_q_rnd(,
pst->time_base,pst->time_base,(AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
=av_rescale_q_rnd(,
pst->time_base,pst->time_base,(AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
on = av_rescale_q(on,pst->time_base, pst->time_base);
= -1;
printf("=%dn",);
ret = av_interleaved_write_frame( m_pOc, &pkt );
if (ret < 0)
12
{
printf("cannot write frame");
}
}
void CloseMp4()
{
waitkey = -1;
vi = -1;
if (m_pOc)
av_write_trailer(m_pOc);
if (m_pOc && !(m_pOc->oformat->flags & AVFMT_NOFILE))
avio_close(m_pOc->pb);
if (m_pOc)
{
avformat_free_context(m_pOc);
m_pOc = NULL;
}
}
13


发布评论