@ -114,12 +114,13 @@ static void open_audio(AVFormatContext *oc, AVStream *st)
audio_input_frame_size = 10000 ;
audio_input_frame_size = 10000 ;
else
else
audio_input_frame_size = c - > frame_size ;
audio_input_frame_size = c - > frame_size ;
samples = av_malloc ( audio_input_frame_size * av_get_bytes_per_sample ( c - > sample_fmt )
samples = av_malloc ( audio_input_frame_size *
* c - > channels ) ;
av_get_bytes_per_sample ( c - > sample_fmt ) *
c - > channels ) ;
}
}
/* p repare a 16 bit dummy audio frame of 'frame_size' samples and
/* P repare a 16 bit dummy audio frame of 'frame_size' samples and
' nb_channels ' channels */
* ' nb_channels ' channels . */
static void get_audio_frame ( int16_t * samples , int frame_size , int nb_channels )
static void get_audio_frame ( int16_t * samples , int frame_size , int nb_channels )
{
{
int j , i , v ;
int j , i , v ;
@ -138,7 +139,7 @@ static void get_audio_frame(int16_t *samples, int frame_size, int nb_channels)
static void write_audio_frame ( AVFormatContext * oc , AVStream * st )
static void write_audio_frame ( AVFormatContext * oc , AVStream * st )
{
{
AVCodecContext * c ;
AVCodecContext * c ;
AVPacket pkt ;
AVPacket pkt = { 0 } ; // data and size must be 0;
AVFrame * frame = avcodec_alloc_frame ( ) ;
AVFrame * frame = avcodec_alloc_frame ( ) ;
int got_packet ;
int got_packet ;
@ -147,9 +148,11 @@ static void write_audio_frame(AVFormatContext *oc, AVStream *st)
get_audio_frame ( samples , audio_input_frame_size , c - > channels ) ;
get_audio_frame ( samples , audio_input_frame_size , c - > channels ) ;
frame - > nb_samples = audio_input_frame_size ;
frame - > nb_samples = audio_input_frame_size ;
avcodec_fill_audio_frame ( frame , c - > channels , c - > sample_fmt , ( uint8_t * ) samples ,
avcodec_fill_audio_frame ( frame , c - > channels , c - > sample_fmt ,
audio_input_frame_size * av_get_bytes_per_sample ( c - > sample_fmt )
( uint8_t * ) samples ,
* c - > channels , 1 ) ;
audio_input_frame_size *
av_get_bytes_per_sample ( c - > sample_fmt ) *
c - > channels , 1 ) ;
avcodec_encode_audio2 ( c , & pkt , frame , & got_packet ) ;
avcodec_encode_audio2 ( c , & pkt , frame , & got_packet ) ;
if ( ! got_packet )
if ( ! got_packet )
@ -157,7 +160,7 @@ static void write_audio_frame(AVFormatContext *oc, AVStream *st)
pkt . stream_index = st - > index ;
pkt . stream_index = st - > index ;
/* write the compressed frame in the media file */
/* Write the compressed frame to the media file. */
if ( av_interleaved_write_frame ( oc , & pkt ) ! = 0 ) {
if ( av_interleaved_write_frame ( oc , & pkt ) ! = 0 ) {
fprintf ( stderr , " Error while writing audio frame \n " ) ;
fprintf ( stderr , " Error while writing audio frame \n " ) ;
exit ( 1 ) ;
exit ( 1 ) ;
@ -178,7 +181,7 @@ static AVFrame *picture, *tmp_picture;
static uint8_t * video_outbuf ;
static uint8_t * video_outbuf ;
static int frame_count , video_outbuf_size ;
static int frame_count , video_outbuf_size ;
/* add a video output stream */
/* Add a video output stream. */
static AVStream * add_video_stream ( AVFormatContext * oc , enum CodecID codec_id )
static AVStream * add_video_stream ( AVFormatContext * oc , enum CodecID codec_id )
{
{
AVCodecContext * c ;
AVCodecContext * c ;
@ -210,15 +213,15 @@ static AVStream *add_video_stream(AVFormatContext *oc, enum CodecID codec_id)
c - > codec_id = codec_id ;
c - > codec_id = codec_id ;
/* put sample parameters */
/* Put sample parameters. */
c - > bit_rate = 400000 ;
c - > bit_rate = 400000 ;
/* resolution must be a multiple of two */
/* Resolution must be a multiple of two. */
c - > width = 352 ;
c - > width = 352 ;
c - > height = 288 ;
c - > height = 288 ;
/* time base: t his is the fundamental unit of time (in seconds) in terms
/* timebase: T his is the fundamental unit of time (in seconds) in terms
of which frame timestamps are represented . f or fixed - fps content ,
* of which frame timestamps are represented . F or fixed - fps content ,
timebase should be 1 / framerate and timestamp increments should be
* timebase should be 1 / framerate and timestamp increments should be
identically 1. */
* identical to 1. */
c - > time_base . den = STREAM_FRAME_RATE ;
c - > time_base . den = STREAM_FRAME_RATE ;
c - > time_base . num = 1 ;
c - > time_base . num = 1 ;
c - > gop_size = 12 ; /* emit one intra frame every twelve frames at most */
c - > gop_size = 12 ; /* emit one intra frame every twelve frames at most */
@ -229,11 +232,11 @@ static AVStream *add_video_stream(AVFormatContext *oc, enum CodecID codec_id)
}
}
if ( c - > codec_id = = CODEC_ID_MPEG1VIDEO ) {
if ( c - > codec_id = = CODEC_ID_MPEG1VIDEO ) {
/* Needed to avoid using macroblocks in which some coeffs overflow.
/* Needed to avoid using macroblocks in which some coeffs overflow.
This does not happen with normal video , it just happens here as
* This does not happen with normal video , it just happens here as
the motion of the chroma plane does not match the luma plane . */
* the motion of the chroma plane does not match the luma plane . */
c - > mb_decision = 2 ;
c - > mb_decision = 2 ;
}
}
// some formats want stream headers to be separate
/* Some formats want stream headers to be separate. */
if ( oc - > oformat - > flags & AVFMT_GLOBALHEADER )
if ( oc - > oformat - > flags & AVFMT_GLOBALHEADER )
c - > flags | = CODEC_FLAG_GLOBAL_HEADER ;
c - > flags | = CODEC_FLAG_GLOBAL_HEADER ;
@ -274,26 +277,26 @@ static void open_video(AVFormatContext *oc, AVStream *st)
video_outbuf = NULL ;
video_outbuf = NULL ;
if ( ! ( oc - > oformat - > flags & AVFMT_RAWPICTURE ) ) {
if ( ! ( oc - > oformat - > flags & AVFMT_RAWPICTURE ) ) {
/* allocate output buffer */
/* Allocate output buffer. */
/* XXX: API change will be done */
/* XXX: API change will be done. */
/* b uffers passed into lav* can be allocated any way you prefer,
/* B uffers passed into lav* can be allocated any way you prefer,
as long as they ' re aligned enough for the architecture , and
* as long as they ' re aligned enough for the architecture , and
they ' re freed appropriately ( such as using av_free for buffers
* they ' re freed appropriately ( such as using av_free for buffers
allocated with av_malloc ) */
* allocated with av_malloc ) . */
video_outbuf_size = 200000 ;
video_outbuf_size = 200000 ;
video_outbuf = av_malloc ( video_outbuf_size ) ;
video_outbuf = av_malloc ( video_outbuf_size ) ;
}
}
/* allocate the encoded raw picture */
/* Allocate the encoded raw picture. */
picture = alloc_picture ( c - > pix_fmt , c - > width , c - > height ) ;
picture = alloc_picture ( c - > pix_fmt , c - > width , c - > height ) ;
if ( ! picture ) {
if ( ! picture ) {
fprintf ( stderr , " Could not allocate picture \n " ) ;
fprintf ( stderr , " Could not allocate picture \n " ) ;
exit ( 1 ) ;
exit ( 1 ) ;
}
}
/* i f the output format is not YUV420P, then a temporary YUV420P
/* I f the output format is not YUV420P, then a temporary YUV420P
picture is needed too . It is then converted to the required
* picture is needed too . It is then converted to the required
output format */
* output format . */
tmp_picture = NULL ;
tmp_picture = NULL ;
if ( c - > pix_fmt ! = PIX_FMT_YUV420P ) {
if ( c - > pix_fmt ! = PIX_FMT_YUV420P ) {
tmp_picture = alloc_picture ( PIX_FMT_YUV420P , c - > width , c - > height ) ;
tmp_picture = alloc_picture ( PIX_FMT_YUV420P , c - > width , c - > height ) ;
@ -304,19 +307,18 @@ static void open_video(AVFormatContext *oc, AVStream *st)
}
}
}
}
/* prepare a dummy image */
/* Prepare a dummy image. */
static void fill_yuv_image ( AVFrame * pict , int frame_index , int width , int height )
static void fill_yuv_image ( AVFrame * pict , int frame_index ,
int width , int height )
{
{
int x , y , i ;
int x , y , i ;
i = frame_index ;
i = frame_index ;
/* Y */
/* Y */
for ( y = 0 ; y < height ; y + + ) {
for ( y = 0 ; y < height ; y + + )
for ( x = 0 ; x < width ; x + + ) {
for ( x = 0 ; x < width ; x + + )
pict - > data [ 0 ] [ y * pict - > linesize [ 0 ] + x ] = x + y + i * 3 ;
pict - > data [ 0 ] [ y * pict - > linesize [ 0 ] + x ] = x + y + i * 3 ;
}
}
/* Cb and Cr */
/* Cb and Cr */
for ( y = 0 ; y < height / 2 ; y + + ) {
for ( y = 0 ; y < height / 2 ; y + + ) {
@ -336,13 +338,13 @@ static void write_video_frame(AVFormatContext *oc, AVStream *st)
c = st - > codec ;
c = st - > codec ;
if ( frame_count > = STREAM_NB_FRAMES ) {
if ( frame_count > = STREAM_NB_FRAMES ) {
/* no more frame to compress. The codec has a latency of a few
/* No more frames to compress. The codec has a latency of a few
frames if using B frames , so we get the last frames by
* frames if using B - frames , so we get the last frames by
passing the same picture again */
* passing the same picture again . */
} else {
} else {
if ( c - > pix_fmt ! = PIX_FMT_YUV420P ) {
if ( c - > pix_fmt ! = PIX_FMT_YUV420P ) {
/* as we only generate a YUV420P picture, we must convert it
/* as we only generate a YUV420P picture, we must convert it
to the codec pixel format if needed */
* to the codec pixel format if needed */
if ( img_convert_ctx = = NULL ) {
if ( img_convert_ctx = = NULL ) {
img_convert_ctx = sws_getContext ( c - > width , c - > height ,
img_convert_ctx = sws_getContext ( c - > width , c - > height ,
PIX_FMT_YUV420P ,
PIX_FMT_YUV420P ,
@ -350,7 +352,8 @@ static void write_video_frame(AVFormatContext *oc, AVStream *st)
c - > pix_fmt ,
c - > pix_fmt ,
sws_flags , NULL , NULL , NULL ) ;
sws_flags , NULL , NULL , NULL ) ;
if ( img_convert_ctx = = NULL ) {
if ( img_convert_ctx = = NULL ) {
fprintf ( stderr , " Cannot initialize the conversion context \n " ) ;
fprintf ( stderr ,
" Cannot initialize the conversion context \n " ) ;
exit ( 1 ) ;
exit ( 1 ) ;
}
}
}
}
@ -362,10 +365,9 @@ static void write_video_frame(AVFormatContext *oc, AVStream *st)
}
}
}
}
if ( oc - > oformat - > flags & AVFMT_RAWPICTURE ) {
if ( oc - > oformat - > flags & AVFMT_RAWPICTURE ) {
/* raw video case. T he API will change slightly in the near
/* Raw video case - t he API will change slightly in the near
future for that . */
* future for that . */
AVPacket pkt ;
AVPacket pkt ;
av_init_packet ( & pkt ) ;
av_init_packet ( & pkt ) ;
@ -377,21 +379,24 @@ static void write_video_frame(AVFormatContext *oc, AVStream *st)
ret = av_interleaved_write_frame ( oc , & pkt ) ;
ret = av_interleaved_write_frame ( oc , & pkt ) ;
} else {
} else {
/* encode the image */
/* encode the image */
out_size = avcodec_encode_video ( c , video_outbuf , video_outbuf_size , picture ) ;
out_size = avcodec_encode_video ( c , video_outbuf ,
/* if zero size, it means the image was buffered */
video_outbuf_size , picture ) ;
/* If size is zero, it means the image was buffered. */
if ( out_size > 0 ) {
if ( out_size > 0 ) {
AVPacket pkt ;
AVPacket pkt ;
av_init_packet ( & pkt ) ;
av_init_packet ( & pkt ) ;
if ( c - > coded_frame - > pts ! = AV_NOPTS_VALUE )
if ( c - > coded_frame - > pts ! = AV_NOPTS_VALUE )
pkt . pts = av_rescale_q ( c - > coded_frame - > pts , c - > time_base , st - > time_base ) ;
pkt . pts = av_rescale_q ( c - > coded_frame - > pts ,
c - > time_base , st - > time_base ) ;
if ( c - > coded_frame - > key_frame )
if ( c - > coded_frame - > key_frame )
pkt . flags | = AV_PKT_FLAG_KEY ;
pkt . flags | = AV_PKT_FLAG_KEY ;
pkt . stream_index = st - > index ;
pkt . stream_index = st - > index ;
pkt . data = video_outbuf ;
pkt . data = video_outbuf ;
pkt . size = out_size ;
pkt . size = out_size ;
/* write the compressed frame in the media file */
/* Write the compressed frame to the media file. */
ret = av_interleaved_write_frame ( oc , & pkt ) ;
ret = av_interleaved_write_frame ( oc , & pkt ) ;
} else {
} else {
ret = 0 ;
ret = 0 ;
@ -428,7 +433,7 @@ int main(int argc, char **argv)
double audio_pts , video_pts ;
double audio_pts , video_pts ;
int i ;
int i ;
/* i nitialize libavcodec, and register all codecs and formats */
/* I nitialize libavcodec, and register all codecs and formats. */
av_register_all ( ) ;
av_register_all ( ) ;
if ( argc ! = 2 ) {
if ( argc ! = 2 ) {
@ -453,8 +458,8 @@ int main(int argc, char **argv)
}
}
fmt = oc - > oformat ;
fmt = oc - > oformat ;
/* a dd the audio and video streams using the default format codecs
/* A dd the audio and video streams using the default format codecs
and initialize the codecs */
* and initialize the codecs . */
video_st = NULL ;
video_st = NULL ;
audio_st = NULL ;
audio_st = NULL ;
if ( fmt - > video_codec ! = CODEC_ID_NONE ) {
if ( fmt - > video_codec ! = CODEC_ID_NONE ) {
@ -464,15 +469,15 @@ int main(int argc, char **argv)
audio_st = add_audio_stream ( oc , fmt - > audio_codec ) ;
audio_st = add_audio_stream ( oc , fmt - > audio_codec ) ;
}
}
av_dump_format ( oc , 0 , filename , 1 ) ;
/* Now that all the parameters are set, we can open the audio and
* video codecs and allocate the necessary encode buffers . */
/* now that all the parameters are set, we can open the audio and
video codecs and allocate the necessary encode buffers */
if ( video_st )
if ( video_st )
open_video ( oc , video_st ) ;
open_video ( oc , video_st ) ;
if ( audio_st )
if ( audio_st )
open_audio ( oc , audio_st ) ;
open_audio ( oc , audio_st ) ;
av_dump_format ( oc , 0 , filename , 1 ) ;
/* open the output file, if needed */
/* open the output file, if needed */
if ( ! ( fmt - > flags & AVFMT_NOFILE ) ) {
if ( ! ( fmt - > flags & AVFMT_NOFILE ) ) {
if ( avio_open ( & oc - > pb , filename , AVIO_FLAG_WRITE ) < 0 ) {
if ( avio_open ( & oc - > pb , filename , AVIO_FLAG_WRITE ) < 0 ) {
@ -481,18 +486,20 @@ int main(int argc, char **argv)
}
}
}
}
/* write the stream header, if any */
/* Write the stream header, if any. */
avformat_write_header ( oc , NULL ) ;
avformat_write_header ( oc , NULL ) ;
picture - > pts = 0 ;
picture - > pts = 0 ;
for ( ; ; ) {
for ( ; ; ) {
/* compute current audio and video time */
/* Compute current audio and video time. */
if ( audio_st )
if ( audio_st )
audio_pts = ( double ) audio_st - > pts . val * audio_st - > time_base . num / audio_st - > time_base . den ;
audio_pts = ( double ) audio_st - > pts . val * audio_st - > time_base . num / audio_st - > time_base . den ;
else
else
audio_pts = 0.0 ;
audio_pts = 0.0 ;
if ( video_st )
if ( video_st )
video_pts = ( double ) video_st - > pts . val * video_st - > time_base . num / video_st - > time_base . den ;
video_pts = ( double ) video_st - > pts . val * video_st - > time_base . num /
video_st - > time_base . den ;
else
else
video_pts = 0.0 ;
video_pts = 0.0 ;
@ -509,28 +516,27 @@ int main(int argc, char **argv)
}
}
}
}
/* write the trailer, if any. the trailer must be written
/* Write the trailer, if any. The trailer must be written before you
* before you close the CodecContexts open when you wrote the
* close the CodecContexts open when you wrote the header ; otherwis e
* header ; otherwise write_trailer may try to use memory that
* av_write_trailer ( ) may try to use memory that was freed on
* was freed on av_codec_close ( ) */
* av_codec_close ( ) . */
av_write_trailer ( oc ) ;
av_write_trailer ( oc ) ;
/* close each codec */
/* Close each codec. */
if ( video_st )
if ( video_st )
close_video ( oc , video_st ) ;
close_video ( oc , video_st ) ;
if ( audio_st )
if ( audio_st )
close_audio ( oc , audio_st ) ;
close_audio ( oc , audio_st ) ;
/* free the streams */
/* Free the streams. */
for ( i = 0 ; i < oc - > nb_streams ; i + + ) {
for ( i = 0 ; i < oc - > nb_streams ; i + + ) {
av_freep ( & oc - > streams [ i ] - > codec ) ;
av_freep ( & oc - > streams [ i ] - > codec ) ;
av_freep ( & oc - > streams [ i ] ) ;
av_freep ( & oc - > streams [ i ] ) ;
}
}
if ( ! ( fmt - > flags & AVFMT_NOFILE ) ) {
if ( ! ( fmt - > flags & AVFMT_NOFILE ) )
/* close the output file */
/* Close the output file. */
avio_close ( oc - > pb ) ;
avio_close ( oc - > pb ) ;
}
/* free the stream */
/* free the stream */
av_free ( oc ) ;
av_free ( oc ) ;