Statistics
| Branch: | Revision:

ffmpeg / output_example.c @ 755bfeab

History | View | Annotate | Download (15.9 KB)

1
/*
2
 * Libavformat API example: Output a media file in any supported
3
 * libavformat format. The default codecs are used.
4
 *
5
 * Copyright (c) 2003 Fabrice Bellard
6
 *
7
 * Permission is hereby granted, free of charge, to any person obtaining a copy
8
 * of this software and associated documentation files (the "Software"), to deal
9
 * in the Software without restriction, including without limitation the rights
10
 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11
 * copies of the Software, and to permit persons to whom the Software is
12
 * furnished to do so, subject to the following conditions:
13
 *
14
 * The above copyright notice and this permission notice shall be included in
15
 * all copies or substantial portions of the Software.
16
 *
17
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
20
 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22
 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23
 * THE SOFTWARE.
24
 */
25
#include <stdlib.h>
26
#include <stdio.h>
27
#include <string.h>
28
#include <math.h>
29

    
30
#ifndef M_PI
31
#define M_PI 3.14159265358979323846
32
#endif
33

    
34
#include "avformat.h"
35
#include "swscale.h"
36

    
37
#undef exit
38

    
39
/* 5 seconds stream duration */
40
#define STREAM_DURATION   5.0
41
#define STREAM_FRAME_RATE 25 /* 25 images/s */
42
#define STREAM_NB_FRAMES  ((int)(STREAM_DURATION * STREAM_FRAME_RATE))
43
#define STREAM_PIX_FMT PIX_FMT_YUV420P /* default pix_fmt */
44

    
45
static int sws_flags = SWS_BICUBIC;
46

    
47
/**************************************************************/
48
/* audio output */
49

    
50
float t, tincr, tincr2;
51
int16_t *samples;
52
uint8_t *audio_outbuf;
53
int audio_outbuf_size;
54
int audio_input_frame_size;
55

    
56
/*
57
 * add an audio output stream
58
 */
59
static AVStream *add_audio_stream(AVFormatContext *oc, int codec_id)
60
{
61
    AVCodecContext *c;
62
    AVStream *st;
63

    
64
    st = av_new_stream(oc, 1);
65
    if (!st) {
66
        fprintf(stderr, "Could not alloc stream\n");
67
        exit(1);
68
    }
69

    
70
    c = st->codec;
71
    c->codec_id = codec_id;
72
    c->codec_type = CODEC_TYPE_AUDIO;
73

    
74
    /* put sample parameters */
75
    c->bit_rate = 64000;
76
    c->sample_rate = 44100;
77
    c->channels = 2;
78
    return st;
79
}
80

    
81
static void open_audio(AVFormatContext *oc, AVStream *st)
82
{
83
    AVCodecContext *c;
84
    AVCodec *codec;
85

    
86
    c = st->codec;
87

    
88
    /* find the audio encoder */
89
    codec = avcodec_find_encoder(c->codec_id);
90
    if (!codec) {
91
        fprintf(stderr, "codec not found\n");
92
        exit(1);
93
    }
94

    
95
    /* open it */
96
    if (avcodec_open(c, codec) < 0) {
97
        fprintf(stderr, "could not open codec\n");
98
        exit(1);
99
    }
100

    
101
    /* init signal generator */
102
    t = 0;
103
    tincr = 2 * M_PI * 110.0 / c->sample_rate;
104
    /* increment frequency by 110 Hz per second */
105
    tincr2 = 2 * M_PI * 110.0 / c->sample_rate / c->sample_rate;
106

    
107
    audio_outbuf_size = 10000;
108
    audio_outbuf = av_malloc(audio_outbuf_size);
109

    
110
    /* ugly hack for PCM codecs (will be removed ASAP with new PCM
111
       support to compute the input frame size in samples */
112
    if (c->frame_size <= 1) {
113
        audio_input_frame_size = audio_outbuf_size / c->channels;
114
        switch(st->codec->codec_id) {
115
        case CODEC_ID_PCM_S16LE:
116
        case CODEC_ID_PCM_S16BE:
117
        case CODEC_ID_PCM_U16LE:
118
        case CODEC_ID_PCM_U16BE:
119
            audio_input_frame_size >>= 1;
120
            break;
121
        default:
122
            break;
123
        }
124
    } else {
125
        audio_input_frame_size = c->frame_size;
126
    }
127
    samples = av_malloc(audio_input_frame_size * 2 * c->channels);
128
}
129

    
130
/* prepare a 16 bit dummy audio frame of 'frame_size' samples and
131
   'nb_channels' channels */
132
static void get_audio_frame(int16_t *samples, int frame_size, int nb_channels)
133
{
134
    int j, i, v;
135
    int16_t *q;
136

    
137
    q = samples;
138
    for(j=0;j<frame_size;j++) {
139
        v = (int)(sin(t) * 10000);
140
        for(i = 0; i < nb_channels; i++)
141
            *q++ = v;
142
        t += tincr;
143
        tincr += tincr2;
144
    }
145
}
146

    
147
static void write_audio_frame(AVFormatContext *oc, AVStream *st)
148
{
149
    AVCodecContext *c;
150
    AVPacket pkt;
151
    av_init_packet(&pkt);
152

    
153
    c = st->codec;
154

    
155
    get_audio_frame(samples, audio_input_frame_size, c->channels);
156

    
157
    pkt.size= avcodec_encode_audio(c, audio_outbuf, audio_outbuf_size, samples);
158

    
159
    pkt.pts= av_rescale_q(c->coded_frame->pts, c->time_base, st->time_base);
160
    pkt.flags |= PKT_FLAG_KEY;
161
    pkt.stream_index= st->index;
162
    pkt.data= audio_outbuf;
163

    
164
    /* write the compressed frame in the media file */
165
    if (av_write_frame(oc, &pkt) != 0) {
166
        fprintf(stderr, "Error while writing audio frame\n");
167
        exit(1);
168
    }
169
}
170

    
171
static void close_audio(AVFormatContext *oc, AVStream *st)
172
{
173
    avcodec_close(st->codec);
174

    
175
    av_free(samples);
176
    av_free(audio_outbuf);
177
}
178

    
179
/**************************************************************/
180
/* video output */
181

    
182
AVFrame *picture, *tmp_picture;
183
uint8_t *video_outbuf;
184
int frame_count, video_outbuf_size;
185

    
186
/* add a video output stream */
187
static AVStream *add_video_stream(AVFormatContext *oc, int codec_id)
188
{
189
    AVCodecContext *c;
190
    AVStream *st;
191

    
192
    st = av_new_stream(oc, 0);
193
    if (!st) {
194
        fprintf(stderr, "Could not alloc stream\n");
195
        exit(1);
196
    }
197

    
198
    c = st->codec;
199
    c->codec_id = codec_id;
200
    c->codec_type = CODEC_TYPE_VIDEO;
201

    
202
    /* put sample parameters */
203
    c->bit_rate = 400000;
204
    /* resolution must be a multiple of two */
205
    c->width = 352;
206
    c->height = 288;
207
    /* time base: this is the fundamental unit of time (in seconds) in terms
208
       of which frame timestamps are represented. for fixed-fps content,
209
       timebase should be 1/framerate and timestamp increments should be
210
       identically 1. */
211
    c->time_base.den = STREAM_FRAME_RATE;
212
    c->time_base.num = 1;
213
    c->gop_size = 12; /* emit one intra frame every twelve frames at most */
214
    c->pix_fmt = STREAM_PIX_FMT;
215
    if (c->codec_id == CODEC_ID_MPEG2VIDEO) {
216
        /* just for testing, we also add B frames */
217
        c->max_b_frames = 2;
218
    }
219
    if (c->codec_id == CODEC_ID_MPEG1VIDEO){
220
        /* Needed to avoid using macroblocks in which some coeffs overflow.
221
           This does not happen with normal video, it just happens here as
222
           the motion of the chroma plane does not match the luma plane. */
223
        c->mb_decision=2;
224
    }
225
    // some formats want stream headers to be separate
226
    if(!strcmp(oc->oformat->name, "mp4") || !strcmp(oc->oformat->name, "mov") || !strcmp(oc->oformat->name, "3gp"))
227
        c->flags |= CODEC_FLAG_GLOBAL_HEADER;
228

    
229
    return st;
230
}
231

    
232
static AVFrame *alloc_picture(int pix_fmt, int width, int height)
233
{
234
    AVFrame *picture;
235
    uint8_t *picture_buf;
236
    int size;
237

    
238
    picture = avcodec_alloc_frame();
239
    if (!picture)
240
        return NULL;
241
    size = avpicture_get_size(pix_fmt, width, height);
242
    picture_buf = av_malloc(size);
243
    if (!picture_buf) {
244
        av_free(picture);
245
        return NULL;
246
    }
247
    avpicture_fill((AVPicture *)picture, picture_buf,
248
                   pix_fmt, width, height);
249
    return picture;
250
}
251

    
252
static void open_video(AVFormatContext *oc, AVStream *st)
253
{
254
    AVCodec *codec;
255
    AVCodecContext *c;
256

    
257
    c = st->codec;
258

    
259
    /* find the video encoder */
260
    codec = avcodec_find_encoder(c->codec_id);
261
    if (!codec) {
262
        fprintf(stderr, "codec not found\n");
263
        exit(1);
264
    }
265

    
266
    /* open the codec */
267
    if (avcodec_open(c, codec) < 0) {
268
        fprintf(stderr, "could not open codec\n");
269
        exit(1);
270
    }
271

    
272
    video_outbuf = NULL;
273
    if (!(oc->oformat->flags & AVFMT_RAWPICTURE)) {
274
        /* allocate output buffer */
275
        /* XXX: API change will be done */
276
        /* buffers passed into lav* can be allocated any way you prefer,
277
           as long as they're aligned enough for the architecture, and
278
           they're freed appropriately (such as using av_free for buffers
279
           allocated with av_malloc) */
280
        video_outbuf_size = 200000;
281
        video_outbuf = av_malloc(video_outbuf_size);
282
    }
283

    
284
    /* allocate the encoded raw picture */
285
    picture = alloc_picture(c->pix_fmt, c->width, c->height);
286
    if (!picture) {
287
        fprintf(stderr, "Could not allocate picture\n");
288
        exit(1);
289
    }
290

    
291
    /* if the output format is not YUV420P, then a temporary YUV420P
292
       picture is needed too. It is then converted to the required
293
       output format */
294
    tmp_picture = NULL;
295
    if (c->pix_fmt != PIX_FMT_YUV420P) {
296
        tmp_picture = alloc_picture(PIX_FMT_YUV420P, c->width, c->height);
297
        if (!tmp_picture) {
298
            fprintf(stderr, "Could not allocate temporary picture\n");
299
            exit(1);
300
        }
301
    }
302
}
303

    
304
/* prepare a dummy image */
305
static void fill_yuv_image(AVFrame *pict, int frame_index, int width, int height)
306
{
307
    int x, y, i;
308

    
309
    i = frame_index;
310

    
311
    /* Y */
312
    for(y=0;y<height;y++) {
313
        for(x=0;x<width;x++) {
314
            pict->data[0][y * pict->linesize[0] + x] = x + y + i * 3;
315
        }
316
    }
317

    
318
    /* Cb and Cr */
319
    for(y=0;y<height/2;y++) {
320
        for(x=0;x<width/2;x++) {
321
            pict->data[1][y * pict->linesize[1] + x] = 128 + y + i * 2;
322
            pict->data[2][y * pict->linesize[2] + x] = 64 + x + i * 5;
323
        }
324
    }
325
}
326

    
327
static void write_video_frame(AVFormatContext *oc, AVStream *st)
328
{
329
    int out_size, ret;
330
    AVCodecContext *c;
331
    static struct SwsContext *img_convert_ctx;
332

    
333
    c = st->codec;
334

    
335
    if (frame_count >= STREAM_NB_FRAMES) {
336
        /* no more frame to compress. The codec has a latency of a few
337
           frames if using B frames, so we get the last frames by
338
           passing the same picture again */
339
    } else {
340
        if (c->pix_fmt != PIX_FMT_YUV420P) {
341
            /* as we only generate a YUV420P picture, we must convert it
342
               to the codec pixel format if needed */
343
            if (img_convert_ctx == NULL) {
344
                img_convert_ctx = sws_getContext(c->width, c->height,
345
                                                 PIX_FMT_YUV420P,
346
                                                 c->width, c->height,
347
                                                 c->pix_fmt,
348
                                                 sws_flags, NULL, NULL, NULL);
349
                if (img_convert_ctx == NULL) {
350
                    fprintf(stderr, "Cannot initialize the conversion context\n");
351
                    exit(1);
352
                }
353
            }
354
            fill_yuv_image(tmp_picture, frame_count, c->width, c->height);
355
            sws_scale(img_convert_ctx, tmp_picture->data, tmp_picture->linesize,
356
                      0, c->height, picture->data, picture->linesize);
357
        } else {
358
            fill_yuv_image(picture, frame_count, c->width, c->height);
359
        }
360
    }
361

    
362

    
363
    if (oc->oformat->flags & AVFMT_RAWPICTURE) {
364
        /* raw video case. The API will change slightly in the near
365
           futur for that */
366
        AVPacket pkt;
367
        av_init_packet(&pkt);
368

    
369
        pkt.flags |= PKT_FLAG_KEY;
370
        pkt.stream_index= st->index;
371
        pkt.data= (uint8_t *)picture;
372
        pkt.size= sizeof(AVPicture);
373

    
374
        ret = av_write_frame(oc, &pkt);
375
    } else {
376
        /* encode the image */
377
        out_size = avcodec_encode_video(c, video_outbuf, video_outbuf_size, picture);
378
        /* if zero size, it means the image was buffered */
379
        if (out_size > 0) {
380
            AVPacket pkt;
381
            av_init_packet(&pkt);
382

    
383
            pkt.pts= av_rescale_q(c->coded_frame->pts, c->time_base, st->time_base);
384
            if(c->coded_frame->key_frame)
385
                pkt.flags |= PKT_FLAG_KEY;
386
            pkt.stream_index= st->index;
387
            pkt.data= video_outbuf;
388
            pkt.size= out_size;
389

    
390
            /* write the compressed frame in the media file */
391
            ret = av_write_frame(oc, &pkt);
392
        } else {
393
            ret = 0;
394
        }
395
    }
396
    if (ret != 0) {
397
        fprintf(stderr, "Error while writing video frame\n");
398
        exit(1);
399
    }
400
    frame_count++;
401
}
402

    
403
static void close_video(AVFormatContext *oc, AVStream *st)
404
{
405
    avcodec_close(st->codec);
406
    av_free(picture->data[0]);
407
    av_free(picture);
408
    if (tmp_picture) {
409
        av_free(tmp_picture->data[0]);
410
        av_free(tmp_picture);
411
    }
412
    av_free(video_outbuf);
413
}
414

    
415
/**************************************************************/
416
/* media file output */
417

    
418
int main(int argc, char **argv)
419
{
420
    const char *filename;
421
    AVOutputFormat *fmt;
422
    AVFormatContext *oc;
423
    AVStream *audio_st, *video_st;
424
    double audio_pts, video_pts;
425
    int i;
426

    
427
    /* initialize libavcodec, and register all codecs and formats */
428
    av_register_all();
429

    
430
    if (argc != 2) {
431
        printf("usage: %s output_file\n"
432
               "API example program to output a media file with libavformat.\n"
433
               "The output format is automatically guessed according to the file extension.\n"
434
               "Raw images can also be output by using '%%d' in the filename\n"
435
               "\n", argv[0]);
436
        exit(1);
437
    }
438

    
439
    filename = argv[1];
440

    
441
    /* auto detect the output format from the name. default is
442
       mpeg. */
443
    fmt = guess_format(NULL, filename, NULL);
444
    if (!fmt) {
445
        printf("Could not deduce output format from file extension: using MPEG.\n");
446
        fmt = guess_format("mpeg", NULL, NULL);
447
    }
448
    if (!fmt) {
449
        fprintf(stderr, "Could not find suitable output format\n");
450
        exit(1);
451
    }
452

    
453
    /* allocate the output media context */
454
    oc = av_alloc_format_context();
455
    if (!oc) {
456
        fprintf(stderr, "Memory error\n");
457
        exit(1);
458
    }
459
    oc->oformat = fmt;
460
    snprintf(oc->filename, sizeof(oc->filename), "%s", filename);
461

    
462
    /* add the audio and video streams using the default format codecs
463
       and initialize the codecs */
464
    video_st = NULL;
465
    audio_st = NULL;
466
    if (fmt->video_codec != CODEC_ID_NONE) {
467
        video_st = add_video_stream(oc, fmt->video_codec);
468
    }
469
    if (fmt->audio_codec != CODEC_ID_NONE) {
470
        audio_st = add_audio_stream(oc, fmt->audio_codec);
471
    }
472

    
473
    /* set the output parameters (must be done even if no
474
       parameters). */
475
    if (av_set_parameters(oc, NULL) < 0) {
476
        fprintf(stderr, "Invalid output format parameters\n");
477
        exit(1);
478
    }
479

    
480
    dump_format(oc, 0, filename, 1);
481

    
482
    /* now that all the parameters are set, we can open the audio and
483
       video codecs and allocate the necessary encode buffers */
484
    if (video_st)
485
        open_video(oc, video_st);
486
    if (audio_st)
487
        open_audio(oc, audio_st);
488

    
489
    /* open the output file, if needed */
490
    if (!(fmt->flags & AVFMT_NOFILE)) {
491
        if (url_fopen(&oc->pb, filename, URL_WRONLY) < 0) {
492
            fprintf(stderr, "Could not open '%s'\n", filename);
493
            exit(1);
494
        }
495
    }
496

    
497
    /* write the stream header, if any */
498
    av_write_header(oc);
499

    
500
    for(;;) {
501
        /* compute current audio and video time */
502
        if (audio_st)
503
            audio_pts = (double)audio_st->pts.val * audio_st->time_base.num / audio_st->time_base.den;
504
        else
505
            audio_pts = 0.0;
506

    
507
        if (video_st)
508
            video_pts = (double)video_st->pts.val * video_st->time_base.num / video_st->time_base.den;
509
        else
510
            video_pts = 0.0;
511

    
512
        if ((!audio_st || audio_pts >= STREAM_DURATION) &&
513
            (!video_st || video_pts >= STREAM_DURATION))
514
            break;
515

    
516
        /* write interleaved audio and video frames */
517
        if (!video_st || (video_st && audio_st && audio_pts < video_pts)) {
518
            write_audio_frame(oc, audio_st);
519
        } else {
520
            write_video_frame(oc, video_st);
521
        }
522
    }
523

    
524
    /* close each codec */
525
    if (video_st)
526
        close_video(oc, video_st);
527
    if (audio_st)
528
        close_audio(oc, audio_st);
529

    
530
    /* write the trailer, if any */
531
    av_write_trailer(oc);
532

    
533
    /* free the streams */
534
    for(i = 0; i < oc->nb_streams; i++) {
535
        av_freep(&oc->streams[i]->codec);
536
        av_freep(&oc->streams[i]);
537
    }
538

    
539
    if (!(fmt->flags & AVFMT_NOFILE)) {
540
        /* close the output file */
541
        url_fclose(&oc->pb);
542
    }
543

    
544
    /* free the stream */
545
    av_free(oc);
546

    
547
    return 0;
548
}