Statistics
| Branch: | Revision:

ffmpeg / ffplay.c @ ac50bcc8

History | View | Annotate | Download (75 KB)

1
/*
2
 * FFplay : Simple Media Player based on the ffmpeg libraries
3
 * Copyright (c) 2003 Fabrice Bellard
4
 *
5
 * This file is part of FFmpeg.
6
 *
7
 * FFmpeg is free software; you can redistribute it and/or
8
 * modify it under the terms of the GNU Lesser General Public
9
 * License as published by the Free Software Foundation; either
10
 * version 2.1 of the License, or (at your option) any later version.
11
 *
12
 * FFmpeg is distributed in the hope that it will be useful,
13
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15
 * Lesser General Public License for more details.
16
 *
17
 * You should have received a copy of the GNU Lesser General Public
18
 * License along with FFmpeg; if not, write to the Free Software
19
 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20
 */
21
#define HAVE_AV_CONFIG_H
22
#include "avformat.h"
23
#include "swscale.h"
24

    
25
#include "version.h"
26
#include "cmdutils.h"
27

    
28
#include <SDL.h>
29
#include <SDL_thread.h>
30

    
31
#ifdef __MINGW32__
32
#undef main /* We don't want SDL to override our main() */
33
#endif
34

    
35
#ifdef CONFIG_OS2
36
#define INCL_DOS
37
 #include <os2.h>
38
 #include <stdio.h>
39

    
40
 void MorphToPM()
41
 {
42
   PPIB pib;
43
   PTIB tib;
44

    
45
   DosGetInfoBlocks(&tib, &pib);
46

    
47
   // Change flag from VIO to PM:
48
   if (pib->pib_ultype==2) pib->pib_ultype = 3;
49
 }
50
#endif
51

    
52
//#define DEBUG_SYNC
53

    
54
#define MAX_VIDEOQ_SIZE (5 * 256 * 1024)
55
#define MAX_AUDIOQ_SIZE (5 * 16 * 1024)
56
#define MAX_SUBTITLEQ_SIZE (5 * 16 * 1024)
57

    
58
/* SDL audio buffer size, in samples. Should be small to have precise
59
   A/V sync as SDL does not have hardware buffer fullness info. */
60
#define SDL_AUDIO_BUFFER_SIZE 1024
61

    
62
/* no AV sync correction is done if below the AV sync threshold */
63
#define AV_SYNC_THRESHOLD 0.01
64
/* no AV correction is done if too big error */
65
#define AV_NOSYNC_THRESHOLD 10.0
66

    
67
/* maximum audio speed change to get correct sync */
68
#define SAMPLE_CORRECTION_PERCENT_MAX 10
69

    
70
/* we use about AUDIO_DIFF_AVG_NB A-V differences to make the average */
71
#define AUDIO_DIFF_AVG_NB   20
72

    
73
/* NOTE: the size must be big enough to compensate the hardware audio buffersize size */
74
#define SAMPLE_ARRAY_SIZE (2*65536)
75

    
76
static int sws_flags = SWS_BICUBIC;
77

    
78
typedef struct PacketQueue {
79
    AVPacketList *first_pkt, *last_pkt;
80
    int nb_packets;
81
    int size;
82
    int abort_request;
83
    SDL_mutex *mutex;
84
    SDL_cond *cond;
85
} PacketQueue;
86

    
87
#define VIDEO_PICTURE_QUEUE_SIZE 1
88
#define SUBPICTURE_QUEUE_SIZE 4
89

    
90
typedef struct VideoPicture {
91
    double pts;                                  ///<presentation time stamp for this picture
92
    SDL_Overlay *bmp;
93
    int width, height; /* source height & width */
94
    int allocated;
95
} VideoPicture;
96

    
97
typedef struct SubPicture {
98
    double pts; /* presentation time stamp for this picture */
99
    AVSubtitle sub;
100
} SubPicture;
101

    
102
enum {
103
    AV_SYNC_AUDIO_MASTER, /* default choice */
104
    AV_SYNC_VIDEO_MASTER,
105
    AV_SYNC_EXTERNAL_CLOCK, /* synchronize to an external clock */
106
};
107

    
108
typedef struct VideoState {
109
    SDL_Thread *parse_tid;
110
    SDL_Thread *video_tid;
111
    AVInputFormat *iformat;
112
    int no_background;
113
    int abort_request;
114
    int paused;
115
    int last_paused;
116
    int seek_req;
117
    int seek_flags;
118
    int64_t seek_pos;
119
    AVFormatContext *ic;
120
    int dtg_active_format;
121

    
122
    int audio_stream;
123

    
124
    int av_sync_type;
125
    double external_clock; /* external clock base */
126
    int64_t external_clock_time;
127

    
128
    double audio_clock;
129
    double audio_diff_cum; /* used for AV difference average computation */
130
    double audio_diff_avg_coef;
131
    double audio_diff_threshold;
132
    int audio_diff_avg_count;
133
    AVStream *audio_st;
134
    PacketQueue audioq;
135
    int audio_hw_buf_size;
136
    /* samples output by the codec. we reserve more space for avsync
137
       compensation */
138
    DECLARE_ALIGNED(16,uint8_t,audio_buf[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 2]);
139
    unsigned int audio_buf_size; /* in bytes */
140
    int audio_buf_index; /* in bytes */
141
    AVPacket audio_pkt;
142
    uint8_t *audio_pkt_data;
143
    int audio_pkt_size;
144

    
145
    int show_audio; /* if true, display audio samples */
146
    int16_t sample_array[SAMPLE_ARRAY_SIZE];
147
    int sample_array_index;
148
    int last_i_start;
149

    
150
    SDL_Thread *subtitle_tid;
151
    int subtitle_stream;
152
    int subtitle_stream_changed;
153
    AVStream *subtitle_st;
154
    PacketQueue subtitleq;
155
    SubPicture subpq[SUBPICTURE_QUEUE_SIZE];
156
    int subpq_size, subpq_rindex, subpq_windex;
157
    SDL_mutex *subpq_mutex;
158
    SDL_cond *subpq_cond;
159

    
160
    double frame_timer;
161
    double frame_last_pts;
162
    double frame_last_delay;
163
    double video_clock;                          ///<pts of last decoded frame / predicted pts of next decoded frame
164
    int video_stream;
165
    AVStream *video_st;
166
    PacketQueue videoq;
167
    double video_current_pts;                    ///<current displayed pts (different from video_clock if frame fifos are used)
168
    int64_t video_current_pts_time;              ///<time (av_gettime) at which we updated video_current_pts - used to have running video pts
169
    VideoPicture pictq[VIDEO_PICTURE_QUEUE_SIZE];
170
    int pictq_size, pictq_rindex, pictq_windex;
171
    SDL_mutex *pictq_mutex;
172
    SDL_cond *pictq_cond;
173

    
174
    //    QETimer *video_timer;
175
    char filename[1024];
176
    int width, height, xleft, ytop;
177
} VideoState;
178

    
179
void show_help(void);
180
static int audio_write_get_buf_size(VideoState *is);
181

    
182
/* options specified by the user */
183
static AVInputFormat *file_iformat;
184
static const char *input_filename;
185
static int fs_screen_width;
186
static int fs_screen_height;
187
static int screen_width = 0;
188
static int screen_height = 0;
189
static int audio_disable;
190
static int video_disable;
191
static int wanted_audio_stream= 0;
192
static int seek_by_bytes;
193
static int display_disable;
194
static int show_status;
195
static int av_sync_type = AV_SYNC_AUDIO_MASTER;
196
static int64_t start_time = AV_NOPTS_VALUE;
197
static int debug = 0;
198
static int debug_mv = 0;
199
static int step = 0;
200
static int thread_count = 1;
201
static int workaround_bugs = 1;
202
static int fast = 0;
203
static int genpts = 0;
204
static int lowres = 0;
205
static int idct = FF_IDCT_AUTO;
206
static enum AVDiscard skip_frame= AVDISCARD_DEFAULT;
207
static enum AVDiscard skip_idct= AVDISCARD_DEFAULT;
208
static enum AVDiscard skip_loop_filter= AVDISCARD_DEFAULT;
209
static int error_resilience = FF_ER_CAREFUL;
210
static int error_concealment = 3;
211

    
212
/* current context */
213
static int is_full_screen;
214
static VideoState *cur_stream;
215
static int64_t audio_callback_time;
216

    
217
AVPacket flush_pkt;
218

    
219
#define FF_ALLOC_EVENT   (SDL_USEREVENT)
220
#define FF_REFRESH_EVENT (SDL_USEREVENT + 1)
221
#define FF_QUIT_EVENT    (SDL_USEREVENT + 2)
222

    
223
SDL_Surface *screen;
224

    
225
/* packet queue handling */
226
static void packet_queue_init(PacketQueue *q)
227
{
228
    memset(q, 0, sizeof(PacketQueue));
229
    q->mutex = SDL_CreateMutex();
230
    q->cond = SDL_CreateCond();
231
}
232

    
233
static void packet_queue_flush(PacketQueue *q)
234
{
235
    AVPacketList *pkt, *pkt1;
236

    
237
    SDL_LockMutex(q->mutex);
238
    for(pkt = q->first_pkt; pkt != NULL; pkt = pkt1) {
239
        pkt1 = pkt->next;
240
        av_free_packet(&pkt->pkt);
241
        av_freep(&pkt);
242
    }
243
    q->last_pkt = NULL;
244
    q->first_pkt = NULL;
245
    q->nb_packets = 0;
246
    q->size = 0;
247
    SDL_UnlockMutex(q->mutex);
248
}
249

    
250
static void packet_queue_end(PacketQueue *q)
251
{
252
    packet_queue_flush(q);
253
    SDL_DestroyMutex(q->mutex);
254
    SDL_DestroyCond(q->cond);
255
}
256

    
257
static int packet_queue_put(PacketQueue *q, AVPacket *pkt)
258
{
259
    AVPacketList *pkt1;
260

    
261
    /* duplicate the packet */
262
    if (pkt!=&flush_pkt && av_dup_packet(pkt) < 0)
263
        return -1;
264

    
265
    pkt1 = av_malloc(sizeof(AVPacketList));
266
    if (!pkt1)
267
        return -1;
268
    pkt1->pkt = *pkt;
269
    pkt1->next = NULL;
270

    
271

    
272
    SDL_LockMutex(q->mutex);
273

    
274
    if (!q->last_pkt)
275

    
276
        q->first_pkt = pkt1;
277
    else
278
        q->last_pkt->next = pkt1;
279
    q->last_pkt = pkt1;
280
    q->nb_packets++;
281
    q->size += pkt1->pkt.size;
282
    /* XXX: should duplicate packet data in DV case */
283
    SDL_CondSignal(q->cond);
284

    
285
    SDL_UnlockMutex(q->mutex);
286
    return 0;
287
}
288

    
289
static void packet_queue_abort(PacketQueue *q)
290
{
291
    SDL_LockMutex(q->mutex);
292

    
293
    q->abort_request = 1;
294

    
295
    SDL_CondSignal(q->cond);
296

    
297
    SDL_UnlockMutex(q->mutex);
298
}
299

    
300
/* return < 0 if aborted, 0 if no packet and > 0 if packet.  */
301
static int packet_queue_get(PacketQueue *q, AVPacket *pkt, int block)
302
{
303
    AVPacketList *pkt1;
304
    int ret;
305

    
306
    SDL_LockMutex(q->mutex);
307

    
308
    for(;;) {
309
        if (q->abort_request) {
310
            ret = -1;
311
            break;
312
        }
313

    
314
        pkt1 = q->first_pkt;
315
        if (pkt1) {
316
            q->first_pkt = pkt1->next;
317
            if (!q->first_pkt)
318
                q->last_pkt = NULL;
319
            q->nb_packets--;
320
            q->size -= pkt1->pkt.size;
321
            *pkt = pkt1->pkt;
322
            av_free(pkt1);
323
            ret = 1;
324
            break;
325
        } else if (!block) {
326
            ret = 0;
327
            break;
328
        } else {
329
            SDL_CondWait(q->cond, q->mutex);
330
        }
331
    }
332
    SDL_UnlockMutex(q->mutex);
333
    return ret;
334
}
335

    
336
static inline void fill_rectangle(SDL_Surface *screen,
337
                                  int x, int y, int w, int h, int color)
338
{
339
    SDL_Rect rect;
340
    rect.x = x;
341
    rect.y = y;
342
    rect.w = w;
343
    rect.h = h;
344
    SDL_FillRect(screen, &rect, color);
345
}
346

    
347
#if 0
348
/* draw only the border of a rectangle */
349
void fill_border(VideoState *s, int x, int y, int w, int h, int color)
350
{
351
    int w1, w2, h1, h2;
352

353
    /* fill the background */
354
    w1 = x;
355
    if (w1 < 0)
356
        w1 = 0;
357
    w2 = s->width - (x + w);
358
    if (w2 < 0)
359
        w2 = 0;
360
    h1 = y;
361
    if (h1 < 0)
362
        h1 = 0;
363
    h2 = s->height - (y + h);
364
    if (h2 < 0)
365
        h2 = 0;
366
    fill_rectangle(screen,
367
                   s->xleft, s->ytop,
368
                   w1, s->height,
369
                   color);
370
    fill_rectangle(screen,
371
                   s->xleft + s->width - w2, s->ytop,
372
                   w2, s->height,
373
                   color);
374
    fill_rectangle(screen,
375
                   s->xleft + w1, s->ytop,
376
                   s->width - w1 - w2, h1,
377
                   color);
378
    fill_rectangle(screen,
379
                   s->xleft + w1, s->ytop + s->height - h2,
380
                   s->width - w1 - w2, h2,
381
                   color);
382
}
383
#endif
384

    
385

    
386

    
387
#define SCALEBITS 10
388
#define ONE_HALF  (1 << (SCALEBITS - 1))
389
#define FIX(x)    ((int) ((x) * (1<<SCALEBITS) + 0.5))
390

    
391
#define RGB_TO_Y_CCIR(r, g, b) \
392
((FIX(0.29900*219.0/255.0) * (r) + FIX(0.58700*219.0/255.0) * (g) + \
393
  FIX(0.11400*219.0/255.0) * (b) + (ONE_HALF + (16 << SCALEBITS))) >> SCALEBITS)
394

    
395
#define RGB_TO_U_CCIR(r1, g1, b1, shift)\
396
(((- FIX(0.16874*224.0/255.0) * r1 - FIX(0.33126*224.0/255.0) * g1 +         \
397
     FIX(0.50000*224.0/255.0) * b1 + (ONE_HALF << shift) - 1) >> (SCALEBITS + shift)) + 128)
398

    
399
#define RGB_TO_V_CCIR(r1, g1, b1, shift)\
400
(((FIX(0.50000*224.0/255.0) * r1 - FIX(0.41869*224.0/255.0) * g1 -           \
401
   FIX(0.08131*224.0/255.0) * b1 + (ONE_HALF << shift) - 1) >> (SCALEBITS + shift)) + 128)
402

    
403
#define ALPHA_BLEND(a, oldp, newp, s)\
404
((((oldp << s) * (255 - (a))) + (newp * (a))) / (255 << s))
405

    
406
#define RGBA_IN(r, g, b, a, s)\
407
{\
408
    unsigned int v = ((const uint32_t *)(s))[0];\
409
    a = (v >> 24) & 0xff;\
410
    r = (v >> 16) & 0xff;\
411
    g = (v >> 8) & 0xff;\
412
    b = v & 0xff;\
413
}
414

    
415
#define YUVA_IN(y, u, v, a, s, pal)\
416
{\
417
    unsigned int val = ((const uint32_t *)(pal))[*(const uint8_t*)s];\
418
    a = (val >> 24) & 0xff;\
419
    y = (val >> 16) & 0xff;\
420
    u = (val >> 8) & 0xff;\
421
    v = val & 0xff;\
422
}
423

    
424
#define YUVA_OUT(d, y, u, v, a)\
425
{\
426
    ((uint32_t *)(d))[0] = (a << 24) | (y << 16) | (u << 8) | v;\
427
}
428

    
429

    
430
#define BPP 1
431

    
432
static void blend_subrect(AVPicture *dst, const AVSubtitleRect *rect)
433
{
434
    int wrap, wrap3, width2, skip2;
435
    int y, u, v, a, u1, v1, a1, w, h;
436
    uint8_t *lum, *cb, *cr;
437
    const uint8_t *p;
438
    const uint32_t *pal;
439

    
440
    lum = dst->data[0] + rect->y * dst->linesize[0];
441
    cb = dst->data[1] + (rect->y >> 1) * dst->linesize[1];
442
    cr = dst->data[2] + (rect->y >> 1) * dst->linesize[2];
443

    
444
    width2 = (rect->w + 1) >> 1;
445
    skip2 = rect->x >> 1;
446
    wrap = dst->linesize[0];
447
    wrap3 = rect->linesize;
448
    p = rect->bitmap;
449
    pal = rect->rgba_palette;  /* Now in YCrCb! */
450

    
451
    if (rect->y & 1) {
452
        lum += rect->x;
453
        cb += skip2;
454
        cr += skip2;
455

    
456
        if (rect->x & 1) {
457
            YUVA_IN(y, u, v, a, p, pal);
458
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
459
            cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
460
            cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
461
            cb++;
462
            cr++;
463
            lum++;
464
            p += BPP;
465
        }
466
        for(w = rect->w - (rect->x & 1); w >= 2; w -= 2) {
467
            YUVA_IN(y, u, v, a, p, pal);
468
            u1 = u;
469
            v1 = v;
470
            a1 = a;
471
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
472

    
473
            YUVA_IN(y, u, v, a, p + BPP, pal);
474
            u1 += u;
475
            v1 += v;
476
            a1 += a;
477
            lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
478
            cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
479
            cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
480
            cb++;
481
            cr++;
482
            p += 2 * BPP;
483
            lum += 2;
484
        }
485
        if (w) {
486
            YUVA_IN(y, u, v, a, p, pal);
487
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
488
            cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
489
            cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
490
        }
491
        p += wrap3 + (wrap3 - rect->w * BPP);
492
        lum += wrap + (wrap - rect->w - rect->x);
493
        cb += dst->linesize[1] - width2 - skip2;
494
        cr += dst->linesize[2] - width2 - skip2;
495
    }
496
    for(h = rect->h - (rect->y & 1); h >= 2; h -= 2) {
497
        lum += rect->x;
498
        cb += skip2;
499
        cr += skip2;
500

    
501
        if (rect->x & 1) {
502
            YUVA_IN(y, u, v, a, p, pal);
503
            u1 = u;
504
            v1 = v;
505
            a1 = a;
506
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
507
            p += wrap3;
508
            lum += wrap;
509
            YUVA_IN(y, u, v, a, p, pal);
510
            u1 += u;
511
            v1 += v;
512
            a1 += a;
513
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
514
            cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
515
            cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
516
            cb++;
517
            cr++;
518
            p += -wrap3 + BPP;
519
            lum += -wrap + 1;
520
        }
521
        for(w = rect->w - (rect->x & 1); w >= 2; w -= 2) {
522
            YUVA_IN(y, u, v, a, p, pal);
523
            u1 = u;
524
            v1 = v;
525
            a1 = a;
526
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
527

    
528
            YUVA_IN(y, u, v, a, p, pal);
529
            u1 += u;
530
            v1 += v;
531
            a1 += a;
532
            lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
533
            p += wrap3;
534
            lum += wrap;
535

    
536
            YUVA_IN(y, u, v, a, p, pal);
537
            u1 += u;
538
            v1 += v;
539
            a1 += a;
540
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
541

    
542
            YUVA_IN(y, u, v, a, p, pal);
543
            u1 += u;
544
            v1 += v;
545
            a1 += a;
546
            lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
547

    
548
            cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 2);
549
            cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 2);
550

    
551
            cb++;
552
            cr++;
553
            p += -wrap3 + 2 * BPP;
554
            lum += -wrap + 2;
555
        }
556
        if (w) {
557
            YUVA_IN(y, u, v, a, p, pal);
558
            u1 = u;
559
            v1 = v;
560
            a1 = a;
561
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
562
            p += wrap3;
563
            lum += wrap;
564
            YUVA_IN(y, u, v, a, p, pal);
565
            u1 += u;
566
            v1 += v;
567
            a1 += a;
568
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
569
            cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
570
            cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
571
            cb++;
572
            cr++;
573
            p += -wrap3 + BPP;
574
            lum += -wrap + 1;
575
        }
576
        p += wrap3 + (wrap3 - rect->w * BPP);
577
        lum += wrap + (wrap - rect->w - rect->x);
578
        cb += dst->linesize[1] - width2 - skip2;
579
        cr += dst->linesize[2] - width2 - skip2;
580
    }
581
    /* handle odd height */
582
    if (h) {
583
        lum += rect->x;
584
        cb += skip2;
585
        cr += skip2;
586

    
587
        if (rect->x & 1) {
588
            YUVA_IN(y, u, v, a, p, pal);
589
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
590
            cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
591
            cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
592
            cb++;
593
            cr++;
594
            lum++;
595
            p += BPP;
596
        }
597
        for(w = rect->w - (rect->x & 1); w >= 2; w -= 2) {
598
            YUVA_IN(y, u, v, a, p, pal);
599
            u1 = u;
600
            v1 = v;
601
            a1 = a;
602
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
603

    
604
            YUVA_IN(y, u, v, a, p + BPP, pal);
605
            u1 += u;
606
            v1 += v;
607
            a1 += a;
608
            lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
609
            cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u, 1);
610
            cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v, 1);
611
            cb++;
612
            cr++;
613
            p += 2 * BPP;
614
            lum += 2;
615
        }
616
        if (w) {
617
            YUVA_IN(y, u, v, a, p, pal);
618
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
619
            cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
620
            cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
621
        }
622
    }
623
}
624

    
625
static void free_subpicture(SubPicture *sp)
626
{
627
    int i;
628

    
629
    for (i = 0; i < sp->sub.num_rects; i++)
630
    {
631
        av_free(sp->sub.rects[i].bitmap);
632
        av_free(sp->sub.rects[i].rgba_palette);
633
    }
634

    
635
    av_free(sp->sub.rects);
636

    
637
    memset(&sp->sub, 0, sizeof(AVSubtitle));
638
}
639

    
640
static void video_image_display(VideoState *is)
641
{
642
    VideoPicture *vp;
643
    SubPicture *sp;
644
    AVPicture pict;
645
    float aspect_ratio;
646
    int width, height, x, y;
647
    SDL_Rect rect;
648
    int i;
649

    
650
    vp = &is->pictq[is->pictq_rindex];
651
    if (vp->bmp) {
652
        /* XXX: use variable in the frame */
653
        if (is->video_st->codec->sample_aspect_ratio.num == 0)
654
            aspect_ratio = 0;
655
        else
656
            aspect_ratio = av_q2d(is->video_st->codec->sample_aspect_ratio)
657
                * is->video_st->codec->width / is->video_st->codec->height;;
658
        if (aspect_ratio <= 0.0)
659
            aspect_ratio = (float)is->video_st->codec->width /
660
                (float)is->video_st->codec->height;
661
        /* if an active format is indicated, then it overrides the
662
           mpeg format */
663
#if 0
664
        if (is->video_st->codec->dtg_active_format != is->dtg_active_format) {
665
            is->dtg_active_format = is->video_st->codec->dtg_active_format;
666
            printf("dtg_active_format=%d\n", is->dtg_active_format);
667
        }
668
#endif
669
#if 0
670
        switch(is->video_st->codec->dtg_active_format) {
671
        case FF_DTG_AFD_SAME:
672
        default:
673
            /* nothing to do */
674
            break;
675
        case FF_DTG_AFD_4_3:
676
            aspect_ratio = 4.0 / 3.0;
677
            break;
678
        case FF_DTG_AFD_16_9:
679
            aspect_ratio = 16.0 / 9.0;
680
            break;
681
        case FF_DTG_AFD_14_9:
682
            aspect_ratio = 14.0 / 9.0;
683
            break;
684
        case FF_DTG_AFD_4_3_SP_14_9:
685
            aspect_ratio = 14.0 / 9.0;
686
            break;
687
        case FF_DTG_AFD_16_9_SP_14_9:
688
            aspect_ratio = 14.0 / 9.0;
689
            break;
690
        case FF_DTG_AFD_SP_4_3:
691
            aspect_ratio = 4.0 / 3.0;
692
            break;
693
        }
694
#endif
695

    
696
        if (is->subtitle_st)
697
        {
698
            if (is->subpq_size > 0)
699
            {
700
                sp = &is->subpq[is->subpq_rindex];
701

    
702
                if (vp->pts >= sp->pts + ((float) sp->sub.start_display_time / 1000))
703
                {
704
                    SDL_LockYUVOverlay (vp->bmp);
705

    
706
                    pict.data[0] = vp->bmp->pixels[0];
707
                    pict.data[1] = vp->bmp->pixels[2];
708
                    pict.data[2] = vp->bmp->pixels[1];
709

    
710
                    pict.linesize[0] = vp->bmp->pitches[0];
711
                    pict.linesize[1] = vp->bmp->pitches[2];
712
                    pict.linesize[2] = vp->bmp->pitches[1];
713

    
714
                    for (i = 0; i < sp->sub.num_rects; i++)
715
                        blend_subrect(&pict, &sp->sub.rects[i]);
716

    
717
                    SDL_UnlockYUVOverlay (vp->bmp);
718
                }
719
            }
720
        }
721

    
722

    
723
        /* XXX: we suppose the screen has a 1.0 pixel ratio */
724
        height = is->height;
725
        width = ((int)rint(height * aspect_ratio)) & -3;
726
        if (width > is->width) {
727
            width = is->width;
728
            height = ((int)rint(width / aspect_ratio)) & -3;
729
        }
730
        x = (is->width - width) / 2;
731
        y = (is->height - height) / 2;
732
        if (!is->no_background) {
733
            /* fill the background */
734
            //            fill_border(is, x, y, width, height, QERGB(0x00, 0x00, 0x00));
735
        } else {
736
            is->no_background = 0;
737
        }
738
        rect.x = is->xleft + x;
739
        rect.y = is->xleft + y;
740
        rect.w = width;
741
        rect.h = height;
742
        SDL_DisplayYUVOverlay(vp->bmp, &rect);
743
    } else {
744
#if 0
745
        fill_rectangle(screen,
746
                       is->xleft, is->ytop, is->width, is->height,
747
                       QERGB(0x00, 0x00, 0x00));
748
#endif
749
    }
750
}
751

    
752
static inline int compute_mod(int a, int b)
753
{
754
    a = a % b;
755
    if (a >= 0)
756
        return a;
757
    else
758
        return a + b;
759
}
760

    
761
static void video_audio_display(VideoState *s)
762
{
763
    int i, i_start, x, y1, y, ys, delay, n, nb_display_channels;
764
    int ch, channels, h, h2, bgcolor, fgcolor;
765
    int16_t time_diff;
766

    
767
    /* compute display index : center on currently output samples */
768
    channels = s->audio_st->codec->channels;
769
    nb_display_channels = channels;
770
    if (!s->paused) {
771
        n = 2 * channels;
772
        delay = audio_write_get_buf_size(s);
773
        delay /= n;
774

    
775
        /* to be more precise, we take into account the time spent since
776
           the last buffer computation */
777
        if (audio_callback_time) {
778
            time_diff = av_gettime() - audio_callback_time;
779
            delay += (time_diff * s->audio_st->codec->sample_rate) / 1000000;
780
        }
781

    
782
        delay -= s->width / 2;
783
        if (delay < s->width)
784
            delay = s->width;
785

    
786
        i_start= x = compute_mod(s->sample_array_index - delay * channels, SAMPLE_ARRAY_SIZE);
787

    
788
        h= INT_MIN;
789
        for(i=0; i<1000; i+=channels){
790
            int idx= (SAMPLE_ARRAY_SIZE + x - i) % SAMPLE_ARRAY_SIZE;
791
            int a= s->sample_array[idx];
792
            int b= s->sample_array[(idx + 4*channels)%SAMPLE_ARRAY_SIZE];
793
            int c= s->sample_array[(idx + 5*channels)%SAMPLE_ARRAY_SIZE];
794
            int d= s->sample_array[(idx + 9*channels)%SAMPLE_ARRAY_SIZE];
795
            int score= a-d;
796
            if(h<score && (b^c)<0){
797
                h= score;
798
                i_start= idx;
799
            }
800
        }
801

    
802
        s->last_i_start = i_start;
803
    } else {
804
        i_start = s->last_i_start;
805
    }
806

    
807
    bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00);
808
    fill_rectangle(screen,
809
                   s->xleft, s->ytop, s->width, s->height,
810
                   bgcolor);
811

    
812
    fgcolor = SDL_MapRGB(screen->format, 0xff, 0xff, 0xff);
813

    
814
    /* total height for one channel */
815
    h = s->height / nb_display_channels;
816
    /* graph height / 2 */
817
    h2 = (h * 9) / 20;
818
    for(ch = 0;ch < nb_display_channels; ch++) {
819
        i = i_start + ch;
820
        y1 = s->ytop + ch * h + (h / 2); /* position of center line */
821
        for(x = 0; x < s->width; x++) {
822
            y = (s->sample_array[i] * h2) >> 15;
823
            if (y < 0) {
824
                y = -y;
825
                ys = y1 - y;
826
            } else {
827
                ys = y1;
828
            }
829
            fill_rectangle(screen,
830
                           s->xleft + x, ys, 1, y,
831
                           fgcolor);
832
            i += channels;
833
            if (i >= SAMPLE_ARRAY_SIZE)
834
                i -= SAMPLE_ARRAY_SIZE;
835
        }
836
    }
837

    
838
    fgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0xff);
839

    
840
    for(ch = 1;ch < nb_display_channels; ch++) {
841
        y = s->ytop + ch * h;
842
        fill_rectangle(screen,
843
                       s->xleft, y, s->width, 1,
844
                       fgcolor);
845
    }
846
    SDL_UpdateRect(screen, s->xleft, s->ytop, s->width, s->height);
847
}
848

    
849
static int video_open(VideoState *is){
850
    int flags = SDL_HWSURFACE|SDL_ASYNCBLIT|SDL_HWACCEL;
851
    int w,h;
852

    
853
    if(is_full_screen) flags |= SDL_FULLSCREEN;
854
    else               flags |= SDL_RESIZABLE;
855

    
856
    if (is_full_screen && fs_screen_width) {
857
        w = fs_screen_width;
858
        h = fs_screen_height;
859
    } else if(!is_full_screen && screen_width){
860
        w = screen_width;
861
        h = screen_height;
862
    }else if (is->video_st && is->video_st->codec->width){
863
        w = is->video_st->codec->width;
864
        h = is->video_st->codec->height;
865
    } else {
866
        w = 640;
867
        h = 480;
868
    }
869
#ifndef CONFIG_DARWIN
870
    screen = SDL_SetVideoMode(w, h, 0, flags);
871
#else
872
    /* setting bits_per_pixel = 0 or 32 causes blank video on OS X */
873
    screen = SDL_SetVideoMode(w, h, 24, flags);
874
#endif
875
    if (!screen) {
876
        fprintf(stderr, "SDL: could not set video mode - exiting\n");
877
        return -1;
878
    }
879
    SDL_WM_SetCaption("FFplay", "FFplay");
880

    
881
    is->width = screen->w;
882
    is->height = screen->h;
883

    
884
    return 0;
885
}
886

    
887
/* display the current picture, if any */
888
static void video_display(VideoState *is)
889
{
890
    if(!screen)
891
        video_open(cur_stream);
892
    if (is->audio_st && is->show_audio)
893
        video_audio_display(is);
894
    else if (is->video_st)
895
        video_image_display(is);
896
}
897

    
898
static Uint32 sdl_refresh_timer_cb(Uint32 interval, void *opaque)
899
{
900
    SDL_Event event;
901
    event.type = FF_REFRESH_EVENT;
902
    event.user.data1 = opaque;
903
    SDL_PushEvent(&event);
904
    return 0; /* 0 means stop timer */
905
}
906

    
907
/* schedule a video refresh in 'delay' ms */
908
static void schedule_refresh(VideoState *is, int delay)
909
{
910
    SDL_AddTimer(delay, sdl_refresh_timer_cb, is);
911
}
912

    
913
/* get the current audio clock value */
914
static double get_audio_clock(VideoState *is)
915
{
916
    double pts;
917
    int hw_buf_size, bytes_per_sec;
918
    pts = is->audio_clock;
919
    hw_buf_size = audio_write_get_buf_size(is);
920
    bytes_per_sec = 0;
921
    if (is->audio_st) {
922
        bytes_per_sec = is->audio_st->codec->sample_rate *
923
            2 * is->audio_st->codec->channels;
924
    }
925
    if (bytes_per_sec)
926
        pts -= (double)hw_buf_size / bytes_per_sec;
927
    return pts;
928
}
929

    
930
/* get the current video clock value */
931
static double get_video_clock(VideoState *is)
932
{
933
    double delta;
934
    if (is->paused) {
935
        delta = 0;
936
    } else {
937
        delta = (av_gettime() - is->video_current_pts_time) / 1000000.0;
938
    }
939
    return is->video_current_pts + delta;
940
}
941

    
942
/* get the current external clock value */
943
static double get_external_clock(VideoState *is)
944
{
945
    int64_t ti;
946
    ti = av_gettime();
947
    return is->external_clock + ((ti - is->external_clock_time) * 1e-6);
948
}
949

    
950
/* get the current master clock value */
951
static double get_master_clock(VideoState *is)
952
{
953
    double val;
954

    
955
    if (is->av_sync_type == AV_SYNC_VIDEO_MASTER) {
956
        if (is->video_st)
957
            val = get_video_clock(is);
958
        else
959
            val = get_audio_clock(is);
960
    } else if (is->av_sync_type == AV_SYNC_AUDIO_MASTER) {
961
        if (is->audio_st)
962
            val = get_audio_clock(is);
963
        else
964
            val = get_video_clock(is);
965
    } else {
966
        val = get_external_clock(is);
967
    }
968
    return val;
969
}
970

    
971
/* seek in the stream */
972
static void stream_seek(VideoState *is, int64_t pos, int rel)
973
{
974
    if (!is->seek_req) {
975
        is->seek_pos = pos;
976
        is->seek_flags = rel < 0 ? AVSEEK_FLAG_BACKWARD : 0;
977
        if (seek_by_bytes)
978
            is->seek_flags |= AVSEEK_FLAG_BYTE;
979
        is->seek_req = 1;
980
    }
981
}
982

    
983
/* pause or resume the video */
984
static void stream_pause(VideoState *is)
985
{
986
    is->paused = !is->paused;
987
    if (is->paused) {
988
        is->video_current_pts = get_video_clock(is);
989
    }
990
}
991

    
992
/* called to display each frame */
993
static void video_refresh_timer(void *opaque)
994
{
995
    VideoState *is = opaque;
996
    VideoPicture *vp;
997
    double actual_delay, delay, sync_threshold, ref_clock, diff;
998

    
999
    SubPicture *sp, *sp2;
1000

    
1001
    if (is->video_st) {
1002
        if (is->pictq_size == 0) {
1003
            /* if no picture, need to wait */
1004
            schedule_refresh(is, 1);
1005
        } else {
1006
            /* dequeue the picture */
1007
            vp = &is->pictq[is->pictq_rindex];
1008

    
1009
            /* update current video pts */
1010
            is->video_current_pts = vp->pts;
1011
            is->video_current_pts_time = av_gettime();
1012

    
1013
            /* compute nominal delay */
1014
            delay = vp->pts - is->frame_last_pts;
1015
            if (delay <= 0 || delay >= 1.0) {
1016
                /* if incorrect delay, use previous one */
1017
                delay = is->frame_last_delay;
1018
            }
1019
            is->frame_last_delay = delay;
1020
            is->frame_last_pts = vp->pts;
1021

    
1022
            /* update delay to follow master synchronisation source */
1023
            if (((is->av_sync_type == AV_SYNC_AUDIO_MASTER && is->audio_st) ||
1024
                 is->av_sync_type == AV_SYNC_EXTERNAL_CLOCK)) {
1025
                /* if video is slave, we try to correct big delays by
1026
                   duplicating or deleting a frame */
1027
                ref_clock = get_master_clock(is);
1028
                diff = vp->pts - ref_clock;
1029

    
1030
                /* skip or repeat frame. We take into account the
1031
                   delay to compute the threshold. I still don't know
1032
                   if it is the best guess */
1033
                sync_threshold = AV_SYNC_THRESHOLD;
1034
                if (delay > sync_threshold)
1035
                    sync_threshold = delay;
1036
                if (fabs(diff) < AV_NOSYNC_THRESHOLD) {
1037
                    if (diff <= -sync_threshold)
1038
                        delay = 0;
1039
                    else if (diff >= sync_threshold)
1040
                        delay = 2 * delay;
1041
                }
1042
            }
1043

    
1044
            is->frame_timer += delay;
1045
            /* compute the REAL delay (we need to do that to avoid
1046
               long term errors */
1047
            actual_delay = is->frame_timer - (av_gettime() / 1000000.0);
1048
            if (actual_delay < 0.010) {
1049
                /* XXX: should skip picture */
1050
                actual_delay = 0.010;
1051
            }
1052
            /* launch timer for next picture */
1053
            schedule_refresh(is, (int)(actual_delay * 1000 + 0.5));
1054

    
1055
#if defined(DEBUG_SYNC)
1056
            printf("video: delay=%0.3f actual_delay=%0.3f pts=%0.3f A-V=%f\n",
1057
                   delay, actual_delay, vp->pts, -diff);
1058
#endif
1059

    
1060
            if(is->subtitle_st) {
1061
                if (is->subtitle_stream_changed) {
1062
                    SDL_LockMutex(is->subpq_mutex);
1063

    
1064
                    while (is->subpq_size) {
1065
                        free_subpicture(&is->subpq[is->subpq_rindex]);
1066

    
1067
                        /* update queue size and signal for next picture */
1068
                        if (++is->subpq_rindex == SUBPICTURE_QUEUE_SIZE)
1069
                            is->subpq_rindex = 0;
1070

    
1071
                        is->subpq_size--;
1072
                    }
1073
                    is->subtitle_stream_changed = 0;
1074

    
1075
                    SDL_CondSignal(is->subpq_cond);
1076
                    SDL_UnlockMutex(is->subpq_mutex);
1077
                } else {
1078
                    if (is->subpq_size > 0) {
1079
                        sp = &is->subpq[is->subpq_rindex];
1080

    
1081
                        if (is->subpq_size > 1)
1082
                            sp2 = &is->subpq[(is->subpq_rindex + 1) % SUBPICTURE_QUEUE_SIZE];
1083
                        else
1084
                            sp2 = NULL;
1085

    
1086
                        if ((is->video_current_pts > (sp->pts + ((float) sp->sub.end_display_time / 1000)))
1087
                                || (sp2 && is->video_current_pts > (sp2->pts + ((float) sp2->sub.start_display_time / 1000))))
1088
                        {
1089
                            free_subpicture(sp);
1090

    
1091
                            /* update queue size and signal for next picture */
1092
                            if (++is->subpq_rindex == SUBPICTURE_QUEUE_SIZE)
1093
                                is->subpq_rindex = 0;
1094

    
1095
                            SDL_LockMutex(is->subpq_mutex);
1096
                            is->subpq_size--;
1097
                            SDL_CondSignal(is->subpq_cond);
1098
                            SDL_UnlockMutex(is->subpq_mutex);
1099
                        }
1100
                    }
1101
                }
1102
            }
1103

    
1104
            /* display picture */
1105
            video_display(is);
1106

    
1107
            /* update queue size and signal for next picture */
1108
            if (++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE)
1109
                is->pictq_rindex = 0;
1110

    
1111
            SDL_LockMutex(is->pictq_mutex);
1112
            is->pictq_size--;
1113
            SDL_CondSignal(is->pictq_cond);
1114
            SDL_UnlockMutex(is->pictq_mutex);
1115
        }
1116
    } else if (is->audio_st) {
1117
        /* draw the next audio frame */
1118

    
1119
        schedule_refresh(is, 40);
1120

    
1121
        /* if only audio stream, then display the audio bars (better
1122
           than nothing, just to test the implementation */
1123

    
1124
        /* display picture */
1125
        video_display(is);
1126
    } else {
1127
        schedule_refresh(is, 100);
1128
    }
1129
    if (show_status) {
1130
        static int64_t last_time;
1131
        int64_t cur_time;
1132
        int aqsize, vqsize, sqsize;
1133
        double av_diff;
1134

    
1135
        cur_time = av_gettime();
1136
        if (!last_time || (cur_time - last_time) >= 500 * 1000) {
1137
            aqsize = 0;
1138
            vqsize = 0;
1139
            sqsize = 0;
1140
            if (is->audio_st)
1141
                aqsize = is->audioq.size;
1142
            if (is->video_st)
1143
                vqsize = is->videoq.size;
1144
            if (is->subtitle_st)
1145
                sqsize = is->subtitleq.size;
1146
            av_diff = 0;
1147
            if (is->audio_st && is->video_st)
1148
                av_diff = get_audio_clock(is) - get_video_clock(is);
1149
            printf("%7.2f A-V:%7.3f aq=%5dKB vq=%5dKB sq=%5dB    \r",
1150
                   get_master_clock(is), av_diff, aqsize / 1024, vqsize / 1024, sqsize);
1151
            fflush(stdout);
1152
            last_time = cur_time;
1153
        }
1154
    }
1155
}
1156

    
1157
/* allocate a picture (needs to do that in main thread to avoid
1158
   potential locking problems */
1159
static void alloc_picture(void *opaque)
1160
{
1161
    VideoState *is = opaque;
1162
    VideoPicture *vp;
1163

    
1164
    vp = &is->pictq[is->pictq_windex];
1165

    
1166
    if (vp->bmp)
1167
        SDL_FreeYUVOverlay(vp->bmp);
1168

    
1169
#if 0
1170
    /* XXX: use generic function */
1171
    /* XXX: disable overlay if no hardware acceleration or if RGB format */
1172
    switch(is->video_st->codec->pix_fmt) {
1173
    case PIX_FMT_YUV420P:
1174
    case PIX_FMT_YUV422P:
1175
    case PIX_FMT_YUV444P:
1176
    case PIX_FMT_YUV422:
1177
    case PIX_FMT_YUV410P:
1178
    case PIX_FMT_YUV411P:
1179
        is_yuv = 1;
1180
        break;
1181
    default:
1182
        is_yuv = 0;
1183
        break;
1184
    }
1185
#endif
1186
    vp->bmp = SDL_CreateYUVOverlay(is->video_st->codec->width,
1187
                                   is->video_st->codec->height,
1188
                                   SDL_YV12_OVERLAY,
1189
                                   screen);
1190
    vp->width = is->video_st->codec->width;
1191
    vp->height = is->video_st->codec->height;
1192

    
1193
    SDL_LockMutex(is->pictq_mutex);
1194
    vp->allocated = 1;
1195
    SDL_CondSignal(is->pictq_cond);
1196
    SDL_UnlockMutex(is->pictq_mutex);
1197
}
1198

    
1199
/**
1200
 *
1201
 * @param pts the dts of the pkt / pts of the frame and guessed if not known
1202
 */
1203
static int queue_picture(VideoState *is, AVFrame *src_frame, double pts)
1204
{
1205
    VideoPicture *vp;
1206
    int dst_pix_fmt;
1207
    AVPicture pict;
1208
    static struct SwsContext *img_convert_ctx;
1209

    
1210
    /* wait until we have space to put a new picture */
1211
    SDL_LockMutex(is->pictq_mutex);
1212
    while (is->pictq_size >= VIDEO_PICTURE_QUEUE_SIZE &&
1213
           !is->videoq.abort_request) {
1214
        SDL_CondWait(is->pictq_cond, is->pictq_mutex);
1215
    }
1216
    SDL_UnlockMutex(is->pictq_mutex);
1217

    
1218
    if (is->videoq.abort_request)
1219
        return -1;
1220

    
1221
    vp = &is->pictq[is->pictq_windex];
1222

    
1223
    /* alloc or resize hardware picture buffer */
1224
    if (!vp->bmp ||
1225
        vp->width != is->video_st->codec->width ||
1226
        vp->height != is->video_st->codec->height) {
1227
        SDL_Event event;
1228

    
1229
        vp->allocated = 0;
1230

    
1231
        /* the allocation must be done in the main thread to avoid
1232
           locking problems */
1233
        event.type = FF_ALLOC_EVENT;
1234
        event.user.data1 = is;
1235
        SDL_PushEvent(&event);
1236

    
1237
        /* wait until the picture is allocated */
1238
        SDL_LockMutex(is->pictq_mutex);
1239
        while (!vp->allocated && !is->videoq.abort_request) {
1240
            SDL_CondWait(is->pictq_cond, is->pictq_mutex);
1241
        }
1242
        SDL_UnlockMutex(is->pictq_mutex);
1243

    
1244
        if (is->videoq.abort_request)
1245
            return -1;
1246
    }
1247

    
1248
    /* if the frame is not skipped, then display it */
1249
    if (vp->bmp) {
1250
        /* get a pointer on the bitmap */
1251
        SDL_LockYUVOverlay (vp->bmp);
1252

    
1253
        dst_pix_fmt = PIX_FMT_YUV420P;
1254
        pict.data[0] = vp->bmp->pixels[0];
1255
        pict.data[1] = vp->bmp->pixels[2];
1256
        pict.data[2] = vp->bmp->pixels[1];
1257

    
1258
        pict.linesize[0] = vp->bmp->pitches[0];
1259
        pict.linesize[1] = vp->bmp->pitches[2];
1260
        pict.linesize[2] = vp->bmp->pitches[1];
1261
        if (img_convert_ctx == NULL) {
1262
            img_convert_ctx = sws_getContext(is->video_st->codec->width,
1263
                    is->video_st->codec->height, is->video_st->codec->pix_fmt,
1264
                    is->video_st->codec->width, is->video_st->codec->height,
1265
                    dst_pix_fmt, sws_flags, NULL, NULL, NULL);
1266
            if (img_convert_ctx == NULL) {
1267
                fprintf(stderr, "Cannot initialize the conversion context\n");
1268
                exit(1);
1269
            }
1270
        }
1271
        sws_scale(img_convert_ctx, src_frame->data, src_frame->linesize,
1272
                  0, is->video_st->codec->height, pict.data, pict.linesize);
1273
        /* update the bitmap content */
1274
        SDL_UnlockYUVOverlay(vp->bmp);
1275

    
1276
        vp->pts = pts;
1277

    
1278
        /* now we can update the picture count */
1279
        if (++is->pictq_windex == VIDEO_PICTURE_QUEUE_SIZE)
1280
            is->pictq_windex = 0;
1281
        SDL_LockMutex(is->pictq_mutex);
1282
        is->pictq_size++;
1283
        SDL_UnlockMutex(is->pictq_mutex);
1284
    }
1285
    return 0;
1286
}
1287

    
1288
/**
1289
 * compute the exact PTS for the picture if it is omitted in the stream
1290
 * @param pts1 the dts of the pkt / pts of the frame
1291
 */
1292
static int output_picture2(VideoState *is, AVFrame *src_frame, double pts1)
1293
{
1294
    double frame_delay, pts;
1295

    
1296
    pts = pts1;
1297

    
1298
    if (pts != 0) {
1299
        /* update video clock with pts, if present */
1300
        is->video_clock = pts;
1301
    } else {
1302
        pts = is->video_clock;
1303
    }
1304
    /* update video clock for next frame */
1305
    frame_delay = av_q2d(is->video_st->codec->time_base);
1306
    /* for MPEG2, the frame can be repeated, so we update the
1307
       clock accordingly */
1308
    frame_delay += src_frame->repeat_pict * (frame_delay * 0.5);
1309
    is->video_clock += frame_delay;
1310

    
1311
#if defined(DEBUG_SYNC) && 0
1312
    {
1313
        int ftype;
1314
        if (src_frame->pict_type == FF_B_TYPE)
1315
            ftype = 'B';
1316
        else if (src_frame->pict_type == FF_I_TYPE)
1317
            ftype = 'I';
1318
        else
1319
            ftype = 'P';
1320
        printf("frame_type=%c clock=%0.3f pts=%0.3f\n",
1321
               ftype, pts, pts1);
1322
    }
1323
#endif
1324
    return queue_picture(is, src_frame, pts);
1325
}
1326

    
1327
static int video_thread(void *arg)
1328
{
1329
    VideoState *is = arg;
1330
    AVPacket pkt1, *pkt = &pkt1;
1331
    int len1, got_picture;
1332
    AVFrame *frame= avcodec_alloc_frame();
1333
    double pts;
1334

    
1335
    for(;;) {
1336
        while (is->paused && !is->videoq.abort_request) {
1337
            SDL_Delay(10);
1338
        }
1339
        if (packet_queue_get(&is->videoq, pkt, 1) < 0)
1340
            break;
1341

    
1342
        if(pkt->data == flush_pkt.data){
1343
            avcodec_flush_buffers(is->video_st->codec);
1344
            continue;
1345
        }
1346

    
1347
        /* NOTE: ipts is the PTS of the _first_ picture beginning in
1348
           this packet, if any */
1349
        pts = 0;
1350
        if (pkt->dts != AV_NOPTS_VALUE)
1351
            pts = av_q2d(is->video_st->time_base)*pkt->dts;
1352

    
1353
            len1 = avcodec_decode_video(is->video_st->codec,
1354
                                        frame, &got_picture,
1355
                                        pkt->data, pkt->size);
1356
//            if (len1 < 0)
1357
//                break;
1358
            if (got_picture) {
1359
                if (output_picture2(is, frame, pts) < 0)
1360
                    goto the_end;
1361
            }
1362
        av_free_packet(pkt);
1363
        if (step)
1364
            if (cur_stream)
1365
                stream_pause(cur_stream);
1366
    }
1367
 the_end:
1368
    av_free(frame);
1369
    return 0;
1370
}
1371

    
1372
static int subtitle_thread(void *arg)
1373
{
1374
    VideoState *is = arg;
1375
    SubPicture *sp;
1376
    AVPacket pkt1, *pkt = &pkt1;
1377
    int len1, got_subtitle;
1378
    double pts;
1379
    int i, j;
1380
    int r, g, b, y, u, v, a;
1381

    
1382
    for(;;) {
1383
        while (is->paused && !is->subtitleq.abort_request) {
1384
            SDL_Delay(10);
1385
        }
1386
        if (packet_queue_get(&is->subtitleq, pkt, 1) < 0)
1387
            break;
1388

    
1389
        if(pkt->data == flush_pkt.data){
1390
            avcodec_flush_buffers(is->subtitle_st->codec);
1391
            continue;
1392
        }
1393
        SDL_LockMutex(is->subpq_mutex);
1394
        while (is->subpq_size >= SUBPICTURE_QUEUE_SIZE &&
1395
               !is->subtitleq.abort_request) {
1396
            SDL_CondWait(is->subpq_cond, is->subpq_mutex);
1397
        }
1398
        SDL_UnlockMutex(is->subpq_mutex);
1399

    
1400
        if (is->subtitleq.abort_request)
1401
            goto the_end;
1402

    
1403
        sp = &is->subpq[is->subpq_windex];
1404

    
1405
       /* NOTE: ipts is the PTS of the _first_ picture beginning in
1406
           this packet, if any */
1407
        pts = 0;
1408
        if (pkt->pts != AV_NOPTS_VALUE)
1409
            pts = av_q2d(is->subtitle_st->time_base)*pkt->pts;
1410

    
1411
        len1 = avcodec_decode_subtitle(is->subtitle_st->codec,
1412
                                    &sp->sub, &got_subtitle,
1413
                                    pkt->data, pkt->size);
1414
//            if (len1 < 0)
1415
//                break;
1416
        if (got_subtitle && sp->sub.format == 0) {
1417
            sp->pts = pts;
1418

    
1419
            for (i = 0; i < sp->sub.num_rects; i++)
1420
            {
1421
                for (j = 0; j < sp->sub.rects[i].nb_colors; j++)
1422
                {
1423
                    RGBA_IN(r, g, b, a, sp->sub.rects[i].rgba_palette + j);
1424
                    y = RGB_TO_Y_CCIR(r, g, b);
1425
                    u = RGB_TO_U_CCIR(r, g, b, 0);
1426
                    v = RGB_TO_V_CCIR(r, g, b, 0);
1427
                    YUVA_OUT(sp->sub.rects[i].rgba_palette + j, y, u, v, a);
1428
                }
1429
            }
1430

    
1431
            /* now we can update the picture count */
1432
            if (++is->subpq_windex == SUBPICTURE_QUEUE_SIZE)
1433
                is->subpq_windex = 0;
1434
            SDL_LockMutex(is->subpq_mutex);
1435
            is->subpq_size++;
1436
            SDL_UnlockMutex(is->subpq_mutex);
1437
        }
1438
        av_free_packet(pkt);
1439
//        if (step)
1440
//            if (cur_stream)
1441
//                stream_pause(cur_stream);
1442
    }
1443
 the_end:
1444
    return 0;
1445
}
1446

    
1447
/* copy samples for viewing in editor window */
1448
static void update_sample_display(VideoState *is, short *samples, int samples_size)
1449
{
1450
    int size, len, channels;
1451

    
1452
    channels = is->audio_st->codec->channels;
1453

    
1454
    size = samples_size / sizeof(short);
1455
    while (size > 0) {
1456
        len = SAMPLE_ARRAY_SIZE - is->sample_array_index;
1457
        if (len > size)
1458
            len = size;
1459
        memcpy(is->sample_array + is->sample_array_index, samples, len * sizeof(short));
1460
        samples += len;
1461
        is->sample_array_index += len;
1462
        if (is->sample_array_index >= SAMPLE_ARRAY_SIZE)
1463
            is->sample_array_index = 0;
1464
        size -= len;
1465
    }
1466
}
1467

    
1468
/* return the new audio buffer size (samples can be added or deleted
1469
   to get better sync if video or external master clock) */
1470
static int synchronize_audio(VideoState *is, short *samples,
1471
                             int samples_size1, double pts)
1472
{
1473
    int n, samples_size;
1474
    double ref_clock;
1475

    
1476
    n = 2 * is->audio_st->codec->channels;
1477
    samples_size = samples_size1;
1478

    
1479
    /* if not master, then we try to remove or add samples to correct the clock */
1480
    if (((is->av_sync_type == AV_SYNC_VIDEO_MASTER && is->video_st) ||
1481
         is->av_sync_type == AV_SYNC_EXTERNAL_CLOCK)) {
1482
        double diff, avg_diff;
1483
        int wanted_size, min_size, max_size, nb_samples;
1484

    
1485
        ref_clock = get_master_clock(is);
1486
        diff = get_audio_clock(is) - ref_clock;
1487

    
1488
        if (diff < AV_NOSYNC_THRESHOLD) {
1489
            is->audio_diff_cum = diff + is->audio_diff_avg_coef * is->audio_diff_cum;
1490
            if (is->audio_diff_avg_count < AUDIO_DIFF_AVG_NB) {
1491
                /* not enough measures to have a correct estimate */
1492
                is->audio_diff_avg_count++;
1493
            } else {
1494
                /* estimate the A-V difference */
1495
                avg_diff = is->audio_diff_cum * (1.0 - is->audio_diff_avg_coef);
1496

    
1497
                if (fabs(avg_diff) >= is->audio_diff_threshold) {
1498
                    wanted_size = samples_size + ((int)(diff * is->audio_st->codec->sample_rate) * n);
1499
                    nb_samples = samples_size / n;
1500

    
1501
                    min_size = ((nb_samples * (100 - SAMPLE_CORRECTION_PERCENT_MAX)) / 100) * n;
1502
                    max_size = ((nb_samples * (100 + SAMPLE_CORRECTION_PERCENT_MAX)) / 100) * n;
1503
                    if (wanted_size < min_size)
1504
                        wanted_size = min_size;
1505
                    else if (wanted_size > max_size)
1506
                        wanted_size = max_size;
1507

    
1508
                    /* add or remove samples to correction the synchro */
1509
                    if (wanted_size < samples_size) {
1510
                        /* remove samples */
1511
                        samples_size = wanted_size;
1512
                    } else if (wanted_size > samples_size) {
1513
                        uint8_t *samples_end, *q;
1514
                        int nb;
1515

    
1516
                        /* add samples */
1517
                        nb = (samples_size - wanted_size);
1518
                        samples_end = (uint8_t *)samples + samples_size - n;
1519
                        q = samples_end + n;
1520
                        while (nb > 0) {
1521
                            memcpy(q, samples_end, n);
1522
                            q += n;
1523
                            nb -= n;
1524
                        }
1525
                        samples_size = wanted_size;
1526
                    }
1527
                }
1528
#if 0
1529
                printf("diff=%f adiff=%f sample_diff=%d apts=%0.3f vpts=%0.3f %f\n",
1530
                       diff, avg_diff, samples_size - samples_size1,
1531
                       is->audio_clock, is->video_clock, is->audio_diff_threshold);
1532
#endif
1533
            }
1534
        } else {
1535
            /* too big difference : may be initial PTS errors, so
1536
               reset A-V filter */
1537
            is->audio_diff_avg_count = 0;
1538
            is->audio_diff_cum = 0;
1539
        }
1540
    }
1541

    
1542
    return samples_size;
1543
}
1544

    
1545
/* decode one audio frame and returns its uncompressed size */
1546
static int audio_decode_frame(VideoState *is, uint8_t *audio_buf, double *pts_ptr)
1547
{
1548
    AVPacket *pkt = &is->audio_pkt;
1549
    int n, len1, data_size;
1550
    double pts;
1551

    
1552
    for(;;) {
1553
        /* NOTE: the audio packet can contain several frames */
1554
        while (is->audio_pkt_size > 0) {
1555
            len1 = avcodec_decode_audio(is->audio_st->codec,
1556
                                        (int16_t *)audio_buf, &data_size,
1557
                                        is->audio_pkt_data, is->audio_pkt_size);
1558
            if (len1 < 0) {
1559
                /* if error, we skip the frame */
1560
                is->audio_pkt_size = 0;
1561
                break;
1562
            }
1563

    
1564
            is->audio_pkt_data += len1;
1565
            is->audio_pkt_size -= len1;
1566
            if (data_size <= 0)
1567
                continue;
1568
            /* if no pts, then compute it */
1569
            pts = is->audio_clock;
1570
            *pts_ptr = pts;
1571
            n = 2 * is->audio_st->codec->channels;
1572
            is->audio_clock += (double)data_size /
1573
                (double)(n * is->audio_st->codec->sample_rate);
1574
#if defined(DEBUG_SYNC)
1575
            {
1576
                static double last_clock;
1577
                printf("audio: delay=%0.3f clock=%0.3f pts=%0.3f\n",
1578
                       is->audio_clock - last_clock,
1579
                       is->audio_clock, pts);
1580
                last_clock = is->audio_clock;
1581
            }
1582
#endif
1583
            return data_size;
1584
        }
1585

    
1586
        /* free the current packet */
1587
        if (pkt->data)
1588
            av_free_packet(pkt);
1589

    
1590
        if (is->paused || is->audioq.abort_request) {
1591
            return -1;
1592
        }
1593

    
1594
        /* read next packet */
1595
        if (packet_queue_get(&is->audioq, pkt, 1) < 0)
1596
            return -1;
1597
        if(pkt->data == flush_pkt.data){
1598
            avcodec_flush_buffers(is->audio_st->codec);
1599
            continue;
1600
        }
1601

    
1602
        is->audio_pkt_data = pkt->data;
1603
        is->audio_pkt_size = pkt->size;
1604

    
1605
        /* if update the audio clock with the pts */
1606
        if (pkt->pts != AV_NOPTS_VALUE) {
1607
            is->audio_clock = av_q2d(is->audio_st->time_base)*pkt->pts;
1608
        }
1609
    }
1610
}
1611

    
1612
/* get the current audio output buffer size, in samples. With SDL, we
1613
   cannot have a precise information */
1614
static int audio_write_get_buf_size(VideoState *is)
1615
{
1616
    return is->audio_hw_buf_size - is->audio_buf_index;
1617
}
1618

    
1619

    
1620
/* prepare a new audio buffer */
1621
void sdl_audio_callback(void *opaque, Uint8 *stream, int len)
1622
{
1623
    VideoState *is = opaque;
1624
    int audio_size, len1;
1625
    double pts;
1626

    
1627
    audio_callback_time = av_gettime();
1628

    
1629
    while (len > 0) {
1630
        if (is->audio_buf_index >= is->audio_buf_size) {
1631
           audio_size = audio_decode_frame(is, is->audio_buf, &pts);
1632
           if (audio_size < 0) {
1633
                /* if error, just output silence */
1634
               is->audio_buf_size = 1024;
1635
               memset(is->audio_buf, 0, is->audio_buf_size);
1636
           } else {
1637
               if (is->show_audio)
1638
                   update_sample_display(is, (int16_t *)is->audio_buf, audio_size);
1639
               audio_size = synchronize_audio(is, (int16_t *)is->audio_buf, audio_size,
1640
                                              pts);
1641
               is->audio_buf_size = audio_size;
1642
           }
1643
           is->audio_buf_index = 0;
1644
        }
1645
        len1 = is->audio_buf_size - is->audio_buf_index;
1646
        if (len1 > len)
1647
            len1 = len;
1648
        memcpy(stream, (uint8_t *)is->audio_buf + is->audio_buf_index, len1);
1649
        len -= len1;
1650
        stream += len1;
1651
        is->audio_buf_index += len1;
1652
    }
1653
}
1654

    
1655
/* open a given stream. Return 0 if OK */
1656
static int stream_component_open(VideoState *is, int stream_index)
1657
{
1658
    AVFormatContext *ic = is->ic;
1659
    AVCodecContext *enc;
1660
    AVCodec *codec;
1661
    SDL_AudioSpec wanted_spec, spec;
1662

    
1663
    if (stream_index < 0 || stream_index >= ic->nb_streams)
1664
        return -1;
1665
    enc = ic->streams[stream_index]->codec;
1666

    
1667
    /* prepare audio output */
1668
    if (enc->codec_type == CODEC_TYPE_AUDIO) {
1669
        wanted_spec.freq = enc->sample_rate;
1670
        wanted_spec.format = AUDIO_S16SYS;
1671
        /* hack for AC3. XXX: suppress that */
1672
        if (enc->channels > 2)
1673
            enc->channels = 2;
1674
        wanted_spec.channels = enc->channels;
1675
        wanted_spec.silence = 0;
1676
        wanted_spec.samples = SDL_AUDIO_BUFFER_SIZE;
1677
        wanted_spec.callback = sdl_audio_callback;
1678
        wanted_spec.userdata = is;
1679
        if (SDL_OpenAudio(&wanted_spec, &spec) < 0) {
1680
            fprintf(stderr, "SDL_OpenAudio: %s\n", SDL_GetError());
1681
            return -1;
1682
        }
1683
        is->audio_hw_buf_size = spec.size;
1684
    }
1685

    
1686
    codec = avcodec_find_decoder(enc->codec_id);
1687
    enc->debug_mv = debug_mv;
1688
    enc->debug = debug;
1689
    enc->workaround_bugs = workaround_bugs;
1690
    enc->lowres = lowres;
1691
    if(lowres) enc->flags |= CODEC_FLAG_EMU_EDGE;
1692
    enc->idct_algo= idct;
1693
    if(fast) enc->flags2 |= CODEC_FLAG2_FAST;
1694
    enc->skip_frame= skip_frame;
1695
    enc->skip_idct= skip_idct;
1696
    enc->skip_loop_filter= skip_loop_filter;
1697
    enc->error_resilience= error_resilience;
1698
    enc->error_concealment= error_concealment;
1699
    if (!codec ||
1700
        avcodec_open(enc, codec) < 0)
1701
        return -1;
1702
#if defined(HAVE_THREADS)
1703
    if(thread_count>1)
1704
        avcodec_thread_init(enc, thread_count);
1705
#endif
1706
    enc->thread_count= thread_count;
1707
    switch(enc->codec_type) {
1708
    case CODEC_TYPE_AUDIO:
1709
        is->audio_stream = stream_index;
1710
        is->audio_st = ic->streams[stream_index];
1711
        is->audio_buf_size = 0;
1712
        is->audio_buf_index = 0;
1713

    
1714
        /* init averaging filter */
1715
        is->audio_diff_avg_coef = exp(log(0.01) / AUDIO_DIFF_AVG_NB);
1716
        is->audio_diff_avg_count = 0;
1717
        /* since we do not have a precise anough audio fifo fullness,
1718
           we correct audio sync only if larger than this threshold */
1719
        is->audio_diff_threshold = 2.0 * SDL_AUDIO_BUFFER_SIZE / enc->sample_rate;
1720

    
1721
        memset(&is->audio_pkt, 0, sizeof(is->audio_pkt));
1722
        packet_queue_init(&is->audioq);
1723
        SDL_PauseAudio(0);
1724
        break;
1725
    case CODEC_TYPE_VIDEO:
1726
        is->video_stream = stream_index;
1727
        is->video_st = ic->streams[stream_index];
1728

    
1729
        is->frame_last_delay = 40e-3;
1730
        is->frame_timer = (double)av_gettime() / 1000000.0;
1731
        is->video_current_pts_time = av_gettime();
1732

    
1733
        packet_queue_init(&is->videoq);
1734
        is->video_tid = SDL_CreateThread(video_thread, is);
1735
        break;
1736
    case CODEC_TYPE_SUBTITLE:
1737
        is->subtitle_stream = stream_index;
1738
        is->subtitle_st = ic->streams[stream_index];
1739
        packet_queue_init(&is->subtitleq);
1740

    
1741
        is->subtitle_tid = SDL_CreateThread(subtitle_thread, is);
1742
        break;
1743
    default:
1744
        break;
1745
    }
1746
    return 0;
1747
}
1748

    
1749
static void stream_component_close(VideoState *is, int stream_index)
1750
{
1751
    AVFormatContext *ic = is->ic;
1752
    AVCodecContext *enc;
1753

    
1754
    if (stream_index < 0 || stream_index >= ic->nb_streams)
1755
        return;
1756
    enc = ic->streams[stream_index]->codec;
1757

    
1758
    switch(enc->codec_type) {
1759
    case CODEC_TYPE_AUDIO:
1760
        packet_queue_abort(&is->audioq);
1761

    
1762
        SDL_CloseAudio();
1763

    
1764
        packet_queue_end(&is->audioq);
1765
        break;
1766
    case CODEC_TYPE_VIDEO:
1767
        packet_queue_abort(&is->videoq);
1768

    
1769
        /* note: we also signal this mutex to make sure we deblock the
1770
           video thread in all cases */
1771
        SDL_LockMutex(is->pictq_mutex);
1772
        SDL_CondSignal(is->pictq_cond);
1773
        SDL_UnlockMutex(is->pictq_mutex);
1774

    
1775
        SDL_WaitThread(is->video_tid, NULL);
1776

    
1777
        packet_queue_end(&is->videoq);
1778
        break;
1779
    case CODEC_TYPE_SUBTITLE:
1780
        packet_queue_abort(&is->subtitleq);
1781

    
1782
        /* note: we also signal this mutex to make sure we deblock the
1783
           video thread in all cases */
1784
        SDL_LockMutex(is->subpq_mutex);
1785
        is->subtitle_stream_changed = 1;
1786

    
1787
        SDL_CondSignal(is->subpq_cond);
1788
        SDL_UnlockMutex(is->subpq_mutex);
1789

    
1790
        SDL_WaitThread(is->subtitle_tid, NULL);
1791

    
1792
        packet_queue_end(&is->subtitleq);
1793
        break;
1794
    default:
1795
        break;
1796
    }
1797

    
1798
    avcodec_close(enc);
1799
    switch(enc->codec_type) {
1800
    case CODEC_TYPE_AUDIO:
1801
        is->audio_st = NULL;
1802
        is->audio_stream = -1;
1803
        break;
1804
    case CODEC_TYPE_VIDEO:
1805
        is->video_st = NULL;
1806
        is->video_stream = -1;
1807
        break;
1808
    case CODEC_TYPE_SUBTITLE:
1809
        is->subtitle_st = NULL;
1810
        is->subtitle_stream = -1;
1811
        break;
1812
    default:
1813
        break;
1814
    }
1815
}
1816

    
1817
static void dump_stream_info(const AVFormatContext *s)
1818
{
1819
    if (s->track != 0)
1820
        fprintf(stderr, "Track: %d\n", s->track);
1821
    if (s->title[0] != '\0')
1822
        fprintf(stderr, "Title: %s\n", s->title);
1823
    if (s->author[0] != '\0')
1824
        fprintf(stderr, "Author: %s\n", s->author);
1825
    if (s->copyright[0] != '\0')
1826
        fprintf(stderr, "Copyright: %s\n", s->copyright);
1827
    if (s->comment[0] != '\0')
1828
        fprintf(stderr, "Comment: %s\n", s->comment);
1829
    if (s->album[0] != '\0')
1830
        fprintf(stderr, "Album: %s\n", s->album);
1831
    if (s->year != 0)
1832
        fprintf(stderr, "Year: %d\n", s->year);
1833
    if (s->genre[0] != '\0')
1834
        fprintf(stderr, "Genre: %s\n", s->genre);
1835
}
1836

    
1837
/* since we have only one decoding thread, we can use a global
1838
   variable instead of a thread local variable */
1839
static VideoState *global_video_state;
1840

    
1841
static int decode_interrupt_cb(void)
1842
{
1843
    return (global_video_state && global_video_state->abort_request);
1844
}
1845

    
1846
/* this thread gets the stream from the disk or the network */
1847
static int decode_thread(void *arg)
1848
{
1849
    VideoState *is = arg;
1850
    AVFormatContext *ic;
1851
    int err, i, ret, video_index, audio_index, use_play;
1852
    AVPacket pkt1, *pkt = &pkt1;
1853
    AVFormatParameters params, *ap = &params;
1854

    
1855
    video_index = -1;
1856
    audio_index = -1;
1857
    is->video_stream = -1;
1858
    is->audio_stream = -1;
1859
    is->subtitle_stream = -1;
1860

    
1861
    global_video_state = is;
1862
    url_set_interrupt_cb(decode_interrupt_cb);
1863

    
1864
    memset(ap, 0, sizeof(*ap));
1865
    ap->initial_pause = 1; /* we force a pause when starting an RTSP
1866
                              stream */
1867

    
1868
    err = av_open_input_file(&ic, is->filename, is->iformat, 0, ap);
1869
    if (err < 0) {
1870
        print_error(is->filename, err);
1871
        ret = -1;
1872
        goto fail;
1873
    }
1874
    is->ic = ic;
1875
#ifdef CONFIG_NETWORK
1876
    use_play = (ic->iformat == &rtsp_demuxer);
1877
#else
1878
    use_play = 0;
1879
#endif
1880

    
1881
    if(genpts)
1882
        ic->flags |= AVFMT_FLAG_GENPTS;
1883

    
1884
    if (!use_play) {
1885
        err = av_find_stream_info(ic);
1886
        if (err < 0) {
1887
            fprintf(stderr, "%s: could not find codec parameters\n", is->filename);
1888
            ret = -1;
1889
            goto fail;
1890
        }
1891
        ic->pb.eof_reached= 0; //FIXME hack, ffplay maybe shouldnt use url_feof() to test for the end
1892
    }
1893

    
1894
    /* if seeking requested, we execute it */
1895
    if (start_time != AV_NOPTS_VALUE) {
1896
        int64_t timestamp;
1897

    
1898
        timestamp = start_time;
1899
        /* add the stream start time */
1900
        if (ic->start_time != AV_NOPTS_VALUE)
1901
            timestamp += ic->start_time;
1902
        ret = av_seek_frame(ic, -1, timestamp, AVSEEK_FLAG_BACKWARD);
1903
        if (ret < 0) {
1904
            fprintf(stderr, "%s: could not seek to position %0.3f\n",
1905
                    is->filename, (double)timestamp / AV_TIME_BASE);
1906
        }
1907
    }
1908

    
1909
    /* now we can begin to play (RTSP stream only) */
1910
    av_read_play(ic);
1911

    
1912
    if (use_play) {
1913
        err = av_find_stream_info(ic);
1914
        if (err < 0) {
1915
            fprintf(stderr, "%s: could not find codec parameters\n", is->filename);
1916
            ret = -1;
1917
            goto fail;
1918
        }
1919
    }
1920

    
1921
    for(i = 0; i < ic->nb_streams; i++) {
1922
        AVCodecContext *enc = ic->streams[i]->codec;
1923
        switch(enc->codec_type) {
1924
        case CODEC_TYPE_AUDIO:
1925
            if ((audio_index < 0 || wanted_audio_stream-- > 0) && !audio_disable)
1926
                audio_index = i;
1927
            break;
1928
        case CODEC_TYPE_VIDEO:
1929
            if (video_index < 0 && !video_disable)
1930
                video_index = i;
1931
            break;
1932
        default:
1933
            break;
1934
        }
1935
    }
1936
    if (show_status) {
1937
        dump_format(ic, 0, is->filename, 0);
1938
        dump_stream_info(ic);
1939
    }
1940

    
1941
    /* open the streams */
1942
    if (audio_index >= 0) {
1943
        stream_component_open(is, audio_index);
1944
    }
1945

    
1946
    if (video_index >= 0) {
1947
        stream_component_open(is, video_index);
1948
    } else {
1949
        if (!display_disable)
1950
            is->show_audio = 1;
1951
    }
1952

    
1953
    if (is->video_stream < 0 && is->audio_stream < 0) {
1954
        fprintf(stderr, "%s: could not open codecs\n", is->filename);
1955
        ret = -1;
1956
        goto fail;
1957
    }
1958

    
1959
    for(;;) {
1960
        if (is->abort_request)
1961
            break;
1962
#ifdef CONFIG_NETWORK
1963
        if (is->paused != is->last_paused) {
1964
            is->last_paused = is->paused;
1965
            if (is->paused)
1966
                av_read_pause(ic);
1967
            else
1968
                av_read_play(ic);
1969
        }
1970
        if (is->paused && ic->iformat == &rtsp_demuxer) {
1971
            /* wait 10 ms to avoid trying to get another packet */
1972
            /* XXX: horrible */
1973
            SDL_Delay(10);
1974
            continue;
1975
        }
1976
#endif
1977
        if (is->seek_req) {
1978
            int stream_index= -1;
1979
            int64_t seek_target= is->seek_pos;
1980

    
1981
            if     (is->   video_stream >= 0) stream_index= is->   video_stream;
1982
            else if(is->   audio_stream >= 0) stream_index= is->   audio_stream;
1983
            else if(is->subtitle_stream >= 0) stream_index= is->subtitle_stream;
1984

    
1985
            if(stream_index>=0){
1986
                seek_target= av_rescale_q(seek_target, AV_TIME_BASE_Q, ic->streams[stream_index]->time_base);
1987
            }
1988

    
1989
            ret = av_seek_frame(is->ic, stream_index, seek_target, is->seek_flags);
1990
            if (ret < 0) {
1991
                fprintf(stderr, "%s: error while seeking\n", is->ic->filename);
1992
            }else{
1993
                if (is->audio_stream >= 0) {
1994
                    packet_queue_flush(&is->audioq);
1995
                    packet_queue_put(&is->audioq, &flush_pkt);
1996
                }
1997
                if (is->subtitle_stream >= 0) {
1998
                    packet_queue_flush(&is->subtitleq);
1999
                    packet_queue_put(&is->subtitleq, &flush_pkt);
2000
                }
2001
                if (is->video_stream >= 0) {
2002
                    packet_queue_flush(&is->videoq);
2003
                    packet_queue_put(&is->videoq, &flush_pkt);
2004
                }
2005
            }
2006
            is->seek_req = 0;
2007
        }
2008

    
2009
        /* if the queue are full, no need to read more */
2010
        if (is->audioq.size > MAX_AUDIOQ_SIZE ||
2011
            is->videoq.size > MAX_VIDEOQ_SIZE ||
2012
            is->subtitleq.size > MAX_SUBTITLEQ_SIZE ||
2013
            url_feof(&ic->pb)) {
2014
            /* wait 10 ms */
2015
            SDL_Delay(10);
2016
            continue;
2017
        }
2018
        ret = av_read_frame(ic, pkt);
2019
        if (ret < 0) {
2020
            if (url_ferror(&ic->pb) == 0) {
2021
                SDL_Delay(100); /* wait for user event */
2022
                continue;
2023
            } else
2024
                break;
2025
        }
2026
        if (pkt->stream_index == is->audio_stream) {
2027
            packet_queue_put(&is->audioq, pkt);
2028
        } else if (pkt->stream_index == is->video_stream) {
2029
            packet_queue_put(&is->videoq, pkt);
2030
        } else if (pkt->stream_index == is->subtitle_stream) {
2031
            packet_queue_put(&is->subtitleq, pkt);
2032
        } else {
2033
            av_free_packet(pkt);
2034
        }
2035
    }
2036
    /* wait until the end */
2037
    while (!is->abort_request) {
2038
        SDL_Delay(100);
2039
    }
2040

    
2041
    ret = 0;
2042
 fail:
2043
    /* disable interrupting */
2044
    global_video_state = NULL;
2045

    
2046
    /* close each stream */
2047
    if (is->audio_stream >= 0)
2048
        stream_component_close(is, is->audio_stream);
2049
    if (is->video_stream >= 0)
2050
        stream_component_close(is, is->video_stream);
2051
    if (is->subtitle_stream >= 0)
2052
        stream_component_close(is, is->subtitle_stream);
2053
    if (is->ic) {
2054
        av_close_input_file(is->ic);
2055
        is->ic = NULL; /* safety */
2056
    }
2057
    url_set_interrupt_cb(NULL);
2058

    
2059
    if (ret != 0) {
2060
        SDL_Event event;
2061

    
2062
        event.type = FF_QUIT_EVENT;
2063
        event.user.data1 = is;
2064
        SDL_PushEvent(&event);
2065
    }
2066
    return 0;
2067
}
2068

    
2069
static VideoState *stream_open(const char *filename, AVInputFormat *iformat)
2070
{
2071
    VideoState *is;
2072

    
2073
    is = av_mallocz(sizeof(VideoState));
2074
    if (!is)
2075
        return NULL;
2076
    pstrcpy(is->filename, sizeof(is->filename), filename);
2077
    is->iformat = iformat;
2078
    is->ytop = 0;
2079
    is->xleft = 0;
2080

    
2081
    /* start video display */
2082
    is->pictq_mutex = SDL_CreateMutex();
2083
    is->pictq_cond = SDL_CreateCond();
2084

    
2085
    is->subpq_mutex = SDL_CreateMutex();
2086
    is->subpq_cond = SDL_CreateCond();
2087

    
2088
    /* add the refresh timer to draw the picture */
2089
    schedule_refresh(is, 40);
2090

    
2091
    is->av_sync_type = av_sync_type;
2092
    is->parse_tid = SDL_CreateThread(decode_thread, is);
2093
    if (!is->parse_tid) {
2094
        av_free(is);
2095
        return NULL;
2096
    }
2097
    return is;
2098
}
2099

    
2100
static void stream_close(VideoState *is)
2101
{
2102
    VideoPicture *vp;
2103
    int i;
2104
    /* XXX: use a special url_shutdown call to abort parse cleanly */
2105
    is->abort_request = 1;
2106
    SDL_WaitThread(is->parse_tid, NULL);
2107

    
2108
    /* free all pictures */
2109
    for(i=0;i<VIDEO_PICTURE_QUEUE_SIZE; i++) {
2110
        vp = &is->pictq[i];
2111
        if (vp->bmp) {
2112
            SDL_FreeYUVOverlay(vp->bmp);
2113
            vp->bmp = NULL;
2114
        }
2115
    }
2116
    SDL_DestroyMutex(is->pictq_mutex);
2117
    SDL_DestroyCond(is->pictq_cond);
2118
    SDL_DestroyMutex(is->subpq_mutex);
2119
    SDL_DestroyCond(is->subpq_cond);
2120
}
2121

    
2122
static void stream_cycle_channel(VideoState *is, int codec_type)
2123
{
2124
    AVFormatContext *ic = is->ic;
2125
    int start_index, stream_index;
2126
    AVStream *st;
2127

    
2128
    if (codec_type == CODEC_TYPE_VIDEO)
2129
        start_index = is->video_stream;
2130
    else if (codec_type == CODEC_TYPE_AUDIO)
2131
        start_index = is->audio_stream;
2132
    else
2133
        start_index = is->subtitle_stream;
2134
    if (start_index < (codec_type == CODEC_TYPE_SUBTITLE ? -1 : 0))
2135
        return;
2136
    stream_index = start_index;
2137
    for(;;) {
2138
        if (++stream_index >= is->ic->nb_streams)
2139
        {
2140
            if (codec_type == CODEC_TYPE_SUBTITLE)
2141
            {
2142
                stream_index = -1;
2143
                goto the_end;
2144
            } else
2145
                stream_index = 0;
2146
        }
2147
        if (stream_index == start_index)
2148
            return;
2149
        st = ic->streams[stream_index];
2150
        if (st->codec->codec_type == codec_type) {
2151
            /* check that parameters are OK */
2152
            switch(codec_type) {
2153
            case CODEC_TYPE_AUDIO:
2154
                if (st->codec->sample_rate != 0 &&
2155
                    st->codec->channels != 0)
2156
                    goto the_end;
2157
                break;
2158
            case CODEC_TYPE_VIDEO:
2159
            case CODEC_TYPE_SUBTITLE:
2160
                goto the_end;
2161
            default:
2162
                break;
2163
            }
2164
        }
2165
    }
2166
 the_end:
2167
    stream_component_close(is, start_index);
2168
    stream_component_open(is, stream_index);
2169
}
2170

    
2171

    
2172
static void toggle_full_screen(void)
2173
{
2174
    is_full_screen = !is_full_screen;
2175
    if (!fs_screen_width) {
2176
        /* use default SDL method */
2177
//        SDL_WM_ToggleFullScreen(screen);
2178
    }
2179
    video_open(cur_stream);
2180
}
2181

    
2182
static void toggle_pause(void)
2183
{
2184
    if (cur_stream)
2185
        stream_pause(cur_stream);
2186
    step = 0;
2187
}
2188

    
2189
static void step_to_next_frame(void)
2190
{
2191
    if (cur_stream) {
2192
        if (cur_stream->paused)
2193
            cur_stream->paused=0;
2194
        cur_stream->video_current_pts = get_video_clock(cur_stream);
2195
    }
2196
    step = 1;
2197
}
2198

    
2199
static void do_exit(void)
2200
{
2201
    if (cur_stream) {
2202
        stream_close(cur_stream);
2203
        cur_stream = NULL;
2204
    }
2205
    if (show_status)
2206
        printf("\n");
2207
    SDL_Quit();
2208
    exit(0);
2209
}
2210

    
2211
static void toggle_audio_display(void)
2212
{
2213
    if (cur_stream) {
2214
        cur_stream->show_audio = !cur_stream->show_audio;
2215
    }
2216
}
2217

    
2218
/* handle an event sent by the GUI */
2219
static void event_loop(void)
2220
{
2221
    SDL_Event event;
2222
    double incr, pos, frac;
2223

    
2224
    for(;;) {
2225
        SDL_WaitEvent(&event);
2226
        switch(event.type) {
2227
        case SDL_KEYDOWN:
2228
            switch(event.key.keysym.sym) {
2229
            case SDLK_ESCAPE:
2230
            case SDLK_q:
2231
                do_exit();
2232
                break;
2233
            case SDLK_f:
2234
                toggle_full_screen();
2235
                break;
2236
            case SDLK_p:
2237
            case SDLK_SPACE:
2238
                toggle_pause();
2239
                break;
2240
            case SDLK_s: //S: Step to next frame
2241
                step_to_next_frame();
2242
                break;
2243
            case SDLK_a:
2244
                if (cur_stream)
2245
                    stream_cycle_channel(cur_stream, CODEC_TYPE_AUDIO);
2246
                break;
2247
            case SDLK_v:
2248
                if (cur_stream)
2249
                    stream_cycle_channel(cur_stream, CODEC_TYPE_VIDEO);
2250
                break;
2251
            case SDLK_t:
2252
                if (cur_stream)
2253
                    stream_cycle_channel(cur_stream, CODEC_TYPE_SUBTITLE);
2254
                break;
2255
            case SDLK_w:
2256
                toggle_audio_display();
2257
                break;
2258
            case SDLK_LEFT:
2259
                incr = -10.0;
2260
                goto do_seek;
2261
            case SDLK_RIGHT:
2262
                incr = 10.0;
2263
                goto do_seek;
2264
            case SDLK_UP:
2265
                incr = 60.0;
2266
                goto do_seek;
2267
            case SDLK_DOWN:
2268
                incr = -60.0;
2269
            do_seek:
2270
                if (cur_stream) {
2271
                    if (seek_by_bytes) {
2272
                        pos = url_ftell(&cur_stream->ic->pb);
2273
                        if (cur_stream->ic->bit_rate)
2274
                            incr *= cur_stream->ic->bit_rate / 60.0;
2275
                        else
2276
                            incr *= 180000.0;
2277
                        pos += incr;
2278
                        stream_seek(cur_stream, pos, incr);
2279
                    } else {
2280
                        pos = get_master_clock(cur_stream);
2281
                        pos += incr;
2282
                        stream_seek(cur_stream, (int64_t)(pos * AV_TIME_BASE), incr);
2283
                    }
2284
                }
2285
                break;
2286
            default:
2287
                break;
2288
            }
2289
            break;
2290
        case SDL_MOUSEBUTTONDOWN:
2291
            if (cur_stream) {
2292
                int ns, hh, mm, ss;
2293
                int tns, thh, tmm, tss;
2294
                tns = cur_stream->ic->duration/1000000LL;
2295
                thh = tns/3600;
2296
                tmm = (tns%3600)/60;
2297
                tss = (tns%60);
2298
                frac = (double)event.button.x/(double)cur_stream->width;
2299
                ns = frac*tns;
2300
                hh = ns/3600;
2301
                mm = (ns%3600)/60;
2302
                ss = (ns%60);
2303
                fprintf(stderr, "Seek to %2.0f%% (%2d:%02d:%02d) of total duration (%2d:%02d:%02d)       \n", frac*100,
2304
                        hh, mm, ss, thh, tmm, tss);
2305
                stream_seek(cur_stream, (int64_t)(cur_stream->ic->start_time+frac*cur_stream->ic->duration), 0);
2306
            }
2307
            break;
2308
        case SDL_VIDEORESIZE:
2309
            if (cur_stream) {
2310
                screen = SDL_SetVideoMode(event.resize.w, event.resize.h, 0,
2311
                                          SDL_HWSURFACE|SDL_RESIZABLE|SDL_ASYNCBLIT|SDL_HWACCEL);
2312
                screen_width = cur_stream->width = event.resize.w;
2313
                screen_height= cur_stream->height= event.resize.h;
2314
            }
2315
            break;
2316
        case SDL_QUIT:
2317
        case FF_QUIT_EVENT:
2318
            do_exit();
2319
            break;
2320
        case FF_ALLOC_EVENT:
2321
            video_open(event.user.data1);
2322
            alloc_picture(event.user.data1);
2323
            break;
2324
        case FF_REFRESH_EVENT:
2325
            video_refresh_timer(event.user.data1);
2326
            break;
2327
        default:
2328
            break;
2329
        }
2330
    }
2331
}
2332

    
2333
void opt_width(const char *arg)
2334
{
2335
    screen_width = atoi(arg);
2336
    if(screen_width<=0){
2337
        fprintf(stderr, "invalid width\n");
2338
        exit(1);
2339
    }
2340
}
2341

    
2342
void opt_height(const char *arg)
2343
{
2344
    screen_height = atoi(arg);
2345
    if(screen_height<=0){
2346
        fprintf(stderr, "invalid height\n");
2347
        exit(1);
2348
    }
2349
}
2350

    
2351
static void opt_format(const char *arg)
2352
{
2353
    file_iformat = av_find_input_format(arg);
2354
    if (!file_iformat) {
2355
        fprintf(stderr, "Unknown input format: %s\n", arg);
2356
        exit(1);
2357
    }
2358
}
2359

    
2360
#ifdef CONFIG_NETWORK
2361
void opt_rtp_tcp(void)
2362
{
2363
    /* only tcp protocol */
2364
    rtsp_default_protocols = (1 << RTSP_PROTOCOL_RTP_TCP);
2365
}
2366
#endif
2367

    
2368
void opt_sync(const char *arg)
2369
{
2370
    if (!strcmp(arg, "audio"))
2371
        av_sync_type = AV_SYNC_AUDIO_MASTER;
2372
    else if (!strcmp(arg, "video"))
2373
        av_sync_type = AV_SYNC_VIDEO_MASTER;
2374
    else if (!strcmp(arg, "ext"))
2375
        av_sync_type = AV_SYNC_EXTERNAL_CLOCK;
2376
    else
2377
        show_help();
2378
}
2379

    
2380
void opt_seek(const char *arg)
2381
{
2382
    start_time = parse_date(arg, 1);
2383
}
2384

    
2385
static void opt_debug(const char *arg)
2386
{
2387
    av_log_set_level(99);
2388
    debug = atoi(arg);
2389
}
2390

    
2391
static void opt_vismv(const char *arg)
2392
{
2393
    debug_mv = atoi(arg);
2394
}
2395

    
2396
static void opt_thread_count(const char *arg)
2397
{
2398
    thread_count= atoi(arg);
2399
#if !defined(HAVE_THREADS)
2400
    fprintf(stderr, "Warning: not compiled with thread support, using thread emulation\n");
2401
#endif
2402
}
2403

    
2404
const OptionDef options[] = {
2405
    { "h", 0, {(void*)show_help}, "show help" },
2406
    { "x", HAS_ARG, {(void*)opt_width}, "force displayed width", "width" },
2407
    { "y", HAS_ARG, {(void*)opt_height}, "force displayed height", "height" },
2408
    { "fs", OPT_BOOL, {(void*)&is_full_screen}, "force full screen" },
2409
    { "an", OPT_BOOL, {(void*)&audio_disable}, "disable audio" },
2410
    { "vn", OPT_BOOL, {(void*)&video_disable}, "disable video" },
2411
    { "ast", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&wanted_audio_stream}, "", "" },
2412
    { "ss", HAS_ARG, {(void*)&opt_seek}, "seek to a given position in seconds", "pos" },
2413
    { "bytes", OPT_BOOL, {(void*)&seek_by_bytes}, "seek by bytes" },
2414
    { "nodisp", OPT_BOOL, {(void*)&display_disable}, "disable graphical display" },
2415
    { "f", HAS_ARG, {(void*)opt_format}, "force format", "fmt" },
2416
    { "stats", OPT_BOOL | OPT_EXPERT, {(void*)&show_status}, "show status", "" },
2417
    { "debug", HAS_ARG | OPT_EXPERT, {(void*)opt_debug}, "print specific debug info", "" },
2418
    { "bug", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&workaround_bugs}, "workaround bugs", "" },
2419
    { "vismv", HAS_ARG | OPT_EXPERT, {(void*)opt_vismv}, "visualize motion vectors", "" },
2420
    { "fast", OPT_BOOL | OPT_EXPERT, {(void*)&fast}, "non spec compliant optimizations", "" },
2421
    { "genpts", OPT_BOOL | OPT_EXPERT, {(void*)&genpts}, "generate pts", "" },
2422
    { "lowres", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&lowres}, "", "" },
2423
    { "skiploop", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_loop_filter}, "", "" },
2424
    { "skipframe", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_frame}, "", "" },
2425
    { "skipidct", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_idct}, "", "" },
2426
    { "idct", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&idct}, "set idct algo",  "algo" },
2427
    { "er", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&error_resilience}, "set error detection threshold (0-4)",  "threshold" },
2428
    { "ec", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&error_concealment}, "set error concealment options",  "bit_mask" },
2429
#ifdef CONFIG_NETWORK
2430
    { "rtp_tcp", OPT_EXPERT, {(void*)&opt_rtp_tcp}, "force RTP/TCP protocol usage", "" },
2431
#endif
2432
    { "sync", HAS_ARG | OPT_EXPERT, {(void*)opt_sync}, "set audio-video sync. type (type=audio/video/ext)", "type" },
2433
    { "threads", HAS_ARG | OPT_EXPERT, {(void*)opt_thread_count}, "thread count", "count" },
2434
    { NULL, },
2435
};
2436

    
2437
void show_help(void)
2438
{
2439
    printf("ffplay version " FFMPEG_VERSION ", Copyright (c) 2003-2006 Fabrice Bellard, et al.\n"
2440
           "usage: ffplay [options] input_file\n"
2441
           "Simple media player\n");
2442
    printf("\n");
2443
    show_help_options(options, "Main options:\n",
2444
                      OPT_EXPERT, 0);
2445
    show_help_options(options, "\nAdvanced options:\n",
2446
                      OPT_EXPERT, OPT_EXPERT);
2447
    printf("\nWhile playing:\n"
2448
           "q, ESC              quit\n"
2449
           "f                   toggle full screen\n"
2450
           "p, SPC              pause\n"
2451
           "a                   cycle audio channel\n"
2452
           "v                   cycle video channel\n"
2453
           "t                   cycle subtitle channel\n"
2454
           "w                   show audio waves\n"
2455
           "left/right          seek backward/forward 10 seconds\n"
2456
           "down/up             seek backward/forward 1 minute\n"
2457
           "mouse click         seek to percentage in file corresponding to fraction of width\n"
2458
           );
2459
    exit(1);
2460
}
2461

    
2462
void parse_arg_file(const char *filename)
2463
{
2464
    if (!strcmp(filename, "-"))
2465
                    filename = "pipe:";
2466
    input_filename = filename;
2467
}
2468

    
2469
/* Called from the main */
2470
int main(int argc, char **argv)
2471
{
2472
    int flags;
2473

    
2474
    /* register all codecs, demux and protocols */
2475
    av_register_all();
2476

    
2477
    #ifdef CONFIG_OS2
2478
      MorphToPM(); // Morph the VIO application to a PM one to be able to use Win* functions
2479

    
2480
      // Make stdout and stderr unbuffered
2481
      setbuf( stdout, NULL );
2482
      setbuf( stderr, NULL );
2483
    #endif
2484

    
2485
    parse_options(argc, argv, options);
2486

    
2487
    if (!input_filename)
2488
        show_help();
2489

    
2490
    if (display_disable) {
2491
        video_disable = 1;
2492
    }
2493
    flags = SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER;
2494
#if !defined(__MINGW32__) && !defined(CONFIG_DARWIN)
2495
    flags |= SDL_INIT_EVENTTHREAD; /* Not supported on win32 or darwin */
2496
#endif
2497
    if (SDL_Init (flags)) {
2498
        fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
2499
        exit(1);
2500
    }
2501

    
2502
    if (!display_disable) {
2503
#ifdef HAVE_SDL_VIDEO_SIZE
2504
        const SDL_VideoInfo *vi = SDL_GetVideoInfo();
2505
        fs_screen_width = vi->current_w;
2506
        fs_screen_height = vi->current_h;
2507
#endif
2508
    }
2509

    
2510
    SDL_EventState(SDL_ACTIVEEVENT, SDL_IGNORE);
2511
    SDL_EventState(SDL_MOUSEMOTION, SDL_IGNORE);
2512
    SDL_EventState(SDL_SYSWMEVENT, SDL_IGNORE);
2513
    SDL_EventState(SDL_USEREVENT, SDL_IGNORE);
2514

    
2515
    av_init_packet(&flush_pkt);
2516
    flush_pkt.data= "FLUSH";
2517

    
2518
    cur_stream = stream_open(input_filename, file_iformat);
2519

    
2520
    event_loop();
2521

    
2522
    /* never returns */
2523

    
2524
    return 0;
2525
}