Statistics
| Branch: | Revision:

ffmpeg / ffplay.c @ 3af56110

History | View | Annotate | Download (77.4 KB)

1
/*
2
 * FFplay : Simple Media Player based on the ffmpeg libraries
3
 * Copyright (c) 2003 Fabrice Bellard
4
 *
5
 * This file is part of FFmpeg.
6
 *
7
 * FFmpeg is free software; you can redistribute it and/or
8
 * modify it under the terms of the GNU Lesser General Public
9
 * License as published by the Free Software Foundation; either
10
 * version 2.1 of the License, or (at your option) any later version.
11
 *
12
 * FFmpeg is distributed in the hope that it will be useful,
13
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15
 * Lesser General Public License for more details.
16
 *
17
 * You should have received a copy of the GNU Lesser General Public
18
 * License along with FFmpeg; if not, write to the Free Software
19
 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20
 */
21

    
22
#include <math.h>
23
#include <limits.h>
24
#include "avformat.h"
25
#include "swscale.h"
26
#include "avstring.h"
27

    
28
#include "version.h"
29
#include "cmdutils.h"
30

    
31
#include <SDL.h>
32
#include <SDL_thread.h>
33

    
34
#ifdef __MINGW32__
35
#undef main /* We don't want SDL to override our main() */
36
#endif
37

    
38
#undef exit
39

    
40
//#define DEBUG_SYNC
41

    
42
#define MAX_VIDEOQ_SIZE (5 * 256 * 1024)
43
#define MAX_AUDIOQ_SIZE (5 * 16 * 1024)
44
#define MAX_SUBTITLEQ_SIZE (5 * 16 * 1024)
45

    
46
/* SDL audio buffer size, in samples. Should be small to have precise
47
   A/V sync as SDL does not have hardware buffer fullness info. */
48
#define SDL_AUDIO_BUFFER_SIZE 1024
49

    
50
/* no AV sync correction is done if below the AV sync threshold */
51
#define AV_SYNC_THRESHOLD 0.01
52
/* no AV correction is done if too big error */
53
#define AV_NOSYNC_THRESHOLD 10.0
54

    
55
/* maximum audio speed change to get correct sync */
56
#define SAMPLE_CORRECTION_PERCENT_MAX 10
57

    
58
/* we use about AUDIO_DIFF_AVG_NB A-V differences to make the average */
59
#define AUDIO_DIFF_AVG_NB   20
60

    
61
/* NOTE: the size must be big enough to compensate the hardware audio buffersize size */
62
#define SAMPLE_ARRAY_SIZE (2*65536)
63

    
64
static int sws_flags = SWS_BICUBIC;
65

    
66
typedef struct PacketQueue {
67
    AVPacketList *first_pkt, *last_pkt;
68
    int nb_packets;
69
    int size;
70
    int abort_request;
71
    SDL_mutex *mutex;
72
    SDL_cond *cond;
73
} PacketQueue;
74

    
75
#define VIDEO_PICTURE_QUEUE_SIZE 1
76
#define SUBPICTURE_QUEUE_SIZE 4
77

    
78
typedef struct VideoPicture {
79
    double pts;                                  ///<presentation time stamp for this picture
80
    SDL_Overlay *bmp;
81
    int width, height; /* source height & width */
82
    int allocated;
83
} VideoPicture;
84

    
85
typedef struct SubPicture {
86
    double pts; /* presentation time stamp for this picture */
87
    AVSubtitle sub;
88
} SubPicture;
89

    
90
enum {
91
    AV_SYNC_AUDIO_MASTER, /* default choice */
92
    AV_SYNC_VIDEO_MASTER,
93
    AV_SYNC_EXTERNAL_CLOCK, /* synchronize to an external clock */
94
};
95

    
96
typedef struct VideoState {
97
    SDL_Thread *parse_tid;
98
    SDL_Thread *video_tid;
99
    AVInputFormat *iformat;
100
    int no_background;
101
    int abort_request;
102
    int paused;
103
    int last_paused;
104
    int seek_req;
105
    int seek_flags;
106
    int64_t seek_pos;
107
    AVFormatContext *ic;
108
    int dtg_active_format;
109

    
110
    int audio_stream;
111

    
112
    int av_sync_type;
113
    double external_clock; /* external clock base */
114
    int64_t external_clock_time;
115

    
116
    double audio_clock;
117
    double audio_diff_cum; /* used for AV difference average computation */
118
    double audio_diff_avg_coef;
119
    double audio_diff_threshold;
120
    int audio_diff_avg_count;
121
    AVStream *audio_st;
122
    PacketQueue audioq;
123
    int audio_hw_buf_size;
124
    /* samples output by the codec. we reserve more space for avsync
125
       compensation */
126
    DECLARE_ALIGNED(16,uint8_t,audio_buf[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 2]);
127
    unsigned int audio_buf_size; /* in bytes */
128
    int audio_buf_index; /* in bytes */
129
    AVPacket audio_pkt;
130
    uint8_t *audio_pkt_data;
131
    int audio_pkt_size;
132

    
133
    int show_audio; /* if true, display audio samples */
134
    int16_t sample_array[SAMPLE_ARRAY_SIZE];
135
    int sample_array_index;
136
    int last_i_start;
137

    
138
    SDL_Thread *subtitle_tid;
139
    int subtitle_stream;
140
    int subtitle_stream_changed;
141
    AVStream *subtitle_st;
142
    PacketQueue subtitleq;
143
    SubPicture subpq[SUBPICTURE_QUEUE_SIZE];
144
    int subpq_size, subpq_rindex, subpq_windex;
145
    SDL_mutex *subpq_mutex;
146
    SDL_cond *subpq_cond;
147

    
148
    double frame_timer;
149
    double frame_last_pts;
150
    double frame_last_delay;
151
    double video_clock;                          ///<pts of last decoded frame / predicted pts of next decoded frame
152
    int video_stream;
153
    AVStream *video_st;
154
    PacketQueue videoq;
155
    double video_current_pts;                    ///<current displayed pts (different from video_clock if frame fifos are used)
156
    int64_t video_current_pts_time;              ///<time (av_gettime) at which we updated video_current_pts - used to have running video pts
157
    VideoPicture pictq[VIDEO_PICTURE_QUEUE_SIZE];
158
    int pictq_size, pictq_rindex, pictq_windex;
159
    SDL_mutex *pictq_mutex;
160
    SDL_cond *pictq_cond;
161

    
162
    //    QETimer *video_timer;
163
    char filename[1024];
164
    int width, height, xleft, ytop;
165
} VideoState;
166

    
167
void show_help(void);
168
static int audio_write_get_buf_size(VideoState *is);
169

    
170
/* options specified by the user */
171
static AVInputFormat *file_iformat;
172
static const char *input_filename;
173
static int fs_screen_width;
174
static int fs_screen_height;
175
static int screen_width = 0;
176
static int screen_height = 0;
177
static int frame_width = 0;
178
static int frame_height = 0;
179
static enum PixelFormat frame_pix_fmt = PIX_FMT_NONE;
180
static int audio_disable;
181
static int video_disable;
182
static int wanted_audio_stream= 0;
183
static int wanted_video_stream= 0;
184
static int seek_by_bytes;
185
static int display_disable;
186
static int show_status;
187
static int av_sync_type = AV_SYNC_AUDIO_MASTER;
188
static int64_t start_time = AV_NOPTS_VALUE;
189
static int debug = 0;
190
static int debug_mv = 0;
191
static int step = 0;
192
static int thread_count = 1;
193
static int workaround_bugs = 1;
194
static int fast = 0;
195
static int genpts = 0;
196
static int lowres = 0;
197
static int idct = FF_IDCT_AUTO;
198
static enum AVDiscard skip_frame= AVDISCARD_DEFAULT;
199
static enum AVDiscard skip_idct= AVDISCARD_DEFAULT;
200
static enum AVDiscard skip_loop_filter= AVDISCARD_DEFAULT;
201
static int error_resilience = FF_ER_CAREFUL;
202
static int error_concealment = 3;
203
static int decoder_reorder_pts= 0;
204

    
205
/* current context */
206
static int is_full_screen;
207
static VideoState *cur_stream;
208
static int64_t audio_callback_time;
209

    
210
AVPacket flush_pkt;
211

    
212
#define FF_ALLOC_EVENT   (SDL_USEREVENT)
213
#define FF_REFRESH_EVENT (SDL_USEREVENT + 1)
214
#define FF_QUIT_EVENT    (SDL_USEREVENT + 2)
215

    
216
SDL_Surface *screen;
217

    
218
/* packet queue handling */
219
static void packet_queue_init(PacketQueue *q)
220
{
221
    memset(q, 0, sizeof(PacketQueue));
222
    q->mutex = SDL_CreateMutex();
223
    q->cond = SDL_CreateCond();
224
}
225

    
226
static void packet_queue_flush(PacketQueue *q)
227
{
228
    AVPacketList *pkt, *pkt1;
229

    
230
    SDL_LockMutex(q->mutex);
231
    for(pkt = q->first_pkt; pkt != NULL; pkt = pkt1) {
232
        pkt1 = pkt->next;
233
        av_free_packet(&pkt->pkt);
234
        av_freep(&pkt);
235
    }
236
    q->last_pkt = NULL;
237
    q->first_pkt = NULL;
238
    q->nb_packets = 0;
239
    q->size = 0;
240
    SDL_UnlockMutex(q->mutex);
241
}
242

    
243
static void packet_queue_end(PacketQueue *q)
244
{
245
    packet_queue_flush(q);
246
    SDL_DestroyMutex(q->mutex);
247
    SDL_DestroyCond(q->cond);
248
}
249

    
250
static int packet_queue_put(PacketQueue *q, AVPacket *pkt)
251
{
252
    AVPacketList *pkt1;
253

    
254
    /* duplicate the packet */
255
    if (pkt!=&flush_pkt && av_dup_packet(pkt) < 0)
256
        return -1;
257

    
258
    pkt1 = av_malloc(sizeof(AVPacketList));
259
    if (!pkt1)
260
        return -1;
261
    pkt1->pkt = *pkt;
262
    pkt1->next = NULL;
263

    
264

    
265
    SDL_LockMutex(q->mutex);
266

    
267
    if (!q->last_pkt)
268

    
269
        q->first_pkt = pkt1;
270
    else
271
        q->last_pkt->next = pkt1;
272
    q->last_pkt = pkt1;
273
    q->nb_packets++;
274
    q->size += pkt1->pkt.size;
275
    /* XXX: should duplicate packet data in DV case */
276
    SDL_CondSignal(q->cond);
277

    
278
    SDL_UnlockMutex(q->mutex);
279
    return 0;
280
}
281

    
282
static void packet_queue_abort(PacketQueue *q)
283
{
284
    SDL_LockMutex(q->mutex);
285

    
286
    q->abort_request = 1;
287

    
288
    SDL_CondSignal(q->cond);
289

    
290
    SDL_UnlockMutex(q->mutex);
291
}
292

    
293
/* return < 0 if aborted, 0 if no packet and > 0 if packet.  */
294
static int packet_queue_get(PacketQueue *q, AVPacket *pkt, int block)
295
{
296
    AVPacketList *pkt1;
297
    int ret;
298

    
299
    SDL_LockMutex(q->mutex);
300

    
301
    for(;;) {
302
        if (q->abort_request) {
303
            ret = -1;
304
            break;
305
        }
306

    
307
        pkt1 = q->first_pkt;
308
        if (pkt1) {
309
            q->first_pkt = pkt1->next;
310
            if (!q->first_pkt)
311
                q->last_pkt = NULL;
312
            q->nb_packets--;
313
            q->size -= pkt1->pkt.size;
314
            *pkt = pkt1->pkt;
315
            av_free(pkt1);
316
            ret = 1;
317
            break;
318
        } else if (!block) {
319
            ret = 0;
320
            break;
321
        } else {
322
            SDL_CondWait(q->cond, q->mutex);
323
        }
324
    }
325
    SDL_UnlockMutex(q->mutex);
326
    return ret;
327
}
328

    
329
static inline void fill_rectangle(SDL_Surface *screen,
330
                                  int x, int y, int w, int h, int color)
331
{
332
    SDL_Rect rect;
333
    rect.x = x;
334
    rect.y = y;
335
    rect.w = w;
336
    rect.h = h;
337
    SDL_FillRect(screen, &rect, color);
338
}
339

    
340
#if 0
341
/* draw only the border of a rectangle */
342
void fill_border(VideoState *s, int x, int y, int w, int h, int color)
343
{
344
    int w1, w2, h1, h2;
345

346
    /* fill the background */
347
    w1 = x;
348
    if (w1 < 0)
349
        w1 = 0;
350
    w2 = s->width - (x + w);
351
    if (w2 < 0)
352
        w2 = 0;
353
    h1 = y;
354
    if (h1 < 0)
355
        h1 = 0;
356
    h2 = s->height - (y + h);
357
    if (h2 < 0)
358
        h2 = 0;
359
    fill_rectangle(screen,
360
                   s->xleft, s->ytop,
361
                   w1, s->height,
362
                   color);
363
    fill_rectangle(screen,
364
                   s->xleft + s->width - w2, s->ytop,
365
                   w2, s->height,
366
                   color);
367
    fill_rectangle(screen,
368
                   s->xleft + w1, s->ytop,
369
                   s->width - w1 - w2, h1,
370
                   color);
371
    fill_rectangle(screen,
372
                   s->xleft + w1, s->ytop + s->height - h2,
373
                   s->width - w1 - w2, h2,
374
                   color);
375
}
376
#endif
377

    
378

    
379

    
380
#define SCALEBITS 10
381
#define ONE_HALF  (1 << (SCALEBITS - 1))
382
#define FIX(x)    ((int) ((x) * (1<<SCALEBITS) + 0.5))
383

    
384
#define RGB_TO_Y_CCIR(r, g, b) \
385
((FIX(0.29900*219.0/255.0) * (r) + FIX(0.58700*219.0/255.0) * (g) + \
386
  FIX(0.11400*219.0/255.0) * (b) + (ONE_HALF + (16 << SCALEBITS))) >> SCALEBITS)
387

    
388
#define RGB_TO_U_CCIR(r1, g1, b1, shift)\
389
(((- FIX(0.16874*224.0/255.0) * r1 - FIX(0.33126*224.0/255.0) * g1 +         \
390
     FIX(0.50000*224.0/255.0) * b1 + (ONE_HALF << shift) - 1) >> (SCALEBITS + shift)) + 128)
391

    
392
#define RGB_TO_V_CCIR(r1, g1, b1, shift)\
393
(((FIX(0.50000*224.0/255.0) * r1 - FIX(0.41869*224.0/255.0) * g1 -           \
394
   FIX(0.08131*224.0/255.0) * b1 + (ONE_HALF << shift) - 1) >> (SCALEBITS + shift)) + 128)
395

    
396
#define ALPHA_BLEND(a, oldp, newp, s)\
397
((((oldp << s) * (255 - (a))) + (newp * (a))) / (255 << s))
398

    
399
#define RGBA_IN(r, g, b, a, s)\
400
{\
401
    unsigned int v = ((const uint32_t *)(s))[0];\
402
    a = (v >> 24) & 0xff;\
403
    r = (v >> 16) & 0xff;\
404
    g = (v >> 8) & 0xff;\
405
    b = v & 0xff;\
406
}
407

    
408
#define YUVA_IN(y, u, v, a, s, pal)\
409
{\
410
    unsigned int val = ((const uint32_t *)(pal))[*(const uint8_t*)s];\
411
    a = (val >> 24) & 0xff;\
412
    y = (val >> 16) & 0xff;\
413
    u = (val >> 8) & 0xff;\
414
    v = val & 0xff;\
415
}
416

    
417
#define YUVA_OUT(d, y, u, v, a)\
418
{\
419
    ((uint32_t *)(d))[0] = (a << 24) | (y << 16) | (u << 8) | v;\
420
}
421

    
422

    
423
#define BPP 1
424

    
425
static void blend_subrect(AVPicture *dst, const AVSubtitleRect *rect, int imgw, int imgh,
426
                          float scalex, float scaley)
427
{
428
    int wrap, wrap3, width2, skip2;
429
    int y, u, v, a, u1, v1, a1, w, h;
430
    uint8_t *lum, *cb, *cr;
431
    const uint8_t *p;
432
    const uint32_t *pal;
433
    int dstx, dsty, dstw, dsth;
434

    
435
    dstx = FFMIN(scalex * FFMAX(rect->x, 0), imgw);
436
    dstw = FFMIN(FFMAX(rect->w, 0), imgw - dstx);
437
    dsty = FFMIN(scaley * FFMAX(rect->y, 0), imgh);
438
    dsth = FFMIN(FFMAX(rect->h, 0), imgh - dsty);
439
    lum = dst->data[0] + dsty * dst->linesize[0];
440
    cb = dst->data[1] + (dsty >> 1) * dst->linesize[1];
441
    cr = dst->data[2] + (dsty >> 1) * dst->linesize[2];
442

    
443
    width2 = (dstw + 1) >> 1;
444
    skip2 = dstx >> 1;
445
    wrap = dst->linesize[0];
446
    wrap3 = rect->linesize;
447
    p = rect->bitmap;
448
    pal = rect->rgba_palette;  /* Now in YCrCb! */
449

    
450
    if (dsty & 1) {
451
        lum += dstx;
452
        cb += skip2;
453
        cr += skip2;
454

    
455
        if (dstx & 1) {
456
            YUVA_IN(y, u, v, a, p, pal);
457
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
458
            cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
459
            cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
460
            cb++;
461
            cr++;
462
            lum++;
463
            p += BPP;
464
        }
465
        for(w = dstw - (dstx & 1); w >= 2; w -= 2) {
466
            YUVA_IN(y, u, v, a, p, pal);
467
            u1 = u;
468
            v1 = v;
469
            a1 = a;
470
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
471

    
472
            YUVA_IN(y, u, v, a, p + BPP, pal);
473
            u1 += u;
474
            v1 += v;
475
            a1 += a;
476
            lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
477
            cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
478
            cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
479
            cb++;
480
            cr++;
481
            p += 2 * BPP;
482
            lum += 2;
483
        }
484
        if (w) {
485
            YUVA_IN(y, u, v, a, p, pal);
486
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
487
            cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
488
            cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
489
        }
490
        p += wrap3 + (wrap3 - dstw * BPP);
491
        lum += wrap + (wrap - dstw - dstx);
492
        cb += dst->linesize[1] - width2 - skip2;
493
        cr += dst->linesize[2] - width2 - skip2;
494
    }
495
    for(h = dsth - (dsty & 1); h >= 2; h -= 2) {
496
        lum += dstx;
497
        cb += skip2;
498
        cr += skip2;
499

    
500
        if (dstx & 1) {
501
            YUVA_IN(y, u, v, a, p, pal);
502
            u1 = u;
503
            v1 = v;
504
            a1 = a;
505
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
506
            p += wrap3;
507
            lum += wrap;
508
            YUVA_IN(y, u, v, a, p, pal);
509
            u1 += u;
510
            v1 += v;
511
            a1 += a;
512
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
513
            cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
514
            cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
515
            cb++;
516
            cr++;
517
            p += -wrap3 + BPP;
518
            lum += -wrap + 1;
519
        }
520
        for(w = dstw - (dstx & 1); w >= 2; w -= 2) {
521
            YUVA_IN(y, u, v, a, p, pal);
522
            u1 = u;
523
            v1 = v;
524
            a1 = a;
525
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
526

    
527
            YUVA_IN(y, u, v, a, p, pal);
528
            u1 += u;
529
            v1 += v;
530
            a1 += a;
531
            lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
532
            p += wrap3;
533
            lum += wrap;
534

    
535
            YUVA_IN(y, u, v, a, p, pal);
536
            u1 += u;
537
            v1 += v;
538
            a1 += a;
539
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
540

    
541
            YUVA_IN(y, u, v, a, p, pal);
542
            u1 += u;
543
            v1 += v;
544
            a1 += a;
545
            lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
546

    
547
            cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 2);
548
            cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 2);
549

    
550
            cb++;
551
            cr++;
552
            p += -wrap3 + 2 * BPP;
553
            lum += -wrap + 2;
554
        }
555
        if (w) {
556
            YUVA_IN(y, u, v, a, p, pal);
557
            u1 = u;
558
            v1 = v;
559
            a1 = a;
560
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
561
            p += wrap3;
562
            lum += wrap;
563
            YUVA_IN(y, u, v, a, p, pal);
564
            u1 += u;
565
            v1 += v;
566
            a1 += a;
567
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
568
            cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
569
            cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
570
            cb++;
571
            cr++;
572
            p += -wrap3 + BPP;
573
            lum += -wrap + 1;
574
        }
575
        p += wrap3 + (wrap3 - dstw * BPP);
576
        lum += wrap + (wrap - dstw - dstx);
577
        cb += dst->linesize[1] - width2 - skip2;
578
        cr += dst->linesize[2] - width2 - skip2;
579
    }
580
    /* handle odd height */
581
    if (h) {
582
        lum += dstx;
583
        cb += skip2;
584
        cr += skip2;
585

    
586
        if (dstx & 1) {
587
            YUVA_IN(y, u, v, a, p, pal);
588
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
589
            cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
590
            cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
591
            cb++;
592
            cr++;
593
            lum++;
594
            p += BPP;
595
        }
596
        for(w = dstw - (dstx & 1); w >= 2; w -= 2) {
597
            YUVA_IN(y, u, v, a, p, pal);
598
            u1 = u;
599
            v1 = v;
600
            a1 = a;
601
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
602

    
603
            YUVA_IN(y, u, v, a, p + BPP, pal);
604
            u1 += u;
605
            v1 += v;
606
            a1 += a;
607
            lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
608
            cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u, 1);
609
            cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v, 1);
610
            cb++;
611
            cr++;
612
            p += 2 * BPP;
613
            lum += 2;
614
        }
615
        if (w) {
616
            YUVA_IN(y, u, v, a, p, pal);
617
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
618
            cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
619
            cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
620
        }
621
    }
622
}
623

    
624
static void free_subpicture(SubPicture *sp)
625
{
626
    int i;
627

    
628
    for (i = 0; i < sp->sub.num_rects; i++)
629
    {
630
        av_free(sp->sub.rects[i].bitmap);
631
        av_free(sp->sub.rects[i].rgba_palette);
632
    }
633

    
634
    av_free(sp->sub.rects);
635

    
636
    memset(&sp->sub, 0, sizeof(AVSubtitle));
637
}
638

    
639
static void video_image_display(VideoState *is)
640
{
641
    VideoPicture *vp;
642
    SubPicture *sp;
643
    AVPicture pict;
644
    float aspect_ratio;
645
    int width, height, x, y;
646
    SDL_Rect rect;
647
    int i;
648

    
649
    vp = &is->pictq[is->pictq_rindex];
650
    if (vp->bmp) {
651
        /* XXX: use variable in the frame */
652
        if (is->video_st->codec->sample_aspect_ratio.num == 0)
653
            aspect_ratio = 0;
654
        else
655
            aspect_ratio = av_q2d(is->video_st->codec->sample_aspect_ratio)
656
                * is->video_st->codec->width / is->video_st->codec->height;;
657
        if (aspect_ratio <= 0.0)
658
            aspect_ratio = (float)is->video_st->codec->width /
659
                (float)is->video_st->codec->height;
660
        /* if an active format is indicated, then it overrides the
661
           mpeg format */
662
#if 0
663
        if (is->video_st->codec->dtg_active_format != is->dtg_active_format) {
664
            is->dtg_active_format = is->video_st->codec->dtg_active_format;
665
            printf("dtg_active_format=%d\n", is->dtg_active_format);
666
        }
667
#endif
668
#if 0
669
        switch(is->video_st->codec->dtg_active_format) {
670
        case FF_DTG_AFD_SAME:
671
        default:
672
            /* nothing to do */
673
            break;
674
        case FF_DTG_AFD_4_3:
675
            aspect_ratio = 4.0 / 3.0;
676
            break;
677
        case FF_DTG_AFD_16_9:
678
            aspect_ratio = 16.0 / 9.0;
679
            break;
680
        case FF_DTG_AFD_14_9:
681
            aspect_ratio = 14.0 / 9.0;
682
            break;
683
        case FF_DTG_AFD_4_3_SP_14_9:
684
            aspect_ratio = 14.0 / 9.0;
685
            break;
686
        case FF_DTG_AFD_16_9_SP_14_9:
687
            aspect_ratio = 14.0 / 9.0;
688
            break;
689
        case FF_DTG_AFD_SP_4_3:
690
            aspect_ratio = 4.0 / 3.0;
691
            break;
692
        }
693
#endif
694

    
695
        if (is->subtitle_st)
696
        {
697
            if (is->subpq_size > 0)
698
            {
699
                sp = &is->subpq[is->subpq_rindex];
700

    
701
                if (vp->pts >= sp->pts + ((float) sp->sub.start_display_time / 1000))
702
                {
703
                    float scalex = 1, scaley = 1;
704
                    SDL_LockYUVOverlay (vp->bmp);
705

    
706
                    pict.data[0] = vp->bmp->pixels[0];
707
                    pict.data[1] = vp->bmp->pixels[2];
708
                    pict.data[2] = vp->bmp->pixels[1];
709

    
710
                    pict.linesize[0] = vp->bmp->pitches[0];
711
                    pict.linesize[1] = vp->bmp->pitches[2];
712
                    pict.linesize[2] = vp->bmp->pitches[1];
713

    
714
                    if (is->subtitle_st->codec->width)
715
                        scalex = (float)vp->bmp->w / is->subtitle_st->codec->width;
716
                    if (is->subtitle_st->codec->height)
717
                        scaley = (float)vp->bmp->h / is->subtitle_st->codec->height;
718
                    for (i = 0; i < sp->sub.num_rects; i++)
719
                        blend_subrect(&pict, &sp->sub.rects[i],
720
                                      vp->bmp->w, vp->bmp->h,
721
                                      scalex, scaley);
722

    
723
                    SDL_UnlockYUVOverlay (vp->bmp);
724
                }
725
            }
726
        }
727

    
728

    
729
        /* XXX: we suppose the screen has a 1.0 pixel ratio */
730
        height = is->height;
731
        width = ((int)rint(height * aspect_ratio)) & -3;
732
        if (width > is->width) {
733
            width = is->width;
734
            height = ((int)rint(width / aspect_ratio)) & -3;
735
        }
736
        x = (is->width - width) / 2;
737
        y = (is->height - height) / 2;
738
        if (!is->no_background) {
739
            /* fill the background */
740
            //            fill_border(is, x, y, width, height, QERGB(0x00, 0x00, 0x00));
741
        } else {
742
            is->no_background = 0;
743
        }
744
        rect.x = is->xleft + x;
745
        rect.y = is->ytop  + y;
746
        rect.w = width;
747
        rect.h = height;
748
        SDL_DisplayYUVOverlay(vp->bmp, &rect);
749
    } else {
750
#if 0
751
        fill_rectangle(screen,
752
                       is->xleft, is->ytop, is->width, is->height,
753
                       QERGB(0x00, 0x00, 0x00));
754
#endif
755
    }
756
}
757

    
758
static inline int compute_mod(int a, int b)
759
{
760
    a = a % b;
761
    if (a >= 0)
762
        return a;
763
    else
764
        return a + b;
765
}
766

    
767
static void video_audio_display(VideoState *s)
768
{
769
    int i, i_start, x, y1, y, ys, delay, n, nb_display_channels;
770
    int ch, channels, h, h2, bgcolor, fgcolor;
771
    int16_t time_diff;
772

    
773
    /* compute display index : center on currently output samples */
774
    channels = s->audio_st->codec->channels;
775
    nb_display_channels = channels;
776
    if (!s->paused) {
777
        n = 2 * channels;
778
        delay = audio_write_get_buf_size(s);
779
        delay /= n;
780

    
781
        /* to be more precise, we take into account the time spent since
782
           the last buffer computation */
783
        if (audio_callback_time) {
784
            time_diff = av_gettime() - audio_callback_time;
785
            delay += (time_diff * s->audio_st->codec->sample_rate) / 1000000;
786
        }
787

    
788
        delay -= s->width / 2;
789
        if (delay < s->width)
790
            delay = s->width;
791

    
792
        i_start= x = compute_mod(s->sample_array_index - delay * channels, SAMPLE_ARRAY_SIZE);
793

    
794
        h= INT_MIN;
795
        for(i=0; i<1000; i+=channels){
796
            int idx= (SAMPLE_ARRAY_SIZE + x - i) % SAMPLE_ARRAY_SIZE;
797
            int a= s->sample_array[idx];
798
            int b= s->sample_array[(idx + 4*channels)%SAMPLE_ARRAY_SIZE];
799
            int c= s->sample_array[(idx + 5*channels)%SAMPLE_ARRAY_SIZE];
800
            int d= s->sample_array[(idx + 9*channels)%SAMPLE_ARRAY_SIZE];
801
            int score= a-d;
802
            if(h<score && (b^c)<0){
803
                h= score;
804
                i_start= idx;
805
            }
806
        }
807

    
808
        s->last_i_start = i_start;
809
    } else {
810
        i_start = s->last_i_start;
811
    }
812

    
813
    bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00);
814
    fill_rectangle(screen,
815
                   s->xleft, s->ytop, s->width, s->height,
816
                   bgcolor);
817

    
818
    fgcolor = SDL_MapRGB(screen->format, 0xff, 0xff, 0xff);
819

    
820
    /* total height for one channel */
821
    h = s->height / nb_display_channels;
822
    /* graph height / 2 */
823
    h2 = (h * 9) / 20;
824
    for(ch = 0;ch < nb_display_channels; ch++) {
825
        i = i_start + ch;
826
        y1 = s->ytop + ch * h + (h / 2); /* position of center line */
827
        for(x = 0; x < s->width; x++) {
828
            y = (s->sample_array[i] * h2) >> 15;
829
            if (y < 0) {
830
                y = -y;
831
                ys = y1 - y;
832
            } else {
833
                ys = y1;
834
            }
835
            fill_rectangle(screen,
836
                           s->xleft + x, ys, 1, y,
837
                           fgcolor);
838
            i += channels;
839
            if (i >= SAMPLE_ARRAY_SIZE)
840
                i -= SAMPLE_ARRAY_SIZE;
841
        }
842
    }
843

    
844
    fgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0xff);
845

    
846
    for(ch = 1;ch < nb_display_channels; ch++) {
847
        y = s->ytop + ch * h;
848
        fill_rectangle(screen,
849
                       s->xleft, y, s->width, 1,
850
                       fgcolor);
851
    }
852
    SDL_UpdateRect(screen, s->xleft, s->ytop, s->width, s->height);
853
}
854

    
855
static int video_open(VideoState *is){
856
    int flags = SDL_HWSURFACE|SDL_ASYNCBLIT|SDL_HWACCEL;
857
    int w,h;
858

    
859
    if(is_full_screen) flags |= SDL_FULLSCREEN;
860
    else               flags |= SDL_RESIZABLE;
861

    
862
    if (is_full_screen && fs_screen_width) {
863
        w = fs_screen_width;
864
        h = fs_screen_height;
865
    } else if(!is_full_screen && screen_width){
866
        w = screen_width;
867
        h = screen_height;
868
    }else if (is->video_st && is->video_st->codec->width){
869
        w = is->video_st->codec->width;
870
        h = is->video_st->codec->height;
871
    } else {
872
        w = 640;
873
        h = 480;
874
    }
875
#ifndef CONFIG_DARWIN
876
    screen = SDL_SetVideoMode(w, h, 0, flags);
877
#else
878
    /* setting bits_per_pixel = 0 or 32 causes blank video on OS X */
879
    screen = SDL_SetVideoMode(w, h, 24, flags);
880
#endif
881
    if (!screen) {
882
        fprintf(stderr, "SDL: could not set video mode - exiting\n");
883
        return -1;
884
    }
885
    SDL_WM_SetCaption("FFplay", "FFplay");
886

    
887
    is->width = screen->w;
888
    is->height = screen->h;
889

    
890
    return 0;
891
}
892

    
893
/* display the current picture, if any */
894
static void video_display(VideoState *is)
895
{
896
    if(!screen)
897
        video_open(cur_stream);
898
    if (is->audio_st && is->show_audio)
899
        video_audio_display(is);
900
    else if (is->video_st)
901
        video_image_display(is);
902
}
903

    
904
static Uint32 sdl_refresh_timer_cb(Uint32 interval, void *opaque)
905
{
906
    SDL_Event event;
907
    event.type = FF_REFRESH_EVENT;
908
    event.user.data1 = opaque;
909
    SDL_PushEvent(&event);
910
    return 0; /* 0 means stop timer */
911
}
912

    
913
/* schedule a video refresh in 'delay' ms */
914
static void schedule_refresh(VideoState *is, int delay)
915
{
916
    SDL_AddTimer(delay, sdl_refresh_timer_cb, is);
917
}
918

    
919
/* get the current audio clock value */
920
static double get_audio_clock(VideoState *is)
921
{
922
    double pts;
923
    int hw_buf_size, bytes_per_sec;
924
    pts = is->audio_clock;
925
    hw_buf_size = audio_write_get_buf_size(is);
926
    bytes_per_sec = 0;
927
    if (is->audio_st) {
928
        bytes_per_sec = is->audio_st->codec->sample_rate *
929
            2 * is->audio_st->codec->channels;
930
    }
931
    if (bytes_per_sec)
932
        pts -= (double)hw_buf_size / bytes_per_sec;
933
    return pts;
934
}
935

    
936
/* get the current video clock value */
937
static double get_video_clock(VideoState *is)
938
{
939
    double delta;
940
    if (is->paused) {
941
        delta = 0;
942
    } else {
943
        delta = (av_gettime() - is->video_current_pts_time) / 1000000.0;
944
    }
945
    return is->video_current_pts + delta;
946
}
947

    
948
/* get the current external clock value */
949
static double get_external_clock(VideoState *is)
950
{
951
    int64_t ti;
952
    ti = av_gettime();
953
    return is->external_clock + ((ti - is->external_clock_time) * 1e-6);
954
}
955

    
956
/* get the current master clock value */
957
static double get_master_clock(VideoState *is)
958
{
959
    double val;
960

    
961
    if (is->av_sync_type == AV_SYNC_VIDEO_MASTER) {
962
        if (is->video_st)
963
            val = get_video_clock(is);
964
        else
965
            val = get_audio_clock(is);
966
    } else if (is->av_sync_type == AV_SYNC_AUDIO_MASTER) {
967
        if (is->audio_st)
968
            val = get_audio_clock(is);
969
        else
970
            val = get_video_clock(is);
971
    } else {
972
        val = get_external_clock(is);
973
    }
974
    return val;
975
}
976

    
977
/* seek in the stream */
978
static void stream_seek(VideoState *is, int64_t pos, int rel)
979
{
980
    if (!is->seek_req) {
981
        is->seek_pos = pos;
982
        is->seek_flags = rel < 0 ? AVSEEK_FLAG_BACKWARD : 0;
983
        if (seek_by_bytes)
984
            is->seek_flags |= AVSEEK_FLAG_BYTE;
985
        is->seek_req = 1;
986
    }
987
}
988

    
989
/* pause or resume the video */
990
static void stream_pause(VideoState *is)
991
{
992
    is->paused = !is->paused;
993
    if (!is->paused) {
994
        is->video_current_pts = get_video_clock(is);
995
        is->frame_timer += (av_gettime() - is->video_current_pts_time) / 1000000.0;
996
    }
997
}
998

    
999
/* called to display each frame */
1000
static void video_refresh_timer(void *opaque)
1001
{
1002
    VideoState *is = opaque;
1003
    VideoPicture *vp;
1004
    double actual_delay, delay, sync_threshold, ref_clock, diff;
1005

    
1006
    SubPicture *sp, *sp2;
1007

    
1008
    if (is->video_st) {
1009
        if (is->pictq_size == 0) {
1010
            /* if no picture, need to wait */
1011
            schedule_refresh(is, 1);
1012
        } else {
1013
            /* dequeue the picture */
1014
            vp = &is->pictq[is->pictq_rindex];
1015

    
1016
            /* update current video pts */
1017
            is->video_current_pts = vp->pts;
1018
            is->video_current_pts_time = av_gettime();
1019

    
1020
            /* compute nominal delay */
1021
            delay = vp->pts - is->frame_last_pts;
1022
            if (delay <= 0 || delay >= 1.0) {
1023
                /* if incorrect delay, use previous one */
1024
                delay = is->frame_last_delay;
1025
            }
1026
            is->frame_last_delay = delay;
1027
            is->frame_last_pts = vp->pts;
1028

    
1029
            /* update delay to follow master synchronisation source */
1030
            if (((is->av_sync_type == AV_SYNC_AUDIO_MASTER && is->audio_st) ||
1031
                 is->av_sync_type == AV_SYNC_EXTERNAL_CLOCK)) {
1032
                /* if video is slave, we try to correct big delays by
1033
                   duplicating or deleting a frame */
1034
                ref_clock = get_master_clock(is);
1035
                diff = vp->pts - ref_clock;
1036

    
1037
                /* skip or repeat frame. We take into account the
1038
                   delay to compute the threshold. I still don't know
1039
                   if it is the best guess */
1040
                sync_threshold = AV_SYNC_THRESHOLD;
1041
                if (delay > sync_threshold)
1042
                    sync_threshold = delay;
1043
                if (fabs(diff) < AV_NOSYNC_THRESHOLD) {
1044
                    if (diff <= -sync_threshold)
1045
                        delay = 0;
1046
                    else if (diff >= sync_threshold)
1047
                        delay = 2 * delay;
1048
                }
1049
            }
1050

    
1051
            is->frame_timer += delay;
1052
            /* compute the REAL delay (we need to do that to avoid
1053
               long term errors */
1054
            actual_delay = is->frame_timer - (av_gettime() / 1000000.0);
1055
            if (actual_delay < 0.010) {
1056
                /* XXX: should skip picture */
1057
                actual_delay = 0.010;
1058
            }
1059
            /* launch timer for next picture */
1060
            schedule_refresh(is, (int)(actual_delay * 1000 + 0.5));
1061

    
1062
#if defined(DEBUG_SYNC)
1063
            printf("video: delay=%0.3f actual_delay=%0.3f pts=%0.3f A-V=%f\n",
1064
                   delay, actual_delay, vp->pts, -diff);
1065
#endif
1066

    
1067
            if(is->subtitle_st) {
1068
                if (is->subtitle_stream_changed) {
1069
                    SDL_LockMutex(is->subpq_mutex);
1070

    
1071
                    while (is->subpq_size) {
1072
                        free_subpicture(&is->subpq[is->subpq_rindex]);
1073

    
1074
                        /* update queue size and signal for next picture */
1075
                        if (++is->subpq_rindex == SUBPICTURE_QUEUE_SIZE)
1076
                            is->subpq_rindex = 0;
1077

    
1078
                        is->subpq_size--;
1079
                    }
1080
                    is->subtitle_stream_changed = 0;
1081

    
1082
                    SDL_CondSignal(is->subpq_cond);
1083
                    SDL_UnlockMutex(is->subpq_mutex);
1084
                } else {
1085
                    if (is->subpq_size > 0) {
1086
                        sp = &is->subpq[is->subpq_rindex];
1087

    
1088
                        if (is->subpq_size > 1)
1089
                            sp2 = &is->subpq[(is->subpq_rindex + 1) % SUBPICTURE_QUEUE_SIZE];
1090
                        else
1091
                            sp2 = NULL;
1092

    
1093
                        if ((is->video_current_pts > (sp->pts + ((float) sp->sub.end_display_time / 1000)))
1094
                                || (sp2 && is->video_current_pts > (sp2->pts + ((float) sp2->sub.start_display_time / 1000))))
1095
                        {
1096
                            free_subpicture(sp);
1097

    
1098
                            /* update queue size and signal for next picture */
1099
                            if (++is->subpq_rindex == SUBPICTURE_QUEUE_SIZE)
1100
                                is->subpq_rindex = 0;
1101

    
1102
                            SDL_LockMutex(is->subpq_mutex);
1103
                            is->subpq_size--;
1104
                            SDL_CondSignal(is->subpq_cond);
1105
                            SDL_UnlockMutex(is->subpq_mutex);
1106
                        }
1107
                    }
1108
                }
1109
            }
1110

    
1111
            /* display picture */
1112
            video_display(is);
1113

    
1114
            /* update queue size and signal for next picture */
1115
            if (++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE)
1116
                is->pictq_rindex = 0;
1117

    
1118
            SDL_LockMutex(is->pictq_mutex);
1119
            is->pictq_size--;
1120
            SDL_CondSignal(is->pictq_cond);
1121
            SDL_UnlockMutex(is->pictq_mutex);
1122
        }
1123
    } else if (is->audio_st) {
1124
        /* draw the next audio frame */
1125

    
1126
        schedule_refresh(is, 40);
1127

    
1128
        /* if only audio stream, then display the audio bars (better
1129
           than nothing, just to test the implementation */
1130

    
1131
        /* display picture */
1132
        video_display(is);
1133
    } else {
1134
        schedule_refresh(is, 100);
1135
    }
1136
    if (show_status) {
1137
        static int64_t last_time;
1138
        int64_t cur_time;
1139
        int aqsize, vqsize, sqsize;
1140
        double av_diff;
1141

    
1142
        cur_time = av_gettime();
1143
        if (!last_time || (cur_time - last_time) >= 500 * 1000) {
1144
            aqsize = 0;
1145
            vqsize = 0;
1146
            sqsize = 0;
1147
            if (is->audio_st)
1148
                aqsize = is->audioq.size;
1149
            if (is->video_st)
1150
                vqsize = is->videoq.size;
1151
            if (is->subtitle_st)
1152
                sqsize = is->subtitleq.size;
1153
            av_diff = 0;
1154
            if (is->audio_st && is->video_st)
1155
                av_diff = get_audio_clock(is) - get_video_clock(is);
1156
            printf("%7.2f A-V:%7.3f aq=%5dKB vq=%5dKB sq=%5dB    \r",
1157
                   get_master_clock(is), av_diff, aqsize / 1024, vqsize / 1024, sqsize);
1158
            fflush(stdout);
1159
            last_time = cur_time;
1160
        }
1161
    }
1162
}
1163

    
1164
/* allocate a picture (needs to do that in main thread to avoid
1165
   potential locking problems */
1166
static void alloc_picture(void *opaque)
1167
{
1168
    VideoState *is = opaque;
1169
    VideoPicture *vp;
1170

    
1171
    vp = &is->pictq[is->pictq_windex];
1172

    
1173
    if (vp->bmp)
1174
        SDL_FreeYUVOverlay(vp->bmp);
1175

    
1176
#if 0
1177
    /* XXX: use generic function */
1178
    /* XXX: disable overlay if no hardware acceleration or if RGB format */
1179
    switch(is->video_st->codec->pix_fmt) {
1180
    case PIX_FMT_YUV420P:
1181
    case PIX_FMT_YUV422P:
1182
    case PIX_FMT_YUV444P:
1183
    case PIX_FMT_YUYV422:
1184
    case PIX_FMT_YUV410P:
1185
    case PIX_FMT_YUV411P:
1186
        is_yuv = 1;
1187
        break;
1188
    default:
1189
        is_yuv = 0;
1190
        break;
1191
    }
1192
#endif
1193
    vp->bmp = SDL_CreateYUVOverlay(is->video_st->codec->width,
1194
                                   is->video_st->codec->height,
1195
                                   SDL_YV12_OVERLAY,
1196
                                   screen);
1197
    vp->width = is->video_st->codec->width;
1198
    vp->height = is->video_st->codec->height;
1199

    
1200
    SDL_LockMutex(is->pictq_mutex);
1201
    vp->allocated = 1;
1202
    SDL_CondSignal(is->pictq_cond);
1203
    SDL_UnlockMutex(is->pictq_mutex);
1204
}
1205

    
1206
/**
1207
 *
1208
 * @param pts the dts of the pkt / pts of the frame and guessed if not known
1209
 */
1210
static int queue_picture(VideoState *is, AVFrame *src_frame, double pts)
1211
{
1212
    VideoPicture *vp;
1213
    int dst_pix_fmt;
1214
    AVPicture pict;
1215
    static struct SwsContext *img_convert_ctx;
1216

    
1217
    /* wait until we have space to put a new picture */
1218
    SDL_LockMutex(is->pictq_mutex);
1219
    while (is->pictq_size >= VIDEO_PICTURE_QUEUE_SIZE &&
1220
           !is->videoq.abort_request) {
1221
        SDL_CondWait(is->pictq_cond, is->pictq_mutex);
1222
    }
1223
    SDL_UnlockMutex(is->pictq_mutex);
1224

    
1225
    if (is->videoq.abort_request)
1226
        return -1;
1227

    
1228
    vp = &is->pictq[is->pictq_windex];
1229

    
1230
    /* alloc or resize hardware picture buffer */
1231
    if (!vp->bmp ||
1232
        vp->width != is->video_st->codec->width ||
1233
        vp->height != is->video_st->codec->height) {
1234
        SDL_Event event;
1235

    
1236
        vp->allocated = 0;
1237

    
1238
        /* the allocation must be done in the main thread to avoid
1239
           locking problems */
1240
        event.type = FF_ALLOC_EVENT;
1241
        event.user.data1 = is;
1242
        SDL_PushEvent(&event);
1243

    
1244
        /* wait until the picture is allocated */
1245
        SDL_LockMutex(is->pictq_mutex);
1246
        while (!vp->allocated && !is->videoq.abort_request) {
1247
            SDL_CondWait(is->pictq_cond, is->pictq_mutex);
1248
        }
1249
        SDL_UnlockMutex(is->pictq_mutex);
1250

    
1251
        if (is->videoq.abort_request)
1252
            return -1;
1253
    }
1254

    
1255
    /* if the frame is not skipped, then display it */
1256
    if (vp->bmp) {
1257
        /* get a pointer on the bitmap */
1258
        SDL_LockYUVOverlay (vp->bmp);
1259

    
1260
        dst_pix_fmt = PIX_FMT_YUV420P;
1261
        pict.data[0] = vp->bmp->pixels[0];
1262
        pict.data[1] = vp->bmp->pixels[2];
1263
        pict.data[2] = vp->bmp->pixels[1];
1264

    
1265
        pict.linesize[0] = vp->bmp->pitches[0];
1266
        pict.linesize[1] = vp->bmp->pitches[2];
1267
        pict.linesize[2] = vp->bmp->pitches[1];
1268
        img_convert_ctx = sws_getCachedContext(img_convert_ctx,
1269
            is->video_st->codec->width, is->video_st->codec->height,
1270
            is->video_st->codec->pix_fmt,
1271
            is->video_st->codec->width, is->video_st->codec->height,
1272
            dst_pix_fmt, sws_flags, NULL, NULL, NULL);
1273
        if (img_convert_ctx == NULL) {
1274
            fprintf(stderr, "Cannot initialize the conversion context\n");
1275
            exit(1);
1276
        }
1277
        sws_scale(img_convert_ctx, src_frame->data, src_frame->linesize,
1278
                  0, is->video_st->codec->height, pict.data, pict.linesize);
1279
        /* update the bitmap content */
1280
        SDL_UnlockYUVOverlay(vp->bmp);
1281

    
1282
        vp->pts = pts;
1283

    
1284
        /* now we can update the picture count */
1285
        if (++is->pictq_windex == VIDEO_PICTURE_QUEUE_SIZE)
1286
            is->pictq_windex = 0;
1287
        SDL_LockMutex(is->pictq_mutex);
1288
        is->pictq_size++;
1289
        SDL_UnlockMutex(is->pictq_mutex);
1290
    }
1291
    return 0;
1292
}
1293

    
1294
/**
1295
 * compute the exact PTS for the picture if it is omitted in the stream
1296
 * @param pts1 the dts of the pkt / pts of the frame
1297
 */
1298
static int output_picture2(VideoState *is, AVFrame *src_frame, double pts1)
1299
{
1300
    double frame_delay, pts;
1301

    
1302
    pts = pts1;
1303

    
1304
    if (pts != 0) {
1305
        /* update video clock with pts, if present */
1306
        is->video_clock = pts;
1307
    } else {
1308
        pts = is->video_clock;
1309
    }
1310
    /* update video clock for next frame */
1311
    frame_delay = av_q2d(is->video_st->codec->time_base);
1312
    /* for MPEG2, the frame can be repeated, so we update the
1313
       clock accordingly */
1314
    frame_delay += src_frame->repeat_pict * (frame_delay * 0.5);
1315
    is->video_clock += frame_delay;
1316

    
1317
#if defined(DEBUG_SYNC) && 0
1318
    {
1319
        int ftype;
1320
        if (src_frame->pict_type == FF_B_TYPE)
1321
            ftype = 'B';
1322
        else if (src_frame->pict_type == FF_I_TYPE)
1323
            ftype = 'I';
1324
        else
1325
            ftype = 'P';
1326
        printf("frame_type=%c clock=%0.3f pts=%0.3f\n",
1327
               ftype, pts, pts1);
1328
    }
1329
#endif
1330
    return queue_picture(is, src_frame, pts);
1331
}
1332

    
1333
static uint64_t global_video_pkt_pts= AV_NOPTS_VALUE;
1334

    
1335
static int my_get_buffer(struct AVCodecContext *c, AVFrame *pic){
1336
    int ret= avcodec_default_get_buffer(c, pic);
1337
    uint64_t *pts= av_malloc(sizeof(uint64_t));
1338
    *pts= global_video_pkt_pts;
1339
    pic->opaque= pts;
1340
    return ret;
1341
}
1342

    
1343
static void my_release_buffer(struct AVCodecContext *c, AVFrame *pic){
1344
    if(pic) av_freep(&pic->opaque);
1345
    avcodec_default_release_buffer(c, pic);
1346
}
1347

    
1348
static int video_thread(void *arg)
1349
{
1350
    VideoState *is = arg;
1351
    AVPacket pkt1, *pkt = &pkt1;
1352
    int len1, got_picture;
1353
    AVFrame *frame= avcodec_alloc_frame();
1354
    double pts;
1355

    
1356
    for(;;) {
1357
        while (is->paused && !is->videoq.abort_request) {
1358
            SDL_Delay(10);
1359
        }
1360
        if (packet_queue_get(&is->videoq, pkt, 1) < 0)
1361
            break;
1362

    
1363
        if(pkt->data == flush_pkt.data){
1364
            avcodec_flush_buffers(is->video_st->codec);
1365
            continue;
1366
        }
1367

    
1368
        /* NOTE: ipts is the PTS of the _first_ picture beginning in
1369
           this packet, if any */
1370
        global_video_pkt_pts= pkt->pts;
1371
        len1 = avcodec_decode_video(is->video_st->codec,
1372
                                    frame, &got_picture,
1373
                                    pkt->data, pkt->size);
1374

    
1375
        if(   (decoder_reorder_pts || pkt->dts == AV_NOPTS_VALUE)
1376
           && frame->opaque && *(uint64_t*)frame->opaque != AV_NOPTS_VALUE)
1377
            pts= *(uint64_t*)frame->opaque;
1378
        else if(pkt->dts != AV_NOPTS_VALUE)
1379
            pts= pkt->dts;
1380
        else
1381
            pts= 0;
1382
        pts *= av_q2d(is->video_st->time_base);
1383

    
1384
//            if (len1 < 0)
1385
//                break;
1386
        if (got_picture) {
1387
            if (output_picture2(is, frame, pts) < 0)
1388
                goto the_end;
1389
        }
1390
        av_free_packet(pkt);
1391
        if (step)
1392
            if (cur_stream)
1393
                stream_pause(cur_stream);
1394
    }
1395
 the_end:
1396
    av_free(frame);
1397
    return 0;
1398
}
1399

    
1400
static int subtitle_thread(void *arg)
1401
{
1402
    VideoState *is = arg;
1403
    SubPicture *sp;
1404
    AVPacket pkt1, *pkt = &pkt1;
1405
    int len1, got_subtitle;
1406
    double pts;
1407
    int i, j;
1408
    int r, g, b, y, u, v, a;
1409

    
1410
    for(;;) {
1411
        while (is->paused && !is->subtitleq.abort_request) {
1412
            SDL_Delay(10);
1413
        }
1414
        if (packet_queue_get(&is->subtitleq, pkt, 1) < 0)
1415
            break;
1416

    
1417
        if(pkt->data == flush_pkt.data){
1418
            avcodec_flush_buffers(is->subtitle_st->codec);
1419
            continue;
1420
        }
1421
        SDL_LockMutex(is->subpq_mutex);
1422
        while (is->subpq_size >= SUBPICTURE_QUEUE_SIZE &&
1423
               !is->subtitleq.abort_request) {
1424
            SDL_CondWait(is->subpq_cond, is->subpq_mutex);
1425
        }
1426
        SDL_UnlockMutex(is->subpq_mutex);
1427

    
1428
        if (is->subtitleq.abort_request)
1429
            goto the_end;
1430

    
1431
        sp = &is->subpq[is->subpq_windex];
1432

    
1433
       /* NOTE: ipts is the PTS of the _first_ picture beginning in
1434
           this packet, if any */
1435
        pts = 0;
1436
        if (pkt->pts != AV_NOPTS_VALUE)
1437
            pts = av_q2d(is->subtitle_st->time_base)*pkt->pts;
1438

    
1439
        len1 = avcodec_decode_subtitle(is->subtitle_st->codec,
1440
                                    &sp->sub, &got_subtitle,
1441
                                    pkt->data, pkt->size);
1442
//            if (len1 < 0)
1443
//                break;
1444
        if (got_subtitle && sp->sub.format == 0) {
1445
            sp->pts = pts;
1446

    
1447
            for (i = 0; i < sp->sub.num_rects; i++)
1448
            {
1449
                for (j = 0; j < sp->sub.rects[i].nb_colors; j++)
1450
                {
1451
                    RGBA_IN(r, g, b, a, sp->sub.rects[i].rgba_palette + j);
1452
                    y = RGB_TO_Y_CCIR(r, g, b);
1453
                    u = RGB_TO_U_CCIR(r, g, b, 0);
1454
                    v = RGB_TO_V_CCIR(r, g, b, 0);
1455
                    YUVA_OUT(sp->sub.rects[i].rgba_palette + j, y, u, v, a);
1456
                }
1457
            }
1458

    
1459
            /* now we can update the picture count */
1460
            if (++is->subpq_windex == SUBPICTURE_QUEUE_SIZE)
1461
                is->subpq_windex = 0;
1462
            SDL_LockMutex(is->subpq_mutex);
1463
            is->subpq_size++;
1464
            SDL_UnlockMutex(is->subpq_mutex);
1465
        }
1466
        av_free_packet(pkt);
1467
//        if (step)
1468
//            if (cur_stream)
1469
//                stream_pause(cur_stream);
1470
    }
1471
 the_end:
1472
    return 0;
1473
}
1474

    
1475
/* copy samples for viewing in editor window */
1476
static void update_sample_display(VideoState *is, short *samples, int samples_size)
1477
{
1478
    int size, len, channels;
1479

    
1480
    channels = is->audio_st->codec->channels;
1481

    
1482
    size = samples_size / sizeof(short);
1483
    while (size > 0) {
1484
        len = SAMPLE_ARRAY_SIZE - is->sample_array_index;
1485
        if (len > size)
1486
            len = size;
1487
        memcpy(is->sample_array + is->sample_array_index, samples, len * sizeof(short));
1488
        samples += len;
1489
        is->sample_array_index += len;
1490
        if (is->sample_array_index >= SAMPLE_ARRAY_SIZE)
1491
            is->sample_array_index = 0;
1492
        size -= len;
1493
    }
1494
}
1495

    
1496
/* return the new audio buffer size (samples can be added or deleted
1497
   to get better sync if video or external master clock) */
1498
static int synchronize_audio(VideoState *is, short *samples,
1499
                             int samples_size1, double pts)
1500
{
1501
    int n, samples_size;
1502
    double ref_clock;
1503

    
1504
    n = 2 * is->audio_st->codec->channels;
1505
    samples_size = samples_size1;
1506

    
1507
    /* if not master, then we try to remove or add samples to correct the clock */
1508
    if (((is->av_sync_type == AV_SYNC_VIDEO_MASTER && is->video_st) ||
1509
         is->av_sync_type == AV_SYNC_EXTERNAL_CLOCK)) {
1510
        double diff, avg_diff;
1511
        int wanted_size, min_size, max_size, nb_samples;
1512

    
1513
        ref_clock = get_master_clock(is);
1514
        diff = get_audio_clock(is) - ref_clock;
1515

    
1516
        if (diff < AV_NOSYNC_THRESHOLD) {
1517
            is->audio_diff_cum = diff + is->audio_diff_avg_coef * is->audio_diff_cum;
1518
            if (is->audio_diff_avg_count < AUDIO_DIFF_AVG_NB) {
1519
                /* not enough measures to have a correct estimate */
1520
                is->audio_diff_avg_count++;
1521
            } else {
1522
                /* estimate the A-V difference */
1523
                avg_diff = is->audio_diff_cum * (1.0 - is->audio_diff_avg_coef);
1524

    
1525
                if (fabs(avg_diff) >= is->audio_diff_threshold) {
1526
                    wanted_size = samples_size + ((int)(diff * is->audio_st->codec->sample_rate) * n);
1527
                    nb_samples = samples_size / n;
1528

    
1529
                    min_size = ((nb_samples * (100 - SAMPLE_CORRECTION_PERCENT_MAX)) / 100) * n;
1530
                    max_size = ((nb_samples * (100 + SAMPLE_CORRECTION_PERCENT_MAX)) / 100) * n;
1531
                    if (wanted_size < min_size)
1532
                        wanted_size = min_size;
1533
                    else if (wanted_size > max_size)
1534
                        wanted_size = max_size;
1535

    
1536
                    /* add or remove samples to correction the synchro */
1537
                    if (wanted_size < samples_size) {
1538
                        /* remove samples */
1539
                        samples_size = wanted_size;
1540
                    } else if (wanted_size > samples_size) {
1541
                        uint8_t *samples_end, *q;
1542
                        int nb;
1543

    
1544
                        /* add samples */
1545
                        nb = (samples_size - wanted_size);
1546
                        samples_end = (uint8_t *)samples + samples_size - n;
1547
                        q = samples_end + n;
1548
                        while (nb > 0) {
1549
                            memcpy(q, samples_end, n);
1550
                            q += n;
1551
                            nb -= n;
1552
                        }
1553
                        samples_size = wanted_size;
1554
                    }
1555
                }
1556
#if 0
1557
                printf("diff=%f adiff=%f sample_diff=%d apts=%0.3f vpts=%0.3f %f\n",
1558
                       diff, avg_diff, samples_size - samples_size1,
1559
                       is->audio_clock, is->video_clock, is->audio_diff_threshold);
1560
#endif
1561
            }
1562
        } else {
1563
            /* too big difference : may be initial PTS errors, so
1564
               reset A-V filter */
1565
            is->audio_diff_avg_count = 0;
1566
            is->audio_diff_cum = 0;
1567
        }
1568
    }
1569

    
1570
    return samples_size;
1571
}
1572

    
1573
/* decode one audio frame and returns its uncompressed size */
1574
static int audio_decode_frame(VideoState *is, uint8_t *audio_buf, int buf_size, double *pts_ptr)
1575
{
1576
    AVPacket *pkt = &is->audio_pkt;
1577
    int n, len1, data_size;
1578
    double pts;
1579

    
1580
    for(;;) {
1581
        /* NOTE: the audio packet can contain several frames */
1582
        while (is->audio_pkt_size > 0) {
1583
            data_size = buf_size;
1584
            len1 = avcodec_decode_audio2(is->audio_st->codec,
1585
                                        (int16_t *)audio_buf, &data_size,
1586
                                        is->audio_pkt_data, is->audio_pkt_size);
1587
            if (len1 < 0) {
1588
                /* if error, we skip the frame */
1589
                is->audio_pkt_size = 0;
1590
                break;
1591
            }
1592

    
1593
            is->audio_pkt_data += len1;
1594
            is->audio_pkt_size -= len1;
1595
            if (data_size <= 0)
1596
                continue;
1597
            /* if no pts, then compute it */
1598
            pts = is->audio_clock;
1599
            *pts_ptr = pts;
1600
            n = 2 * is->audio_st->codec->channels;
1601
            is->audio_clock += (double)data_size /
1602
                (double)(n * is->audio_st->codec->sample_rate);
1603
#if defined(DEBUG_SYNC)
1604
            {
1605
                static double last_clock;
1606
                printf("audio: delay=%0.3f clock=%0.3f pts=%0.3f\n",
1607
                       is->audio_clock - last_clock,
1608
                       is->audio_clock, pts);
1609
                last_clock = is->audio_clock;
1610
            }
1611
#endif
1612
            return data_size;
1613
        }
1614

    
1615
        /* free the current packet */
1616
        if (pkt->data)
1617
            av_free_packet(pkt);
1618

    
1619
        if (is->paused || is->audioq.abort_request) {
1620
            return -1;
1621
        }
1622

    
1623
        /* read next packet */
1624
        if (packet_queue_get(&is->audioq, pkt, 1) < 0)
1625
            return -1;
1626
        if(pkt->data == flush_pkt.data){
1627
            avcodec_flush_buffers(is->audio_st->codec);
1628
            continue;
1629
        }
1630

    
1631
        is->audio_pkt_data = pkt->data;
1632
        is->audio_pkt_size = pkt->size;
1633

    
1634
        /* if update the audio clock with the pts */
1635
        if (pkt->pts != AV_NOPTS_VALUE) {
1636
            is->audio_clock = av_q2d(is->audio_st->time_base)*pkt->pts;
1637
        }
1638
    }
1639
}
1640

    
1641
/* get the current audio output buffer size, in samples. With SDL, we
1642
   cannot have a precise information */
1643
static int audio_write_get_buf_size(VideoState *is)
1644
{
1645
    return is->audio_buf_size - is->audio_buf_index;
1646
}
1647

    
1648

    
1649
/* prepare a new audio buffer */
1650
void sdl_audio_callback(void *opaque, Uint8 *stream, int len)
1651
{
1652
    VideoState *is = opaque;
1653
    int audio_size, len1;
1654
    double pts;
1655

    
1656
    audio_callback_time = av_gettime();
1657

    
1658
    while (len > 0) {
1659
        if (is->audio_buf_index >= is->audio_buf_size) {
1660
           audio_size = audio_decode_frame(is, is->audio_buf, sizeof(is->audio_buf), &pts);
1661
           if (audio_size < 0) {
1662
                /* if error, just output silence */
1663
               is->audio_buf_size = 1024;
1664
               memset(is->audio_buf, 0, is->audio_buf_size);
1665
           } else {
1666
               if (is->show_audio)
1667
                   update_sample_display(is, (int16_t *)is->audio_buf, audio_size);
1668
               audio_size = synchronize_audio(is, (int16_t *)is->audio_buf, audio_size,
1669
                                              pts);
1670
               is->audio_buf_size = audio_size;
1671
           }
1672
           is->audio_buf_index = 0;
1673
        }
1674
        len1 = is->audio_buf_size - is->audio_buf_index;
1675
        if (len1 > len)
1676
            len1 = len;
1677
        memcpy(stream, (uint8_t *)is->audio_buf + is->audio_buf_index, len1);
1678
        len -= len1;
1679
        stream += len1;
1680
        is->audio_buf_index += len1;
1681
    }
1682
}
1683

    
1684
/* open a given stream. Return 0 if OK */
1685
static int stream_component_open(VideoState *is, int stream_index)
1686
{
1687
    AVFormatContext *ic = is->ic;
1688
    AVCodecContext *enc;
1689
    AVCodec *codec;
1690
    SDL_AudioSpec wanted_spec, spec;
1691

    
1692
    if (stream_index < 0 || stream_index >= ic->nb_streams)
1693
        return -1;
1694
    enc = ic->streams[stream_index]->codec;
1695

    
1696
    /* prepare audio output */
1697
    if (enc->codec_type == CODEC_TYPE_AUDIO) {
1698
        wanted_spec.freq = enc->sample_rate;
1699
        wanted_spec.format = AUDIO_S16SYS;
1700
        /* hack for AC3. XXX: suppress that */
1701
        if (enc->channels > 2)
1702
            enc->channels = 2;
1703
        wanted_spec.channels = enc->channels;
1704
        wanted_spec.silence = 0;
1705
        wanted_spec.samples = SDL_AUDIO_BUFFER_SIZE;
1706
        wanted_spec.callback = sdl_audio_callback;
1707
        wanted_spec.userdata = is;
1708
        if (SDL_OpenAudio(&wanted_spec, &spec) < 0) {
1709
            fprintf(stderr, "SDL_OpenAudio: %s\n", SDL_GetError());
1710
            return -1;
1711
        }
1712
        is->audio_hw_buf_size = spec.size;
1713
    }
1714

    
1715
    codec = avcodec_find_decoder(enc->codec_id);
1716
    enc->debug_mv = debug_mv;
1717
    enc->debug = debug;
1718
    enc->workaround_bugs = workaround_bugs;
1719
    enc->lowres = lowres;
1720
    if(lowres) enc->flags |= CODEC_FLAG_EMU_EDGE;
1721
    enc->idct_algo= idct;
1722
    if(fast) enc->flags2 |= CODEC_FLAG2_FAST;
1723
    enc->skip_frame= skip_frame;
1724
    enc->skip_idct= skip_idct;
1725
    enc->skip_loop_filter= skip_loop_filter;
1726
    enc->error_resilience= error_resilience;
1727
    enc->error_concealment= error_concealment;
1728
    if (!codec ||
1729
        avcodec_open(enc, codec) < 0)
1730
        return -1;
1731
    if(thread_count>1)
1732
        avcodec_thread_init(enc, thread_count);
1733
    enc->thread_count= thread_count;
1734
    switch(enc->codec_type) {
1735
    case CODEC_TYPE_AUDIO:
1736
        is->audio_stream = stream_index;
1737
        is->audio_st = ic->streams[stream_index];
1738
        is->audio_buf_size = 0;
1739
        is->audio_buf_index = 0;
1740

    
1741
        /* init averaging filter */
1742
        is->audio_diff_avg_coef = exp(log(0.01) / AUDIO_DIFF_AVG_NB);
1743
        is->audio_diff_avg_count = 0;
1744
        /* since we do not have a precise anough audio fifo fullness,
1745
           we correct audio sync only if larger than this threshold */
1746
        is->audio_diff_threshold = 2.0 * SDL_AUDIO_BUFFER_SIZE / enc->sample_rate;
1747

    
1748
        memset(&is->audio_pkt, 0, sizeof(is->audio_pkt));
1749
        packet_queue_init(&is->audioq);
1750
        SDL_PauseAudio(0);
1751
        break;
1752
    case CODEC_TYPE_VIDEO:
1753
        is->video_stream = stream_index;
1754
        is->video_st = ic->streams[stream_index];
1755

    
1756
        is->frame_last_delay = 40e-3;
1757
        is->frame_timer = (double)av_gettime() / 1000000.0;
1758
        is->video_current_pts_time = av_gettime();
1759

    
1760
        packet_queue_init(&is->videoq);
1761
        is->video_tid = SDL_CreateThread(video_thread, is);
1762

    
1763
        enc->    get_buffer=     my_get_buffer;
1764
        enc->release_buffer= my_release_buffer;
1765
        break;
1766
    case CODEC_TYPE_SUBTITLE:
1767
        is->subtitle_stream = stream_index;
1768
        is->subtitle_st = ic->streams[stream_index];
1769
        packet_queue_init(&is->subtitleq);
1770

    
1771
        is->subtitle_tid = SDL_CreateThread(subtitle_thread, is);
1772
        break;
1773
    default:
1774
        break;
1775
    }
1776
    return 0;
1777
}
1778

    
1779
static void stream_component_close(VideoState *is, int stream_index)
1780
{
1781
    AVFormatContext *ic = is->ic;
1782
    AVCodecContext *enc;
1783

    
1784
    if (stream_index < 0 || stream_index >= ic->nb_streams)
1785
        return;
1786
    enc = ic->streams[stream_index]->codec;
1787

    
1788
    switch(enc->codec_type) {
1789
    case CODEC_TYPE_AUDIO:
1790
        packet_queue_abort(&is->audioq);
1791

    
1792
        SDL_CloseAudio();
1793

    
1794
        packet_queue_end(&is->audioq);
1795
        break;
1796
    case CODEC_TYPE_VIDEO:
1797
        packet_queue_abort(&is->videoq);
1798

    
1799
        /* note: we also signal this mutex to make sure we deblock the
1800
           video thread in all cases */
1801
        SDL_LockMutex(is->pictq_mutex);
1802
        SDL_CondSignal(is->pictq_cond);
1803
        SDL_UnlockMutex(is->pictq_mutex);
1804

    
1805
        SDL_WaitThread(is->video_tid, NULL);
1806

    
1807
        packet_queue_end(&is->videoq);
1808
        break;
1809
    case CODEC_TYPE_SUBTITLE:
1810
        packet_queue_abort(&is->subtitleq);
1811

    
1812
        /* note: we also signal this mutex to make sure we deblock the
1813
           video thread in all cases */
1814
        SDL_LockMutex(is->subpq_mutex);
1815
        is->subtitle_stream_changed = 1;
1816

    
1817
        SDL_CondSignal(is->subpq_cond);
1818
        SDL_UnlockMutex(is->subpq_mutex);
1819

    
1820
        SDL_WaitThread(is->subtitle_tid, NULL);
1821

    
1822
        packet_queue_end(&is->subtitleq);
1823
        break;
1824
    default:
1825
        break;
1826
    }
1827

    
1828
    avcodec_close(enc);
1829
    switch(enc->codec_type) {
1830
    case CODEC_TYPE_AUDIO:
1831
        is->audio_st = NULL;
1832
        is->audio_stream = -1;
1833
        break;
1834
    case CODEC_TYPE_VIDEO:
1835
        is->video_st = NULL;
1836
        is->video_stream = -1;
1837
        break;
1838
    case CODEC_TYPE_SUBTITLE:
1839
        is->subtitle_st = NULL;
1840
        is->subtitle_stream = -1;
1841
        break;
1842
    default:
1843
        break;
1844
    }
1845
}
1846

    
1847
static void dump_stream_info(const AVFormatContext *s)
1848
{
1849
    if (s->track != 0)
1850
        fprintf(stderr, "Track: %d\n", s->track);
1851
    if (s->title[0] != '\0')
1852
        fprintf(stderr, "Title: %s\n", s->title);
1853
    if (s->author[0] != '\0')
1854
        fprintf(stderr, "Author: %s\n", s->author);
1855
    if (s->copyright[0] != '\0')
1856
        fprintf(stderr, "Copyright: %s\n", s->copyright);
1857
    if (s->comment[0] != '\0')
1858
        fprintf(stderr, "Comment: %s\n", s->comment);
1859
    if (s->album[0] != '\0')
1860
        fprintf(stderr, "Album: %s\n", s->album);
1861
    if (s->year != 0)
1862
        fprintf(stderr, "Year: %d\n", s->year);
1863
    if (s->genre[0] != '\0')
1864
        fprintf(stderr, "Genre: %s\n", s->genre);
1865
}
1866

    
1867
/* since we have only one decoding thread, we can use a global
1868
   variable instead of a thread local variable */
1869
static VideoState *global_video_state;
1870

    
1871
static int decode_interrupt_cb(void)
1872
{
1873
    return (global_video_state && global_video_state->abort_request);
1874
}
1875

    
1876
/* this thread gets the stream from the disk or the network */
1877
static int decode_thread(void *arg)
1878
{
1879
    VideoState *is = arg;
1880
    AVFormatContext *ic;
1881
    int err, i, ret, video_index, audio_index, use_play;
1882
    AVPacket pkt1, *pkt = &pkt1;
1883
    AVFormatParameters params, *ap = &params;
1884

    
1885
    video_index = -1;
1886
    audio_index = -1;
1887
    is->video_stream = -1;
1888
    is->audio_stream = -1;
1889
    is->subtitle_stream = -1;
1890

    
1891
    global_video_state = is;
1892
    url_set_interrupt_cb(decode_interrupt_cb);
1893

    
1894
    memset(ap, 0, sizeof(*ap));
1895
    ap->initial_pause = 1; /* we force a pause when starting an RTSP
1896
                              stream */
1897

    
1898
    ap->width = frame_width;
1899
    ap->height= frame_height;
1900
    ap->time_base= (AVRational){1, 25};
1901
    ap->pix_fmt = frame_pix_fmt;
1902

    
1903
    err = av_open_input_file(&ic, is->filename, is->iformat, 0, ap);
1904
    if (err < 0) {
1905
        print_error(is->filename, err);
1906
        ret = -1;
1907
        goto fail;
1908
    }
1909
    is->ic = ic;
1910
#ifdef CONFIG_RTSP_DEMUXER
1911
    use_play = (ic->iformat == &rtsp_demuxer);
1912
#else
1913
    use_play = 0;
1914
#endif
1915

    
1916
    if(genpts)
1917
        ic->flags |= AVFMT_FLAG_GENPTS;
1918

    
1919
    if (!use_play) {
1920
        err = av_find_stream_info(ic);
1921
        if (err < 0) {
1922
            fprintf(stderr, "%s: could not find codec parameters\n", is->filename);
1923
            ret = -1;
1924
            goto fail;
1925
        }
1926
        ic->pb.eof_reached= 0; //FIXME hack, ffplay maybe should not use url_feof() to test for the end
1927
    }
1928

    
1929
    /* if seeking requested, we execute it */
1930
    if (start_time != AV_NOPTS_VALUE) {
1931
        int64_t timestamp;
1932

    
1933
        timestamp = start_time;
1934
        /* add the stream start time */
1935
        if (ic->start_time != AV_NOPTS_VALUE)
1936
            timestamp += ic->start_time;
1937
        ret = av_seek_frame(ic, -1, timestamp, AVSEEK_FLAG_BACKWARD);
1938
        if (ret < 0) {
1939
            fprintf(stderr, "%s: could not seek to position %0.3f\n",
1940
                    is->filename, (double)timestamp / AV_TIME_BASE);
1941
        }
1942
    }
1943

    
1944
    /* now we can begin to play (RTSP stream only) */
1945
    av_read_play(ic);
1946

    
1947
    if (use_play) {
1948
        err = av_find_stream_info(ic);
1949
        if (err < 0) {
1950
            fprintf(stderr, "%s: could not find codec parameters\n", is->filename);
1951
            ret = -1;
1952
            goto fail;
1953
        }
1954
    }
1955

    
1956
    for(i = 0; i < ic->nb_streams; i++) {
1957
        AVCodecContext *enc = ic->streams[i]->codec;
1958
        switch(enc->codec_type) {
1959
        case CODEC_TYPE_AUDIO:
1960
            if ((audio_index < 0 || wanted_audio_stream-- > 0) && !audio_disable)
1961
                audio_index = i;
1962
            break;
1963
        case CODEC_TYPE_VIDEO:
1964
            if ((video_index < 0 || wanted_video_stream-- > 0) && !video_disable)
1965
                video_index = i;
1966
            break;
1967
        default:
1968
            break;
1969
        }
1970
    }
1971
    if (show_status) {
1972
        dump_format(ic, 0, is->filename, 0);
1973
        dump_stream_info(ic);
1974
    }
1975

    
1976
    /* open the streams */
1977
    if (audio_index >= 0) {
1978
        stream_component_open(is, audio_index);
1979
    }
1980

    
1981
    if (video_index >= 0) {
1982
        stream_component_open(is, video_index);
1983
    } else {
1984
        if (!display_disable)
1985
            is->show_audio = 1;
1986
    }
1987

    
1988
    if (is->video_stream < 0 && is->audio_stream < 0) {
1989
        fprintf(stderr, "%s: could not open codecs\n", is->filename);
1990
        ret = -1;
1991
        goto fail;
1992
    }
1993

    
1994
    for(;;) {
1995
        if (is->abort_request)
1996
            break;
1997
        if (is->paused != is->last_paused) {
1998
            is->last_paused = is->paused;
1999
            if (is->paused)
2000
                av_read_pause(ic);
2001
            else
2002
                av_read_play(ic);
2003
        }
2004
#ifdef CONFIG_RTSP_DEMUXER
2005
        if (is->paused && ic->iformat == &rtsp_demuxer) {
2006
            /* wait 10 ms to avoid trying to get another packet */
2007
            /* XXX: horrible */
2008
            SDL_Delay(10);
2009
            continue;
2010
        }
2011
#endif
2012
        if (is->seek_req) {
2013
            int stream_index= -1;
2014
            int64_t seek_target= is->seek_pos;
2015

    
2016
            if     (is->   video_stream >= 0) stream_index= is->   video_stream;
2017
            else if(is->   audio_stream >= 0) stream_index= is->   audio_stream;
2018
            else if(is->subtitle_stream >= 0) stream_index= is->subtitle_stream;
2019

    
2020
            if(stream_index>=0){
2021
                seek_target= av_rescale_q(seek_target, AV_TIME_BASE_Q, ic->streams[stream_index]->time_base);
2022
            }
2023

    
2024
            ret = av_seek_frame(is->ic, stream_index, seek_target, is->seek_flags);
2025
            if (ret < 0) {
2026
                fprintf(stderr, "%s: error while seeking\n", is->ic->filename);
2027
            }else{
2028
                if (is->audio_stream >= 0) {
2029
                    packet_queue_flush(&is->audioq);
2030
                    packet_queue_put(&is->audioq, &flush_pkt);
2031
                }
2032
                if (is->subtitle_stream >= 0) {
2033
                    packet_queue_flush(&is->subtitleq);
2034
                    packet_queue_put(&is->subtitleq, &flush_pkt);
2035
                }
2036
                if (is->video_stream >= 0) {
2037
                    packet_queue_flush(&is->videoq);
2038
                    packet_queue_put(&is->videoq, &flush_pkt);
2039
                }
2040
            }
2041
            is->seek_req = 0;
2042
        }
2043

    
2044
        /* if the queue are full, no need to read more */
2045
        if (is->audioq.size > MAX_AUDIOQ_SIZE ||
2046
            is->videoq.size > MAX_VIDEOQ_SIZE ||
2047
            is->subtitleq.size > MAX_SUBTITLEQ_SIZE ||
2048
            url_feof(&ic->pb)) {
2049
            /* wait 10 ms */
2050
            SDL_Delay(10);
2051
            continue;
2052
        }
2053
        ret = av_read_frame(ic, pkt);
2054
        if (ret < 0) {
2055
            if (url_ferror(&ic->pb) == 0) {
2056
                SDL_Delay(100); /* wait for user event */
2057
                continue;
2058
            } else
2059
                break;
2060
        }
2061
        if (pkt->stream_index == is->audio_stream) {
2062
            packet_queue_put(&is->audioq, pkt);
2063
        } else if (pkt->stream_index == is->video_stream) {
2064
            packet_queue_put(&is->videoq, pkt);
2065
        } else if (pkt->stream_index == is->subtitle_stream) {
2066
            packet_queue_put(&is->subtitleq, pkt);
2067
        } else {
2068
            av_free_packet(pkt);
2069
        }
2070
    }
2071
    /* wait until the end */
2072
    while (!is->abort_request) {
2073
        SDL_Delay(100);
2074
    }
2075

    
2076
    ret = 0;
2077
 fail:
2078
    /* disable interrupting */
2079
    global_video_state = NULL;
2080

    
2081
    /* close each stream */
2082
    if (is->audio_stream >= 0)
2083
        stream_component_close(is, is->audio_stream);
2084
    if (is->video_stream >= 0)
2085
        stream_component_close(is, is->video_stream);
2086
    if (is->subtitle_stream >= 0)
2087
        stream_component_close(is, is->subtitle_stream);
2088
    if (is->ic) {
2089
        av_close_input_file(is->ic);
2090
        is->ic = NULL; /* safety */
2091
    }
2092
    url_set_interrupt_cb(NULL);
2093

    
2094
    if (ret != 0) {
2095
        SDL_Event event;
2096

    
2097
        event.type = FF_QUIT_EVENT;
2098
        event.user.data1 = is;
2099
        SDL_PushEvent(&event);
2100
    }
2101
    return 0;
2102
}
2103

    
2104
static VideoState *stream_open(const char *filename, AVInputFormat *iformat)
2105
{
2106
    VideoState *is;
2107

    
2108
    is = av_mallocz(sizeof(VideoState));
2109
    if (!is)
2110
        return NULL;
2111
    av_strlcpy(is->filename, filename, sizeof(is->filename));
2112
    is->iformat = iformat;
2113
    is->ytop = 0;
2114
    is->xleft = 0;
2115

    
2116
    /* start video display */
2117
    is->pictq_mutex = SDL_CreateMutex();
2118
    is->pictq_cond = SDL_CreateCond();
2119

    
2120
    is->subpq_mutex = SDL_CreateMutex();
2121
    is->subpq_cond = SDL_CreateCond();
2122

    
2123
    /* add the refresh timer to draw the picture */
2124
    schedule_refresh(is, 40);
2125

    
2126
    is->av_sync_type = av_sync_type;
2127
    is->parse_tid = SDL_CreateThread(decode_thread, is);
2128
    if (!is->parse_tid) {
2129
        av_free(is);
2130
        return NULL;
2131
    }
2132
    return is;
2133
}
2134

    
2135
static void stream_close(VideoState *is)
2136
{
2137
    VideoPicture *vp;
2138
    int i;
2139
    /* XXX: use a special url_shutdown call to abort parse cleanly */
2140
    is->abort_request = 1;
2141
    SDL_WaitThread(is->parse_tid, NULL);
2142

    
2143
    /* free all pictures */
2144
    for(i=0;i<VIDEO_PICTURE_QUEUE_SIZE; i++) {
2145
        vp = &is->pictq[i];
2146
        if (vp->bmp) {
2147
            SDL_FreeYUVOverlay(vp->bmp);
2148
            vp->bmp = NULL;
2149
        }
2150
    }
2151
    SDL_DestroyMutex(is->pictq_mutex);
2152
    SDL_DestroyCond(is->pictq_cond);
2153
    SDL_DestroyMutex(is->subpq_mutex);
2154
    SDL_DestroyCond(is->subpq_cond);
2155
}
2156

    
2157
static void stream_cycle_channel(VideoState *is, int codec_type)
2158
{
2159
    AVFormatContext *ic = is->ic;
2160
    int start_index, stream_index;
2161
    AVStream *st;
2162

    
2163
    if (codec_type == CODEC_TYPE_VIDEO)
2164
        start_index = is->video_stream;
2165
    else if (codec_type == CODEC_TYPE_AUDIO)
2166
        start_index = is->audio_stream;
2167
    else
2168
        start_index = is->subtitle_stream;
2169
    if (start_index < (codec_type == CODEC_TYPE_SUBTITLE ? -1 : 0))
2170
        return;
2171
    stream_index = start_index;
2172
    for(;;) {
2173
        if (++stream_index >= is->ic->nb_streams)
2174
        {
2175
            if (codec_type == CODEC_TYPE_SUBTITLE)
2176
            {
2177
                stream_index = -1;
2178
                goto the_end;
2179
            } else
2180
                stream_index = 0;
2181
        }
2182
        if (stream_index == start_index)
2183
            return;
2184
        st = ic->streams[stream_index];
2185
        if (st->codec->codec_type == codec_type) {
2186
            /* check that parameters are OK */
2187
            switch(codec_type) {
2188
            case CODEC_TYPE_AUDIO:
2189
                if (st->codec->sample_rate != 0 &&
2190
                    st->codec->channels != 0)
2191
                    goto the_end;
2192
                break;
2193
            case CODEC_TYPE_VIDEO:
2194
            case CODEC_TYPE_SUBTITLE:
2195
                goto the_end;
2196
            default:
2197
                break;
2198
            }
2199
        }
2200
    }
2201
 the_end:
2202
    stream_component_close(is, start_index);
2203
    stream_component_open(is, stream_index);
2204
}
2205

    
2206

    
2207
static void toggle_full_screen(void)
2208
{
2209
    is_full_screen = !is_full_screen;
2210
    if (!fs_screen_width) {
2211
        /* use default SDL method */
2212
//        SDL_WM_ToggleFullScreen(screen);
2213
    }
2214
    video_open(cur_stream);
2215
}
2216

    
2217
static void toggle_pause(void)
2218
{
2219
    if (cur_stream)
2220
        stream_pause(cur_stream);
2221
    step = 0;
2222
}
2223

    
2224
static void step_to_next_frame(void)
2225
{
2226
    if (cur_stream) {
2227
        if (cur_stream->paused)
2228
            cur_stream->paused=0;
2229
        cur_stream->video_current_pts = get_video_clock(cur_stream);
2230
    }
2231
    step = 1;
2232
}
2233

    
2234
static void do_exit(void)
2235
{
2236
    if (cur_stream) {
2237
        stream_close(cur_stream);
2238
        cur_stream = NULL;
2239
    }
2240
    if (show_status)
2241
        printf("\n");
2242
    SDL_Quit();
2243
    exit(0);
2244
}
2245

    
2246
static void toggle_audio_display(void)
2247
{
2248
    if (cur_stream) {
2249
        cur_stream->show_audio = !cur_stream->show_audio;
2250
    }
2251
}
2252

    
2253
/* handle an event sent by the GUI */
2254
static void event_loop(void)
2255
{
2256
    SDL_Event event;
2257
    double incr, pos, frac;
2258

    
2259
    for(;;) {
2260
        SDL_WaitEvent(&event);
2261
        switch(event.type) {
2262
        case SDL_KEYDOWN:
2263
            switch(event.key.keysym.sym) {
2264
            case SDLK_ESCAPE:
2265
            case SDLK_q:
2266
                do_exit();
2267
                break;
2268
            case SDLK_f:
2269
                toggle_full_screen();
2270
                break;
2271
            case SDLK_p:
2272
            case SDLK_SPACE:
2273
                toggle_pause();
2274
                break;
2275
            case SDLK_s: //S: Step to next frame
2276
                step_to_next_frame();
2277
                break;
2278
            case SDLK_a:
2279
                if (cur_stream)
2280
                    stream_cycle_channel(cur_stream, CODEC_TYPE_AUDIO);
2281
                break;
2282
            case SDLK_v:
2283
                if (cur_stream)
2284
                    stream_cycle_channel(cur_stream, CODEC_TYPE_VIDEO);
2285
                break;
2286
            case SDLK_t:
2287
                if (cur_stream)
2288
                    stream_cycle_channel(cur_stream, CODEC_TYPE_SUBTITLE);
2289
                break;
2290
            case SDLK_w:
2291
                toggle_audio_display();
2292
                break;
2293
            case SDLK_LEFT:
2294
                incr = -10.0;
2295
                goto do_seek;
2296
            case SDLK_RIGHT:
2297
                incr = 10.0;
2298
                goto do_seek;
2299
            case SDLK_UP:
2300
                incr = 60.0;
2301
                goto do_seek;
2302
            case SDLK_DOWN:
2303
                incr = -60.0;
2304
            do_seek:
2305
                if (cur_stream) {
2306
                    if (seek_by_bytes) {
2307
                        pos = url_ftell(&cur_stream->ic->pb);
2308
                        if (cur_stream->ic->bit_rate)
2309
                            incr *= cur_stream->ic->bit_rate / 60.0;
2310
                        else
2311
                            incr *= 180000.0;
2312
                        pos += incr;
2313
                        stream_seek(cur_stream, pos, incr);
2314
                    } else {
2315
                        pos = get_master_clock(cur_stream);
2316
                        pos += incr;
2317
                        stream_seek(cur_stream, (int64_t)(pos * AV_TIME_BASE), incr);
2318
                    }
2319
                }
2320
                break;
2321
            default:
2322
                break;
2323
            }
2324
            break;
2325
        case SDL_MOUSEBUTTONDOWN:
2326
            if (cur_stream) {
2327
                int ns, hh, mm, ss;
2328
                int tns, thh, tmm, tss;
2329
                tns = cur_stream->ic->duration/1000000LL;
2330
                thh = tns/3600;
2331
                tmm = (tns%3600)/60;
2332
                tss = (tns%60);
2333
                frac = (double)event.button.x/(double)cur_stream->width;
2334
                ns = frac*tns;
2335
                hh = ns/3600;
2336
                mm = (ns%3600)/60;
2337
                ss = (ns%60);
2338
                fprintf(stderr, "Seek to %2.0f%% (%2d:%02d:%02d) of total duration (%2d:%02d:%02d)       \n", frac*100,
2339
                        hh, mm, ss, thh, tmm, tss);
2340
                stream_seek(cur_stream, (int64_t)(cur_stream->ic->start_time+frac*cur_stream->ic->duration), 0);
2341
            }
2342
            break;
2343
        case SDL_VIDEORESIZE:
2344
            if (cur_stream) {
2345
                screen = SDL_SetVideoMode(event.resize.w, event.resize.h, 0,
2346
                                          SDL_HWSURFACE|SDL_RESIZABLE|SDL_ASYNCBLIT|SDL_HWACCEL);
2347
                screen_width = cur_stream->width = event.resize.w;
2348
                screen_height= cur_stream->height= event.resize.h;
2349
            }
2350
            break;
2351
        case SDL_QUIT:
2352
        case FF_QUIT_EVENT:
2353
            do_exit();
2354
            break;
2355
        case FF_ALLOC_EVENT:
2356
            video_open(event.user.data1);
2357
            alloc_picture(event.user.data1);
2358
            break;
2359
        case FF_REFRESH_EVENT:
2360
            video_refresh_timer(event.user.data1);
2361
            break;
2362
        default:
2363
            break;
2364
        }
2365
    }
2366
}
2367

    
2368
static void opt_frame_size(const char *arg)
2369
{
2370
    if (av_parse_video_frame_size(&frame_width, &frame_height, arg) < 0) {
2371
        fprintf(stderr, "Incorrect frame size\n");
2372
        exit(1);
2373
    }
2374
    if ((frame_width % 2) != 0 || (frame_height % 2) != 0) {
2375
        fprintf(stderr, "Frame size must be a multiple of 2\n");
2376
        exit(1);
2377
    }
2378
}
2379

    
2380
static void opt_width(const char *arg)
2381
{
2382
    screen_width = atoi(arg);
2383
    if(screen_width<=0){
2384
        fprintf(stderr, "invalid width\n");
2385
        exit(1);
2386
    }
2387
}
2388

    
2389
static void opt_height(const char *arg)
2390
{
2391
    screen_height = atoi(arg);
2392
    if(screen_height<=0){
2393
        fprintf(stderr, "invalid height\n");
2394
        exit(1);
2395
    }
2396
}
2397

    
2398
static void opt_format(const char *arg)
2399
{
2400
    file_iformat = av_find_input_format(arg);
2401
    if (!file_iformat) {
2402
        fprintf(stderr, "Unknown input format: %s\n", arg);
2403
        exit(1);
2404
    }
2405
}
2406

    
2407
static void opt_frame_pix_fmt(const char *arg)
2408
{
2409
    frame_pix_fmt = avcodec_get_pix_fmt(arg);
2410
}
2411

    
2412
#ifdef CONFIG_RTSP_DEMUXER
2413
static void opt_rtp_tcp(void)
2414
{
2415
    /* only tcp protocol */
2416
    rtsp_default_protocols = (1 << RTSP_PROTOCOL_RTP_TCP);
2417
}
2418
#endif
2419

    
2420
static void opt_sync(const char *arg)
2421
{
2422
    if (!strcmp(arg, "audio"))
2423
        av_sync_type = AV_SYNC_AUDIO_MASTER;
2424
    else if (!strcmp(arg, "video"))
2425
        av_sync_type = AV_SYNC_VIDEO_MASTER;
2426
    else if (!strcmp(arg, "ext"))
2427
        av_sync_type = AV_SYNC_EXTERNAL_CLOCK;
2428
    else {
2429
        show_help();
2430
        exit(1);
2431
    }
2432
}
2433

    
2434
static void opt_seek(const char *arg)
2435
{
2436
    start_time = parse_date(arg, 1);
2437
}
2438

    
2439
static void opt_debug(const char *arg)
2440
{
2441
    av_log_level = 99;
2442
    debug = atoi(arg);
2443
}
2444

    
2445
static void opt_vismv(const char *arg)
2446
{
2447
    debug_mv = atoi(arg);
2448
}
2449

    
2450
static void opt_thread_count(const char *arg)
2451
{
2452
    thread_count= atoi(arg);
2453
#if !defined(HAVE_THREADS)
2454
    fprintf(stderr, "Warning: not compiled with thread support, using thread emulation\n");
2455
#endif
2456
}
2457

    
2458
static void opt_show_help(void)
2459
{
2460
    show_help();
2461
    exit(0);
2462
}
2463

    
2464
const OptionDef options[] = {
2465
    { "h", 0, {(void*)opt_show_help}, "show help" },
2466
    { "x", HAS_ARG, {(void*)opt_width}, "force displayed width", "width" },
2467
    { "y", HAS_ARG, {(void*)opt_height}, "force displayed height", "height" },
2468
    { "s", HAS_ARG | OPT_VIDEO, {(void*)opt_frame_size}, "set frame size (WxH or abbreviation)", "size" },
2469
    { "fs", OPT_BOOL, {(void*)&is_full_screen}, "force full screen" },
2470
    { "an", OPT_BOOL, {(void*)&audio_disable}, "disable audio" },
2471
    { "vn", OPT_BOOL, {(void*)&video_disable}, "disable video" },
2472
    { "ast", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&wanted_audio_stream}, "", "" },
2473
    { "vst", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&wanted_video_stream}, "", "" },
2474
    { "ss", HAS_ARG, {(void*)&opt_seek}, "seek to a given position in seconds", "pos" },
2475
    { "bytes", OPT_BOOL, {(void*)&seek_by_bytes}, "seek by bytes" },
2476
    { "nodisp", OPT_BOOL, {(void*)&display_disable}, "disable graphical display" },
2477
    { "f", HAS_ARG, {(void*)opt_format}, "force format", "fmt" },
2478
    { "pix_fmt", HAS_ARG | OPT_EXPERT | OPT_VIDEO, {(void*)opt_frame_pix_fmt}, "set pixel format", "format" },
2479
    { "stats", OPT_BOOL | OPT_EXPERT, {(void*)&show_status}, "show status", "" },
2480
    { "debug", HAS_ARG | OPT_EXPERT, {(void*)opt_debug}, "print specific debug info", "" },
2481
    { "bug", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&workaround_bugs}, "workaround bugs", "" },
2482
    { "vismv", HAS_ARG | OPT_EXPERT, {(void*)opt_vismv}, "visualize motion vectors", "" },
2483
    { "fast", OPT_BOOL | OPT_EXPERT, {(void*)&fast}, "non spec compliant optimizations", "" },
2484
    { "genpts", OPT_BOOL | OPT_EXPERT, {(void*)&genpts}, "generate pts", "" },
2485
    { "drp", OPT_BOOL |OPT_EXPERT, {(void*)&decoder_reorder_pts}, "let decoder reorder pts", ""},
2486
    { "lowres", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&lowres}, "", "" },
2487
    { "skiploop", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_loop_filter}, "", "" },
2488
    { "skipframe", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_frame}, "", "" },
2489
    { "skipidct", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_idct}, "", "" },
2490
    { "idct", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&idct}, "set idct algo",  "algo" },
2491
    { "er", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&error_resilience}, "set error detection threshold (0-4)",  "threshold" },
2492
    { "ec", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&error_concealment}, "set error concealment options",  "bit_mask" },
2493
#ifdef CONFIG_RTSP_DEMUXER
2494
    { "rtp_tcp", OPT_EXPERT, {(void*)&opt_rtp_tcp}, "force RTP/TCP protocol usage", "" },
2495
#endif
2496
    { "sync", HAS_ARG | OPT_EXPERT, {(void*)opt_sync}, "set audio-video sync. type (type=audio/video/ext)", "type" },
2497
    { "threads", HAS_ARG | OPT_EXPERT, {(void*)opt_thread_count}, "thread count", "count" },
2498
    { NULL, },
2499
};
2500

    
2501
void show_help(void)
2502
{
2503
    printf("ffplay version " FFMPEG_VERSION ", Copyright (c) 2003-2007 Fabrice Bellard, et al.\n"
2504
           "usage: ffplay [options] input_file\n"
2505
           "Simple media player\n");
2506
    printf("\n");
2507
    show_help_options(options, "Main options:\n",
2508
                      OPT_EXPERT, 0);
2509
    show_help_options(options, "\nAdvanced options:\n",
2510
                      OPT_EXPERT, OPT_EXPERT);
2511
    printf("\nWhile playing:\n"
2512
           "q, ESC              quit\n"
2513
           "f                   toggle full screen\n"
2514
           "p, SPC              pause\n"
2515
           "a                   cycle audio channel\n"
2516
           "v                   cycle video channel\n"
2517
           "t                   cycle subtitle channel\n"
2518
           "w                   show audio waves\n"
2519
           "left/right          seek backward/forward 10 seconds\n"
2520
           "down/up             seek backward/forward 1 minute\n"
2521
           "mouse click         seek to percentage in file corresponding to fraction of width\n"
2522
           );
2523
}
2524

    
2525
void parse_arg_file(const char *filename)
2526
{
2527
    if (!strcmp(filename, "-"))
2528
                    filename = "pipe:";
2529
    input_filename = filename;
2530
}
2531

    
2532
/* Called from the main */
2533
int main(int argc, char **argv)
2534
{
2535
    int flags;
2536

    
2537
    /* register all codecs, demux and protocols */
2538
    av_register_all();
2539

    
2540
    parse_options(argc, argv, options);
2541

    
2542
    if (!input_filename) {
2543
        show_help();
2544
        exit(1);
2545
    }
2546

    
2547
    if (display_disable) {
2548
        video_disable = 1;
2549
    }
2550
    flags = SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER;
2551
#if !defined(__MINGW32__) && !defined(CONFIG_DARWIN)
2552
    flags |= SDL_INIT_EVENTTHREAD; /* Not supported on win32 or darwin */
2553
#endif
2554
    if (SDL_Init (flags)) {
2555
        fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
2556
        exit(1);
2557
    }
2558

    
2559
    if (!display_disable) {
2560
#ifdef HAVE_SDL_VIDEO_SIZE
2561
        const SDL_VideoInfo *vi = SDL_GetVideoInfo();
2562
        fs_screen_width = vi->current_w;
2563
        fs_screen_height = vi->current_h;
2564
#endif
2565
    }
2566

    
2567
    SDL_EventState(SDL_ACTIVEEVENT, SDL_IGNORE);
2568
    SDL_EventState(SDL_MOUSEMOTION, SDL_IGNORE);
2569
    SDL_EventState(SDL_SYSWMEVENT, SDL_IGNORE);
2570
    SDL_EventState(SDL_USEREVENT, SDL_IGNORE);
2571

    
2572
    av_init_packet(&flush_pkt);
2573
    flush_pkt.data= "FLUSH";
2574

    
2575
    cur_stream = stream_open(input_filename, file_iformat);
2576

    
2577
    event_loop();
2578

    
2579
    /* never returns */
2580

    
2581
    return 0;
2582
}