Statistics
| Branch: | Revision:

ffmpeg / ffplay.c @ 39c6a118

History | View | Annotate | Download (73.8 KB)

1
/*
2
 * FFplay : Simple Media Player based on the ffmpeg libraries
3
 * Copyright (c) 2003 Fabrice Bellard
4
 *
5
 * This file is part of FFmpeg.
6
 *
7
 * FFmpeg is free software; you can redistribute it and/or
8
 * modify it under the terms of the GNU Lesser General Public
9
 * License as published by the Free Software Foundation; either
10
 * version 2.1 of the License, or (at your option) any later version.
11
 *
12
 * FFmpeg is distributed in the hope that it will be useful,
13
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15
 * Lesser General Public License for more details.
16
 *
17
 * You should have received a copy of the GNU Lesser General Public
18
 * License along with FFmpeg; if not, write to the Free Software
19
 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20
 */
21
#define HAVE_AV_CONFIG_H
22
#include "avformat.h"
23
#include "swscale.h"
24

    
25
#include "version.h"
26
#include "cmdutils.h"
27

    
28
#include <SDL.h>
29
#include <SDL_thread.h>
30

    
31
#ifdef __MINGW32__
32
#undef main /* We don't want SDL to override our main() */
33
#endif
34

    
35
#ifdef CONFIG_OS2
36
#define INCL_DOS
37
 #include <os2.h>
38
 #include <stdio.h>
39

    
40
 void MorphToPM()
41
 {
42
   PPIB pib;
43
   PTIB tib;
44

    
45
   DosGetInfoBlocks(&tib, &pib);
46

    
47
   // Change flag from VIO to PM:
48
   if (pib->pib_ultype==2) pib->pib_ultype = 3;
49
 }
50
#endif
51

    
52
//#define DEBUG_SYNC
53

    
54
#define MAX_VIDEOQ_SIZE (5 * 256 * 1024)
55
#define MAX_AUDIOQ_SIZE (5 * 16 * 1024)
56
#define MAX_SUBTITLEQ_SIZE (5 * 16 * 1024)
57

    
58
/* SDL audio buffer size, in samples. Should be small to have precise
59
   A/V sync as SDL does not have hardware buffer fullness info. */
60
#define SDL_AUDIO_BUFFER_SIZE 1024
61

    
62
/* no AV sync correction is done if below the AV sync threshold */
63
#define AV_SYNC_THRESHOLD 0.01
64
/* no AV correction is done if too big error */
65
#define AV_NOSYNC_THRESHOLD 10.0
66

    
67
/* maximum audio speed change to get correct sync */
68
#define SAMPLE_CORRECTION_PERCENT_MAX 10
69

    
70
/* we use about AUDIO_DIFF_AVG_NB A-V differences to make the average */
71
#define AUDIO_DIFF_AVG_NB   20
72

    
73
/* NOTE: the size must be big enough to compensate the hardware audio buffersize size */
74
#define SAMPLE_ARRAY_SIZE (2*65536)
75

    
76
static int sws_flags = SWS_BICUBIC;
77

    
78
typedef struct PacketQueue {
79
    AVPacketList *first_pkt, *last_pkt;
80
    int nb_packets;
81
    int size;
82
    int abort_request;
83
    SDL_mutex *mutex;
84
    SDL_cond *cond;
85
} PacketQueue;
86

    
87
#define VIDEO_PICTURE_QUEUE_SIZE 1
88
#define SUBPICTURE_QUEUE_SIZE 4
89

    
90
typedef struct VideoPicture {
91
    double pts;                                  ///<presentation time stamp for this picture
92
    SDL_Overlay *bmp;
93
    int width, height; /* source height & width */
94
    int allocated;
95
} VideoPicture;
96

    
97
typedef struct SubPicture {
98
    double pts; /* presentation time stamp for this picture */
99
    AVSubtitle sub;
100
} SubPicture;
101

    
102
enum {
103
    AV_SYNC_AUDIO_MASTER, /* default choice */
104
    AV_SYNC_VIDEO_MASTER,
105
    AV_SYNC_EXTERNAL_CLOCK, /* synchronize to an external clock */
106
};
107

    
108
typedef struct VideoState {
109
    SDL_Thread *parse_tid;
110
    SDL_Thread *video_tid;
111
    AVInputFormat *iformat;
112
    int no_background;
113
    int abort_request;
114
    int paused;
115
    int last_paused;
116
    int seek_req;
117
    int seek_flags;
118
    int64_t seek_pos;
119
    AVFormatContext *ic;
120
    int dtg_active_format;
121

    
122
    int audio_stream;
123

    
124
    int av_sync_type;
125
    double external_clock; /* external clock base */
126
    int64_t external_clock_time;
127

    
128
    double audio_clock;
129
    double audio_diff_cum; /* used for AV difference average computation */
130
    double audio_diff_avg_coef;
131
    double audio_diff_threshold;
132
    int audio_diff_avg_count;
133
    AVStream *audio_st;
134
    PacketQueue audioq;
135
    int audio_hw_buf_size;
136
    /* samples output by the codec. we reserve more space for avsync
137
       compensation */
138
    DECLARE_ALIGNED(16,uint8_t,audio_buf[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 2]);
139
    unsigned int audio_buf_size; /* in bytes */
140
    int audio_buf_index; /* in bytes */
141
    AVPacket audio_pkt;
142
    uint8_t *audio_pkt_data;
143
    int audio_pkt_size;
144

    
145
    int show_audio; /* if true, display audio samples */
146
    int16_t sample_array[SAMPLE_ARRAY_SIZE];
147
    int sample_array_index;
148
    int last_i_start;
149

    
150
    SDL_Thread *subtitle_tid;
151
    int subtitle_stream;
152
    int subtitle_stream_changed;
153
    AVStream *subtitle_st;
154
    PacketQueue subtitleq;
155
    SubPicture subpq[SUBPICTURE_QUEUE_SIZE];
156
    int subpq_size, subpq_rindex, subpq_windex;
157
    SDL_mutex *subpq_mutex;
158
    SDL_cond *subpq_cond;
159

    
160
    double frame_timer;
161
    double frame_last_pts;
162
    double frame_last_delay;
163
    double video_clock;                          ///<pts of last decoded frame / predicted pts of next decoded frame
164
    int video_stream;
165
    AVStream *video_st;
166
    PacketQueue videoq;
167
    double video_current_pts;                    ///<current displayed pts (different from video_clock if frame fifos are used)
168
    int64_t video_current_pts_time;              ///<time (av_gettime) at which we updated video_current_pts - used to have running video pts
169
    VideoPicture pictq[VIDEO_PICTURE_QUEUE_SIZE];
170
    int pictq_size, pictq_rindex, pictq_windex;
171
    SDL_mutex *pictq_mutex;
172
    SDL_cond *pictq_cond;
173

    
174
    //    QETimer *video_timer;
175
    char filename[1024];
176
    int width, height, xleft, ytop;
177
} VideoState;
178

    
179
void show_help(void);
180
static int audio_write_get_buf_size(VideoState *is);
181

    
182
/* options specified by the user */
183
static AVInputFormat *file_iformat;
184
static const char *input_filename;
185
static int fs_screen_width;
186
static int fs_screen_height;
187
static int screen_width = 640;
188
static int screen_height = 480;
189
static int audio_disable;
190
static int video_disable;
191
static int seek_by_bytes;
192
static int display_disable;
193
static int show_status;
194
static int av_sync_type = AV_SYNC_AUDIO_MASTER;
195
static int64_t start_time = AV_NOPTS_VALUE;
196
static int debug = 0;
197
static int debug_mv = 0;
198
static int step = 0;
199
static int thread_count = 1;
200
static int workaround_bugs = 1;
201
static int fast = 0;
202
static int genpts = 0;
203
static int lowres = 0;
204
static int idct = FF_IDCT_AUTO;
205
static enum AVDiscard skip_frame= AVDISCARD_DEFAULT;
206
static enum AVDiscard skip_idct= AVDISCARD_DEFAULT;
207
static enum AVDiscard skip_loop_filter= AVDISCARD_DEFAULT;
208
static int error_resilience = FF_ER_CAREFUL;
209
static int error_concealment = 3;
210

    
211
/* current context */
212
static int is_full_screen;
213
static VideoState *cur_stream;
214
static int64_t audio_callback_time;
215

    
216
AVPacket flush_pkt;
217

    
218
#define FF_ALLOC_EVENT   (SDL_USEREVENT)
219
#define FF_REFRESH_EVENT (SDL_USEREVENT + 1)
220
#define FF_QUIT_EVENT    (SDL_USEREVENT + 2)
221

    
222
SDL_Surface *screen;
223

    
224
/* packet queue handling */
225
static void packet_queue_init(PacketQueue *q)
226
{
227
    memset(q, 0, sizeof(PacketQueue));
228
    q->mutex = SDL_CreateMutex();
229
    q->cond = SDL_CreateCond();
230
}
231

    
232
static void packet_queue_flush(PacketQueue *q)
233
{
234
    AVPacketList *pkt, *pkt1;
235

    
236
    SDL_LockMutex(q->mutex);
237
    for(pkt = q->first_pkt; pkt != NULL; pkt = pkt1) {
238
        pkt1 = pkt->next;
239
        av_free_packet(&pkt->pkt);
240
        av_freep(&pkt);
241
    }
242
    q->last_pkt = NULL;
243
    q->first_pkt = NULL;
244
    q->nb_packets = 0;
245
    q->size = 0;
246
    SDL_UnlockMutex(q->mutex);
247
}
248

    
249
static void packet_queue_end(PacketQueue *q)
250
{
251
    packet_queue_flush(q);
252
    SDL_DestroyMutex(q->mutex);
253
    SDL_DestroyCond(q->cond);
254
}
255

    
256
static int packet_queue_put(PacketQueue *q, AVPacket *pkt)
257
{
258
    AVPacketList *pkt1;
259

    
260
    /* duplicate the packet */
261
    if (pkt!=&flush_pkt && av_dup_packet(pkt) < 0)
262
        return -1;
263

    
264
    pkt1 = av_malloc(sizeof(AVPacketList));
265
    if (!pkt1)
266
        return -1;
267
    pkt1->pkt = *pkt;
268
    pkt1->next = NULL;
269

    
270

    
271
    SDL_LockMutex(q->mutex);
272

    
273
    if (!q->last_pkt)
274

    
275
        q->first_pkt = pkt1;
276
    else
277
        q->last_pkt->next = pkt1;
278
    q->last_pkt = pkt1;
279
    q->nb_packets++;
280
    q->size += pkt1->pkt.size;
281
    /* XXX: should duplicate packet data in DV case */
282
    SDL_CondSignal(q->cond);
283

    
284
    SDL_UnlockMutex(q->mutex);
285
    return 0;
286
}
287

    
288
static void packet_queue_abort(PacketQueue *q)
289
{
290
    SDL_LockMutex(q->mutex);
291

    
292
    q->abort_request = 1;
293

    
294
    SDL_CondSignal(q->cond);
295

    
296
    SDL_UnlockMutex(q->mutex);
297
}
298

    
299
/* return < 0 if aborted, 0 if no packet and > 0 if packet.  */
300
static int packet_queue_get(PacketQueue *q, AVPacket *pkt, int block)
301
{
302
    AVPacketList *pkt1;
303
    int ret;
304

    
305
    SDL_LockMutex(q->mutex);
306

    
307
    for(;;) {
308
        if (q->abort_request) {
309
            ret = -1;
310
            break;
311
        }
312

    
313
        pkt1 = q->first_pkt;
314
        if (pkt1) {
315
            q->first_pkt = pkt1->next;
316
            if (!q->first_pkt)
317
                q->last_pkt = NULL;
318
            q->nb_packets--;
319
            q->size -= pkt1->pkt.size;
320
            *pkt = pkt1->pkt;
321
            av_free(pkt1);
322
            ret = 1;
323
            break;
324
        } else if (!block) {
325
            ret = 0;
326
            break;
327
        } else {
328
            SDL_CondWait(q->cond, q->mutex);
329
        }
330
    }
331
    SDL_UnlockMutex(q->mutex);
332
    return ret;
333
}
334

    
335
static inline void fill_rectangle(SDL_Surface *screen,
336
                                  int x, int y, int w, int h, int color)
337
{
338
    SDL_Rect rect;
339
    rect.x = x;
340
    rect.y = y;
341
    rect.w = w;
342
    rect.h = h;
343
    SDL_FillRect(screen, &rect, color);
344
}
345

    
346
#if 0
347
/* draw only the border of a rectangle */
348
void fill_border(VideoState *s, int x, int y, int w, int h, int color)
349
{
350
    int w1, w2, h1, h2;
351

352
    /* fill the background */
353
    w1 = x;
354
    if (w1 < 0)
355
        w1 = 0;
356
    w2 = s->width - (x + w);
357
    if (w2 < 0)
358
        w2 = 0;
359
    h1 = y;
360
    if (h1 < 0)
361
        h1 = 0;
362
    h2 = s->height - (y + h);
363
    if (h2 < 0)
364
        h2 = 0;
365
    fill_rectangle(screen,
366
                   s->xleft, s->ytop,
367
                   w1, s->height,
368
                   color);
369
    fill_rectangle(screen,
370
                   s->xleft + s->width - w2, s->ytop,
371
                   w2, s->height,
372
                   color);
373
    fill_rectangle(screen,
374
                   s->xleft + w1, s->ytop,
375
                   s->width - w1 - w2, h1,
376
                   color);
377
    fill_rectangle(screen,
378
                   s->xleft + w1, s->ytop + s->height - h2,
379
                   s->width - w1 - w2, h2,
380
                   color);
381
}
382
#endif
383

    
384

    
385

    
386
#define SCALEBITS 10
387
#define ONE_HALF  (1 << (SCALEBITS - 1))
388
#define FIX(x)    ((int) ((x) * (1<<SCALEBITS) + 0.5))
389

    
390
#define RGB_TO_Y_CCIR(r, g, b) \
391
((FIX(0.29900*219.0/255.0) * (r) + FIX(0.58700*219.0/255.0) * (g) + \
392
  FIX(0.11400*219.0/255.0) * (b) + (ONE_HALF + (16 << SCALEBITS))) >> SCALEBITS)
393

    
394
#define RGB_TO_U_CCIR(r1, g1, b1, shift)\
395
(((- FIX(0.16874*224.0/255.0) * r1 - FIX(0.33126*224.0/255.0) * g1 +         \
396
     FIX(0.50000*224.0/255.0) * b1 + (ONE_HALF << shift) - 1) >> (SCALEBITS + shift)) + 128)
397

    
398
#define RGB_TO_V_CCIR(r1, g1, b1, shift)\
399
(((FIX(0.50000*224.0/255.0) * r1 - FIX(0.41869*224.0/255.0) * g1 -           \
400
   FIX(0.08131*224.0/255.0) * b1 + (ONE_HALF << shift) - 1) >> (SCALEBITS + shift)) + 128)
401

    
402
#define ALPHA_BLEND(a, oldp, newp, s)\
403
((((oldp << s) * (255 - (a))) + (newp * (a))) / (255 << s))
404

    
405
#define RGBA_IN(r, g, b, a, s)\
406
{\
407
    unsigned int v = ((const uint32_t *)(s))[0];\
408
    a = (v >> 24) & 0xff;\
409
    r = (v >> 16) & 0xff;\
410
    g = (v >> 8) & 0xff;\
411
    b = v & 0xff;\
412
}
413

    
414
#define YUVA_IN(y, u, v, a, s, pal)\
415
{\
416
    unsigned int val = ((const uint32_t *)(pal))[*(const uint8_t*)s];\
417
    a = (val >> 24) & 0xff;\
418
    y = (val >> 16) & 0xff;\
419
    u = (val >> 8) & 0xff;\
420
    v = val & 0xff;\
421
}
422

    
423
#define YUVA_OUT(d, y, u, v, a)\
424
{\
425
    ((uint32_t *)(d))[0] = (a << 24) | (y << 16) | (u << 8) | v;\
426
}
427

    
428

    
429
#define BPP 1
430

    
431
static void blend_subrect(AVPicture *dst, const AVSubtitleRect *rect)
432
{
433
    int wrap, wrap3, width2, skip2;
434
    int y, u, v, a, u1, v1, a1, w, h;
435
    uint8_t *lum, *cb, *cr;
436
    const uint8_t *p;
437
    const uint32_t *pal;
438

    
439
    lum = dst->data[0] + rect->y * dst->linesize[0];
440
    cb = dst->data[1] + (rect->y >> 1) * dst->linesize[1];
441
    cr = dst->data[2] + (rect->y >> 1) * dst->linesize[2];
442

    
443
    width2 = (rect->w + 1) >> 1;
444
    skip2 = rect->x >> 1;
445
    wrap = dst->linesize[0];
446
    wrap3 = rect->linesize;
447
    p = rect->bitmap;
448
    pal = rect->rgba_palette;  /* Now in YCrCb! */
449

    
450
    if (rect->y & 1) {
451
        lum += rect->x;
452
        cb += skip2;
453
        cr += skip2;
454

    
455
        if (rect->x & 1) {
456
            YUVA_IN(y, u, v, a, p, pal);
457
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
458
            cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
459
            cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
460
            cb++;
461
            cr++;
462
            lum++;
463
            p += BPP;
464
        }
465
        for(w = rect->w - (rect->x & 1); w >= 2; w -= 2) {
466
            YUVA_IN(y, u, v, a, p, pal);
467
            u1 = u;
468
            v1 = v;
469
            a1 = a;
470
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
471

    
472
            YUVA_IN(y, u, v, a, p + BPP, pal);
473
            u1 += u;
474
            v1 += v;
475
            a1 += a;
476
            lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
477
            cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
478
            cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
479
            cb++;
480
            cr++;
481
            p += 2 * BPP;
482
            lum += 2;
483
        }
484
        if (w) {
485
            YUVA_IN(y, u, v, a, p, pal);
486
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
487
            cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
488
            cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
489
        }
490
        p += wrap3 + (wrap3 - rect->w * BPP);
491
        lum += wrap + (wrap - rect->w - rect->x);
492
        cb += dst->linesize[1] - width2 - skip2;
493
        cr += dst->linesize[2] - width2 - skip2;
494
    }
495
    for(h = rect->h - (rect->y & 1); h >= 2; h -= 2) {
496
        lum += rect->x;
497
        cb += skip2;
498
        cr += skip2;
499

    
500
        if (rect->x & 1) {
501
            YUVA_IN(y, u, v, a, p, pal);
502
            u1 = u;
503
            v1 = v;
504
            a1 = a;
505
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
506
            p += wrap3;
507
            lum += wrap;
508
            YUVA_IN(y, u, v, a, p, pal);
509
            u1 += u;
510
            v1 += v;
511
            a1 += a;
512
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
513
            cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
514
            cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
515
            cb++;
516
            cr++;
517
            p += -wrap3 + BPP;
518
            lum += -wrap + 1;
519
        }
520
        for(w = rect->w - (rect->x & 1); w >= 2; w -= 2) {
521
            YUVA_IN(y, u, v, a, p, pal);
522
            u1 = u;
523
            v1 = v;
524
            a1 = a;
525
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
526

    
527
            YUVA_IN(y, u, v, a, p, pal);
528
            u1 += u;
529
            v1 += v;
530
            a1 += a;
531
            lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
532
            p += wrap3;
533
            lum += wrap;
534

    
535
            YUVA_IN(y, u, v, a, p, pal);
536
            u1 += u;
537
            v1 += v;
538
            a1 += a;
539
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
540

    
541
            YUVA_IN(y, u, v, a, p, pal);
542
            u1 += u;
543
            v1 += v;
544
            a1 += a;
545
            lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
546

    
547
            cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 2);
548
            cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 2);
549

    
550
            cb++;
551
            cr++;
552
            p += -wrap3 + 2 * BPP;
553
            lum += -wrap + 2;
554
        }
555
        if (w) {
556
            YUVA_IN(y, u, v, a, p, pal);
557
            u1 = u;
558
            v1 = v;
559
            a1 = a;
560
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
561
            p += wrap3;
562
            lum += wrap;
563
            YUVA_IN(y, u, v, a, p, pal);
564
            u1 += u;
565
            v1 += v;
566
            a1 += a;
567
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
568
            cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
569
            cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
570
            cb++;
571
            cr++;
572
            p += -wrap3 + BPP;
573
            lum += -wrap + 1;
574
        }
575
        p += wrap3 + (wrap3 - rect->w * BPP);
576
        lum += wrap + (wrap - rect->w - rect->x);
577
        cb += dst->linesize[1] - width2 - skip2;
578
        cr += dst->linesize[2] - width2 - skip2;
579
    }
580
    /* handle odd height */
581
    if (h) {
582
        lum += rect->x;
583
        cb += skip2;
584
        cr += skip2;
585

    
586
        if (rect->x & 1) {
587
            YUVA_IN(y, u, v, a, p, pal);
588
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
589
            cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
590
            cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
591
            cb++;
592
            cr++;
593
            lum++;
594
            p += BPP;
595
        }
596
        for(w = rect->w - (rect->x & 1); w >= 2; w -= 2) {
597
            YUVA_IN(y, u, v, a, p, pal);
598
            u1 = u;
599
            v1 = v;
600
            a1 = a;
601
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
602

    
603
            YUVA_IN(y, u, v, a, p + BPP, pal);
604
            u1 += u;
605
            v1 += v;
606
            a1 += a;
607
            lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
608
            cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u, 1);
609
            cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v, 1);
610
            cb++;
611
            cr++;
612
            p += 2 * BPP;
613
            lum += 2;
614
        }
615
        if (w) {
616
            YUVA_IN(y, u, v, a, p, pal);
617
            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
618
            cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
619
            cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
620
        }
621
    }
622
}
623

    
624
static void free_subpicture(SubPicture *sp)
625
{
626
    int i;
627

    
628
    for (i = 0; i < sp->sub.num_rects; i++)
629
    {
630
        av_free(sp->sub.rects[i].bitmap);
631
        av_free(sp->sub.rects[i].rgba_palette);
632
    }
633

    
634
    av_free(sp->sub.rects);
635

    
636
    memset(&sp->sub, 0, sizeof(AVSubtitle));
637
}
638

    
639
static void video_image_display(VideoState *is)
640
{
641
    VideoPicture *vp;
642
    SubPicture *sp;
643
    AVPicture pict;
644
    float aspect_ratio;
645
    int width, height, x, y;
646
    SDL_Rect rect;
647
    int i;
648

    
649
    vp = &is->pictq[is->pictq_rindex];
650
    if (vp->bmp) {
651
        /* XXX: use variable in the frame */
652
        if (is->video_st->codec->sample_aspect_ratio.num == 0)
653
            aspect_ratio = 0;
654
        else
655
            aspect_ratio = av_q2d(is->video_st->codec->sample_aspect_ratio)
656
                * is->video_st->codec->width / is->video_st->codec->height;;
657
        if (aspect_ratio <= 0.0)
658
            aspect_ratio = (float)is->video_st->codec->width /
659
                (float)is->video_st->codec->height;
660
        /* if an active format is indicated, then it overrides the
661
           mpeg format */
662
#if 0
663
        if (is->video_st->codec->dtg_active_format != is->dtg_active_format) {
664
            is->dtg_active_format = is->video_st->codec->dtg_active_format;
665
            printf("dtg_active_format=%d\n", is->dtg_active_format);
666
        }
667
#endif
668
#if 0
669
        switch(is->video_st->codec->dtg_active_format) {
670
        case FF_DTG_AFD_SAME:
671
        default:
672
            /* nothing to do */
673
            break;
674
        case FF_DTG_AFD_4_3:
675
            aspect_ratio = 4.0 / 3.0;
676
            break;
677
        case FF_DTG_AFD_16_9:
678
            aspect_ratio = 16.0 / 9.0;
679
            break;
680
        case FF_DTG_AFD_14_9:
681
            aspect_ratio = 14.0 / 9.0;
682
            break;
683
        case FF_DTG_AFD_4_3_SP_14_9:
684
            aspect_ratio = 14.0 / 9.0;
685
            break;
686
        case FF_DTG_AFD_16_9_SP_14_9:
687
            aspect_ratio = 14.0 / 9.0;
688
            break;
689
        case FF_DTG_AFD_SP_4_3:
690
            aspect_ratio = 4.0 / 3.0;
691
            break;
692
        }
693
#endif
694

    
695
        if (is->subtitle_st)
696
        {
697
            if (is->subpq_size > 0)
698
            {
699
                sp = &is->subpq[is->subpq_rindex];
700

    
701
                if (vp->pts >= sp->pts + ((float) sp->sub.start_display_time / 1000))
702
                {
703
                    SDL_LockYUVOverlay (vp->bmp);
704

    
705
                    pict.data[0] = vp->bmp->pixels[0];
706
                    pict.data[1] = vp->bmp->pixels[2];
707
                    pict.data[2] = vp->bmp->pixels[1];
708

    
709
                    pict.linesize[0] = vp->bmp->pitches[0];
710
                    pict.linesize[1] = vp->bmp->pitches[2];
711
                    pict.linesize[2] = vp->bmp->pitches[1];
712

    
713
                    for (i = 0; i < sp->sub.num_rects; i++)
714
                        blend_subrect(&pict, &sp->sub.rects[i]);
715

    
716
                    SDL_UnlockYUVOverlay (vp->bmp);
717
                }
718
            }
719
        }
720

    
721

    
722
        /* XXX: we suppose the screen has a 1.0 pixel ratio */
723
        height = is->height;
724
        width = ((int)rint(height * aspect_ratio)) & -3;
725
        if (width > is->width) {
726
            width = is->width;
727
            height = ((int)rint(width / aspect_ratio)) & -3;
728
        }
729
        x = (is->width - width) / 2;
730
        y = (is->height - height) / 2;
731
        if (!is->no_background) {
732
            /* fill the background */
733
            //            fill_border(is, x, y, width, height, QERGB(0x00, 0x00, 0x00));
734
        } else {
735
            is->no_background = 0;
736
        }
737
        rect.x = is->xleft + x;
738
        rect.y = is->xleft + y;
739
        rect.w = width;
740
        rect.h = height;
741
        SDL_DisplayYUVOverlay(vp->bmp, &rect);
742
    } else {
743
#if 0
744
        fill_rectangle(screen,
745
                       is->xleft, is->ytop, is->width, is->height,
746
                       QERGB(0x00, 0x00, 0x00));
747
#endif
748
    }
749
}
750

    
751
static inline int compute_mod(int a, int b)
752
{
753
    a = a % b;
754
    if (a >= 0)
755
        return a;
756
    else
757
        return a + b;
758
}
759

    
760
static void video_audio_display(VideoState *s)
761
{
762
    int i, i_start, x, y1, y, ys, delay, n, nb_display_channels;
763
    int ch, channels, h, h2, bgcolor, fgcolor;
764
    int16_t time_diff;
765

    
766
    /* compute display index : center on currently output samples */
767
    channels = s->audio_st->codec->channels;
768
    nb_display_channels = channels;
769
    if (!s->paused) {
770
        n = 2 * channels;
771
        delay = audio_write_get_buf_size(s);
772
        delay /= n;
773

    
774
        /* to be more precise, we take into account the time spent since
775
           the last buffer computation */
776
        if (audio_callback_time) {
777
            time_diff = av_gettime() - audio_callback_time;
778
            delay += (time_diff * s->audio_st->codec->sample_rate) / 1000000;
779
        }
780

    
781
        delay -= s->width / 2;
782
        if (delay < s->width)
783
            delay = s->width;
784
        i_start = compute_mod(s->sample_array_index - delay * channels, SAMPLE_ARRAY_SIZE);
785
        s->last_i_start = i_start;
786
    } else {
787
        i_start = s->last_i_start;
788
    }
789

    
790
    bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00);
791
    fill_rectangle(screen,
792
                   s->xleft, s->ytop, s->width, s->height,
793
                   bgcolor);
794

    
795
    fgcolor = SDL_MapRGB(screen->format, 0xff, 0xff, 0xff);
796

    
797
    /* total height for one channel */
798
    h = s->height / nb_display_channels;
799
    /* graph height / 2 */
800
    h2 = (h * 9) / 20;
801
    for(ch = 0;ch < nb_display_channels; ch++) {
802
        i = i_start + ch;
803
        y1 = s->ytop + ch * h + (h / 2); /* position of center line */
804
        for(x = 0; x < s->width; x++) {
805
            y = (s->sample_array[i] * h2) >> 15;
806
            if (y < 0) {
807
                y = -y;
808
                ys = y1 - y;
809
            } else {
810
                ys = y1;
811
            }
812
            fill_rectangle(screen,
813
                           s->xleft + x, ys, 1, y,
814
                           fgcolor);
815
            i += channels;
816
            if (i >= SAMPLE_ARRAY_SIZE)
817
                i -= SAMPLE_ARRAY_SIZE;
818
        }
819
    }
820

    
821
    fgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0xff);
822

    
823
    for(ch = 1;ch < nb_display_channels; ch++) {
824
        y = s->ytop + ch * h;
825
        fill_rectangle(screen,
826
                       s->xleft, y, s->width, 1,
827
                       fgcolor);
828
    }
829
    SDL_UpdateRect(screen, s->xleft, s->ytop, s->width, s->height);
830
}
831

    
832
/* display the current picture, if any */
833
static void video_display(VideoState *is)
834
{
835
    if (is->audio_st && is->show_audio)
836
        video_audio_display(is);
837
    else if (is->video_st)
838
        video_image_display(is);
839
}
840

    
841
static Uint32 sdl_refresh_timer_cb(Uint32 interval, void *opaque)
842
{
843
    SDL_Event event;
844
    event.type = FF_REFRESH_EVENT;
845
    event.user.data1 = opaque;
846
    SDL_PushEvent(&event);
847
    return 0; /* 0 means stop timer */
848
}
849

    
850
/* schedule a video refresh in 'delay' ms */
851
static void schedule_refresh(VideoState *is, int delay)
852
{
853
    SDL_AddTimer(delay, sdl_refresh_timer_cb, is);
854
}
855

    
856
/* get the current audio clock value */
857
static double get_audio_clock(VideoState *is)
858
{
859
    double pts;
860
    int hw_buf_size, bytes_per_sec;
861
    pts = is->audio_clock;
862
    hw_buf_size = audio_write_get_buf_size(is);
863
    bytes_per_sec = 0;
864
    if (is->audio_st) {
865
        bytes_per_sec = is->audio_st->codec->sample_rate *
866
            2 * is->audio_st->codec->channels;
867
    }
868
    if (bytes_per_sec)
869
        pts -= (double)hw_buf_size / bytes_per_sec;
870
    return pts;
871
}
872

    
873
/* get the current video clock value */
874
static double get_video_clock(VideoState *is)
875
{
876
    double delta;
877
    if (is->paused) {
878
        delta = 0;
879
    } else {
880
        delta = (av_gettime() - is->video_current_pts_time) / 1000000.0;
881
    }
882
    return is->video_current_pts + delta;
883
}
884

    
885
/* get the current external clock value */
886
static double get_external_clock(VideoState *is)
887
{
888
    int64_t ti;
889
    ti = av_gettime();
890
    return is->external_clock + ((ti - is->external_clock_time) * 1e-6);
891
}
892

    
893
/* get the current master clock value */
894
static double get_master_clock(VideoState *is)
895
{
896
    double val;
897

    
898
    if (is->av_sync_type == AV_SYNC_VIDEO_MASTER) {
899
        if (is->video_st)
900
            val = get_video_clock(is);
901
        else
902
            val = get_audio_clock(is);
903
    } else if (is->av_sync_type == AV_SYNC_AUDIO_MASTER) {
904
        if (is->audio_st)
905
            val = get_audio_clock(is);
906
        else
907
            val = get_video_clock(is);
908
    } else {
909
        val = get_external_clock(is);
910
    }
911
    return val;
912
}
913

    
914
/* seek in the stream */
915
static void stream_seek(VideoState *is, int64_t pos, int rel)
916
{
917
    if (!is->seek_req) {
918
        is->seek_pos = pos;
919
        is->seek_flags = rel < 0 ? AVSEEK_FLAG_BACKWARD : 0;
920
        if (seek_by_bytes)
921
            is->seek_flags |= AVSEEK_FLAG_BYTE;
922
        is->seek_req = 1;
923
    }
924
}
925

    
926
/* pause or resume the video */
927
static void stream_pause(VideoState *is)
928
{
929
    is->paused = !is->paused;
930
    if (is->paused) {
931
        is->video_current_pts = get_video_clock(is);
932
    }
933
}
934

    
935
/* called to display each frame */
936
static void video_refresh_timer(void *opaque)
937
{
938
    VideoState *is = opaque;
939
    VideoPicture *vp;
940
    double actual_delay, delay, sync_threshold, ref_clock, diff;
941

    
942
    SubPicture *sp, *sp2;
943

    
944
    if (is->video_st) {
945
        if (is->pictq_size == 0) {
946
            /* if no picture, need to wait */
947
            schedule_refresh(is, 1);
948
        } else {
949
            /* dequeue the picture */
950
            vp = &is->pictq[is->pictq_rindex];
951

    
952
            /* update current video pts */
953
            is->video_current_pts = vp->pts;
954
            is->video_current_pts_time = av_gettime();
955

    
956
            /* compute nominal delay */
957
            delay = vp->pts - is->frame_last_pts;
958
            if (delay <= 0 || delay >= 1.0) {
959
                /* if incorrect delay, use previous one */
960
                delay = is->frame_last_delay;
961
            }
962
            is->frame_last_delay = delay;
963
            is->frame_last_pts = vp->pts;
964

    
965
            /* update delay to follow master synchronisation source */
966
            if (((is->av_sync_type == AV_SYNC_AUDIO_MASTER && is->audio_st) ||
967
                 is->av_sync_type == AV_SYNC_EXTERNAL_CLOCK)) {
968
                /* if video is slave, we try to correct big delays by
969
                   duplicating or deleting a frame */
970
                ref_clock = get_master_clock(is);
971
                diff = vp->pts - ref_clock;
972

    
973
                /* skip or repeat frame. We take into account the
974
                   delay to compute the threshold. I still don't know
975
                   if it is the best guess */
976
                sync_threshold = AV_SYNC_THRESHOLD;
977
                if (delay > sync_threshold)
978
                    sync_threshold = delay;
979
                if (fabs(diff) < AV_NOSYNC_THRESHOLD) {
980
                    if (diff <= -sync_threshold)
981
                        delay = 0;
982
                    else if (diff >= sync_threshold)
983
                        delay = 2 * delay;
984
                }
985
            }
986

    
987
            is->frame_timer += delay;
988
            /* compute the REAL delay (we need to do that to avoid
989
               long term errors */
990
            actual_delay = is->frame_timer - (av_gettime() / 1000000.0);
991
            if (actual_delay < 0.010) {
992
                /* XXX: should skip picture */
993
                actual_delay = 0.010;
994
            }
995
            /* launch timer for next picture */
996
            schedule_refresh(is, (int)(actual_delay * 1000 + 0.5));
997

    
998
#if defined(DEBUG_SYNC)
999
            printf("video: delay=%0.3f actual_delay=%0.3f pts=%0.3f A-V=%f\n",
1000
                   delay, actual_delay, vp->pts, -diff);
1001
#endif
1002

    
1003
            if(is->subtitle_st) {
1004
                if (is->subtitle_stream_changed) {
1005
                    SDL_LockMutex(is->subpq_mutex);
1006

    
1007
                    while (is->subpq_size) {
1008
                        free_subpicture(&is->subpq[is->subpq_rindex]);
1009

    
1010
                        /* update queue size and signal for next picture */
1011
                        if (++is->subpq_rindex == SUBPICTURE_QUEUE_SIZE)
1012
                            is->subpq_rindex = 0;
1013

    
1014
                        is->subpq_size--;
1015
                    }
1016
                    is->subtitle_stream_changed = 0;
1017

    
1018
                    SDL_CondSignal(is->subpq_cond);
1019
                    SDL_UnlockMutex(is->subpq_mutex);
1020
                } else {
1021
                    if (is->subpq_size > 0) {
1022
                        sp = &is->subpq[is->subpq_rindex];
1023

    
1024
                        if (is->subpq_size > 1)
1025
                            sp2 = &is->subpq[(is->subpq_rindex + 1) % SUBPICTURE_QUEUE_SIZE];
1026
                        else
1027
                            sp2 = NULL;
1028

    
1029
                        if ((is->video_current_pts > (sp->pts + ((float) sp->sub.end_display_time / 1000)))
1030
                                || (sp2 && is->video_current_pts > (sp2->pts + ((float) sp2->sub.start_display_time / 1000))))
1031
                        {
1032
                            free_subpicture(sp);
1033

    
1034
                            /* update queue size and signal for next picture */
1035
                            if (++is->subpq_rindex == SUBPICTURE_QUEUE_SIZE)
1036
                                is->subpq_rindex = 0;
1037

    
1038
                            SDL_LockMutex(is->subpq_mutex);
1039
                            is->subpq_size--;
1040
                            SDL_CondSignal(is->subpq_cond);
1041
                            SDL_UnlockMutex(is->subpq_mutex);
1042
                        }
1043
                    }
1044
                }
1045
            }
1046

    
1047
            /* display picture */
1048
            video_display(is);
1049

    
1050
            /* update queue size and signal for next picture */
1051
            if (++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE)
1052
                is->pictq_rindex = 0;
1053

    
1054
            SDL_LockMutex(is->pictq_mutex);
1055
            is->pictq_size--;
1056
            SDL_CondSignal(is->pictq_cond);
1057
            SDL_UnlockMutex(is->pictq_mutex);
1058
        }
1059
    } else if (is->audio_st) {
1060
        /* draw the next audio frame */
1061

    
1062
        schedule_refresh(is, 40);
1063

    
1064
        /* if only audio stream, then display the audio bars (better
1065
           than nothing, just to test the implementation */
1066

    
1067
        /* display picture */
1068
        video_display(is);
1069
    } else {
1070
        schedule_refresh(is, 100);
1071
    }
1072
    if (show_status) {
1073
        static int64_t last_time;
1074
        int64_t cur_time;
1075
        int aqsize, vqsize, sqsize;
1076
        double av_diff;
1077

    
1078
        cur_time = av_gettime();
1079
        if (!last_time || (cur_time - last_time) >= 500 * 1000) {
1080
            aqsize = 0;
1081
            vqsize = 0;
1082
            sqsize = 0;
1083
            if (is->audio_st)
1084
                aqsize = is->audioq.size;
1085
            if (is->video_st)
1086
                vqsize = is->videoq.size;
1087
            if (is->subtitle_st)
1088
                sqsize = is->subtitleq.size;
1089
            av_diff = 0;
1090
            if (is->audio_st && is->video_st)
1091
                av_diff = get_audio_clock(is) - get_video_clock(is);
1092
            printf("%7.2f A-V:%7.3f aq=%5dKB vq=%5dKB sq=%5dB    \r",
1093
                   get_master_clock(is), av_diff, aqsize / 1024, vqsize / 1024, sqsize);
1094
            fflush(stdout);
1095
            last_time = cur_time;
1096
        }
1097
    }
1098
}
1099

    
1100
/* allocate a picture (needs to do that in main thread to avoid
1101
   potential locking problems */
1102
static void alloc_picture(void *opaque)
1103
{
1104
    VideoState *is = opaque;
1105
    VideoPicture *vp;
1106

    
1107
    vp = &is->pictq[is->pictq_windex];
1108

    
1109
    if (vp->bmp)
1110
        SDL_FreeYUVOverlay(vp->bmp);
1111

    
1112
#if 0
1113
    /* XXX: use generic function */
1114
    /* XXX: disable overlay if no hardware acceleration or if RGB format */
1115
    switch(is->video_st->codec->pix_fmt) {
1116
    case PIX_FMT_YUV420P:
1117
    case PIX_FMT_YUV422P:
1118
    case PIX_FMT_YUV444P:
1119
    case PIX_FMT_YUV422:
1120
    case PIX_FMT_YUV410P:
1121
    case PIX_FMT_YUV411P:
1122
        is_yuv = 1;
1123
        break;
1124
    default:
1125
        is_yuv = 0;
1126
        break;
1127
    }
1128
#endif
1129
    vp->bmp = SDL_CreateYUVOverlay(is->video_st->codec->width,
1130
                                   is->video_st->codec->height,
1131
                                   SDL_YV12_OVERLAY,
1132
                                   screen);
1133
    vp->width = is->video_st->codec->width;
1134
    vp->height = is->video_st->codec->height;
1135

    
1136
    SDL_LockMutex(is->pictq_mutex);
1137
    vp->allocated = 1;
1138
    SDL_CondSignal(is->pictq_cond);
1139
    SDL_UnlockMutex(is->pictq_mutex);
1140
}
1141

    
1142
/**
1143
 *
1144
 * @param pts the dts of the pkt / pts of the frame and guessed if not known
1145
 */
1146
static int queue_picture(VideoState *is, AVFrame *src_frame, double pts)
1147
{
1148
    VideoPicture *vp;
1149
    int dst_pix_fmt;
1150
    AVPicture pict;
1151
    static struct SwsContext *img_convert_ctx;
1152

    
1153
    /* wait until we have space to put a new picture */
1154
    SDL_LockMutex(is->pictq_mutex);
1155
    while (is->pictq_size >= VIDEO_PICTURE_QUEUE_SIZE &&
1156
           !is->videoq.abort_request) {
1157
        SDL_CondWait(is->pictq_cond, is->pictq_mutex);
1158
    }
1159
    SDL_UnlockMutex(is->pictq_mutex);
1160

    
1161
    if (is->videoq.abort_request)
1162
        return -1;
1163

    
1164
    vp = &is->pictq[is->pictq_windex];
1165

    
1166
    /* alloc or resize hardware picture buffer */
1167
    if (!vp->bmp ||
1168
        vp->width != is->video_st->codec->width ||
1169
        vp->height != is->video_st->codec->height) {
1170
        SDL_Event event;
1171

    
1172
        vp->allocated = 0;
1173

    
1174
        /* the allocation must be done in the main thread to avoid
1175
           locking problems */
1176
        event.type = FF_ALLOC_EVENT;
1177
        event.user.data1 = is;
1178
        SDL_PushEvent(&event);
1179

    
1180
        /* wait until the picture is allocated */
1181
        SDL_LockMutex(is->pictq_mutex);
1182
        while (!vp->allocated && !is->videoq.abort_request) {
1183
            SDL_CondWait(is->pictq_cond, is->pictq_mutex);
1184
        }
1185
        SDL_UnlockMutex(is->pictq_mutex);
1186

    
1187
        if (is->videoq.abort_request)
1188
            return -1;
1189
    }
1190

    
1191
    /* if the frame is not skipped, then display it */
1192
    if (vp->bmp) {
1193
        /* get a pointer on the bitmap */
1194
        SDL_LockYUVOverlay (vp->bmp);
1195

    
1196
        dst_pix_fmt = PIX_FMT_YUV420P;
1197
        pict.data[0] = vp->bmp->pixels[0];
1198
        pict.data[1] = vp->bmp->pixels[2];
1199
        pict.data[2] = vp->bmp->pixels[1];
1200

    
1201
        pict.linesize[0] = vp->bmp->pitches[0];
1202
        pict.linesize[1] = vp->bmp->pitches[2];
1203
        pict.linesize[2] = vp->bmp->pitches[1];
1204
        if (img_convert_ctx == NULL) {
1205
            img_convert_ctx = sws_getContext(is->video_st->codec->width,
1206
                    is->video_st->codec->height, is->video_st->codec->pix_fmt,
1207
                    is->video_st->codec->width, is->video_st->codec->height,
1208
                    dst_pix_fmt, sws_flags, NULL, NULL, NULL);
1209
            if (img_convert_ctx == NULL) {
1210
                fprintf(stderr, "Cannot initialize the conversion context\n");
1211
                exit(1);
1212
            }
1213
        }
1214
        sws_scale(img_convert_ctx, src_frame->data, src_frame->linesize,
1215
                  0, is->video_st->codec->height, pict.data, pict.linesize);
1216
        /* update the bitmap content */
1217
        SDL_UnlockYUVOverlay(vp->bmp);
1218

    
1219
        vp->pts = pts;
1220

    
1221
        /* now we can update the picture count */
1222
        if (++is->pictq_windex == VIDEO_PICTURE_QUEUE_SIZE)
1223
            is->pictq_windex = 0;
1224
        SDL_LockMutex(is->pictq_mutex);
1225
        is->pictq_size++;
1226
        SDL_UnlockMutex(is->pictq_mutex);
1227
    }
1228
    return 0;
1229
}
1230

    
1231
/**
1232
 * compute the exact PTS for the picture if it is omitted in the stream
1233
 * @param pts1 the dts of the pkt / pts of the frame
1234
 */
1235
static int output_picture2(VideoState *is, AVFrame *src_frame, double pts1)
1236
{
1237
    double frame_delay, pts;
1238

    
1239
    pts = pts1;
1240

    
1241
    if (pts != 0) {
1242
        /* update video clock with pts, if present */
1243
        is->video_clock = pts;
1244
    } else {
1245
        pts = is->video_clock;
1246
    }
1247
    /* update video clock for next frame */
1248
    frame_delay = av_q2d(is->video_st->codec->time_base);
1249
    /* for MPEG2, the frame can be repeated, so we update the
1250
       clock accordingly */
1251
    frame_delay += src_frame->repeat_pict * (frame_delay * 0.5);
1252
    is->video_clock += frame_delay;
1253

    
1254
#if defined(DEBUG_SYNC) && 0
1255
    {
1256
        int ftype;
1257
        if (src_frame->pict_type == FF_B_TYPE)
1258
            ftype = 'B';
1259
        else if (src_frame->pict_type == FF_I_TYPE)
1260
            ftype = 'I';
1261
        else
1262
            ftype = 'P';
1263
        printf("frame_type=%c clock=%0.3f pts=%0.3f\n",
1264
               ftype, pts, pts1);
1265
    }
1266
#endif
1267
    return queue_picture(is, src_frame, pts);
1268
}
1269

    
1270
static int video_thread(void *arg)
1271
{
1272
    VideoState *is = arg;
1273
    AVPacket pkt1, *pkt = &pkt1;
1274
    int len1, got_picture;
1275
    AVFrame *frame= avcodec_alloc_frame();
1276
    double pts;
1277

    
1278
    for(;;) {
1279
        while (is->paused && !is->videoq.abort_request) {
1280
            SDL_Delay(10);
1281
        }
1282
        if (packet_queue_get(&is->videoq, pkt, 1) < 0)
1283
            break;
1284

    
1285
        if(pkt->data == flush_pkt.data){
1286
            avcodec_flush_buffers(is->video_st->codec);
1287
            continue;
1288
        }
1289

    
1290
        /* NOTE: ipts is the PTS of the _first_ picture beginning in
1291
           this packet, if any */
1292
        pts = 0;
1293
        if (pkt->dts != AV_NOPTS_VALUE)
1294
            pts = av_q2d(is->video_st->time_base)*pkt->dts;
1295

    
1296
            len1 = avcodec_decode_video(is->video_st->codec,
1297
                                        frame, &got_picture,
1298
                                        pkt->data, pkt->size);
1299
//            if (len1 < 0)
1300
//                break;
1301
            if (got_picture) {
1302
                if (output_picture2(is, frame, pts) < 0)
1303
                    goto the_end;
1304
            }
1305
        av_free_packet(pkt);
1306
        if (step)
1307
            if (cur_stream)
1308
                stream_pause(cur_stream);
1309
    }
1310
 the_end:
1311
    av_free(frame);
1312
    return 0;
1313
}
1314

    
1315
static int subtitle_thread(void *arg)
1316
{
1317
    VideoState *is = arg;
1318
    SubPicture *sp;
1319
    AVPacket pkt1, *pkt = &pkt1;
1320
    int len1, got_subtitle;
1321
    double pts;
1322
    int i, j;
1323
    int r, g, b, y, u, v, a;
1324

    
1325
    for(;;) {
1326
        while (is->paused && !is->subtitleq.abort_request) {
1327
            SDL_Delay(10);
1328
        }
1329
        if (packet_queue_get(&is->subtitleq, pkt, 1) < 0)
1330
            break;
1331

    
1332
        if(pkt->data == flush_pkt.data){
1333
            avcodec_flush_buffers(is->subtitle_st->codec);
1334
            continue;
1335
        }
1336
        SDL_LockMutex(is->subpq_mutex);
1337
        while (is->subpq_size >= SUBPICTURE_QUEUE_SIZE &&
1338
               !is->subtitleq.abort_request) {
1339
            SDL_CondWait(is->subpq_cond, is->subpq_mutex);
1340
        }
1341
        SDL_UnlockMutex(is->subpq_mutex);
1342

    
1343
        if (is->subtitleq.abort_request)
1344
            goto the_end;
1345

    
1346
        sp = &is->subpq[is->subpq_windex];
1347

    
1348
       /* NOTE: ipts is the PTS of the _first_ picture beginning in
1349
           this packet, if any */
1350
        pts = 0;
1351
        if (pkt->pts != AV_NOPTS_VALUE)
1352
            pts = av_q2d(is->subtitle_st->time_base)*pkt->pts;
1353

    
1354
        len1 = avcodec_decode_subtitle(is->subtitle_st->codec,
1355
                                    &sp->sub, &got_subtitle,
1356
                                    pkt->data, pkt->size);
1357
//            if (len1 < 0)
1358
//                break;
1359
        if (got_subtitle && sp->sub.format == 0) {
1360
            sp->pts = pts;
1361

    
1362
            for (i = 0; i < sp->sub.num_rects; i++)
1363
            {
1364
                for (j = 0; j < sp->sub.rects[i].nb_colors; j++)
1365
                {
1366
                    RGBA_IN(r, g, b, a, sp->sub.rects[i].rgba_palette + j);
1367
                    y = RGB_TO_Y_CCIR(r, g, b);
1368
                    u = RGB_TO_U_CCIR(r, g, b, 0);
1369
                    v = RGB_TO_V_CCIR(r, g, b, 0);
1370
                    YUVA_OUT(sp->sub.rects[i].rgba_palette + j, y, u, v, a);
1371
                }
1372
            }
1373

    
1374
            /* now we can update the picture count */
1375
            if (++is->subpq_windex == SUBPICTURE_QUEUE_SIZE)
1376
                is->subpq_windex = 0;
1377
            SDL_LockMutex(is->subpq_mutex);
1378
            is->subpq_size++;
1379
            SDL_UnlockMutex(is->subpq_mutex);
1380
        }
1381
        av_free_packet(pkt);
1382
//        if (step)
1383
//            if (cur_stream)
1384
//                stream_pause(cur_stream);
1385
    }
1386
 the_end:
1387
    return 0;
1388
}
1389

    
1390
/* copy samples for viewing in editor window */
1391
static void update_sample_display(VideoState *is, short *samples, int samples_size)
1392
{
1393
    int size, len, channels;
1394

    
1395
    channels = is->audio_st->codec->channels;
1396

    
1397
    size = samples_size / sizeof(short);
1398
    while (size > 0) {
1399
        len = SAMPLE_ARRAY_SIZE - is->sample_array_index;
1400
        if (len > size)
1401
            len = size;
1402
        memcpy(is->sample_array + is->sample_array_index, samples, len * sizeof(short));
1403
        samples += len;
1404
        is->sample_array_index += len;
1405
        if (is->sample_array_index >= SAMPLE_ARRAY_SIZE)
1406
            is->sample_array_index = 0;
1407
        size -= len;
1408
    }
1409
}
1410

    
1411
/* return the new audio buffer size (samples can be added or deleted
1412
   to get better sync if video or external master clock) */
1413
static int synchronize_audio(VideoState *is, short *samples,
1414
                             int samples_size1, double pts)
1415
{
1416
    int n, samples_size;
1417
    double ref_clock;
1418

    
1419
    n = 2 * is->audio_st->codec->channels;
1420
    samples_size = samples_size1;
1421

    
1422
    /* if not master, then we try to remove or add samples to correct the clock */
1423
    if (((is->av_sync_type == AV_SYNC_VIDEO_MASTER && is->video_st) ||
1424
         is->av_sync_type == AV_SYNC_EXTERNAL_CLOCK)) {
1425
        double diff, avg_diff;
1426
        int wanted_size, min_size, max_size, nb_samples;
1427

    
1428
        ref_clock = get_master_clock(is);
1429
        diff = get_audio_clock(is) - ref_clock;
1430

    
1431
        if (diff < AV_NOSYNC_THRESHOLD) {
1432
            is->audio_diff_cum = diff + is->audio_diff_avg_coef * is->audio_diff_cum;
1433
            if (is->audio_diff_avg_count < AUDIO_DIFF_AVG_NB) {
1434
                /* not enough measures to have a correct estimate */
1435
                is->audio_diff_avg_count++;
1436
            } else {
1437
                /* estimate the A-V difference */
1438
                avg_diff = is->audio_diff_cum * (1.0 - is->audio_diff_avg_coef);
1439

    
1440
                if (fabs(avg_diff) >= is->audio_diff_threshold) {
1441
                    wanted_size = samples_size + ((int)(diff * is->audio_st->codec->sample_rate) * n);
1442
                    nb_samples = samples_size / n;
1443

    
1444
                    min_size = ((nb_samples * (100 - SAMPLE_CORRECTION_PERCENT_MAX)) / 100) * n;
1445
                    max_size = ((nb_samples * (100 + SAMPLE_CORRECTION_PERCENT_MAX)) / 100) * n;
1446
                    if (wanted_size < min_size)
1447
                        wanted_size = min_size;
1448
                    else if (wanted_size > max_size)
1449
                        wanted_size = max_size;
1450

    
1451
                    /* add or remove samples to correction the synchro */
1452
                    if (wanted_size < samples_size) {
1453
                        /* remove samples */
1454
                        samples_size = wanted_size;
1455
                    } else if (wanted_size > samples_size) {
1456
                        uint8_t *samples_end, *q;
1457
                        int nb;
1458

    
1459
                        /* add samples */
1460
                        nb = (samples_size - wanted_size);
1461
                        samples_end = (uint8_t *)samples + samples_size - n;
1462
                        q = samples_end + n;
1463
                        while (nb > 0) {
1464
                            memcpy(q, samples_end, n);
1465
                            q += n;
1466
                            nb -= n;
1467
                        }
1468
                        samples_size = wanted_size;
1469
                    }
1470
                }
1471
#if 0
1472
                printf("diff=%f adiff=%f sample_diff=%d apts=%0.3f vpts=%0.3f %f\n",
1473
                       diff, avg_diff, samples_size - samples_size1,
1474
                       is->audio_clock, is->video_clock, is->audio_diff_threshold);
1475
#endif
1476
            }
1477
        } else {
1478
            /* too big difference : may be initial PTS errors, so
1479
               reset A-V filter */
1480
            is->audio_diff_avg_count = 0;
1481
            is->audio_diff_cum = 0;
1482
        }
1483
    }
1484

    
1485
    return samples_size;
1486
}
1487

    
1488
/* decode one audio frame and returns its uncompressed size */
1489
static int audio_decode_frame(VideoState *is, uint8_t *audio_buf, double *pts_ptr)
1490
{
1491
    AVPacket *pkt = &is->audio_pkt;
1492
    int n, len1, data_size;
1493
    double pts;
1494

    
1495
    for(;;) {
1496
        /* NOTE: the audio packet can contain several frames */
1497
        while (is->audio_pkt_size > 0) {
1498
            len1 = avcodec_decode_audio(is->audio_st->codec,
1499
                                        (int16_t *)audio_buf, &data_size,
1500
                                        is->audio_pkt_data, is->audio_pkt_size);
1501
            if (len1 < 0) {
1502
                /* if error, we skip the frame */
1503
                is->audio_pkt_size = 0;
1504
                break;
1505
            }
1506

    
1507
            is->audio_pkt_data += len1;
1508
            is->audio_pkt_size -= len1;
1509
            if (data_size <= 0)
1510
                continue;
1511
            /* if no pts, then compute it */
1512
            pts = is->audio_clock;
1513
            *pts_ptr = pts;
1514
            n = 2 * is->audio_st->codec->channels;
1515
            is->audio_clock += (double)data_size /
1516
                (double)(n * is->audio_st->codec->sample_rate);
1517
#if defined(DEBUG_SYNC)
1518
            {
1519
                static double last_clock;
1520
                printf("audio: delay=%0.3f clock=%0.3f pts=%0.3f\n",
1521
                       is->audio_clock - last_clock,
1522
                       is->audio_clock, pts);
1523
                last_clock = is->audio_clock;
1524
            }
1525
#endif
1526
            return data_size;
1527
        }
1528

    
1529
        /* free the current packet */
1530
        if (pkt->data)
1531
            av_free_packet(pkt);
1532

    
1533
        if (is->paused || is->audioq.abort_request) {
1534
            return -1;
1535
        }
1536

    
1537
        /* read next packet */
1538
        if (packet_queue_get(&is->audioq, pkt, 1) < 0)
1539
            return -1;
1540
        if(pkt->data == flush_pkt.data){
1541
            avcodec_flush_buffers(is->audio_st->codec);
1542
            continue;
1543
        }
1544

    
1545
        is->audio_pkt_data = pkt->data;
1546
        is->audio_pkt_size = pkt->size;
1547

    
1548
        /* if update the audio clock with the pts */
1549
        if (pkt->pts != AV_NOPTS_VALUE) {
1550
            is->audio_clock = av_q2d(is->audio_st->time_base)*pkt->pts;
1551
        }
1552
    }
1553
}
1554

    
1555
/* get the current audio output buffer size, in samples. With SDL, we
1556
   cannot have a precise information */
1557
static int audio_write_get_buf_size(VideoState *is)
1558
{
1559
    return is->audio_hw_buf_size - is->audio_buf_index;
1560
}
1561

    
1562

    
1563
/* prepare a new audio buffer */
1564
void sdl_audio_callback(void *opaque, Uint8 *stream, int len)
1565
{
1566
    VideoState *is = opaque;
1567
    int audio_size, len1;
1568
    double pts;
1569

    
1570
    audio_callback_time = av_gettime();
1571

    
1572
    while (len > 0) {
1573
        if (is->audio_buf_index >= is->audio_buf_size) {
1574
           audio_size = audio_decode_frame(is, is->audio_buf, &pts);
1575
           if (audio_size < 0) {
1576
                /* if error, just output silence */
1577
               is->audio_buf_size = 1024;
1578
               memset(is->audio_buf, 0, is->audio_buf_size);
1579
           } else {
1580
               if (is->show_audio)
1581
                   update_sample_display(is, (int16_t *)is->audio_buf, audio_size);
1582
               audio_size = synchronize_audio(is, (int16_t *)is->audio_buf, audio_size,
1583
                                              pts);
1584
               is->audio_buf_size = audio_size;
1585
           }
1586
           is->audio_buf_index = 0;
1587
        }
1588
        len1 = is->audio_buf_size - is->audio_buf_index;
1589
        if (len1 > len)
1590
            len1 = len;
1591
        memcpy(stream, (uint8_t *)is->audio_buf + is->audio_buf_index, len1);
1592
        len -= len1;
1593
        stream += len1;
1594
        is->audio_buf_index += len1;
1595
    }
1596
}
1597

    
1598

    
1599
/* open a given stream. Return 0 if OK */
1600
static int stream_component_open(VideoState *is, int stream_index)
1601
{
1602
    AVFormatContext *ic = is->ic;
1603
    AVCodecContext *enc;
1604
    AVCodec *codec;
1605
    SDL_AudioSpec wanted_spec, spec;
1606

    
1607
    if (stream_index < 0 || stream_index >= ic->nb_streams)
1608
        return -1;
1609
    enc = ic->streams[stream_index]->codec;
1610

    
1611
    /* prepare audio output */
1612
    if (enc->codec_type == CODEC_TYPE_AUDIO) {
1613
        wanted_spec.freq = enc->sample_rate;
1614
        wanted_spec.format = AUDIO_S16SYS;
1615
        /* hack for AC3. XXX: suppress that */
1616
        if (enc->channels > 2)
1617
            enc->channels = 2;
1618
        wanted_spec.channels = enc->channels;
1619
        wanted_spec.silence = 0;
1620
        wanted_spec.samples = SDL_AUDIO_BUFFER_SIZE;
1621
        wanted_spec.callback = sdl_audio_callback;
1622
        wanted_spec.userdata = is;
1623
        if (SDL_OpenAudio(&wanted_spec, &spec) < 0) {
1624
            fprintf(stderr, "SDL_OpenAudio: %s\n", SDL_GetError());
1625
            return -1;
1626
        }
1627
        is->audio_hw_buf_size = spec.size;
1628
    }
1629

    
1630
    codec = avcodec_find_decoder(enc->codec_id);
1631
    enc->debug_mv = debug_mv;
1632
    enc->debug = debug;
1633
    enc->workaround_bugs = workaround_bugs;
1634
    enc->lowres = lowres;
1635
    if(lowres) enc->flags |= CODEC_FLAG_EMU_EDGE;
1636
    enc->idct_algo= idct;
1637
    if(fast) enc->flags2 |= CODEC_FLAG2_FAST;
1638
    enc->skip_frame= skip_frame;
1639
    enc->skip_idct= skip_idct;
1640
    enc->skip_loop_filter= skip_loop_filter;
1641
    enc->error_resilience= error_resilience;
1642
    enc->error_concealment= error_concealment;
1643
    if (!codec ||
1644
        avcodec_open(enc, codec) < 0)
1645
        return -1;
1646
#if defined(HAVE_THREADS)
1647
    if(thread_count>1)
1648
        avcodec_thread_init(enc, thread_count);
1649
#endif
1650
    enc->thread_count= thread_count;
1651
    switch(enc->codec_type) {
1652
    case CODEC_TYPE_AUDIO:
1653
        is->audio_stream = stream_index;
1654
        is->audio_st = ic->streams[stream_index];
1655
        is->audio_buf_size = 0;
1656
        is->audio_buf_index = 0;
1657

    
1658
        /* init averaging filter */
1659
        is->audio_diff_avg_coef = exp(log(0.01) / AUDIO_DIFF_AVG_NB);
1660
        is->audio_diff_avg_count = 0;
1661
        /* since we do not have a precise anough audio fifo fullness,
1662
           we correct audio sync only if larger than this threshold */
1663
        is->audio_diff_threshold = 2.0 * SDL_AUDIO_BUFFER_SIZE / enc->sample_rate;
1664

    
1665
        memset(&is->audio_pkt, 0, sizeof(is->audio_pkt));
1666
        packet_queue_init(&is->audioq);
1667
        SDL_PauseAudio(0);
1668
        break;
1669
    case CODEC_TYPE_VIDEO:
1670
        is->video_stream = stream_index;
1671
        is->video_st = ic->streams[stream_index];
1672

    
1673
        is->frame_last_delay = 40e-3;
1674
        is->frame_timer = (double)av_gettime() / 1000000.0;
1675
        is->video_current_pts_time = av_gettime();
1676

    
1677
        packet_queue_init(&is->videoq);
1678
        is->video_tid = SDL_CreateThread(video_thread, is);
1679
        break;
1680
    case CODEC_TYPE_SUBTITLE:
1681
        is->subtitle_stream = stream_index;
1682
        is->subtitle_st = ic->streams[stream_index];
1683
        packet_queue_init(&is->subtitleq);
1684

    
1685
        is->subtitle_tid = SDL_CreateThread(subtitle_thread, is);
1686
        break;
1687
    default:
1688
        break;
1689
    }
1690
    return 0;
1691
}
1692

    
1693
static void stream_component_close(VideoState *is, int stream_index)
1694
{
1695
    AVFormatContext *ic = is->ic;
1696
    AVCodecContext *enc;
1697

    
1698
    if (stream_index < 0 || stream_index >= ic->nb_streams)
1699
        return;
1700
    enc = ic->streams[stream_index]->codec;
1701

    
1702
    switch(enc->codec_type) {
1703
    case CODEC_TYPE_AUDIO:
1704
        packet_queue_abort(&is->audioq);
1705

    
1706
        SDL_CloseAudio();
1707

    
1708
        packet_queue_end(&is->audioq);
1709
        break;
1710
    case CODEC_TYPE_VIDEO:
1711
        packet_queue_abort(&is->videoq);
1712

    
1713
        /* note: we also signal this mutex to make sure we deblock the
1714
           video thread in all cases */
1715
        SDL_LockMutex(is->pictq_mutex);
1716
        SDL_CondSignal(is->pictq_cond);
1717
        SDL_UnlockMutex(is->pictq_mutex);
1718

    
1719
        SDL_WaitThread(is->video_tid, NULL);
1720

    
1721
        packet_queue_end(&is->videoq);
1722
        break;
1723
    case CODEC_TYPE_SUBTITLE:
1724
        packet_queue_abort(&is->subtitleq);
1725

    
1726
        /* note: we also signal this mutex to make sure we deblock the
1727
           video thread in all cases */
1728
        SDL_LockMutex(is->subpq_mutex);
1729
        is->subtitle_stream_changed = 1;
1730

    
1731
        SDL_CondSignal(is->subpq_cond);
1732
        SDL_UnlockMutex(is->subpq_mutex);
1733

    
1734
        SDL_WaitThread(is->subtitle_tid, NULL);
1735

    
1736
        packet_queue_end(&is->subtitleq);
1737
        break;
1738
    default:
1739
        break;
1740
    }
1741

    
1742
    avcodec_close(enc);
1743
    switch(enc->codec_type) {
1744
    case CODEC_TYPE_AUDIO:
1745
        is->audio_st = NULL;
1746
        is->audio_stream = -1;
1747
        break;
1748
    case CODEC_TYPE_VIDEO:
1749
        is->video_st = NULL;
1750
        is->video_stream = -1;
1751
        break;
1752
    case CODEC_TYPE_SUBTITLE:
1753
        is->subtitle_st = NULL;
1754
        is->subtitle_stream = -1;
1755
        break;
1756
    default:
1757
        break;
1758
    }
1759
}
1760

    
1761
static void dump_stream_info(const AVFormatContext *s)
1762
{
1763
    if (s->track != 0)
1764
        fprintf(stderr, "Track: %d\n", s->track);
1765
    if (s->title[0] != '\0')
1766
        fprintf(stderr, "Title: %s\n", s->title);
1767
    if (s->author[0] != '\0')
1768
        fprintf(stderr, "Author: %s\n", s->author);
1769
    if (s->copyright[0] != '\0')
1770
        fprintf(stderr, "Copyright: %s\n", s->copyright);
1771
    if (s->comment[0] != '\0')
1772
        fprintf(stderr, "Comment: %s\n", s->comment);
1773
    if (s->album[0] != '\0')
1774
        fprintf(stderr, "Album: %s\n", s->album);
1775
    if (s->year != 0)
1776
        fprintf(stderr, "Year: %d\n", s->year);
1777
    if (s->genre[0] != '\0')
1778
        fprintf(stderr, "Genre: %s\n", s->genre);
1779
}
1780

    
1781
/* since we have only one decoding thread, we can use a global
1782
   variable instead of a thread local variable */
1783
static VideoState *global_video_state;
1784

    
1785
static int decode_interrupt_cb(void)
1786
{
1787
    return (global_video_state && global_video_state->abort_request);
1788
}
1789

    
1790
/* this thread gets the stream from the disk or the network */
1791
static int decode_thread(void *arg)
1792
{
1793
    VideoState *is = arg;
1794
    AVFormatContext *ic;
1795
    int err, i, ret, video_index, audio_index, use_play;
1796
    AVPacket pkt1, *pkt = &pkt1;
1797
    AVFormatParameters params, *ap = &params;
1798

    
1799
    video_index = -1;
1800
    audio_index = -1;
1801
    is->video_stream = -1;
1802
    is->audio_stream = -1;
1803
    is->subtitle_stream = -1;
1804

    
1805
    global_video_state = is;
1806
    url_set_interrupt_cb(decode_interrupt_cb);
1807

    
1808
    memset(ap, 0, sizeof(*ap));
1809
    ap->initial_pause = 1; /* we force a pause when starting an RTSP
1810
                              stream */
1811

    
1812
    err = av_open_input_file(&ic, is->filename, is->iformat, 0, ap);
1813
    if (err < 0) {
1814
        print_error(is->filename, err);
1815
        ret = -1;
1816
        goto fail;
1817
    }
1818
    is->ic = ic;
1819
#ifdef CONFIG_NETWORK
1820
    use_play = (ic->iformat == &rtsp_demuxer);
1821
#else
1822
    use_play = 0;
1823
#endif
1824

    
1825
    if(genpts)
1826
        ic->flags |= AVFMT_FLAG_GENPTS;
1827

    
1828
    if (!use_play) {
1829
        err = av_find_stream_info(ic);
1830
        if (err < 0) {
1831
            fprintf(stderr, "%s: could not find codec parameters\n", is->filename);
1832
            ret = -1;
1833
            goto fail;
1834
        }
1835
        ic->pb.eof_reached= 0; //FIXME hack, ffplay maybe shouldnt use url_feof() to test for the end
1836
    }
1837

    
1838
    /* if seeking requested, we execute it */
1839
    if (start_time != AV_NOPTS_VALUE) {
1840
        int64_t timestamp;
1841

    
1842
        timestamp = start_time;
1843
        /* add the stream start time */
1844
        if (ic->start_time != AV_NOPTS_VALUE)
1845
            timestamp += ic->start_time;
1846
        ret = av_seek_frame(ic, -1, timestamp, AVSEEK_FLAG_BACKWARD);
1847
        if (ret < 0) {
1848
            fprintf(stderr, "%s: could not seek to position %0.3f\n",
1849
                    is->filename, (double)timestamp / AV_TIME_BASE);
1850
        }
1851
    }
1852

    
1853
    /* now we can begin to play (RTSP stream only) */
1854
    av_read_play(ic);
1855

    
1856
    if (use_play) {
1857
        err = av_find_stream_info(ic);
1858
        if (err < 0) {
1859
            fprintf(stderr, "%s: could not find codec parameters\n", is->filename);
1860
            ret = -1;
1861
            goto fail;
1862
        }
1863
    }
1864

    
1865
    for(i = 0; i < ic->nb_streams; i++) {
1866
        AVCodecContext *enc = ic->streams[i]->codec;
1867
        switch(enc->codec_type) {
1868
        case CODEC_TYPE_AUDIO:
1869
            if (audio_index < 0 && !audio_disable)
1870
                audio_index = i;
1871
            break;
1872
        case CODEC_TYPE_VIDEO:
1873
            if (video_index < 0 && !video_disable)
1874
                video_index = i;
1875
            break;
1876
        default:
1877
            break;
1878
        }
1879
    }
1880
    if (show_status) {
1881
        dump_format(ic, 0, is->filename, 0);
1882
        dump_stream_info(ic);
1883
    }
1884

    
1885
    /* open the streams */
1886
    if (audio_index >= 0) {
1887
        stream_component_open(is, audio_index);
1888
    }
1889

    
1890
    if (video_index >= 0) {
1891
        stream_component_open(is, video_index);
1892
    } else {
1893
        if (!display_disable)
1894
            is->show_audio = 1;
1895
    }
1896

    
1897
    if (is->video_stream < 0 && is->audio_stream < 0) {
1898
        fprintf(stderr, "%s: could not open codecs\n", is->filename);
1899
        ret = -1;
1900
        goto fail;
1901
    }
1902

    
1903
    for(;;) {
1904
        if (is->abort_request)
1905
            break;
1906
#ifdef CONFIG_NETWORK
1907
        if (is->paused != is->last_paused) {
1908
            is->last_paused = is->paused;
1909
            if (is->paused)
1910
                av_read_pause(ic);
1911
            else
1912
                av_read_play(ic);
1913
        }
1914
        if (is->paused && ic->iformat == &rtsp_demuxer) {
1915
            /* wait 10 ms to avoid trying to get another packet */
1916
            /* XXX: horrible */
1917
            SDL_Delay(10);
1918
            continue;
1919
        }
1920
#endif
1921
        if (is->seek_req) {
1922
            ret = av_seek_frame(is->ic, -1, is->seek_pos, is->seek_flags);
1923
            if (ret < 0) {
1924
                fprintf(stderr, "%s: error while seeking\n", is->ic->filename);
1925
            }else{
1926
                if (is->audio_stream >= 0) {
1927
                    packet_queue_flush(&is->audioq);
1928
                    packet_queue_put(&is->audioq, &flush_pkt);
1929
                }
1930
                if (is->subtitle_stream >= 0) {
1931
                    packet_queue_flush(&is->subtitleq);
1932
                    packet_queue_put(&is->subtitleq, &flush_pkt);
1933
                }
1934
                if (is->video_stream >= 0) {
1935
                    packet_queue_flush(&is->videoq);
1936
                    packet_queue_put(&is->videoq, &flush_pkt);
1937
                }
1938
            }
1939
            is->seek_req = 0;
1940
        }
1941

    
1942
        /* if the queue are full, no need to read more */
1943
        if (is->audioq.size > MAX_AUDIOQ_SIZE ||
1944
            is->videoq.size > MAX_VIDEOQ_SIZE ||
1945
            is->subtitleq.size > MAX_SUBTITLEQ_SIZE ||
1946
            url_feof(&ic->pb)) {
1947
            /* wait 10 ms */
1948
            SDL_Delay(10);
1949
            continue;
1950
        }
1951
        ret = av_read_frame(ic, pkt);
1952
        if (ret < 0) {
1953
            if (url_ferror(&ic->pb) == 0) {
1954
                SDL_Delay(100); /* wait for user event */
1955
                continue;
1956
            } else
1957
                break;
1958
        }
1959
        if (pkt->stream_index == is->audio_stream) {
1960
            packet_queue_put(&is->audioq, pkt);
1961
        } else if (pkt->stream_index == is->video_stream) {
1962
            packet_queue_put(&is->videoq, pkt);
1963
        } else if (pkt->stream_index == is->subtitle_stream) {
1964
            packet_queue_put(&is->subtitleq, pkt);
1965
        } else {
1966
            av_free_packet(pkt);
1967
        }
1968
    }
1969
    /* wait until the end */
1970
    while (!is->abort_request) {
1971
        SDL_Delay(100);
1972
    }
1973

    
1974
    ret = 0;
1975
 fail:
1976
    /* disable interrupting */
1977
    global_video_state = NULL;
1978

    
1979
    /* close each stream */
1980
    if (is->audio_stream >= 0)
1981
        stream_component_close(is, is->audio_stream);
1982
    if (is->video_stream >= 0)
1983
        stream_component_close(is, is->video_stream);
1984
    if (is->subtitle_stream >= 0)
1985
        stream_component_close(is, is->subtitle_stream);
1986
    if (is->ic) {
1987
        av_close_input_file(is->ic);
1988
        is->ic = NULL; /* safety */
1989
    }
1990
    url_set_interrupt_cb(NULL);
1991

    
1992
    if (ret != 0) {
1993
        SDL_Event event;
1994

    
1995
        event.type = FF_QUIT_EVENT;
1996
        event.user.data1 = is;
1997
        SDL_PushEvent(&event);
1998
    }
1999
    return 0;
2000
}
2001

    
2002
static VideoState *stream_open(const char *filename, AVInputFormat *iformat)
2003
{
2004
    VideoState *is;
2005

    
2006
    is = av_mallocz(sizeof(VideoState));
2007
    if (!is)
2008
        return NULL;
2009
    pstrcpy(is->filename, sizeof(is->filename), filename);
2010
    is->iformat = iformat;
2011
    if (screen) {
2012
        is->width = screen->w;
2013
        is->height = screen->h;
2014
    }
2015
    is->ytop = 0;
2016
    is->xleft = 0;
2017

    
2018
    /* start video display */
2019
    is->pictq_mutex = SDL_CreateMutex();
2020
    is->pictq_cond = SDL_CreateCond();
2021

    
2022
    is->subpq_mutex = SDL_CreateMutex();
2023
    is->subpq_cond = SDL_CreateCond();
2024

    
2025
    /* add the refresh timer to draw the picture */
2026
    schedule_refresh(is, 40);
2027

    
2028
    is->av_sync_type = av_sync_type;
2029
    is->parse_tid = SDL_CreateThread(decode_thread, is);
2030
    if (!is->parse_tid) {
2031
        av_free(is);
2032
        return NULL;
2033
    }
2034
    return is;
2035
}
2036

    
2037
static void stream_close(VideoState *is)
2038
{
2039
    VideoPicture *vp;
2040
    int i;
2041
    /* XXX: use a special url_shutdown call to abort parse cleanly */
2042
    is->abort_request = 1;
2043
    SDL_WaitThread(is->parse_tid, NULL);
2044

    
2045
    /* free all pictures */
2046
    for(i=0;i<VIDEO_PICTURE_QUEUE_SIZE; i++) {
2047
        vp = &is->pictq[i];
2048
        if (vp->bmp) {
2049
            SDL_FreeYUVOverlay(vp->bmp);
2050
            vp->bmp = NULL;
2051
        }
2052
    }
2053
    SDL_DestroyMutex(is->pictq_mutex);
2054
    SDL_DestroyCond(is->pictq_cond);
2055
    SDL_DestroyMutex(is->subpq_mutex);
2056
    SDL_DestroyCond(is->subpq_cond);
2057
}
2058

    
2059
static void stream_cycle_channel(VideoState *is, int codec_type)
2060
{
2061
    AVFormatContext *ic = is->ic;
2062
    int start_index, stream_index;
2063
    AVStream *st;
2064

    
2065
    if (codec_type == CODEC_TYPE_VIDEO)
2066
        start_index = is->video_stream;
2067
    else if (codec_type == CODEC_TYPE_AUDIO)
2068
        start_index = is->audio_stream;
2069
    else
2070
        start_index = is->subtitle_stream;
2071
    if (start_index < (codec_type == CODEC_TYPE_SUBTITLE ? -1 : 0))
2072
        return;
2073
    stream_index = start_index;
2074
    for(;;) {
2075
        if (++stream_index >= is->ic->nb_streams)
2076
        {
2077
            if (codec_type == CODEC_TYPE_SUBTITLE)
2078
            {
2079
                stream_index = -1;
2080
                goto the_end;
2081
            } else
2082
                stream_index = 0;
2083
        }
2084
        if (stream_index == start_index)
2085
            return;
2086
        st = ic->streams[stream_index];
2087
        if (st->codec->codec_type == codec_type) {
2088
            /* check that parameters are OK */
2089
            switch(codec_type) {
2090
            case CODEC_TYPE_AUDIO:
2091
                if (st->codec->sample_rate != 0 &&
2092
                    st->codec->channels != 0)
2093
                    goto the_end;
2094
                break;
2095
            case CODEC_TYPE_VIDEO:
2096
            case CODEC_TYPE_SUBTITLE:
2097
                goto the_end;
2098
            default:
2099
                break;
2100
            }
2101
        }
2102
    }
2103
 the_end:
2104
    stream_component_close(is, start_index);
2105
    stream_component_open(is, stream_index);
2106
}
2107

    
2108

    
2109
static void toggle_full_screen(void)
2110
{
2111
    int w, h, flags;
2112
    is_full_screen = !is_full_screen;
2113
    if (!fs_screen_width) {
2114
        /* use default SDL method */
2115
        SDL_WM_ToggleFullScreen(screen);
2116
    } else {
2117
        /* use the recorded resolution */
2118
        flags = SDL_HWSURFACE|SDL_ASYNCBLIT|SDL_HWACCEL;
2119
        if (is_full_screen) {
2120
            w = fs_screen_width;
2121
            h = fs_screen_height;
2122
            flags |= SDL_FULLSCREEN;
2123
        } else {
2124
            w = screen_width;
2125
            h = screen_height;
2126
            flags |= SDL_RESIZABLE;
2127
        }
2128
        screen = SDL_SetVideoMode(w, h, 0, flags);
2129
        cur_stream->width = w;
2130
        cur_stream->height = h;
2131
    }
2132
}
2133

    
2134
static void toggle_pause(void)
2135
{
2136
    if (cur_stream)
2137
        stream_pause(cur_stream);
2138
    step = 0;
2139
}
2140

    
2141
static void step_to_next_frame(void)
2142
{
2143
    if (cur_stream) {
2144
        if (cur_stream->paused)
2145
            cur_stream->paused=0;
2146
        cur_stream->video_current_pts = get_video_clock(cur_stream);
2147
    }
2148
    step = 1;
2149
}
2150

    
2151
static void do_exit(void)
2152
{
2153
    if (cur_stream) {
2154
        stream_close(cur_stream);
2155
        cur_stream = NULL;
2156
    }
2157
    if (show_status)
2158
        printf("\n");
2159
    SDL_Quit();
2160
    exit(0);
2161
}
2162

    
2163
static void toggle_audio_display(void)
2164
{
2165
    if (cur_stream) {
2166
        cur_stream->show_audio = !cur_stream->show_audio;
2167
    }
2168
}
2169

    
2170
/* handle an event sent by the GUI */
2171
static void event_loop(void)
2172
{
2173
    SDL_Event event;
2174
    double incr, pos, frac;
2175

    
2176
    for(;;) {
2177
        SDL_WaitEvent(&event);
2178
        switch(event.type) {
2179
        case SDL_KEYDOWN:
2180
            switch(event.key.keysym.sym) {
2181
            case SDLK_ESCAPE:
2182
            case SDLK_q:
2183
                do_exit();
2184
                break;
2185
            case SDLK_f:
2186
                toggle_full_screen();
2187
                break;
2188
            case SDLK_p:
2189
            case SDLK_SPACE:
2190
                toggle_pause();
2191
                break;
2192
            case SDLK_s: //S: Step to next frame
2193
                step_to_next_frame();
2194
                break;
2195
            case SDLK_a:
2196
                if (cur_stream)
2197
                    stream_cycle_channel(cur_stream, CODEC_TYPE_AUDIO);
2198
                break;
2199
            case SDLK_v:
2200
                if (cur_stream)
2201
                    stream_cycle_channel(cur_stream, CODEC_TYPE_VIDEO);
2202
                break;
2203
            case SDLK_t:
2204
                if (cur_stream)
2205
                    stream_cycle_channel(cur_stream, CODEC_TYPE_SUBTITLE);
2206
                break;
2207
            case SDLK_w:
2208
                toggle_audio_display();
2209
                break;
2210
            case SDLK_LEFT:
2211
                incr = -10.0;
2212
                goto do_seek;
2213
            case SDLK_RIGHT:
2214
                incr = 10.0;
2215
                goto do_seek;
2216
            case SDLK_UP:
2217
                incr = 60.0;
2218
                goto do_seek;
2219
            case SDLK_DOWN:
2220
                incr = -60.0;
2221
            do_seek:
2222
                if (cur_stream) {
2223
                    if (seek_by_bytes) {
2224
                        pos = url_ftell(&cur_stream->ic->pb);
2225
                        if (cur_stream->ic->bit_rate)
2226
                            incr *= cur_stream->ic->bit_rate / 60.0;
2227
                        else
2228
                            incr *= 180000.0;
2229
                        pos += incr;
2230
                        stream_seek(cur_stream, pos, incr);
2231
                    } else {
2232
                        pos = get_master_clock(cur_stream);
2233
                        pos += incr;
2234
                        stream_seek(cur_stream, (int64_t)(pos * AV_TIME_BASE), incr);
2235
                    }
2236
                }
2237
                break;
2238
            default:
2239
                break;
2240
            }
2241
            break;
2242
        case SDL_MOUSEBUTTONDOWN:
2243
            if (cur_stream) {
2244
                int ns, hh, mm, ss;
2245
                int tns, thh, tmm, tss;
2246
                tns = cur_stream->ic->duration/1000000LL;
2247
                thh = tns/3600;
2248
                tmm = (tns%3600)/60;
2249
                tss = (tns%60);
2250
                frac = (double)event.button.x/(double)cur_stream->width;
2251
                ns = frac*tns;
2252
                hh = ns/3600;
2253
                mm = (ns%3600)/60;
2254
                ss = (ns%60);
2255
                fprintf(stderr, "Seek to %2.0f%% (%2d:%02d:%02d) of total duration (%2d:%02d:%02d)       \n", frac*100,
2256
                        hh, mm, ss, thh, tmm, tss);
2257
                stream_seek(cur_stream, (int64_t)(cur_stream->ic->start_time+frac*cur_stream->ic->duration), 0);
2258
            }
2259
            break;
2260
        case SDL_VIDEORESIZE:
2261
            if (cur_stream) {
2262
                screen = SDL_SetVideoMode(event.resize.w, event.resize.h, 0,
2263
                                          SDL_HWSURFACE|SDL_RESIZABLE|SDL_ASYNCBLIT|SDL_HWACCEL);
2264
                cur_stream->width = event.resize.w;
2265
                cur_stream->height = event.resize.h;
2266
            }
2267
            break;
2268
        case SDL_QUIT:
2269
        case FF_QUIT_EVENT:
2270
            do_exit();
2271
            break;
2272
        case FF_ALLOC_EVENT:
2273
            alloc_picture(event.user.data1);
2274
            break;
2275
        case FF_REFRESH_EVENT:
2276
            video_refresh_timer(event.user.data1);
2277
            break;
2278
        default:
2279
            break;
2280
        }
2281
    }
2282
}
2283

    
2284
void opt_width(const char *arg)
2285
{
2286
    screen_width = atoi(arg);
2287
}
2288

    
2289
void opt_height(const char *arg)
2290
{
2291
    screen_height = atoi(arg);
2292
}
2293

    
2294
static void opt_format(const char *arg)
2295
{
2296
    file_iformat = av_find_input_format(arg);
2297
    if (!file_iformat) {
2298
        fprintf(stderr, "Unknown input format: %s\n", arg);
2299
        exit(1);
2300
    }
2301
}
2302

    
2303
#ifdef CONFIG_NETWORK
2304
void opt_rtp_tcp(void)
2305
{
2306
    /* only tcp protocol */
2307
    rtsp_default_protocols = (1 << RTSP_PROTOCOL_RTP_TCP);
2308
}
2309
#endif
2310

    
2311
void opt_sync(const char *arg)
2312
{
2313
    if (!strcmp(arg, "audio"))
2314
        av_sync_type = AV_SYNC_AUDIO_MASTER;
2315
    else if (!strcmp(arg, "video"))
2316
        av_sync_type = AV_SYNC_VIDEO_MASTER;
2317
    else if (!strcmp(arg, "ext"))
2318
        av_sync_type = AV_SYNC_EXTERNAL_CLOCK;
2319
    else
2320
        show_help();
2321
}
2322

    
2323
void opt_seek(const char *arg)
2324
{
2325
    start_time = parse_date(arg, 1);
2326
}
2327

    
2328
static void opt_debug(const char *arg)
2329
{
2330
    av_log_set_level(99);
2331
    debug = atoi(arg);
2332
}
2333

    
2334
static void opt_vismv(const char *arg)
2335
{
2336
    debug_mv = atoi(arg);
2337
}
2338

    
2339
static void opt_thread_count(const char *arg)
2340
{
2341
    thread_count= atoi(arg);
2342
#if !defined(HAVE_THREADS)
2343
    fprintf(stderr, "Warning: not compiled with thread support, using thread emulation\n");
2344
#endif
2345
}
2346

    
2347
const OptionDef options[] = {
2348
    { "h", 0, {(void*)show_help}, "show help" },
2349
    { "x", HAS_ARG, {(void*)opt_width}, "force displayed width", "width" },
2350
    { "y", HAS_ARG, {(void*)opt_height}, "force displayed height", "height" },
2351
    { "fs", OPT_BOOL, {(void*)&is_full_screen}, "force full screen" },
2352
    { "an", OPT_BOOL, {(void*)&audio_disable}, "disable audio" },
2353
    { "vn", OPT_BOOL, {(void*)&video_disable}, "disable video" },
2354
    { "ss", HAS_ARG, {(void*)&opt_seek}, "seek to a given position in seconds", "pos" },
2355
    { "bytes", OPT_BOOL, {(void*)&seek_by_bytes}, "seek by bytes" },
2356
    { "nodisp", OPT_BOOL, {(void*)&display_disable}, "disable graphical display" },
2357
    { "f", HAS_ARG, {(void*)opt_format}, "force format", "fmt" },
2358
    { "stats", OPT_BOOL | OPT_EXPERT, {(void*)&show_status}, "show status", "" },
2359
    { "debug", HAS_ARG | OPT_EXPERT, {(void*)opt_debug}, "print specific debug info", "" },
2360
    { "bug", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&workaround_bugs}, "workaround bugs", "" },
2361
    { "vismv", HAS_ARG | OPT_EXPERT, {(void*)opt_vismv}, "visualize motion vectors", "" },
2362
    { "fast", OPT_BOOL | OPT_EXPERT, {(void*)&fast}, "non spec compliant optimizations", "" },
2363
    { "genpts", OPT_BOOL | OPT_EXPERT, {(void*)&genpts}, "generate pts", "" },
2364
    { "lowres", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&lowres}, "", "" },
2365
    { "skiploop", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_loop_filter}, "", "" },
2366
    { "skipframe", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_frame}, "", "" },
2367
    { "skipidct", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_idct}, "", "" },
2368
    { "idct", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&idct}, "set idct algo",  "algo" },
2369
    { "er", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&error_resilience}, "set error detection threshold (0-4)",  "threshold" },
2370
    { "ec", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&error_concealment}, "set error concealment options",  "bit_mask" },
2371
#ifdef CONFIG_NETWORK
2372
    { "rtp_tcp", OPT_EXPERT, {(void*)&opt_rtp_tcp}, "force RTP/TCP protocol usage", "" },
2373
#endif
2374
    { "sync", HAS_ARG | OPT_EXPERT, {(void*)opt_sync}, "set audio-video sync. type (type=audio/video/ext)", "type" },
2375
    { "threads", HAS_ARG | OPT_EXPERT, {(void*)opt_thread_count}, "thread count", "count" },
2376
    { NULL, },
2377
};
2378

    
2379
void show_help(void)
2380
{
2381
    printf("ffplay version " FFMPEG_VERSION ", Copyright (c) 2003-2006 Fabrice Bellard, et al.\n"
2382
           "usage: ffplay [options] input_file\n"
2383
           "Simple media player\n");
2384
    printf("\n");
2385
    show_help_options(options, "Main options:\n",
2386
                      OPT_EXPERT, 0);
2387
    show_help_options(options, "\nAdvanced options:\n",
2388
                      OPT_EXPERT, OPT_EXPERT);
2389
    printf("\nWhile playing:\n"
2390
           "q, ESC              quit\n"
2391
           "f                   toggle full screen\n"
2392
           "p, SPC              pause\n"
2393
           "a                   cycle audio channel\n"
2394
           "v                   cycle video channel\n"
2395
           "t                   cycle subtitle channel\n"
2396
           "w                   show audio waves\n"
2397
           "left/right          seek backward/forward 10 seconds\n"
2398
           "down/up             seek backward/forward 1 minute\n"
2399
           "mouse click         seek to percentage in file corresponding to fraction of width\n"
2400
           );
2401
    exit(1);
2402
}
2403

    
2404
void parse_arg_file(const char *filename)
2405
{
2406
    if (!strcmp(filename, "-"))
2407
                    filename = "pipe:";
2408
    input_filename = filename;
2409
}
2410

    
2411
/* Called from the main */
2412
int main(int argc, char **argv)
2413
{
2414
    int flags, w, h;
2415

    
2416
    /* register all codecs, demux and protocols */
2417
    av_register_all();
2418

    
2419
    #ifdef CONFIG_OS2
2420
      MorphToPM(); // Morph the VIO application to a PM one to be able to use Win* functions
2421

    
2422
      // Make stdout and stderr unbuffered
2423
      setbuf( stdout, NULL );
2424
      setbuf( stderr, NULL );
2425
    #endif
2426

    
2427
    parse_options(argc, argv, options);
2428

    
2429
    if (!input_filename)
2430
        show_help();
2431

    
2432
    if (display_disable) {
2433
        video_disable = 1;
2434
    }
2435
    flags = SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER;
2436
#if !defined(__MINGW32__) && !defined(CONFIG_DARWIN)
2437
    flags |= SDL_INIT_EVENTTHREAD; /* Not supported on win32 or darwin */
2438
#endif
2439
    if (SDL_Init (flags)) {
2440
        fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
2441
        exit(1);
2442
    }
2443

    
2444
    if (!display_disable) {
2445
#ifdef HAVE_SDL_VIDEO_SIZE
2446
        const SDL_VideoInfo *vi = SDL_GetVideoInfo();
2447
        fs_screen_width = vi->current_w;
2448
        fs_screen_height = vi->current_h;
2449
#endif
2450
        flags = SDL_HWSURFACE|SDL_ASYNCBLIT|SDL_HWACCEL;
2451
        if (is_full_screen && fs_screen_width) {
2452
            w = fs_screen_width;
2453
            h = fs_screen_height;
2454
            flags |= SDL_FULLSCREEN;
2455
        } else {
2456
            w = screen_width;
2457
            h = screen_height;
2458
            flags |= SDL_RESIZABLE;
2459
        }
2460
#ifndef CONFIG_DARWIN
2461
        screen = SDL_SetVideoMode(w, h, 0, flags);
2462
#else
2463
        /* setting bits_per_pixel = 0 or 32 causes blank video on OS X */
2464
        screen = SDL_SetVideoMode(w, h, 24, flags);
2465
#endif
2466
        if (!screen) {
2467
            fprintf(stderr, "SDL: could not set video mode - exiting\n");
2468
            exit(1);
2469
        }
2470
        SDL_WM_SetCaption("FFplay", "FFplay");
2471
    }
2472

    
2473
    SDL_EventState(SDL_ACTIVEEVENT, SDL_IGNORE);
2474
    SDL_EventState(SDL_MOUSEMOTION, SDL_IGNORE);
2475
    SDL_EventState(SDL_SYSWMEVENT, SDL_IGNORE);
2476
    SDL_EventState(SDL_USEREVENT, SDL_IGNORE);
2477

    
2478
    av_init_packet(&flush_pkt);
2479
    flush_pkt.data= "FLUSH";
2480

    
2481
    cur_stream = stream_open(input_filename, file_iformat);
2482

    
2483
    event_loop();
2484

    
2485
    /* never returns */
2486

    
2487
    return 0;
2488
}