Statistics
| Branch: | Revision:

ffmpeg / libav / grab.c @ cca1b241

History | View | Annotate | Download (9.2 KB)

1
/*
2
 * Linux video grab interface
3
 * Copyright (c) 2000,2001 Fabrice Bellard.
4
 *
5
 * This library is free software; you can redistribute it and/or
6
 * modify it under the terms of the GNU Lesser General Public
7
 * License as published by the Free Software Foundation; either
8
 * version 2 of the License, or (at your option) any later version.
9
 *
10
 * This library is distributed in the hope that it will be useful,
11
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13
 * Lesser General Public License for more details.
14
 *
15
 * You should have received a copy of the GNU Lesser General Public
16
 * License along with this library; if not, write to the Free Software
17
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
18
 */
19
#include "avformat.h"
20
#include <linux/videodev.h>
21
#include <unistd.h>
22
#include <fcntl.h>
23
#include <sys/ioctl.h>
24
#include <sys/mman.h>
25
#include <sys/time.h>
26
#include <time.h>
27

    
28
typedef struct {
29
    int fd;
30
    int frame_format; /* see VIDEO_PALETTE_xxx */
31
    int use_mmap;
32
    int width, height;
33
    int frame_rate;
34
    INT64 time_frame;
35
    int frame_size;
36
} VideoData;
37

    
38
const char *v4l_device = "/dev/video";
39

    
40
/* XXX: move all that to the context */
41

    
42
static struct video_capability  video_cap;
43
static UINT8 *video_buf;
44
static struct video_mbuf gb_buffers;
45
static struct video_mmap gb_buf;
46
static struct video_audio audio, audio_saved;
47
static int gb_frame = 0;
48

    
49
static int grab_read_header(AVFormatContext *s1, AVFormatParameters *ap)
50
{
51
    VideoData *s = s1->priv_data;
52
    AVStream *st;
53
    int width, height;
54
    int video_fd, frame_size;
55
    int ret, frame_rate;
56
    int desired_palette;
57

    
58
    if (!ap || ap->width <= 0 || ap->height <= 0 || ap->frame_rate <= 0)
59
        return -1;
60
    
61
    width = ap->width;
62
    height = ap->height;
63
    frame_rate = ap->frame_rate;
64

    
65
    st = av_new_stream(s1, 0);
66
    if (!st)
67
        return -ENOMEM;
68

    
69
    s->width = width;
70
    s->height = height;
71
    s->frame_rate = frame_rate;
72

    
73
    video_fd = open(v4l_device, O_RDWR);
74
    if (video_fd < 0) {
75
        perror(v4l_device);
76
        goto fail;
77
    }
78
    
79
    if (ioctl(video_fd,VIDIOCGCAP,&video_cap) < 0) {
80
        perror("VIDIOCGCAP");
81
        goto fail;
82
    }
83

    
84
    if (!(video_cap.type & VID_TYPE_CAPTURE)) {
85
        fprintf(stderr, "Fatal: grab device does not handle capture\n");
86
        goto fail;
87
    }
88

    
89
    desired_palette = -1;
90
    if (st->codec.pix_fmt == PIX_FMT_YUV420P) {
91
        desired_palette = VIDEO_PALETTE_YUV420P;
92
    } else if (st->codec.pix_fmt == PIX_FMT_YUV422) {
93
        desired_palette = VIDEO_PALETTE_YUV422;
94
    } else if (st->codec.pix_fmt == PIX_FMT_BGR24) {
95
        desired_palette = VIDEO_PALETTE_RGB24;
96
    }    
97
    
98
    /* unmute audio */
99
    ioctl(video_fd, VIDIOCGAUDIO, &audio);
100
    memcpy(&audio_saved, &audio, sizeof(audio));
101
    audio.flags &= ~VIDEO_AUDIO_MUTE;
102
    ioctl(video_fd, VIDIOCSAUDIO, &audio);
103

    
104
    ret = ioctl(video_fd,VIDIOCGMBUF,&gb_buffers);
105
    if (ret < 0) {
106
        /* try to use read based access */
107
        struct video_window win;
108
        struct video_picture pict;
109
        int val;
110

    
111
        win.x = 0;
112
        win.y = 0;
113
        win.width = width;
114
        win.height = height;
115
        win.chromakey = -1;
116
        win.flags = 0;
117

    
118
        ioctl(video_fd, VIDIOCSWIN, &win);
119

    
120
        ioctl(video_fd, VIDIOCGPICT, &pict);
121
#if 0
122
        printf("v4l: colour=%d hue=%d brightness=%d constrast=%d whiteness=%d\n",
123
               pict.colour,
124
               pict.hue,
125
               pict.brightness,
126
               pict.contrast,
127
               pict.whiteness);
128
#endif        
129
        /* try to choose a suitable video format */
130
        pict.palette = desired_palette;
131
        if (desired_palette == -1 || (ret = ioctl(video_fd, VIDIOCSPICT, &pict)) < 0) {
132
            pict.palette=VIDEO_PALETTE_YUV420P;
133
            ret = ioctl(video_fd, VIDIOCSPICT, &pict);
134
            if (ret < 0) {
135
                pict.palette=VIDEO_PALETTE_YUV422;
136
                ret = ioctl(video_fd, VIDIOCSPICT, &pict);
137
                if (ret < 0) {
138
                    pict.palette=VIDEO_PALETTE_RGB24;
139
                    ret = ioctl(video_fd, VIDIOCSPICT, &pict);
140
                    if (ret < 0) 
141
                        goto fail1;
142
                }
143
            }
144
        }
145

    
146
        s->frame_format = pict.palette;
147

    
148
        val = 1;
149
        ioctl(video_fd, VIDIOCCAPTURE, &val);
150

    
151
        s->time_frame = av_gettime();
152
        s->use_mmap = 0;
153
    } else {
154
        video_buf = mmap(0,gb_buffers.size,PROT_READ|PROT_WRITE,MAP_SHARED,video_fd,0);
155
        if ((unsigned char*)-1 == video_buf) {
156
            perror("mmap");
157
            goto fail;
158
        }
159
        gb_frame = 0;
160
        s->time_frame = av_gettime();
161
        
162
        /* start to grab the first frame */
163
        gb_buf.frame = gb_frame % gb_buffers.frames;
164
        gb_buf.height = height;
165
        gb_buf.width = width;
166
        gb_buf.format = desired_palette;
167

    
168
        if (desired_palette == -1 || (ret = ioctl(video_fd, VIDIOCMCAPTURE, &gb_buf)) < 0) {
169
            gb_buf.format = VIDEO_PALETTE_YUV420P;
170
            
171
            ret = ioctl(video_fd, VIDIOCMCAPTURE, &gb_buf);
172
            if (ret < 0 && errno != EAGAIN) {
173
                /* try YUV422 */
174
                gb_buf.format = VIDEO_PALETTE_YUV422;
175
                
176
                ret = ioctl(video_fd, VIDIOCMCAPTURE, &gb_buf);
177
                if (ret < 0 && errno != EAGAIN) {
178
                    /* try RGB24 */
179
                    gb_buf.format = VIDEO_PALETTE_RGB24;
180
                    ret = ioctl(video_fd, VIDIOCMCAPTURE, &gb_buf);
181
                }
182
            }
183
        }
184
        if (ret < 0) {
185
            if (errno != EAGAIN) {
186
            fail1:
187
                fprintf(stderr, "Fatal: grab device does not support suitable format\n");
188
            } else {
189
                fprintf(stderr,"Fatal: grab device does not receive any video signal\n");
190
            }
191
            goto fail;
192
        }
193
        s->frame_format = gb_buf.format;
194
        s->use_mmap = 1;
195
    }
196

    
197
    switch(s->frame_format) {
198
    case VIDEO_PALETTE_YUV420P:
199
        frame_size = (width * height * 3) / 2;
200
        st->codec.pix_fmt = PIX_FMT_YUV420P;
201
        break;
202
    case VIDEO_PALETTE_YUV422:
203
        frame_size = width * height * 2;
204
        st->codec.pix_fmt = PIX_FMT_YUV422;
205
        break;
206
    case VIDEO_PALETTE_RGB24:
207
        frame_size = width * height * 3;
208
        st->codec.pix_fmt = PIX_FMT_BGR24; /* NOTE: v4l uses BGR24, not RGB24 ! */
209
        break;
210
    default:
211
        goto fail;
212
    }
213
    s->fd = video_fd;
214
    s->frame_size = frame_size;
215
    
216
    st->codec.codec_type = CODEC_TYPE_VIDEO;
217
    st->codec.codec_id = CODEC_ID_RAWVIDEO;
218
    st->codec.width = width;
219
    st->codec.height = height;
220
    st->codec.frame_rate = frame_rate;
221

    
222
    return 0;
223
 fail:
224
    if (video_fd >= 0)
225
        close(video_fd);
226
    av_free(st);
227
    return -EIO;
228
}
229

    
230
static int v4l_mm_read_picture(VideoData *s, UINT8 *buf)
231
{
232
    UINT8 *ptr;
233

    
234
    /* Setup to capture the next frame */
235
    gb_buf.frame = (gb_frame + 1) % gb_buffers.frames;
236
    if (ioctl(s->fd, VIDIOCMCAPTURE, &gb_buf) < 0) {
237
        if (errno == EAGAIN)
238
            fprintf(stderr,"Cannot Sync\n");
239
        else
240
            perror("VIDIOCMCAPTURE");
241
        return -EIO;
242
    }
243

    
244
    while (ioctl(s->fd, VIDIOCSYNC, &gb_frame) < 0 &&
245
           (errno == EAGAIN || errno == EINTR));
246

    
247
    ptr = video_buf + gb_buffers.offsets[gb_frame];
248
    memcpy(buf, ptr, s->frame_size);
249

    
250
    /* This is now the grabbing frame */
251
    gb_frame = gb_buf.frame;
252

    
253
    return s->frame_size;
254
}
255

    
256
static int grab_read_packet(AVFormatContext *s1, AVPacket *pkt)
257
{
258
    VideoData *s = s1->priv_data;
259
    INT64 curtime, delay;
260
    struct timespec ts;
261
    int first;
262
    INT64 per_frame = (INT64_C(1000000) * FRAME_RATE_BASE) / s->frame_rate;
263
    int dropped = 0;
264

    
265
    /* Calculate the time of the next frame */
266
    s->time_frame += per_frame;
267

    
268
    /* wait based on the frame rate */
269
    for(first = 1;; first = 0) {
270
        curtime = av_gettime();
271
        delay = s->time_frame - curtime;
272
        if (delay <= 0) {
273
            if (delay < -per_frame) {
274
                /* printf("grabbing is %d frames late (dropping)\n", (int) -(delay / 16666)); */
275
                dropped = 1;
276
                s->time_frame += per_frame;
277
            }
278
            break;
279
        }    
280
        ts.tv_sec = delay / 1000000;
281
        ts.tv_nsec = (delay % 1000000) * 1000;
282
        nanosleep(&ts, NULL);
283
    }
284

    
285
    if (av_new_packet(pkt, s->frame_size) < 0)
286
        return -EIO;
287

    
288
    if (dropped)
289
        pkt->flags |= PKT_FLAG_DROPPED_FRAME;
290

    
291
    /* read one frame */
292
    if (s->use_mmap) {
293
        return v4l_mm_read_picture(s, pkt->data);
294
    } else {
295
        if (read(s->fd, pkt->data, pkt->size) != pkt->size)
296
            return -EIO;
297
        return s->frame_size;
298
    }
299
}
300

    
301
static int grab_read_close(AVFormatContext *s1)
302
{
303
    VideoData *s = s1->priv_data;
304

    
305
    if (s->use_mmap)
306
        munmap(video_buf, gb_buffers.size);
307

    
308
    /* restore audio settings */
309
    ioctl(s->fd, VIDIOCSAUDIO, &audio_saved);
310

    
311
    close(s->fd);
312
    return 0;
313
}
314

    
315
AVInputFormat video_grab_device_format = {
316
    "video_grab_device",
317
    "video grab",
318
    sizeof(VideoData),
319
    NULL,
320
    grab_read_header,
321
    grab_read_packet,
322
    grab_read_close,
323
    flags: AVFMT_NOFILE,
324
};
325

    
326
int video_grab_init(void)
327
{
328
    av_register_input_format(&video_grab_device_format);
329
    return 0;
330
}