ffmpeg / libavcodec / interplayvideo.c @ 80fec3ff
History | View | Annotate | Download (20.7 KB)
1 |
/*
|
---|---|
2 |
* Interplay MVE Video Decoder
|
3 |
* Copyright (C) 2003 the ffmpeg project
|
4 |
*
|
5 |
* This file is part of FFmpeg.
|
6 |
*
|
7 |
* FFmpeg is free software; you can redistribute it and/or
|
8 |
* modify it under the terms of the GNU Lesser General Public
|
9 |
* License as published by the Free Software Foundation; either
|
10 |
* version 2.1 of the License, or (at your option) any later version.
|
11 |
*
|
12 |
* FFmpeg is distributed in the hope that it will be useful,
|
13 |
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
14 |
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
15 |
* Lesser General Public License for more details.
|
16 |
*
|
17 |
* You should have received a copy of the GNU Lesser General Public
|
18 |
* License along with FFmpeg; if not, write to the Free Software
|
19 |
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
20 |
*/
|
21 |
|
22 |
/**
|
23 |
* @file libavcodec/interplayvideo.c
|
24 |
* Interplay MVE Video Decoder by Mike Melanson (melanson@pcisys.net)
|
25 |
* For more information about the Interplay MVE format, visit:
|
26 |
* http://www.pcisys.net/~melanson/codecs/interplay-mve.txt
|
27 |
* This code is written in such a way that the identifiers match up
|
28 |
* with the encoding descriptions in the document.
|
29 |
*
|
30 |
* This decoder presently only supports a PAL8 output colorspace.
|
31 |
*
|
32 |
* An Interplay video frame consists of 2 parts: The decoding map and
|
33 |
* the video data. A demuxer must load these 2 parts together in a single
|
34 |
* buffer before sending it through the stream to this decoder.
|
35 |
*/
|
36 |
|
37 |
#include <stdio.h> |
38 |
#include <stdlib.h> |
39 |
#include <string.h> |
40 |
|
41 |
#include "avcodec.h" |
42 |
#include "bytestream.h" |
43 |
#include "dsputil.h" |
44 |
#define ALT_BITSTREAM_READER_LE
|
45 |
#include "get_bits.h" |
46 |
|
47 |
#define PALETTE_COUNT 256 |
48 |
|
49 |
/* debugging support */
|
50 |
#define DEBUG_INTERPLAY 0 |
51 |
#if DEBUG_INTERPLAY
|
52 |
#define debug_interplay(x,...) av_log(NULL, AV_LOG_DEBUG, x, __VA_ARGS__) |
53 |
#else
|
54 |
static inline void debug_interplay(const char *format, ...) { } |
55 |
#endif
|
56 |
|
57 |
typedef struct IpvideoContext { |
58 |
|
59 |
AVCodecContext *avctx; |
60 |
DSPContext dsp; |
61 |
AVFrame second_last_frame; |
62 |
AVFrame last_frame; |
63 |
AVFrame current_frame; |
64 |
const unsigned char *decoding_map; |
65 |
int decoding_map_size;
|
66 |
|
67 |
const unsigned char *buf; |
68 |
int size;
|
69 |
|
70 |
int is_16bpp;
|
71 |
const unsigned char *stream_ptr; |
72 |
const unsigned char *stream_end; |
73 |
unsigned char *pixel_ptr; |
74 |
int line_inc;
|
75 |
int stride;
|
76 |
int upper_motion_limit_offset;
|
77 |
|
78 |
} IpvideoContext; |
79 |
|
80 |
#define CHECK_STREAM_PTR(stream_ptr, stream_end, n) \
|
81 |
if (stream_end - stream_ptr < n) { \
|
82 |
av_log(s->avctx, AV_LOG_ERROR, "Interplay video warning: stream_ptr out of bounds (%p >= %p)\n", \
|
83 |
stream_ptr + n, stream_end); \ |
84 |
return -1; \ |
85 |
} |
86 |
|
87 |
static int copy_from(IpvideoContext *s, AVFrame *src, int delta_x, int delta_y) |
88 |
{ |
89 |
int current_offset = s->pixel_ptr - s->current_frame.data[0]; |
90 |
int motion_offset = current_offset + delta_y * s->current_frame.linesize[0] + delta_x; |
91 |
if (motion_offset < 0) { |
92 |
av_log(s->avctx, AV_LOG_ERROR, " Interplay video: motion offset < 0 (%d)\n", motion_offset);
|
93 |
return -1; |
94 |
} else if (motion_offset > s->upper_motion_limit_offset) { |
95 |
av_log(s->avctx, AV_LOG_ERROR, " Interplay video: motion offset above limit (%d >= %d)\n",
|
96 |
motion_offset, s->upper_motion_limit_offset); |
97 |
return -1; |
98 |
} |
99 |
s->dsp.put_pixels_tab[1][0](s->pixel_ptr, src->data[0] + motion_offset, s->current_frame.linesize[0], 8); |
100 |
return 0; |
101 |
} |
102 |
|
103 |
static int ipvideo_decode_block_opcode_0x0(IpvideoContext *s) |
104 |
{ |
105 |
return copy_from(s, &s->last_frame, 0, 0); |
106 |
} |
107 |
|
108 |
static int ipvideo_decode_block_opcode_0x1(IpvideoContext *s) |
109 |
{ |
110 |
return copy_from(s, &s->second_last_frame, 0, 0); |
111 |
} |
112 |
|
113 |
static int ipvideo_decode_block_opcode_0x2(IpvideoContext *s) |
114 |
{ |
115 |
unsigned char B; |
116 |
int x, y;
|
117 |
|
118 |
/* copy block from 2 frames ago using a motion vector; need 1 more byte */
|
119 |
CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 1);
|
120 |
B = *s->stream_ptr++; |
121 |
|
122 |
if (B < 56) { |
123 |
x = 8 + (B % 7); |
124 |
y = B / 7;
|
125 |
} else {
|
126 |
x = -14 + ((B - 56) % 29); |
127 |
y = 8 + ((B - 56) / 29); |
128 |
} |
129 |
|
130 |
debug_interplay (" motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
|
131 |
return copy_from(s, &s->second_last_frame, x, y);
|
132 |
} |
133 |
|
134 |
static int ipvideo_decode_block_opcode_0x3(IpvideoContext *s) |
135 |
{ |
136 |
unsigned char B; |
137 |
int x, y;
|
138 |
|
139 |
/* copy 8x8 block from current frame from an up/left block */
|
140 |
|
141 |
/* need 1 more byte for motion */
|
142 |
CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 1);
|
143 |
B = *s->stream_ptr++; |
144 |
|
145 |
if (B < 56) { |
146 |
x = -(8 + (B % 7)); |
147 |
y = -(B / 7);
|
148 |
} else {
|
149 |
x = -(-14 + ((B - 56) % 29)); |
150 |
y = -( 8 + ((B - 56) / 29)); |
151 |
} |
152 |
|
153 |
debug_interplay (" motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
|
154 |
return copy_from(s, &s->current_frame, x, y);
|
155 |
} |
156 |
|
157 |
static int ipvideo_decode_block_opcode_0x4(IpvideoContext *s) |
158 |
{ |
159 |
int x, y;
|
160 |
unsigned char B, BL, BH; |
161 |
|
162 |
/* copy a block from the previous frame; need 1 more byte */
|
163 |
CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 1);
|
164 |
|
165 |
B = *s->stream_ptr++; |
166 |
BL = B & 0x0F;
|
167 |
BH = (B >> 4) & 0x0F; |
168 |
x = -8 + BL;
|
169 |
y = -8 + BH;
|
170 |
|
171 |
debug_interplay (" motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
|
172 |
return copy_from(s, &s->last_frame, x, y);
|
173 |
} |
174 |
|
175 |
static int ipvideo_decode_block_opcode_0x5(IpvideoContext *s) |
176 |
{ |
177 |
signed char x, y; |
178 |
|
179 |
/* copy a block from the previous frame using an expanded range;
|
180 |
* need 2 more bytes */
|
181 |
CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
|
182 |
|
183 |
x = *s->stream_ptr++; |
184 |
y = *s->stream_ptr++; |
185 |
|
186 |
debug_interplay (" motion bytes = %d, %d\n", x, y);
|
187 |
return copy_from(s, &s->last_frame, x, y);
|
188 |
} |
189 |
|
190 |
static int ipvideo_decode_block_opcode_0x6(IpvideoContext *s) |
191 |
{ |
192 |
/* mystery opcode? skip multiple blocks? */
|
193 |
av_log(s->avctx, AV_LOG_ERROR, " Interplay video: Help! Mystery opcode 0x6 seen\n");
|
194 |
|
195 |
/* report success */
|
196 |
return 0; |
197 |
} |
198 |
|
199 |
static int ipvideo_decode_block_opcode_0x7(IpvideoContext *s) |
200 |
{ |
201 |
int x, y;
|
202 |
unsigned char P[2]; |
203 |
unsigned int flags; |
204 |
|
205 |
/* 2-color encoding */
|
206 |
CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
|
207 |
|
208 |
P[0] = *s->stream_ptr++;
|
209 |
P[1] = *s->stream_ptr++;
|
210 |
|
211 |
if (P[0] <= P[1]) { |
212 |
|
213 |
/* need 8 more bytes from the stream */
|
214 |
CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 8);
|
215 |
|
216 |
for (y = 0; y < 8; y++) { |
217 |
flags = *s->stream_ptr++ | 0x100;
|
218 |
for (; flags != 1; flags >>= 1) |
219 |
*s->pixel_ptr++ = P[flags & 1];
|
220 |
s->pixel_ptr += s->line_inc; |
221 |
} |
222 |
|
223 |
} else {
|
224 |
|
225 |
/* need 2 more bytes from the stream */
|
226 |
CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
|
227 |
|
228 |
flags = bytestream_get_le16(&s->stream_ptr); |
229 |
for (y = 0; y < 8; y += 2) { |
230 |
for (x = 0; x < 8; x += 2, flags >>= 1) { |
231 |
s->pixel_ptr[x ] = |
232 |
s->pixel_ptr[x + 1 ] =
|
233 |
s->pixel_ptr[x + s->stride] = |
234 |
s->pixel_ptr[x + 1 + s->stride] = P[flags & 1]; |
235 |
} |
236 |
s->pixel_ptr += s->stride * 2;
|
237 |
} |
238 |
} |
239 |
|
240 |
/* report success */
|
241 |
return 0; |
242 |
} |
243 |
|
244 |
static int ipvideo_decode_block_opcode_0x8(IpvideoContext *s) |
245 |
{ |
246 |
int x, y;
|
247 |
unsigned char P[2]; |
248 |
unsigned int flags = 0; |
249 |
|
250 |
/* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
|
251 |
* either top and bottom or left and right halves */
|
252 |
CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
|
253 |
|
254 |
P[0] = *s->stream_ptr++;
|
255 |
P[1] = *s->stream_ptr++;
|
256 |
|
257 |
if (P[0] <= P[1]) { |
258 |
|
259 |
CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 14);
|
260 |
s->stream_ptr -= 2;
|
261 |
|
262 |
for (y = 0; y < 16; y++) { |
263 |
// new values for each 4x4 block
|
264 |
if (!(y & 3)) { |
265 |
P[0] = *s->stream_ptr++; P[1] = *s->stream_ptr++; |
266 |
flags = bytestream_get_le16(&s->stream_ptr); |
267 |
} |
268 |
|
269 |
for (x = 0; x < 4; x++, flags >>= 1) |
270 |
*s->pixel_ptr++ = P[flags & 1];
|
271 |
s->pixel_ptr += s->stride - 4;
|
272 |
// switch to right half
|
273 |
if (y == 7) s->pixel_ptr -= 8 * s->stride - 4; |
274 |
} |
275 |
|
276 |
} else {
|
277 |
|
278 |
/* need 10 more bytes */
|
279 |
CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 10);
|
280 |
|
281 |
if (s->stream_ptr[4] <= s->stream_ptr[5]) { |
282 |
|
283 |
flags = bytestream_get_le32(&s->stream_ptr); |
284 |
|
285 |
/* vertical split; left & right halves are 2-color encoded */
|
286 |
|
287 |
for (y = 0; y < 16; y++) { |
288 |
for (x = 0; x < 4; x++, flags >>= 1) |
289 |
*s->pixel_ptr++ = P[flags & 1];
|
290 |
s->pixel_ptr += s->stride - 4;
|
291 |
// switch to right half
|
292 |
if (y == 7) { |
293 |
s->pixel_ptr -= 8 * s->stride - 4; |
294 |
P[0] = *s->stream_ptr++; P[1] = *s->stream_ptr++; |
295 |
flags = bytestream_get_le32(&s->stream_ptr); |
296 |
} |
297 |
} |
298 |
|
299 |
} else {
|
300 |
|
301 |
/* horizontal split; top & bottom halves are 2-color encoded */
|
302 |
|
303 |
for (y = 0; y < 8; y++) { |
304 |
if (y == 4) { |
305 |
P[0] = *s->stream_ptr++;
|
306 |
P[1] = *s->stream_ptr++;
|
307 |
} |
308 |
flags = *s->stream_ptr++ | 0x100;
|
309 |
|
310 |
for (; flags != 1; flags >>= 1) |
311 |
*s->pixel_ptr++ = P[flags & 1];
|
312 |
s->pixel_ptr += s->line_inc; |
313 |
} |
314 |
} |
315 |
} |
316 |
|
317 |
/* report success */
|
318 |
return 0; |
319 |
} |
320 |
|
321 |
static int ipvideo_decode_block_opcode_0x9(IpvideoContext *s) |
322 |
{ |
323 |
int x, y;
|
324 |
unsigned char P[4]; |
325 |
|
326 |
/* 4-color encoding */
|
327 |
CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 4);
|
328 |
|
329 |
memcpy(P, s->stream_ptr, 4);
|
330 |
s->stream_ptr += 4;
|
331 |
|
332 |
if (P[0] <= P[1]) { |
333 |
if (P[2] <= P[3]) { |
334 |
|
335 |
/* 1 of 4 colors for each pixel, need 16 more bytes */
|
336 |
CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 16);
|
337 |
|
338 |
for (y = 0; y < 8; y++) { |
339 |
/* get the next set of 8 2-bit flags */
|
340 |
int flags = bytestream_get_le16(&s->stream_ptr);
|
341 |
for (x = 0; x < 8; x++, flags >>= 2) |
342 |
*s->pixel_ptr++ = P[flags & 0x03];
|
343 |
s->pixel_ptr += s->line_inc; |
344 |
} |
345 |
|
346 |
} else {
|
347 |
uint32_t flags; |
348 |
|
349 |
/* 1 of 4 colors for each 2x2 block, need 4 more bytes */
|
350 |
CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 4);
|
351 |
|
352 |
flags = bytestream_get_le32(&s->stream_ptr); |
353 |
|
354 |
for (y = 0; y < 8; y += 2) { |
355 |
for (x = 0; x < 8; x += 2, flags >>= 2) { |
356 |
s->pixel_ptr[x ] = |
357 |
s->pixel_ptr[x + 1 ] =
|
358 |
s->pixel_ptr[x + s->stride] = |
359 |
s->pixel_ptr[x + 1 + s->stride] = P[flags & 0x03]; |
360 |
} |
361 |
s->pixel_ptr += s->stride * 2;
|
362 |
} |
363 |
|
364 |
} |
365 |
} else {
|
366 |
uint64_t flags; |
367 |
|
368 |
/* 1 of 4 colors for each 2x1 or 1x2 block, need 8 more bytes */
|
369 |
CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 8);
|
370 |
|
371 |
flags = bytestream_get_le64(&s->stream_ptr); |
372 |
if (P[2] <= P[3]) { |
373 |
for (y = 0; y < 8; y++) { |
374 |
for (x = 0; x < 8; x += 2, flags >>= 2) { |
375 |
s->pixel_ptr[x ] = |
376 |
s->pixel_ptr[x + 1] = P[flags & 0x03]; |
377 |
} |
378 |
s->pixel_ptr += s->stride; |
379 |
} |
380 |
} else {
|
381 |
for (y = 0; y < 8; y += 2) { |
382 |
for (x = 0; x < 8; x++, flags >>= 2) { |
383 |
s->pixel_ptr[x ] = |
384 |
s->pixel_ptr[x + s->stride] = P[flags & 0x03];
|
385 |
} |
386 |
s->pixel_ptr += s->stride * 2;
|
387 |
} |
388 |
} |
389 |
} |
390 |
|
391 |
/* report success */
|
392 |
return 0; |
393 |
} |
394 |
|
395 |
static int ipvideo_decode_block_opcode_0xA(IpvideoContext *s) |
396 |
{ |
397 |
int x, y;
|
398 |
unsigned char P[4]; |
399 |
int flags = 0; |
400 |
|
401 |
/* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
|
402 |
* either top and bottom or left and right halves */
|
403 |
CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 24);
|
404 |
|
405 |
if (s->stream_ptr[0] <= s->stream_ptr[1]) { |
406 |
|
407 |
/* 4-color encoding for each quadrant; need 32 bytes */
|
408 |
CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 32);
|
409 |
|
410 |
for (y = 0; y < 16; y++) { |
411 |
// new values for each 4x4 block
|
412 |
if (!(y & 3)) { |
413 |
memcpy(P, s->stream_ptr, 4);
|
414 |
s->stream_ptr += 4;
|
415 |
flags = bytestream_get_le32(&s->stream_ptr); |
416 |
} |
417 |
|
418 |
for (x = 0; x < 4; x++, flags >>= 2) |
419 |
*s->pixel_ptr++ = P[flags & 0x03];
|
420 |
|
421 |
s->pixel_ptr += s->stride - 4;
|
422 |
// switch to right half
|
423 |
if (y == 7) s->pixel_ptr -= 8 * s->stride - 4; |
424 |
} |
425 |
|
426 |
} else {
|
427 |
// vertical split?
|
428 |
int vert = s->stream_ptr[12] <= s->stream_ptr[13]; |
429 |
uint64_t flags = 0;
|
430 |
|
431 |
/* 4-color encoding for either left and right or top and bottom
|
432 |
* halves */
|
433 |
|
434 |
for (y = 0; y < 16; y++) { |
435 |
// load values for each half
|
436 |
if (!(y & 7)) { |
437 |
memcpy(P, s->stream_ptr, 4);
|
438 |
s->stream_ptr += 4;
|
439 |
flags = bytestream_get_le64(&s->stream_ptr); |
440 |
} |
441 |
|
442 |
for (x = 0; x < 4; x++, flags >>= 2) |
443 |
*s->pixel_ptr++ = P[flags & 0x03];
|
444 |
|
445 |
if (vert) {
|
446 |
s->pixel_ptr += s->stride - 4;
|
447 |
// switch to right half
|
448 |
if (y == 7) s->pixel_ptr -= 8 * s->stride - 4; |
449 |
} else if (y & 1) s->pixel_ptr += s->line_inc; |
450 |
} |
451 |
} |
452 |
|
453 |
/* report success */
|
454 |
return 0; |
455 |
} |
456 |
|
457 |
static int ipvideo_decode_block_opcode_0xB(IpvideoContext *s) |
458 |
{ |
459 |
int y;
|
460 |
|
461 |
/* 64-color encoding (each pixel in block is a different color) */
|
462 |
CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 64);
|
463 |
|
464 |
for (y = 0; y < 8; y++) { |
465 |
memcpy(s->pixel_ptr, s->stream_ptr, 8);
|
466 |
s->stream_ptr += 8;
|
467 |
s->pixel_ptr += s->stride; |
468 |
} |
469 |
|
470 |
/* report success */
|
471 |
return 0; |
472 |
} |
473 |
|
474 |
static int ipvideo_decode_block_opcode_0xC(IpvideoContext *s) |
475 |
{ |
476 |
int x, y;
|
477 |
|
478 |
/* 16-color block encoding: each 2x2 block is a different color */
|
479 |
CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 16);
|
480 |
|
481 |
for (y = 0; y < 8; y += 2) { |
482 |
for (x = 0; x < 8; x += 2) { |
483 |
s->pixel_ptr[x ] = |
484 |
s->pixel_ptr[x + 1 ] =
|
485 |
s->pixel_ptr[x + s->stride] = |
486 |
s->pixel_ptr[x + 1 + s->stride] = *s->stream_ptr++;
|
487 |
} |
488 |
s->pixel_ptr += s->stride * 2;
|
489 |
} |
490 |
|
491 |
/* report success */
|
492 |
return 0; |
493 |
} |
494 |
|
495 |
static int ipvideo_decode_block_opcode_0xD(IpvideoContext *s) |
496 |
{ |
497 |
int y;
|
498 |
unsigned char P[2]; |
499 |
|
500 |
/* 4-color block encoding: each 4x4 block is a different color */
|
501 |
CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 4);
|
502 |
|
503 |
for (y = 0; y < 8; y++) { |
504 |
if (!(y & 3)) { |
505 |
P[0] = *s->stream_ptr++;
|
506 |
P[1] = *s->stream_ptr++;
|
507 |
} |
508 |
memset(s->pixel_ptr, P[0], 4); |
509 |
memset(s->pixel_ptr + 4, P[1], 4); |
510 |
s->pixel_ptr += s->stride; |
511 |
} |
512 |
|
513 |
/* report success */
|
514 |
return 0; |
515 |
} |
516 |
|
517 |
static int ipvideo_decode_block_opcode_0xE(IpvideoContext *s) |
518 |
{ |
519 |
int y;
|
520 |
unsigned char pix; |
521 |
|
522 |
/* 1-color encoding: the whole block is 1 solid color */
|
523 |
CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 1);
|
524 |
pix = *s->stream_ptr++; |
525 |
|
526 |
for (y = 0; y < 8; y++) { |
527 |
memset(s->pixel_ptr, pix, 8);
|
528 |
s->pixel_ptr += s->stride; |
529 |
} |
530 |
|
531 |
/* report success */
|
532 |
return 0; |
533 |
} |
534 |
|
535 |
static int ipvideo_decode_block_opcode_0xF(IpvideoContext *s) |
536 |
{ |
537 |
int x, y;
|
538 |
unsigned char sample[2]; |
539 |
|
540 |
/* dithered encoding */
|
541 |
CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
|
542 |
sample[0] = *s->stream_ptr++;
|
543 |
sample[1] = *s->stream_ptr++;
|
544 |
|
545 |
for (y = 0; y < 8; y++) { |
546 |
for (x = 0; x < 8; x += 2) { |
547 |
*s->pixel_ptr++ = sample[ y & 1 ];
|
548 |
*s->pixel_ptr++ = sample[!(y & 1)];
|
549 |
} |
550 |
s->pixel_ptr += s->line_inc; |
551 |
} |
552 |
|
553 |
/* report success */
|
554 |
return 0; |
555 |
} |
556 |
|
557 |
static int (* const ipvideo_decode_block[])(IpvideoContext *s) = { |
558 |
ipvideo_decode_block_opcode_0x0, ipvideo_decode_block_opcode_0x1, |
559 |
ipvideo_decode_block_opcode_0x2, ipvideo_decode_block_opcode_0x3, |
560 |
ipvideo_decode_block_opcode_0x4, ipvideo_decode_block_opcode_0x5, |
561 |
ipvideo_decode_block_opcode_0x6, ipvideo_decode_block_opcode_0x7, |
562 |
ipvideo_decode_block_opcode_0x8, ipvideo_decode_block_opcode_0x9, |
563 |
ipvideo_decode_block_opcode_0xA, ipvideo_decode_block_opcode_0xB, |
564 |
ipvideo_decode_block_opcode_0xC, ipvideo_decode_block_opcode_0xD, |
565 |
ipvideo_decode_block_opcode_0xE, ipvideo_decode_block_opcode_0xF, |
566 |
}; |
567 |
|
568 |
static void ipvideo_decode_opcodes(IpvideoContext *s) |
569 |
{ |
570 |
int x, y;
|
571 |
unsigned char opcode; |
572 |
int ret;
|
573 |
static int frame = 0; |
574 |
GetBitContext gb; |
575 |
|
576 |
debug_interplay("------------------ frame %d\n", frame);
|
577 |
frame++; |
578 |
|
579 |
/* this is PAL8, so make the palette available */
|
580 |
memcpy(s->current_frame.data[1], s->avctx->palctrl->palette, PALETTE_COUNT * 4); |
581 |
|
582 |
s->stride = s->current_frame.linesize[0];
|
583 |
s->stream_ptr = s->buf + 14; /* data starts 14 bytes in */ |
584 |
s->stream_end = s->buf + s->size; |
585 |
s->line_inc = s->stride - 8;
|
586 |
s->upper_motion_limit_offset = (s->avctx->height - 8) * s->stride
|
587 |
+ s->avctx->width - 8;
|
588 |
|
589 |
init_get_bits(&gb, s->decoding_map, s->decoding_map_size * 8);
|
590 |
for (y = 0; y < s->avctx->height; y += 8) { |
591 |
for (x = 0; x < s->avctx->width; x += 8) { |
592 |
opcode = get_bits(&gb, 4);
|
593 |
|
594 |
debug_interplay(" block @ (%3d, %3d): encoding 0x%X, data ptr @ %p\n",
|
595 |
x, y, opcode, s->stream_ptr); |
596 |
|
597 |
s->pixel_ptr = s->current_frame.data[0] + x
|
598 |
+ y*s->current_frame.linesize[0];
|
599 |
ret = ipvideo_decode_block[opcode](s); |
600 |
if (ret != 0) { |
601 |
av_log(s->avctx, AV_LOG_ERROR, " Interplay video: decode problem on frame %d, @ block (%d, %d)\n",
|
602 |
frame, x, y); |
603 |
return;
|
604 |
} |
605 |
} |
606 |
} |
607 |
if (s->stream_end - s->stream_ptr > 1) { |
608 |
av_log(s->avctx, AV_LOG_ERROR, " Interplay video: decode finished with %td bytes left over\n",
|
609 |
s->stream_end - s->stream_ptr); |
610 |
} |
611 |
} |
612 |
|
613 |
static av_cold int ipvideo_decode_init(AVCodecContext *avctx) |
614 |
{ |
615 |
IpvideoContext *s = avctx->priv_data; |
616 |
|
617 |
s->avctx = avctx; |
618 |
|
619 |
if (s->avctx->palctrl == NULL) { |
620 |
av_log(avctx, AV_LOG_ERROR, " Interplay video: palette expected.\n");
|
621 |
return -1; |
622 |
} |
623 |
|
624 |
s->is_16bpp = avctx->bits_per_coded_sample == 16;
|
625 |
avctx->pix_fmt = s->is_16bpp ? PIX_FMT_RGB555 : PIX_FMT_PAL8; |
626 |
if (s->is_16bpp) {
|
627 |
av_log(avctx, AV_LOG_ERROR, "16-bit Interplay video is not supported yet.\n");
|
628 |
return -1; |
629 |
} |
630 |
dsputil_init(&s->dsp, avctx); |
631 |
|
632 |
/* decoding map contains 4 bits of information per 8x8 block */
|
633 |
s->decoding_map_size = avctx->width * avctx->height / (8 * 8 * 2); |
634 |
|
635 |
s->current_frame.data[0] = s->last_frame.data[0] = |
636 |
s->second_last_frame.data[0] = NULL; |
637 |
|
638 |
return 0; |
639 |
} |
640 |
|
641 |
static int ipvideo_decode_frame(AVCodecContext *avctx, |
642 |
void *data, int *data_size, |
643 |
AVPacket *avpkt) |
644 |
{ |
645 |
const uint8_t *buf = avpkt->data;
|
646 |
int buf_size = avpkt->size;
|
647 |
IpvideoContext *s = avctx->priv_data; |
648 |
AVPaletteControl *palette_control = avctx->palctrl; |
649 |
|
650 |
/* compressed buffer needs to be large enough to at least hold an entire
|
651 |
* decoding map */
|
652 |
if (buf_size < s->decoding_map_size)
|
653 |
return buf_size;
|
654 |
|
655 |
s->decoding_map = buf; |
656 |
s->buf = buf + s->decoding_map_size; |
657 |
s->size = buf_size - s->decoding_map_size; |
658 |
|
659 |
s->current_frame.reference = 3;
|
660 |
if (avctx->get_buffer(avctx, &s->current_frame)) {
|
661 |
av_log(avctx, AV_LOG_ERROR, " Interplay Video: get_buffer() failed\n");
|
662 |
return -1; |
663 |
} |
664 |
|
665 |
ipvideo_decode_opcodes(s); |
666 |
|
667 |
if (palette_control->palette_changed) {
|
668 |
palette_control->palette_changed = 0;
|
669 |
s->current_frame.palette_has_changed = 1;
|
670 |
} |
671 |
|
672 |
*data_size = sizeof(AVFrame);
|
673 |
*(AVFrame*)data = s->current_frame; |
674 |
|
675 |
/* shuffle frames */
|
676 |
if (s->second_last_frame.data[0]) |
677 |
avctx->release_buffer(avctx, &s->second_last_frame); |
678 |
s->second_last_frame = s->last_frame; |
679 |
s->last_frame = s->current_frame; |
680 |
s->current_frame.data[0] = NULL; /* catch any access attempts */ |
681 |
|
682 |
/* report that the buffer was completely consumed */
|
683 |
return buf_size;
|
684 |
} |
685 |
|
686 |
static av_cold int ipvideo_decode_end(AVCodecContext *avctx) |
687 |
{ |
688 |
IpvideoContext *s = avctx->priv_data; |
689 |
|
690 |
/* release the last frame */
|
691 |
if (s->last_frame.data[0]) |
692 |
avctx->release_buffer(avctx, &s->last_frame); |
693 |
if (s->second_last_frame.data[0]) |
694 |
avctx->release_buffer(avctx, &s->second_last_frame); |
695 |
|
696 |
return 0; |
697 |
} |
698 |
|
699 |
AVCodec interplay_video_decoder = { |
700 |
"interplayvideo",
|
701 |
CODEC_TYPE_VIDEO, |
702 |
CODEC_ID_INTERPLAY_VIDEO, |
703 |
sizeof(IpvideoContext),
|
704 |
ipvideo_decode_init, |
705 |
NULL,
|
706 |
ipvideo_decode_end, |
707 |
ipvideo_decode_frame, |
708 |
CODEC_CAP_DR1, |
709 |
.long_name = NULL_IF_CONFIG_SMALL("Interplay MVE video"),
|
710 |
}; |