1 |
|
|
/* |
2 |
|
|
* Interplay MVE Video Decoder |
3 |
|
|
* Copyright (C) 2003 The FFmpeg project |
4 |
|
|
* |
5 |
|
|
* This file is part of FFmpeg. |
6 |
|
|
* |
7 |
|
|
* FFmpeg is free software; you can redistribute it and/or |
8 |
|
|
* modify it under the terms of the GNU Lesser General Public |
9 |
|
|
* License as published by the Free Software Foundation; either |
10 |
|
|
* version 2.1 of the License, or (at your option) any later version. |
11 |
|
|
* |
12 |
|
|
* FFmpeg is distributed in the hope that it will be useful, |
13 |
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of |
14 |
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
15 |
|
|
* Lesser General Public License for more details. |
16 |
|
|
* |
17 |
|
|
* You should have received a copy of the GNU Lesser General Public |
18 |
|
|
* License along with FFmpeg; if not, write to the Free Software |
19 |
|
|
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
20 |
|
|
*/ |
21 |
|
|
|
22 |
|
|
/** |
23 |
|
|
* @file |
24 |
|
|
* Interplay MVE Video Decoder by Mike Melanson (melanson@pcisys.net) |
25 |
|
|
* For more information about the Interplay MVE format, visit: |
26 |
|
|
* http://www.pcisys.net/~melanson/codecs/interplay-mve.txt |
27 |
|
|
* This code is written in such a way that the identifiers match up |
28 |
|
|
* with the encoding descriptions in the document. |
29 |
|
|
* |
30 |
|
|
* This decoder presently only supports a PAL8 output colorspace. |
31 |
|
|
* |
32 |
|
|
* An Interplay video frame consists of 2 parts: The decoding map and |
33 |
|
|
* the video data. A demuxer must load these 2 parts together in a single |
34 |
|
|
* buffer before sending it through the stream to this decoder. |
35 |
|
|
*/ |
36 |
|
|
|
37 |
|
|
#include <stdio.h> |
38 |
|
|
#include <stdlib.h> |
39 |
|
|
#include <string.h> |
40 |
|
|
|
41 |
|
|
#include "libavutil/intreadwrite.h" |
42 |
|
|
|
43 |
|
|
#define BITSTREAM_READER_LE |
44 |
|
|
#include "avcodec.h" |
45 |
|
|
#include "bytestream.h" |
46 |
|
|
#include "get_bits.h" |
47 |
|
|
#include "hpeldsp.h" |
48 |
|
|
#include "internal.h" |
49 |
|
|
|
50 |
|
|
#define PALETTE_COUNT 256 |
51 |
|
|
|
52 |
|
|
typedef struct IpvideoContext { |
53 |
|
|
|
54 |
|
|
AVCodecContext *avctx; |
55 |
|
|
HpelDSPContext hdsp; |
56 |
|
|
AVFrame *second_last_frame; |
57 |
|
|
AVFrame *last_frame; |
58 |
|
|
|
59 |
|
|
/* For format 0x10 */ |
60 |
|
|
AVFrame *cur_decode_frame; |
61 |
|
|
AVFrame *prev_decode_frame; |
62 |
|
|
|
63 |
|
|
const unsigned char *decoding_map; |
64 |
|
|
int decoding_map_size; |
65 |
|
|
const unsigned char *skip_map; |
66 |
|
|
int skip_map_size; |
67 |
|
|
|
68 |
|
|
int is_16bpp; |
69 |
|
|
GetByteContext stream_ptr, mv_ptr; |
70 |
|
|
unsigned char *pixel_ptr; |
71 |
|
|
int line_inc; |
72 |
|
|
int stride; |
73 |
|
|
int upper_motion_limit_offset; |
74 |
|
|
|
75 |
|
|
uint32_t pal[256]; |
76 |
|
|
} IpvideoContext; |
77 |
|
|
|
78 |
|
341521 |
static int copy_from(IpvideoContext *s, AVFrame *src, AVFrame *dst, int delta_x, int delta_y) |
79 |
|
|
{ |
80 |
|
341521 |
int width = dst->width; |
81 |
|
341521 |
int current_offset = s->pixel_ptr - dst->data[0]; |
82 |
|
341521 |
int x = (current_offset % dst->linesize[0]) / (1 + s->is_16bpp); |
83 |
|
341521 |
int y = current_offset / dst->linesize[0]; |
84 |
|
341521 |
int dx = delta_x + x - ((delta_x + x >= width) - (delta_x + x < 0)) * width; |
85 |
|
341521 |
int dy = delta_y + y + (delta_x + x >= width) - (delta_x + x < 0); |
86 |
|
341521 |
int motion_offset = dy * src->linesize[0] + dx * (1 + s->is_16bpp); |
87 |
|
|
|
88 |
✗✓ |
341521 |
if (motion_offset < 0) { |
89 |
|
|
av_log(s->avctx, AV_LOG_ERROR, "motion offset < 0 (%d)\n", motion_offset); |
90 |
|
|
return AVERROR_INVALIDDATA; |
91 |
✗✓ |
341521 |
} else if (motion_offset > s->upper_motion_limit_offset) { |
92 |
|
|
av_log(s->avctx, AV_LOG_ERROR, "motion offset above limit (%d >= %d)\n", |
93 |
|
|
motion_offset, s->upper_motion_limit_offset); |
94 |
|
|
return AVERROR_INVALIDDATA; |
95 |
|
|
} |
96 |
✗✓ |
341521 |
if (!src->data[0]) { |
97 |
|
|
av_log(s->avctx, AV_LOG_ERROR, "Invalid decode type, corrupted header?\n"); |
98 |
|
|
return AVERROR(EINVAL); |
99 |
|
|
} |
100 |
|
341521 |
s->hdsp.put_pixels_tab[!s->is_16bpp][0](s->pixel_ptr, src->data[0] + motion_offset, |
101 |
|
341521 |
dst->linesize[0], 8); |
102 |
|
341521 |
return 0; |
103 |
|
|
} |
104 |
|
|
|
105 |
|
34551 |
static int ipvideo_decode_block_opcode_0x0(IpvideoContext *s, AVFrame *frame) |
106 |
|
|
{ |
107 |
|
34551 |
return copy_from(s, s->last_frame, frame, 0, 0); |
108 |
|
|
} |
109 |
|
|
|
110 |
|
280419 |
static int ipvideo_decode_block_opcode_0x1(IpvideoContext *s, AVFrame *frame) |
111 |
|
|
{ |
112 |
|
280419 |
return copy_from(s, s->second_last_frame, frame, 0, 0); |
113 |
|
|
} |
114 |
|
|
|
115 |
|
1282 |
static int ipvideo_decode_block_opcode_0x2(IpvideoContext *s, AVFrame *frame) |
116 |
|
|
{ |
117 |
|
|
unsigned char B; |
118 |
|
|
int x, y; |
119 |
|
|
|
120 |
|
|
/* copy block from 2 frames ago using a motion vector; need 1 more byte */ |
121 |
✓✓ |
1282 |
if (!s->is_16bpp) { |
122 |
|
579 |
B = bytestream2_get_byte(&s->stream_ptr); |
123 |
|
|
} else { |
124 |
|
703 |
B = bytestream2_get_byte(&s->mv_ptr); |
125 |
|
|
} |
126 |
|
|
|
127 |
✓✓ |
1282 |
if (B < 56) { |
128 |
|
491 |
x = 8 + (B % 7); |
129 |
|
491 |
y = B / 7; |
130 |
|
|
} else { |
131 |
|
791 |
x = -14 + ((B - 56) % 29); |
132 |
|
791 |
y = 8 + ((B - 56) / 29); |
133 |
|
|
} |
134 |
|
|
|
135 |
|
|
ff_tlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y); |
136 |
|
1282 |
return copy_from(s, s->second_last_frame, frame, x, y); |
137 |
|
|
} |
138 |
|
|
|
139 |
|
3043 |
static int ipvideo_decode_block_opcode_0x3(IpvideoContext *s, AVFrame *frame) |
140 |
|
|
{ |
141 |
|
|
unsigned char B; |
142 |
|
|
int x, y; |
143 |
|
|
|
144 |
|
|
/* copy 8x8 block from current frame from an up/left block */ |
145 |
|
|
|
146 |
|
|
/* need 1 more byte for motion */ |
147 |
✓✓ |
3043 |
if (!s->is_16bpp) { |
148 |
|
387 |
B = bytestream2_get_byte(&s->stream_ptr); |
149 |
|
|
} else { |
150 |
|
2656 |
B = bytestream2_get_byte(&s->mv_ptr); |
151 |
|
|
} |
152 |
|
|
|
153 |
✓✓ |
3043 |
if (B < 56) { |
154 |
|
1630 |
x = -(8 + (B % 7)); |
155 |
|
1630 |
y = -(B / 7); |
156 |
|
|
} else { |
157 |
|
1413 |
x = -(-14 + ((B - 56) % 29)); |
158 |
|
1413 |
y = -( 8 + ((B - 56) / 29)); |
159 |
|
|
} |
160 |
|
|
|
161 |
|
|
ff_tlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y); |
162 |
|
3043 |
return copy_from(s, frame, frame, x, y); |
163 |
|
|
} |
164 |
|
|
|
165 |
|
18863 |
static int ipvideo_decode_block_opcode_0x4(IpvideoContext *s, AVFrame *frame) |
166 |
|
|
{ |
167 |
|
|
int x, y; |
168 |
|
|
unsigned char B, BL, BH; |
169 |
|
|
|
170 |
|
|
/* copy a block from the previous frame; need 1 more byte */ |
171 |
✓✓ |
18863 |
if (!s->is_16bpp) { |
172 |
|
6043 |
B = bytestream2_get_byte(&s->stream_ptr); |
173 |
|
|
} else { |
174 |
|
12820 |
B = bytestream2_get_byte(&s->mv_ptr); |
175 |
|
|
} |
176 |
|
|
|
177 |
|
18863 |
BL = B & 0x0F; |
178 |
|
18863 |
BH = (B >> 4) & 0x0F; |
179 |
|
18863 |
x = -8 + BL; |
180 |
|
18863 |
y = -8 + BH; |
181 |
|
|
|
182 |
|
|
ff_tlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y); |
183 |
|
18863 |
return copy_from(s, s->last_frame, frame, x, y); |
184 |
|
|
} |
185 |
|
|
|
186 |
|
3363 |
static int ipvideo_decode_block_opcode_0x5(IpvideoContext *s, AVFrame *frame) |
187 |
|
|
{ |
188 |
|
|
signed char x, y; |
189 |
|
|
|
190 |
|
|
/* copy a block from the previous frame using an expanded range; |
191 |
|
|
* need 2 more bytes */ |
192 |
|
3363 |
x = bytestream2_get_byte(&s->stream_ptr); |
193 |
|
3363 |
y = bytestream2_get_byte(&s->stream_ptr); |
194 |
|
|
|
195 |
|
|
ff_tlog(s->avctx, "motion bytes = %d, %d\n", x, y); |
196 |
|
3363 |
return copy_from(s, s->last_frame, frame, x, y); |
197 |
|
|
} |
198 |
|
|
|
199 |
|
|
static int ipvideo_decode_block_opcode_0x6(IpvideoContext *s, AVFrame *frame) |
200 |
|
|
{ |
201 |
|
|
/* mystery opcode? skip multiple blocks? */ |
202 |
|
|
av_log(s->avctx, AV_LOG_ERROR, "Help! Mystery opcode 0x6 seen\n"); |
203 |
|
|
|
204 |
|
|
/* report success */ |
205 |
|
|
return 0; |
206 |
|
|
} |
207 |
|
|
|
208 |
|
815 |
static int ipvideo_decode_block_opcode_0x7(IpvideoContext *s, AVFrame *frame) |
209 |
|
|
{ |
210 |
|
|
int x, y; |
211 |
|
|
unsigned char P[2]; |
212 |
|
|
unsigned int flags; |
213 |
|
|
|
214 |
✗✓ |
815 |
if (bytestream2_get_bytes_left(&s->stream_ptr) < 4) { |
215 |
|
|
av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0x7\n"); |
216 |
|
|
return AVERROR_INVALIDDATA; |
217 |
|
|
} |
218 |
|
|
|
219 |
|
|
/* 2-color encoding */ |
220 |
|
815 |
P[0] = bytestream2_get_byte(&s->stream_ptr); |
221 |
|
815 |
P[1] = bytestream2_get_byte(&s->stream_ptr); |
222 |
|
|
|
223 |
✓✓ |
815 |
if (P[0] <= P[1]) { |
224 |
|
|
|
225 |
|
|
/* need 8 more bytes from the stream */ |
226 |
✓✓ |
6471 |
for (y = 0; y < 8; y++) { |
227 |
|
5752 |
flags = bytestream2_get_byte(&s->stream_ptr) | 0x100; |
228 |
✓✓ |
51768 |
for (; flags != 1; flags >>= 1) |
229 |
|
46016 |
*s->pixel_ptr++ = P[flags & 1]; |
230 |
|
5752 |
s->pixel_ptr += s->line_inc; |
231 |
|
|
} |
232 |
|
|
|
233 |
|
|
} else { |
234 |
|
|
|
235 |
|
|
/* need 2 more bytes from the stream */ |
236 |
|
96 |
flags = bytestream2_get_le16(&s->stream_ptr); |
237 |
✓✓ |
480 |
for (y = 0; y < 8; y += 2) { |
238 |
✓✓ |
1920 |
for (x = 0; x < 8; x += 2, flags >>= 1) { |
239 |
|
1536 |
s->pixel_ptr[x ] = |
240 |
|
1536 |
s->pixel_ptr[x + 1 ] = |
241 |
|
1536 |
s->pixel_ptr[x + s->stride] = |
242 |
|
1536 |
s->pixel_ptr[x + 1 + s->stride] = P[flags & 1]; |
243 |
|
|
} |
244 |
|
384 |
s->pixel_ptr += s->stride * 2; |
245 |
|
|
} |
246 |
|
|
} |
247 |
|
|
|
248 |
|
|
/* report success */ |
249 |
|
815 |
return 0; |
250 |
|
|
} |
251 |
|
|
|
252 |
|
1013 |
static int ipvideo_decode_block_opcode_0x8(IpvideoContext *s, AVFrame *frame) |
253 |
|
|
{ |
254 |
|
|
int x, y; |
255 |
|
|
unsigned char P[4]; |
256 |
|
1013 |
unsigned int flags = 0; |
257 |
|
|
|
258 |
✗✓ |
1013 |
if (bytestream2_get_bytes_left(&s->stream_ptr) < 12) { |
259 |
|
|
av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0x8\n"); |
260 |
|
|
return AVERROR_INVALIDDATA; |
261 |
|
|
} |
262 |
|
|
|
263 |
|
|
/* 2-color encoding for each 4x4 quadrant, or 2-color encoding on |
264 |
|
|
* either top and bottom or left and right halves */ |
265 |
|
1013 |
P[0] = bytestream2_get_byte(&s->stream_ptr); |
266 |
|
1013 |
P[1] = bytestream2_get_byte(&s->stream_ptr); |
267 |
|
|
|
268 |
✓✓ |
1013 |
if (P[0] <= P[1]) { |
269 |
✓✓ |
5627 |
for (y = 0; y < 16; y++) { |
270 |
|
|
// new values for each 4x4 block |
271 |
✓✓ |
5296 |
if (!(y & 3)) { |
272 |
✓✓ |
1324 |
if (y) { |
273 |
|
993 |
P[0] = bytestream2_get_byte(&s->stream_ptr); |
274 |
|
993 |
P[1] = bytestream2_get_byte(&s->stream_ptr); |
275 |
|
|
} |
276 |
|
1324 |
flags = bytestream2_get_le16(&s->stream_ptr); |
277 |
|
|
} |
278 |
|
|
|
279 |
✓✓ |
26480 |
for (x = 0; x < 4; x++, flags >>= 1) |
280 |
|
21184 |
*s->pixel_ptr++ = P[flags & 1]; |
281 |
|
5296 |
s->pixel_ptr += s->stride - 4; |
282 |
|
|
// switch to right half |
283 |
✓✓ |
5296 |
if (y == 7) s->pixel_ptr -= 8 * s->stride - 4; |
284 |
|
|
} |
285 |
|
|
|
286 |
|
|
} else { |
287 |
|
682 |
flags = bytestream2_get_le32(&s->stream_ptr); |
288 |
|
682 |
P[2] = bytestream2_get_byte(&s->stream_ptr); |
289 |
|
682 |
P[3] = bytestream2_get_byte(&s->stream_ptr); |
290 |
|
|
|
291 |
✓✓ |
682 |
if (P[2] <= P[3]) { |
292 |
|
|
|
293 |
|
|
/* vertical split; left & right halves are 2-color encoded */ |
294 |
|
|
|
295 |
✓✓ |
6919 |
for (y = 0; y < 16; y++) { |
296 |
✓✓ |
32560 |
for (x = 0; x < 4; x++, flags >>= 1) |
297 |
|
26048 |
*s->pixel_ptr++ = P[flags & 1]; |
298 |
|
6512 |
s->pixel_ptr += s->stride - 4; |
299 |
|
|
// switch to right half |
300 |
✓✓ |
6512 |
if (y == 7) { |
301 |
|
407 |
s->pixel_ptr -= 8 * s->stride - 4; |
302 |
|
407 |
P[0] = P[2]; |
303 |
|
407 |
P[1] = P[3]; |
304 |
|
407 |
flags = bytestream2_get_le32(&s->stream_ptr); |
305 |
|
|
} |
306 |
|
|
} |
307 |
|
|
|
308 |
|
|
} else { |
309 |
|
|
|
310 |
|
|
/* horizontal split; top & bottom halves are 2-color encoded */ |
311 |
|
|
|
312 |
✓✓ |
2475 |
for (y = 0; y < 8; y++) { |
313 |
✓✓ |
2200 |
if (y == 4) { |
314 |
|
275 |
P[0] = P[2]; |
315 |
|
275 |
P[1] = P[3]; |
316 |
|
275 |
flags = bytestream2_get_le32(&s->stream_ptr); |
317 |
|
|
} |
318 |
|
|
|
319 |
✓✓ |
19800 |
for (x = 0; x < 8; x++, flags >>= 1) |
320 |
|
17600 |
*s->pixel_ptr++ = P[flags & 1]; |
321 |
|
2200 |
s->pixel_ptr += s->line_inc; |
322 |
|
|
} |
323 |
|
|
} |
324 |
|
|
} |
325 |
|
|
|
326 |
|
|
/* report success */ |
327 |
|
1013 |
return 0; |
328 |
|
|
} |
329 |
|
|
|
330 |
|
2412 |
static int ipvideo_decode_block_opcode_0x9(IpvideoContext *s, AVFrame *frame) |
331 |
|
|
{ |
332 |
|
|
int x, y; |
333 |
|
|
unsigned char P[4]; |
334 |
|
|
|
335 |
✗✓ |
2412 |
if (bytestream2_get_bytes_left(&s->stream_ptr) < 8) { |
336 |
|
|
av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0x9\n"); |
337 |
|
|
return AVERROR_INVALIDDATA; |
338 |
|
|
} |
339 |
|
|
|
340 |
|
|
/* 4-color encoding */ |
341 |
|
2412 |
bytestream2_get_buffer(&s->stream_ptr, P, 4); |
342 |
|
|
|
343 |
✓✓ |
2412 |
if (P[0] <= P[1]) { |
344 |
✓✓ |
2053 |
if (P[2] <= P[3]) { |
345 |
|
|
|
346 |
|
|
/* 1 of 4 colors for each pixel, need 16 more bytes */ |
347 |
✓✓ |
16515 |
for (y = 0; y < 8; y++) { |
348 |
|
|
/* get the next set of 8 2-bit flags */ |
349 |
|
14680 |
int flags = bytestream2_get_le16(&s->stream_ptr); |
350 |
✓✓ |
132120 |
for (x = 0; x < 8; x++, flags >>= 2) |
351 |
|
117440 |
*s->pixel_ptr++ = P[flags & 0x03]; |
352 |
|
14680 |
s->pixel_ptr += s->line_inc; |
353 |
|
|
} |
354 |
|
|
|
355 |
|
|
} else { |
356 |
|
|
uint32_t flags; |
357 |
|
|
|
358 |
|
|
/* 1 of 4 colors for each 2x2 block, need 4 more bytes */ |
359 |
|
218 |
flags = bytestream2_get_le32(&s->stream_ptr); |
360 |
|
|
|
361 |
✓✓ |
1090 |
for (y = 0; y < 8; y += 2) { |
362 |
✓✓ |
4360 |
for (x = 0; x < 8; x += 2, flags >>= 2) { |
363 |
|
3488 |
s->pixel_ptr[x ] = |
364 |
|
3488 |
s->pixel_ptr[x + 1 ] = |
365 |
|
3488 |
s->pixel_ptr[x + s->stride] = |
366 |
|
3488 |
s->pixel_ptr[x + 1 + s->stride] = P[flags & 0x03]; |
367 |
|
|
} |
368 |
|
872 |
s->pixel_ptr += s->stride * 2; |
369 |
|
|
} |
370 |
|
|
|
371 |
|
|
} |
372 |
|
|
} else { |
373 |
|
|
uint64_t flags; |
374 |
|
|
|
375 |
|
|
/* 1 of 4 colors for each 2x1 or 1x2 block, need 8 more bytes */ |
376 |
|
359 |
flags = bytestream2_get_le64(&s->stream_ptr); |
377 |
✓✓ |
359 |
if (P[2] <= P[3]) { |
378 |
✓✓ |
1755 |
for (y = 0; y < 8; y++) { |
379 |
✓✓ |
7800 |
for (x = 0; x < 8; x += 2, flags >>= 2) { |
380 |
|
6240 |
s->pixel_ptr[x ] = |
381 |
|
6240 |
s->pixel_ptr[x + 1] = P[flags & 0x03]; |
382 |
|
|
} |
383 |
|
1560 |
s->pixel_ptr += s->stride; |
384 |
|
|
} |
385 |
|
|
} else { |
386 |
✓✓ |
820 |
for (y = 0; y < 8; y += 2) { |
387 |
✓✓ |
5904 |
for (x = 0; x < 8; x++, flags >>= 2) { |
388 |
|
5248 |
s->pixel_ptr[x ] = |
389 |
|
5248 |
s->pixel_ptr[x + s->stride] = P[flags & 0x03]; |
390 |
|
|
} |
391 |
|
656 |
s->pixel_ptr += s->stride * 2; |
392 |
|
|
} |
393 |
|
|
} |
394 |
|
|
} |
395 |
|
|
|
396 |
|
|
/* report success */ |
397 |
|
2412 |
return 0; |
398 |
|
|
} |
399 |
|
|
|
400 |
|
5451 |
static int ipvideo_decode_block_opcode_0xA(IpvideoContext *s, AVFrame *frame) |
401 |
|
|
{ |
402 |
|
|
int x, y; |
403 |
|
|
unsigned char P[8]; |
404 |
|
5451 |
int flags = 0; |
405 |
|
|
|
406 |
✗✓ |
5451 |
if (bytestream2_get_bytes_left(&s->stream_ptr) < 16) { |
407 |
|
|
av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0xA\n"); |
408 |
|
|
return AVERROR_INVALIDDATA; |
409 |
|
|
} |
410 |
|
|
|
411 |
|
5451 |
bytestream2_get_buffer(&s->stream_ptr, P, 4); |
412 |
|
|
|
413 |
|
|
/* 4-color encoding for each 4x4 quadrant, or 4-color encoding on |
414 |
|
|
* either top and bottom or left and right halves */ |
415 |
✓✓ |
5451 |
if (P[0] <= P[1]) { |
416 |
|
|
|
417 |
|
|
/* 4-color encoding for each quadrant; need 32 bytes */ |
418 |
✓✓ |
32419 |
for (y = 0; y < 16; y++) { |
419 |
|
|
// new values for each 4x4 block |
420 |
✓✓ |
30512 |
if (!(y & 3)) { |
421 |
✓✓ |
7628 |
if (y) bytestream2_get_buffer(&s->stream_ptr, P, 4); |
422 |
|
7628 |
flags = bytestream2_get_le32(&s->stream_ptr); |
423 |
|
|
} |
424 |
|
|
|
425 |
✓✓ |
152560 |
for (x = 0; x < 4; x++, flags >>= 2) |
426 |
|
122048 |
*s->pixel_ptr++ = P[flags & 0x03]; |
427 |
|
|
|
428 |
|
30512 |
s->pixel_ptr += s->stride - 4; |
429 |
|
|
// switch to right half |
430 |
✓✓ |
30512 |
if (y == 7) s->pixel_ptr -= 8 * s->stride - 4; |
431 |
|
|
} |
432 |
|
|
|
433 |
|
|
} else { |
434 |
|
|
// vertical split? |
435 |
|
|
int vert; |
436 |
|
3544 |
uint64_t flags = bytestream2_get_le64(&s->stream_ptr); |
437 |
|
|
|
438 |
|
3544 |
bytestream2_get_buffer(&s->stream_ptr, P + 4, 4); |
439 |
|
3544 |
vert = P[4] <= P[5]; |
440 |
|
|
|
441 |
|
|
/* 4-color encoding for either left and right or top and bottom |
442 |
|
|
* halves */ |
443 |
|
|
|
444 |
✓✓ |
60248 |
for (y = 0; y < 16; y++) { |
445 |
✓✓ |
283520 |
for (x = 0; x < 4; x++, flags >>= 2) |
446 |
|
226816 |
*s->pixel_ptr++ = P[flags & 0x03]; |
447 |
|
|
|
448 |
✓✓ |
56704 |
if (vert) { |
449 |
|
30576 |
s->pixel_ptr += s->stride - 4; |
450 |
|
|
// switch to right half |
451 |
✓✓ |
30576 |
if (y == 7) s->pixel_ptr -= 8 * s->stride - 4; |
452 |
✓✓ |
26128 |
} else if (y & 1) s->pixel_ptr += s->line_inc; |
453 |
|
|
|
454 |
|
|
// load values for second half |
455 |
✓✓ |
56704 |
if (y == 7) { |
456 |
|
3544 |
memcpy(P, P + 4, 4); |
457 |
|
3544 |
flags = bytestream2_get_le64(&s->stream_ptr); |
458 |
|
|
} |
459 |
|
|
} |
460 |
|
|
} |
461 |
|
|
|
462 |
|
|
/* report success */ |
463 |
|
5451 |
return 0; |
464 |
|
|
} |
465 |
|
|
|
466 |
|
21074 |
static int ipvideo_decode_block_opcode_0xB(IpvideoContext *s, AVFrame *frame) |
467 |
|
|
{ |
468 |
|
|
int y; |
469 |
|
|
|
470 |
|
|
/* 64-color encoding (each pixel in block is a different color) */ |
471 |
✓✓ |
189666 |
for (y = 0; y < 8; y++) { |
472 |
|
168592 |
bytestream2_get_buffer(&s->stream_ptr, s->pixel_ptr, 8); |
473 |
|
168592 |
s->pixel_ptr += s->stride; |
474 |
|
|
} |
475 |
|
|
|
476 |
|
|
/* report success */ |
477 |
|
21074 |
return 0; |
478 |
|
|
} |
479 |
|
|
|
480 |
|
135 |
static int ipvideo_decode_block_opcode_0xC(IpvideoContext *s, AVFrame *frame) |
481 |
|
|
{ |
482 |
|
|
int x, y; |
483 |
|
|
|
484 |
|
|
/* 16-color block encoding: each 2x2 block is a different color */ |
485 |
✓✓ |
675 |
for (y = 0; y < 8; y += 2) { |
486 |
✓✓ |
2700 |
for (x = 0; x < 8; x += 2) { |
487 |
|
2160 |
s->pixel_ptr[x ] = |
488 |
|
2160 |
s->pixel_ptr[x + 1 ] = |
489 |
|
2160 |
s->pixel_ptr[x + s->stride] = |
490 |
|
2160 |
s->pixel_ptr[x + 1 + s->stride] = bytestream2_get_byte(&s->stream_ptr); |
491 |
|
|
} |
492 |
|
540 |
s->pixel_ptr += s->stride * 2; |
493 |
|
|
} |
494 |
|
|
|
495 |
|
|
/* report success */ |
496 |
|
135 |
return 0; |
497 |
|
|
} |
498 |
|
|
|
499 |
|
49 |
static int ipvideo_decode_block_opcode_0xD(IpvideoContext *s, AVFrame *frame) |
500 |
|
|
{ |
501 |
|
|
int y; |
502 |
|
|
unsigned char P[2]; |
503 |
|
|
|
504 |
✗✓ |
49 |
if (bytestream2_get_bytes_left(&s->stream_ptr) < 4) { |
505 |
|
|
av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0xD\n"); |
506 |
|
|
return AVERROR_INVALIDDATA; |
507 |
|
|
} |
508 |
|
|
|
509 |
|
|
/* 4-color block encoding: each 4x4 block is a different color */ |
510 |
✓✓ |
441 |
for (y = 0; y < 8; y++) { |
511 |
✓✓ |
392 |
if (!(y & 3)) { |
512 |
|
98 |
P[0] = bytestream2_get_byte(&s->stream_ptr); |
513 |
|
98 |
P[1] = bytestream2_get_byte(&s->stream_ptr); |
514 |
|
|
} |
515 |
|
392 |
memset(s->pixel_ptr, P[0], 4); |
516 |
|
392 |
memset(s->pixel_ptr + 4, P[1], 4); |
517 |
|
392 |
s->pixel_ptr += s->stride; |
518 |
|
|
} |
519 |
|
|
|
520 |
|
|
/* report success */ |
521 |
|
49 |
return 0; |
522 |
|
|
} |
523 |
|
|
|
524 |
|
1696 |
static int ipvideo_decode_block_opcode_0xE(IpvideoContext *s, AVFrame *frame) |
525 |
|
|
{ |
526 |
|
|
int y; |
527 |
|
|
unsigned char pix; |
528 |
|
|
|
529 |
|
|
/* 1-color encoding: the whole block is 1 solid color */ |
530 |
|
1696 |
pix = bytestream2_get_byte(&s->stream_ptr); |
531 |
|
|
|
532 |
✓✓ |
15264 |
for (y = 0; y < 8; y++) { |
533 |
|
13568 |
memset(s->pixel_ptr, pix, 8); |
534 |
|
13568 |
s->pixel_ptr += s->stride; |
535 |
|
|
} |
536 |
|
|
|
537 |
|
|
/* report success */ |
538 |
|
1696 |
return 0; |
539 |
|
|
} |
540 |
|
|
|
541 |
|
27 |
static int ipvideo_decode_block_opcode_0xF(IpvideoContext *s, AVFrame *frame) |
542 |
|
|
{ |
543 |
|
|
int x, y; |
544 |
|
|
unsigned char sample[2]; |
545 |
|
|
|
546 |
|
|
/* dithered encoding */ |
547 |
|
27 |
sample[0] = bytestream2_get_byte(&s->stream_ptr); |
548 |
|
27 |
sample[1] = bytestream2_get_byte(&s->stream_ptr); |
549 |
|
|
|
550 |
✓✓ |
243 |
for (y = 0; y < 8; y++) { |
551 |
✓✓ |
1080 |
for (x = 0; x < 8; x += 2) { |
552 |
|
864 |
*s->pixel_ptr++ = sample[ y & 1 ]; |
553 |
|
864 |
*s->pixel_ptr++ = sample[!(y & 1)]; |
554 |
|
|
} |
555 |
|
216 |
s->pixel_ptr += s->line_inc; |
556 |
|
|
} |
557 |
|
|
|
558 |
|
|
/* report success */ |
559 |
|
27 |
return 0; |
560 |
|
|
} |
561 |
|
|
|
562 |
|
|
static int ipvideo_decode_block_opcode_0x6_16(IpvideoContext *s, AVFrame *frame) |
563 |
|
|
{ |
564 |
|
|
signed char x, y; |
565 |
|
|
|
566 |
|
|
/* copy a block from the second last frame using an expanded range */ |
567 |
|
|
x = bytestream2_get_byte(&s->stream_ptr); |
568 |
|
|
y = bytestream2_get_byte(&s->stream_ptr); |
569 |
|
|
|
570 |
|
|
ff_tlog(s->avctx, "motion bytes = %d, %d\n", x, y); |
571 |
|
|
return copy_from(s, s->second_last_frame, frame, x, y); |
572 |
|
|
} |
573 |
|
|
|
574 |
|
2197 |
static int ipvideo_decode_block_opcode_0x7_16(IpvideoContext *s, AVFrame *frame) |
575 |
|
|
{ |
576 |
|
|
int x, y; |
577 |
|
|
uint16_t P[2]; |
578 |
|
|
unsigned int flags; |
579 |
|
2197 |
uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr; |
580 |
|
|
|
581 |
|
|
/* 2-color encoding */ |
582 |
|
2197 |
P[0] = bytestream2_get_le16(&s->stream_ptr); |
583 |
|
2197 |
P[1] = bytestream2_get_le16(&s->stream_ptr); |
584 |
|
|
|
585 |
✓✓ |
2197 |
if (!(P[0] & 0x8000)) { |
586 |
|
|
|
587 |
✓✓ |
14823 |
for (y = 0; y < 8; y++) { |
588 |
|
13176 |
flags = bytestream2_get_byte(&s->stream_ptr) | 0x100; |
589 |
✓✓ |
118584 |
for (; flags != 1; flags >>= 1) |
590 |
|
105408 |
*pixel_ptr++ = P[flags & 1]; |
591 |
|
13176 |
pixel_ptr += s->line_inc; |
592 |
|
|
} |
593 |
|
|
|
594 |
|
|
} else { |
595 |
|
|
|
596 |
|
550 |
flags = bytestream2_get_le16(&s->stream_ptr); |
597 |
✓✓ |
2750 |
for (y = 0; y < 8; y += 2) { |
598 |
✓✓ |
11000 |
for (x = 0; x < 8; x += 2, flags >>= 1) { |
599 |
|
8800 |
pixel_ptr[x ] = |
600 |
|
8800 |
pixel_ptr[x + 1 ] = |
601 |
|
8800 |
pixel_ptr[x + s->stride] = |
602 |
|
8800 |
pixel_ptr[x + 1 + s->stride] = P[flags & 1]; |
603 |
|
|
} |
604 |
|
2200 |
pixel_ptr += s->stride * 2; |
605 |
|
|
} |
606 |
|
|
} |
607 |
|
|
|
608 |
|
2197 |
return 0; |
609 |
|
|
} |
610 |
|
|
|
611 |
|
1478 |
static int ipvideo_decode_block_opcode_0x8_16(IpvideoContext *s, AVFrame *frame) |
612 |
|
|
{ |
613 |
|
|
int x, y; |
614 |
|
|
uint16_t P[4]; |
615 |
|
1478 |
unsigned int flags = 0; |
616 |
|
1478 |
uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr; |
617 |
|
|
|
618 |
|
|
/* 2-color encoding for each 4x4 quadrant, or 2-color encoding on |
619 |
|
|
* either top and bottom or left and right halves */ |
620 |
|
1478 |
P[0] = bytestream2_get_le16(&s->stream_ptr); |
621 |
|
1478 |
P[1] = bytestream2_get_le16(&s->stream_ptr); |
622 |
|
|
|
623 |
✓✓ |
1478 |
if (!(P[0] & 0x8000)) { |
624 |
|
|
|
625 |
✓✓ |
4522 |
for (y = 0; y < 16; y++) { |
626 |
|
|
// new values for each 4x4 block |
627 |
✓✓ |
4256 |
if (!(y & 3)) { |
628 |
✓✓ |
1064 |
if (y) { |
629 |
|
798 |
P[0] = bytestream2_get_le16(&s->stream_ptr); |
630 |
|
798 |
P[1] = bytestream2_get_le16(&s->stream_ptr); |
631 |
|
|
} |
632 |
|
1064 |
flags = bytestream2_get_le16(&s->stream_ptr); |
633 |
|
|
} |
634 |
|
|
|
635 |
✓✓ |
21280 |
for (x = 0; x < 4; x++, flags >>= 1) |
636 |
|
17024 |
*pixel_ptr++ = P[flags & 1]; |
637 |
|
4256 |
pixel_ptr += s->stride - 4; |
638 |
|
|
// switch to right half |
639 |
✓✓ |
4256 |
if (y == 7) pixel_ptr -= 8 * s->stride - 4; |
640 |
|
|
} |
641 |
|
|
|
642 |
|
|
} else { |
643 |
|
|
|
644 |
|
1212 |
flags = bytestream2_get_le32(&s->stream_ptr); |
645 |
|
1212 |
P[2] = bytestream2_get_le16(&s->stream_ptr); |
646 |
|
1212 |
P[3] = bytestream2_get_le16(&s->stream_ptr); |
647 |
|
|
|
648 |
✓✓ |
1212 |
if (!(P[2] & 0x8000)) { |
649 |
|
|
|
650 |
|
|
/* vertical split; left & right halves are 2-color encoded */ |
651 |
|
|
|
652 |
✓✓ |
6834 |
for (y = 0; y < 16; y++) { |
653 |
✓✓ |
32160 |
for (x = 0; x < 4; x++, flags >>= 1) |
654 |
|
25728 |
*pixel_ptr++ = P[flags & 1]; |
655 |
|
6432 |
pixel_ptr += s->stride - 4; |
656 |
|
|
// switch to right half |
657 |
✓✓ |
6432 |
if (y == 7) { |
658 |
|
402 |
pixel_ptr -= 8 * s->stride - 4; |
659 |
|
402 |
P[0] = P[2]; |
660 |
|
402 |
P[1] = P[3]; |
661 |
|
402 |
flags = bytestream2_get_le32(&s->stream_ptr); |
662 |
|
|
} |
663 |
|
|
} |
664 |
|
|
|
665 |
|
|
} else { |
666 |
|
|
|
667 |
|
|
/* horizontal split; top & bottom halves are 2-color encoded */ |
668 |
|
|
|
669 |
✓✓ |
7290 |
for (y = 0; y < 8; y++) { |
670 |
✓✓ |
6480 |
if (y == 4) { |
671 |
|
810 |
P[0] = P[2]; |
672 |
|
810 |
P[1] = P[3]; |
673 |
|
810 |
flags = bytestream2_get_le32(&s->stream_ptr); |
674 |
|
|
} |
675 |
|
|
|
676 |
✓✓ |
58320 |
for (x = 0; x < 8; x++, flags >>= 1) |
677 |
|
51840 |
*pixel_ptr++ = P[flags & 1]; |
678 |
|
6480 |
pixel_ptr += s->line_inc; |
679 |
|
|
} |
680 |
|
|
} |
681 |
|
|
} |
682 |
|
|
|
683 |
|
|
/* report success */ |
684 |
|
1478 |
return 0; |
685 |
|
|
} |
686 |
|
|
|
687 |
|
6526 |
static int ipvideo_decode_block_opcode_0x9_16(IpvideoContext *s, AVFrame *frame) |
688 |
|
|
{ |
689 |
|
|
int x, y; |
690 |
|
|
uint16_t P[4]; |
691 |
|
6526 |
uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr; |
692 |
|
|
|
693 |
|
|
/* 4-color encoding */ |
694 |
✓✓ |
32630 |
for (x = 0; x < 4; x++) |
695 |
|
26104 |
P[x] = bytestream2_get_le16(&s->stream_ptr); |
696 |
|
|
|
697 |
✓✓ |
6526 |
if (!(P[0] & 0x8000)) { |
698 |
✓✓ |
2987 |
if (!(P[2] & 0x8000)) { |
699 |
|
|
|
700 |
|
|
/* 1 of 4 colors for each pixel */ |
701 |
✓✓ |
25947 |
for (y = 0; y < 8; y++) { |
702 |
|
|
/* get the next set of 8 2-bit flags */ |
703 |
|
23064 |
int flags = bytestream2_get_le16(&s->stream_ptr); |
704 |
✓✓ |
207576 |
for (x = 0; x < 8; x++, flags >>= 2) |
705 |
|
184512 |
*pixel_ptr++ = P[flags & 0x03]; |
706 |
|
23064 |
pixel_ptr += s->line_inc; |
707 |
|
|
} |
708 |
|
|
|
709 |
|
|
} else { |
710 |
|
|
uint32_t flags; |
711 |
|
|
|
712 |
|
|
/* 1 of 4 colors for each 2x2 block */ |
713 |
|
104 |
flags = bytestream2_get_le32(&s->stream_ptr); |
714 |
|
|
|
715 |
✓✓ |
520 |
for (y = 0; y < 8; y += 2) { |
716 |
✓✓ |
2080 |
for (x = 0; x < 8; x += 2, flags >>= 2) { |
717 |
|
1664 |
pixel_ptr[x ] = |
718 |
|
1664 |
pixel_ptr[x + 1 ] = |
719 |
|
1664 |
pixel_ptr[x + s->stride] = |
720 |
|
1664 |
pixel_ptr[x + 1 + s->stride] = P[flags & 0x03]; |
721 |
|
|
} |
722 |
|
416 |
pixel_ptr += s->stride * 2; |
723 |
|
|
} |
724 |
|
|
|
725 |
|
|
} |
726 |
|
|
} else { |
727 |
|
|
uint64_t flags; |
728 |
|
|
|
729 |
|
|
/* 1 of 4 colors for each 2x1 or 1x2 block */ |
730 |
|
3539 |
flags = bytestream2_get_le64(&s->stream_ptr); |
731 |
✓✓ |
3539 |
if (!(P[2] & 0x8000)) { |
732 |
✓✓ |
25371 |
for (y = 0; y < 8; y++) { |
733 |
✓✓ |
112760 |
for (x = 0; x < 8; x += 2, flags >>= 2) { |
734 |
|
90208 |
pixel_ptr[x ] = |
735 |
|
90208 |
pixel_ptr[x + 1] = P[flags & 0x03]; |
736 |
|
|
} |
737 |
|
22552 |
pixel_ptr += s->stride; |
738 |
|
|
} |
739 |
|
|
} else { |
740 |
✓✓ |
3600 |
for (y = 0; y < 8; y += 2) { |
741 |
✓✓ |
25920 |
for (x = 0; x < 8; x++, flags >>= 2) { |
742 |
|
23040 |
pixel_ptr[x ] = |
743 |
|
23040 |
pixel_ptr[x + s->stride] = P[flags & 0x03]; |
744 |
|
|
} |
745 |
|
2880 |
pixel_ptr += s->stride * 2; |
746 |
|
|
} |
747 |
|
|
} |
748 |
|
|
} |
749 |
|
|
|
750 |
|
|
/* report success */ |
751 |
|
6526 |
return 0; |
752 |
|
|
} |
753 |
|
|
|
754 |
|
7914 |
static int ipvideo_decode_block_opcode_0xA_16(IpvideoContext *s, AVFrame *frame) |
755 |
|
|
{ |
756 |
|
|
int x, y; |
757 |
|
|
uint16_t P[8]; |
758 |
|
7914 |
int flags = 0; |
759 |
|
7914 |
uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr; |
760 |
|
|
|
761 |
✓✓ |
39570 |
for (x = 0; x < 4; x++) |
762 |
|
31656 |
P[x] = bytestream2_get_le16(&s->stream_ptr); |
763 |
|
|
|
764 |
|
|
/* 4-color encoding for each 4x4 quadrant, or 4-color encoding on |
765 |
|
|
* either top and bottom or left and right halves */ |
766 |
✓✓ |
7914 |
if (!(P[0] & 0x8000)) { |
767 |
|
|
|
768 |
|
|
/* 4-color encoding for each quadrant */ |
769 |
✓✓ |
52530 |
for (y = 0; y < 16; y++) { |
770 |
|
|
// new values for each 4x4 block |
771 |
✓✓ |
49440 |
if (!(y & 3)) { |
772 |
✓✓ |
12360 |
if (y) |
773 |
✓✓ |
46350 |
for (x = 0; x < 4; x++) |
774 |
|
37080 |
P[x] = bytestream2_get_le16(&s->stream_ptr); |
775 |
|
12360 |
flags = bytestream2_get_le32(&s->stream_ptr); |
776 |
|
|
} |
777 |
|
|
|
778 |
✓✓ |
247200 |
for (x = 0; x < 4; x++, flags >>= 2) |
779 |
|
197760 |
*pixel_ptr++ = P[flags & 0x03]; |
780 |
|
|
|
781 |
|
49440 |
pixel_ptr += s->stride - 4; |
782 |
|
|
// switch to right half |
783 |
✓✓ |
49440 |
if (y == 7) pixel_ptr -= 8 * s->stride - 4; |
784 |
|
|
} |
785 |
|
|
|
786 |
|
|
} else { |
787 |
|
|
// vertical split? |
788 |
|
|
int vert; |
789 |
|
4824 |
uint64_t flags = bytestream2_get_le64(&s->stream_ptr); |
790 |
|
|
|
791 |
✓✓ |
24120 |
for (x = 4; x < 8; x++) |
792 |
|
19296 |
P[x] = bytestream2_get_le16(&s->stream_ptr); |
793 |
|
4824 |
vert = !(P[4] & 0x8000); |
794 |
|
|
|
795 |
|
|
/* 4-color encoding for either left and right or top and bottom |
796 |
|
|
* halves */ |
797 |
|
|
|
798 |
✓✓ |
82008 |
for (y = 0; y < 16; y++) { |
799 |
✓✓ |
385920 |
for (x = 0; x < 4; x++, flags >>= 2) |
800 |
|
308736 |
*pixel_ptr++ = P[flags & 0x03]; |
801 |
|
|
|
802 |
✓✓ |
77184 |
if (vert) { |
803 |
|
26000 |
pixel_ptr += s->stride - 4; |
804 |
|
|
// switch to right half |
805 |
✓✓ |
26000 |
if (y == 7) pixel_ptr -= 8 * s->stride - 4; |
806 |
✓✓ |
51184 |
} else if (y & 1) pixel_ptr += s->line_inc; |
807 |
|
|
|
808 |
|
|
// load values for second half |
809 |
✓✓ |
77184 |
if (y == 7) { |
810 |
|
4824 |
memcpy(P, P + 4, 8); |
811 |
|
4824 |
flags = bytestream2_get_le64(&s->stream_ptr); |
812 |
|
|
} |
813 |
|
|
} |
814 |
|
|
} |
815 |
|
|
|
816 |
|
|
/* report success */ |
817 |
|
7914 |
return 0; |
818 |
|
|
} |
819 |
|
|
|
820 |
|
1690 |
static int ipvideo_decode_block_opcode_0xB_16(IpvideoContext *s, AVFrame *frame) |
821 |
|
|
{ |
822 |
|
|
int x, y; |
823 |
|
1690 |
uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr; |
824 |
|
|
|
825 |
|
|
/* 64-color encoding (each pixel in block is a different color) */ |
826 |
✓✓ |
15210 |
for (y = 0; y < 8; y++) { |
827 |
✓✓ |
121680 |
for (x = 0; x < 8; x++) |
828 |
|
108160 |
pixel_ptr[x] = bytestream2_get_le16(&s->stream_ptr); |
829 |
|
13520 |
pixel_ptr += s->stride; |
830 |
|
|
} |
831 |
|
|
|
832 |
|
|
/* report success */ |
833 |
|
1690 |
return 0; |
834 |
|
|
} |
835 |
|
|
|
836 |
|
8 |
static int ipvideo_decode_block_opcode_0xC_16(IpvideoContext *s, AVFrame *frame) |
837 |
|
|
{ |
838 |
|
|
int x, y; |
839 |
|
8 |
uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr; |
840 |
|
|
|
841 |
|
|
/* 16-color block encoding: each 2x2 block is a different color */ |
842 |
✓✓ |
40 |
for (y = 0; y < 8; y += 2) { |
843 |
✓✓ |
160 |
for (x = 0; x < 8; x += 2) { |
844 |
|
128 |
pixel_ptr[x ] = |
845 |
|
128 |
pixel_ptr[x + 1 ] = |
846 |
|
128 |
pixel_ptr[x + s->stride] = |
847 |
|
128 |
pixel_ptr[x + 1 + s->stride] = bytestream2_get_le16(&s->stream_ptr); |
848 |
|
|
} |
849 |
|
32 |
pixel_ptr += s->stride * 2; |
850 |
|
|
} |
851 |
|
|
|
852 |
|
|
/* report success */ |
853 |
|
8 |
return 0; |
854 |
|
|
} |
855 |
|
|
|
856 |
|
38 |
static int ipvideo_decode_block_opcode_0xD_16(IpvideoContext *s, AVFrame *frame) |
857 |
|
|
{ |
858 |
|
|
int x, y; |
859 |
|
|
uint16_t P[2]; |
860 |
|
38 |
uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr; |
861 |
|
|
|
862 |
|
|
/* 4-color block encoding: each 4x4 block is a different color */ |
863 |
✓✓ |
342 |
for (y = 0; y < 8; y++) { |
864 |
✓✓ |
304 |
if (!(y & 3)) { |
865 |
|
76 |
P[0] = bytestream2_get_le16(&s->stream_ptr); |
866 |
|
76 |
P[1] = bytestream2_get_le16(&s->stream_ptr); |
867 |
|
|
} |
868 |
✓✓ |
2736 |
for (x = 0; x < 8; x++) |
869 |
|
2432 |
pixel_ptr[x] = P[x >> 2]; |
870 |
|
304 |
pixel_ptr += s->stride; |
871 |
|
|
} |
872 |
|
|
|
873 |
|
|
/* report success */ |
874 |
|
38 |
return 0; |
875 |
|
|
} |
876 |
|
|
|
877 |
|
3556 |
static int ipvideo_decode_block_opcode_0xE_16(IpvideoContext *s, AVFrame *frame) |
878 |
|
|
{ |
879 |
|
|
int x, y; |
880 |
|
|
uint16_t pix; |
881 |
|
3556 |
uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr; |
882 |
|
|
|
883 |
|
|
/* 1-color encoding: the whole block is 1 solid color */ |
884 |
|
3556 |
pix = bytestream2_get_le16(&s->stream_ptr); |
885 |
|
|
|
886 |
✓✓ |
32004 |
for (y = 0; y < 8; y++) { |
887 |
✓✓ |
256032 |
for (x = 0; x < 8; x++) |
888 |
|
227584 |
pixel_ptr[x] = pix; |
889 |
|
28448 |
pixel_ptr += s->stride; |
890 |
|
|
} |
891 |
|
|
|
892 |
|
|
/* report success */ |
893 |
|
3556 |
return 0; |
894 |
|
|
} |
895 |
|
|
|
896 |
|
|
static int (* const ipvideo_decode_block[])(IpvideoContext *s, AVFrame *frame) = { |
897 |
|
|
ipvideo_decode_block_opcode_0x0, ipvideo_decode_block_opcode_0x1, |
898 |
|
|
ipvideo_decode_block_opcode_0x2, ipvideo_decode_block_opcode_0x3, |
899 |
|
|
ipvideo_decode_block_opcode_0x4, ipvideo_decode_block_opcode_0x5, |
900 |
|
|
ipvideo_decode_block_opcode_0x6, ipvideo_decode_block_opcode_0x7, |
901 |
|
|
ipvideo_decode_block_opcode_0x8, ipvideo_decode_block_opcode_0x9, |
902 |
|
|
ipvideo_decode_block_opcode_0xA, ipvideo_decode_block_opcode_0xB, |
903 |
|
|
ipvideo_decode_block_opcode_0xC, ipvideo_decode_block_opcode_0xD, |
904 |
|
|
ipvideo_decode_block_opcode_0xE, ipvideo_decode_block_opcode_0xF, |
905 |
|
|
}; |
906 |
|
|
|
907 |
|
|
static int (* const ipvideo_decode_block16[])(IpvideoContext *s, AVFrame *frame) = { |
908 |
|
|
ipvideo_decode_block_opcode_0x0, ipvideo_decode_block_opcode_0x1, |
909 |
|
|
ipvideo_decode_block_opcode_0x2, ipvideo_decode_block_opcode_0x3, |
910 |
|
|
ipvideo_decode_block_opcode_0x4, ipvideo_decode_block_opcode_0x5, |
911 |
|
|
ipvideo_decode_block_opcode_0x6_16, ipvideo_decode_block_opcode_0x7_16, |
912 |
|
|
ipvideo_decode_block_opcode_0x8_16, ipvideo_decode_block_opcode_0x9_16, |
913 |
|
|
ipvideo_decode_block_opcode_0xA_16, ipvideo_decode_block_opcode_0xB_16, |
914 |
|
|
ipvideo_decode_block_opcode_0xC_16, ipvideo_decode_block_opcode_0xD_16, |
915 |
|
|
ipvideo_decode_block_opcode_0xE_16, ipvideo_decode_block_opcode_0x1, |
916 |
|
|
}; |
917 |
|
|
|
918 |
|
|
static void ipvideo_format_06_firstpass(IpvideoContext *s, AVFrame *frame, int16_t opcode) |
919 |
|
|
{ |
920 |
|
|
int line; |
921 |
|
|
|
922 |
|
|
if (!opcode) { |
923 |
|
|
for (line = 0; line < 8; ++line) { |
924 |
|
|
bytestream2_get_buffer(&s->stream_ptr, s->pixel_ptr, 8); |
925 |
|
|
s->pixel_ptr += s->stride; |
926 |
|
|
} |
927 |
|
|
} else { |
928 |
|
|
/* Don't try to copy second_last_frame data on the first frames */ |
929 |
|
|
if (s->avctx->frame_number > 2) |
930 |
|
|
copy_from(s, s->second_last_frame, frame, 0, 0); |
931 |
|
|
} |
932 |
|
|
} |
933 |
|
|
|
934 |
|
|
static void ipvideo_format_06_secondpass(IpvideoContext *s, AVFrame *frame, int16_t opcode) |
935 |
|
|
{ |
936 |
|
|
int off_x, off_y; |
937 |
|
|
|
938 |
|
|
if (opcode < 0) { |
939 |
|
|
off_x = ((uint16_t)opcode - 0xC000) % frame->width; |
940 |
|
|
off_y = ((uint16_t)opcode - 0xC000) / frame->width; |
941 |
|
|
copy_from(s, s->last_frame, frame, off_x, off_y); |
942 |
|
|
} else if (opcode > 0) { |
943 |
|
|
off_x = ((uint16_t)opcode - 0x4000) % frame->width; |
944 |
|
|
off_y = ((uint16_t)opcode - 0x4000) / frame->width; |
945 |
|
|
copy_from(s, frame, frame, off_x, off_y); |
946 |
|
|
} |
947 |
|
|
} |
948 |
|
|
|
949 |
|
|
static void (* const ipvideo_format_06_passes[])(IpvideoContext *s, AVFrame *frame, int16_t op) = { |
950 |
|
|
ipvideo_format_06_firstpass, ipvideo_format_06_secondpass, |
951 |
|
|
}; |
952 |
|
|
|
953 |
|
|
static void ipvideo_decode_format_06_opcodes(IpvideoContext *s, AVFrame *frame) |
954 |
|
|
{ |
955 |
|
|
int pass, x, y; |
956 |
|
|
int16_t opcode; |
957 |
|
|
GetByteContext decoding_map_ptr; |
958 |
|
|
|
959 |
|
|
/* this is PAL8, so make the palette available */ |
960 |
|
|
memcpy(frame->data[1], s->pal, AVPALETTE_SIZE); |
961 |
|
|
s->stride = frame->linesize[0]; |
962 |
|
|
|
963 |
|
|
s->line_inc = s->stride - 8; |
964 |
|
|
s->upper_motion_limit_offset = (s->avctx->height - 8) * frame->linesize[0] |
965 |
|
|
+ (s->avctx->width - 8) * (1 + s->is_16bpp); |
966 |
|
|
|
967 |
|
|
bytestream2_init(&decoding_map_ptr, s->decoding_map, s->decoding_map_size); |
968 |
|
|
|
969 |
|
|
for (pass = 0; pass < 2; ++pass) { |
970 |
|
|
bytestream2_seek(&decoding_map_ptr, 0, SEEK_SET); |
971 |
|
|
for (y = 0; y < s->avctx->height; y += 8) { |
972 |
|
|
for (x = 0; x < s->avctx->width; x += 8) { |
973 |
|
|
opcode = bytestream2_get_le16(&decoding_map_ptr); |
974 |
|
|
|
975 |
|
|
ff_tlog(s->avctx, |
976 |
|
|
" block @ (%3d, %3d): opcode 0x%X, data ptr offset %d\n", |
977 |
|
|
x, y, opcode, bytestream2_tell(&s->stream_ptr)); |
978 |
|
|
|
979 |
|
|
s->pixel_ptr = frame->data[0] + x + y * frame->linesize[0]; |
980 |
|
|
ipvideo_format_06_passes[pass](s, frame, opcode); |
981 |
|
|
} |
982 |
|
|
} |
983 |
|
|
} |
984 |
|
|
|
985 |
|
|
if (bytestream2_get_bytes_left(&s->stream_ptr) > 1) { |
986 |
|
|
av_log(s->avctx, AV_LOG_DEBUG, |
987 |
|
|
"decode finished with %d bytes left over\n", |
988 |
|
|
bytestream2_get_bytes_left(&s->stream_ptr)); |
989 |
|
|
} |
990 |
|
|
} |
991 |
|
|
|
992 |
|
|
static void ipvideo_format_10_firstpass(IpvideoContext *s, AVFrame *frame, int16_t opcode) |
993 |
|
|
{ |
994 |
|
|
int line; |
995 |
|
|
|
996 |
|
|
if (!opcode) { |
997 |
|
|
for (line = 0; line < 8; ++line) { |
998 |
|
|
bytestream2_get_buffer(&s->stream_ptr, s->pixel_ptr, 8); |
999 |
|
|
s->pixel_ptr += s->stride; |
1000 |
|
|
} |
1001 |
|
|
} |
1002 |
|
|
} |
1003 |
|
|
|
1004 |
|
|
static void ipvideo_format_10_secondpass(IpvideoContext *s, AVFrame *frame, int16_t opcode) |
1005 |
|
|
{ |
1006 |
|
|
int off_x, off_y; |
1007 |
|
|
|
1008 |
|
|
if (opcode < 0) { |
1009 |
|
|
off_x = ((uint16_t)opcode - 0xC000) % s->cur_decode_frame->width; |
1010 |
|
|
off_y = ((uint16_t)opcode - 0xC000) / s->cur_decode_frame->width; |
1011 |
|
|
copy_from(s, s->prev_decode_frame, s->cur_decode_frame, off_x, off_y); |
1012 |
|
|
} else if (opcode > 0) { |
1013 |
|
|
off_x = ((uint16_t)opcode - 0x4000) % s->cur_decode_frame->width; |
1014 |
|
|
off_y = ((uint16_t)opcode - 0x4000) / s->cur_decode_frame->width; |
1015 |
|
|
copy_from(s, s->cur_decode_frame, s->cur_decode_frame, off_x, off_y); |
1016 |
|
|
} |
1017 |
|
|
} |
1018 |
|
|
|
1019 |
|
|
static void (* const ipvideo_format_10_passes[])(IpvideoContext *s, AVFrame *frame, int16_t op) = { |
1020 |
|
|
ipvideo_format_10_firstpass, ipvideo_format_10_secondpass, |
1021 |
|
|
}; |
1022 |
|
|
|
1023 |
|
|
static void ipvideo_decode_format_10_opcodes(IpvideoContext *s, AVFrame *frame) |
1024 |
|
|
{ |
1025 |
|
|
int pass, x, y, changed_block; |
1026 |
|
|
int16_t opcode, skip; |
1027 |
|
|
GetByteContext decoding_map_ptr; |
1028 |
|
|
GetByteContext skip_map_ptr; |
1029 |
|
|
|
1030 |
|
|
bytestream2_skip(&s->stream_ptr, 14); /* data starts 14 bytes in */ |
1031 |
|
|
|
1032 |
|
|
/* this is PAL8, so make the palette available */ |
1033 |
|
|
memcpy(frame->data[1], s->pal, AVPALETTE_SIZE); |
1034 |
|
|
s->stride = frame->linesize[0]; |
1035 |
|
|
|
1036 |
|
|
s->line_inc = s->stride - 8; |
1037 |
|
|
s->upper_motion_limit_offset = (s->avctx->height - 8) * frame->linesize[0] |
1038 |
|
|
+ (s->avctx->width - 8) * (1 + s->is_16bpp); |
1039 |
|
|
|
1040 |
|
|
bytestream2_init(&decoding_map_ptr, s->decoding_map, s->decoding_map_size); |
1041 |
|
|
bytestream2_init(&skip_map_ptr, s->skip_map, s->skip_map_size); |
1042 |
|
|
|
1043 |
|
|
for (pass = 0; pass < 2; ++pass) { |
1044 |
|
|
bytestream2_seek(&decoding_map_ptr, 0, SEEK_SET); |
1045 |
|
|
bytestream2_seek(&skip_map_ptr, 0, SEEK_SET); |
1046 |
|
|
skip = bytestream2_get_le16(&skip_map_ptr); |
1047 |
|
|
|
1048 |
|
|
for (y = 0; y < s->avctx->height; y += 8) { |
1049 |
|
|
for (x = 0; x < s->avctx->width; x += 8) { |
1050 |
|
|
s->pixel_ptr = s->cur_decode_frame->data[0] + x + y * s->cur_decode_frame->linesize[0]; |
1051 |
|
|
|
1052 |
|
|
while (skip <= 0) { |
1053 |
|
|
if (skip != -0x8000 && skip) { |
1054 |
|
|
opcode = bytestream2_get_le16(&decoding_map_ptr); |
1055 |
|
|
ipvideo_format_10_passes[pass](s, frame, opcode); |
1056 |
|
|
break; |
1057 |
|
|
} |
1058 |
|
|
if (bytestream2_get_bytes_left(&skip_map_ptr) < 2) |
1059 |
|
|
return; |
1060 |
|
|
skip = bytestream2_get_le16(&skip_map_ptr); |
1061 |
|
|
} |
1062 |
|
|
skip *= 2; |
1063 |
|
|
} |
1064 |
|
|
} |
1065 |
|
|
} |
1066 |
|
|
|
1067 |
|
|
bytestream2_seek(&skip_map_ptr, 0, SEEK_SET); |
1068 |
|
|
skip = bytestream2_get_le16(&skip_map_ptr); |
1069 |
|
|
for (y = 0; y < s->avctx->height; y += 8) { |
1070 |
|
|
for (x = 0; x < s->avctx->width; x += 8) { |
1071 |
|
|
changed_block = 0; |
1072 |
|
|
s->pixel_ptr = frame->data[0] + x + y*frame->linesize[0]; |
1073 |
|
|
|
1074 |
|
|
while (skip <= 0) { |
1075 |
|
|
if (skip != -0x8000 && skip) { |
1076 |
|
|
changed_block = 1; |
1077 |
|
|
break; |
1078 |
|
|
} |
1079 |
|
|
if (bytestream2_get_bytes_left(&skip_map_ptr) < 2) |
1080 |
|
|
return; |
1081 |
|
|
skip = bytestream2_get_le16(&skip_map_ptr); |
1082 |
|
|
} |
1083 |
|
|
|
1084 |
|
|
if (changed_block) { |
1085 |
|
|
copy_from(s, s->cur_decode_frame, frame, 0, 0); |
1086 |
|
|
} else { |
1087 |
|
|
/* Don't try to copy last_frame data on the first frame */ |
1088 |
|
|
if (s->avctx->frame_number) |
1089 |
|
|
copy_from(s, s->last_frame, frame, 0, 0); |
1090 |
|
|
} |
1091 |
|
|
skip *= 2; |
1092 |
|
|
} |
1093 |
|
|
} |
1094 |
|
|
|
1095 |
|
|
FFSWAP(AVFrame*, s->prev_decode_frame, s->cur_decode_frame); |
1096 |
|
|
|
1097 |
|
|
if (bytestream2_get_bytes_left(&s->stream_ptr) > 1) { |
1098 |
|
|
av_log(s->avctx, AV_LOG_DEBUG, |
1099 |
|
|
"decode finished with %d bytes left over\n", |
1100 |
|
|
bytestream2_get_bytes_left(&s->stream_ptr)); |
1101 |
|
|
} |
1102 |
|
|
} |
1103 |
|
|
|
1104 |
|
160 |
static void ipvideo_decode_format_11_opcodes(IpvideoContext *s, AVFrame *frame) |
1105 |
|
|
{ |
1106 |
|
|
int x, y; |
1107 |
|
|
unsigned char opcode; |
1108 |
|
|
int ret; |
1109 |
|
|
GetBitContext gb; |
1110 |
|
|
|
1111 |
|
160 |
bytestream2_skip(&s->stream_ptr, 14); /* data starts 14 bytes in */ |
1112 |
✓✓ |
160 |
if (!s->is_16bpp) { |
1113 |
|
|
/* this is PAL8, so make the palette available */ |
1114 |
|
110 |
memcpy(frame->data[1], s->pal, AVPALETTE_SIZE); |
1115 |
|
|
|
1116 |
|
110 |
s->stride = frame->linesize[0]; |
1117 |
|
|
} else { |
1118 |
|
50 |
s->stride = frame->linesize[0] >> 1; |
1119 |
|
50 |
s->mv_ptr = s->stream_ptr; |
1120 |
|
50 |
bytestream2_skip(&s->mv_ptr, bytestream2_get_le16(&s->stream_ptr)); |
1121 |
|
|
} |
1122 |
|
160 |
s->line_inc = s->stride - 8; |
1123 |
|
160 |
s->upper_motion_limit_offset = (s->avctx->height - 8) * frame->linesize[0] |
1124 |
|
160 |
+ (s->avctx->width - 8) * (1 + s->is_16bpp); |
1125 |
|
|
|
1126 |
|
160 |
init_get_bits(&gb, s->decoding_map, s->decoding_map_size * 8); |
1127 |
✓✓ |
6560 |
for (y = 0; y < s->avctx->height; y += 8) { |
1128 |
✓✓ |
404000 |
for (x = 0; x < s->avctx->width; x += 8) { |
1129 |
✗✓ |
397600 |
if (get_bits_left(&gb) < 4) |
1130 |
|
|
return; |
1131 |
|
397600 |
opcode = get_bits(&gb, 4); |
1132 |
|
|
|
1133 |
|
|
ff_tlog(s->avctx, |
1134 |
|
|
" block @ (%3d, %3d): encoding 0x%X, data ptr offset %d\n", |
1135 |
|
|
x, y, opcode, bytestream2_tell(&s->stream_ptr)); |
1136 |
|
|
|
1137 |
✓✓ |
397600 |
if (!s->is_16bpp) { |
1138 |
|
237600 |
s->pixel_ptr = frame->data[0] + x |
1139 |
|
237600 |
+ y*frame->linesize[0]; |
1140 |
|
237600 |
ret = ipvideo_decode_block[opcode](s, frame); |
1141 |
|
|
} else { |
1142 |
|
160000 |
s->pixel_ptr = frame->data[0] + x*2 |
1143 |
|
160000 |
+ y*frame->linesize[0]; |
1144 |
|
160000 |
ret = ipvideo_decode_block16[opcode](s, frame); |
1145 |
|
|
} |
1146 |
✗✓ |
397600 |
if (ret != 0) { |
1147 |
|
|
av_log(s->avctx, AV_LOG_ERROR, "decode problem on frame %d, @ block (%d, %d)\n", |
1148 |
|
|
s->avctx->frame_number, x, y); |
1149 |
|
|
return; |
1150 |
|
|
} |
1151 |
|
|
} |
1152 |
|
|
} |
1153 |
✓✓ |
160 |
if (bytestream2_get_bytes_left(&s->stream_ptr) > 1) { |
1154 |
|
48 |
av_log(s->avctx, AV_LOG_DEBUG, |
1155 |
|
|
"decode finished with %d bytes left over\n", |
1156 |
|
|
bytestream2_get_bytes_left(&s->stream_ptr)); |
1157 |
|
|
} |
1158 |
|
|
} |
1159 |
|
|
|
1160 |
|
5 |
static av_cold int ipvideo_decode_init(AVCodecContext *avctx) |
1161 |
|
|
{ |
1162 |
|
5 |
IpvideoContext *s = avctx->priv_data; |
1163 |
|
|
|
1164 |
|
5 |
s->avctx = avctx; |
1165 |
|
|
|
1166 |
|
5 |
s->is_16bpp = avctx->bits_per_coded_sample == 16; |
1167 |
✓✓ |
5 |
avctx->pix_fmt = s->is_16bpp ? AV_PIX_FMT_RGB555 : AV_PIX_FMT_PAL8; |
1168 |
|
|
|
1169 |
|
5 |
ff_hpeldsp_init(&s->hdsp, avctx->flags); |
1170 |
|
|
|
1171 |
|
5 |
s->last_frame = av_frame_alloc(); |
1172 |
|
5 |
s->second_last_frame = av_frame_alloc(); |
1173 |
|
5 |
s->cur_decode_frame = av_frame_alloc(); |
1174 |
|
5 |
s->prev_decode_frame = av_frame_alloc(); |
1175 |
✓✗✓✗
|
5 |
if (!s->last_frame || !s->second_last_frame || |
1176 |
✓✗✗✓
|
5 |
!s->cur_decode_frame || !s->prev_decode_frame) { |
1177 |
|
|
return AVERROR(ENOMEM); |
1178 |
|
|
} |
1179 |
|
|
|
1180 |
|
5 |
s->cur_decode_frame->width = avctx->width; |
1181 |
|
5 |
s->prev_decode_frame->width = avctx->width; |
1182 |
|
5 |
s->cur_decode_frame->height = avctx->height; |
1183 |
|
5 |
s->prev_decode_frame->height = avctx->height; |
1184 |
|
5 |
s->cur_decode_frame->format = avctx->pix_fmt; |
1185 |
|
5 |
s->prev_decode_frame->format = avctx->pix_fmt; |
1186 |
|
|
|
1187 |
|
5 |
return 0; |
1188 |
|
|
} |
1189 |
|
|
|
1190 |
|
160 |
static int ipvideo_decode_frame(AVCodecContext *avctx, |
1191 |
|
|
void *data, int *got_frame, |
1192 |
|
|
AVPacket *avpkt) |
1193 |
|
|
{ |
1194 |
|
160 |
const uint8_t *buf = avpkt->data; |
1195 |
|
160 |
int buf_size = avpkt->size; |
1196 |
|
160 |
IpvideoContext *s = avctx->priv_data; |
1197 |
|
160 |
AVFrame *frame = data; |
1198 |
|
|
int ret; |
1199 |
|
|
int send_buffer; |
1200 |
|
|
int frame_format; |
1201 |
|
|
int video_data_size; |
1202 |
|
|
|
1203 |
✓✓ |
160 |
if (av_packet_get_side_data(avpkt, AV_PKT_DATA_PARAM_CHANGE, NULL)) { |
1204 |
|
2 |
av_frame_unref(s->last_frame); |
1205 |
|
2 |
av_frame_unref(s->second_last_frame); |
1206 |
|
2 |
av_frame_unref(s->cur_decode_frame); |
1207 |
|
2 |
av_frame_unref(s->prev_decode_frame); |
1208 |
|
|
} |
1209 |
|
|
|
1210 |
✓✓ |
160 |
if (!s->cur_decode_frame->data[0]) { |
1211 |
|
2 |
ret = ff_get_buffer(avctx, s->cur_decode_frame, 0); |
1212 |
✗✓ |
2 |
if (ret < 0) |
1213 |
|
|
return ret; |
1214 |
|
|
|
1215 |
|
2 |
ret = ff_get_buffer(avctx, s->prev_decode_frame, 0); |
1216 |
✗✓ |
2 |
if (ret < 0) { |
1217 |
|
|
av_frame_unref(s->cur_decode_frame); |
1218 |
|
|
return ret; |
1219 |
|
|
} |
1220 |
|
|
} |
1221 |
|
|
|
1222 |
✗✓ |
160 |
if (buf_size < 8) |
1223 |
|
|
return AVERROR_INVALIDDATA; |
1224 |
|
|
|
1225 |
|
160 |
frame_format = AV_RL8(buf); |
1226 |
|
160 |
send_buffer = AV_RL8(buf + 1); |
1227 |
|
160 |
video_data_size = AV_RL16(buf + 2); |
1228 |
|
160 |
s->decoding_map_size = AV_RL16(buf + 4); |
1229 |
|
160 |
s->skip_map_size = AV_RL16(buf + 6); |
1230 |
|
|
|
1231 |
✗✗✓✗
|
160 |
switch (frame_format) { |
1232 |
|
|
case 0x06: |
1233 |
|
|
if (s->decoding_map_size) { |
1234 |
|
|
av_log(avctx, AV_LOG_ERROR, "Decoding map for format 0x06\n"); |
1235 |
|
|
return AVERROR_INVALIDDATA; |
1236 |
|
|
} |
1237 |
|
|
|
1238 |
|
|
if (s->skip_map_size) { |
1239 |
|
|
av_log(avctx, AV_LOG_ERROR, "Skip map for format 0x06\n"); |
1240 |
|
|
return AVERROR_INVALIDDATA; |
1241 |
|
|
} |
1242 |
|
|
|
1243 |
|
|
if (s->is_16bpp) { |
1244 |
|
|
av_log(avctx, AV_LOG_ERROR, "Video format 0x06 does not support 16bpp movies\n"); |
1245 |
|
|
return AVERROR_INVALIDDATA; |
1246 |
|
|
} |
1247 |
|
|
|
1248 |
|
|
/* Decoding map for 0x06 frame format is at the top of pixeldata */ |
1249 |
|
|
s->decoding_map_size = ((s->avctx->width / 8) * (s->avctx->height / 8)) * 2; |
1250 |
|
|
s->decoding_map = buf + 8 + 14; /* 14 bits of op data */ |
1251 |
|
|
video_data_size -= s->decoding_map_size + 14; |
1252 |
|
|
if (video_data_size <= 0 || s->decoding_map_size == 0) |
1253 |
|
|
return AVERROR_INVALIDDATA; |
1254 |
|
|
|
1255 |
|
|
if (buf_size < 8 + s->decoding_map_size + 14 + video_data_size) |
1256 |
|
|
return AVERROR_INVALIDDATA; |
1257 |
|
|
|
1258 |
|
|
bytestream2_init(&s->stream_ptr, buf + 8 + s->decoding_map_size + 14, video_data_size); |
1259 |
|
|
|
1260 |
|
|
break; |
1261 |
|
|
|
1262 |
|
|
case 0x10: |
1263 |
|
|
if (! s->decoding_map_size) { |
1264 |
|
|
av_log(avctx, AV_LOG_ERROR, "Empty decoding map for format 0x10\n"); |
1265 |
|
|
return AVERROR_INVALIDDATA; |
1266 |
|
|
} |
1267 |
|
|
|
1268 |
|
|
if (! s->skip_map_size) { |
1269 |
|
|
av_log(avctx, AV_LOG_ERROR, "Empty skip map for format 0x10\n"); |
1270 |
|
|
return AVERROR_INVALIDDATA; |
1271 |
|
|
} |
1272 |
|
|
|
1273 |
|
|
if (s->is_16bpp) { |
1274 |
|
|
av_log(avctx, AV_LOG_ERROR, "Video format 0x10 does not support 16bpp movies\n"); |
1275 |
|
|
return AVERROR_INVALIDDATA; |
1276 |
|
|
} |
1277 |
|
|
|
1278 |
|
|
if (buf_size < 8 + video_data_size + s->decoding_map_size + s->skip_map_size) |
1279 |
|
|
return AVERROR_INVALIDDATA; |
1280 |
|
|
|
1281 |
|
|
bytestream2_init(&s->stream_ptr, buf + 8, video_data_size); |
1282 |
|
|
s->decoding_map = buf + 8 + video_data_size; |
1283 |
|
|
s->skip_map = buf + 8 + video_data_size + s->decoding_map_size; |
1284 |
|
|
|
1285 |
|
|
break; |
1286 |
|
|
|
1287 |
|
160 |
case 0x11: |
1288 |
✗✓ |
160 |
if (! s->decoding_map_size) { |
1289 |
|
|
av_log(avctx, AV_LOG_ERROR, "Empty decoding map for format 0x11\n"); |
1290 |
|
|
return AVERROR_INVALIDDATA; |
1291 |
|
|
} |
1292 |
|
|
|
1293 |
✗✓ |
160 |
if (s->skip_map_size) { |
1294 |
|
|
av_log(avctx, AV_LOG_ERROR, "Skip map for format 0x11\n"); |
1295 |
|
|
return AVERROR_INVALIDDATA; |
1296 |
|
|
} |
1297 |
|
|
|
1298 |
✗✓ |
160 |
if (buf_size < 8 + video_data_size + s->decoding_map_size) |
1299 |
|
|
return AVERROR_INVALIDDATA; |
1300 |
|
|
|
1301 |
|
160 |
bytestream2_init(&s->stream_ptr, buf + 8, video_data_size); |
1302 |
|
160 |
s->decoding_map = buf + 8 + video_data_size; |
1303 |
|
|
|
1304 |
|
160 |
break; |
1305 |
|
|
|
1306 |
|
|
default: |
1307 |
|
|
av_log(avctx, AV_LOG_ERROR, "Frame type 0x%02X unsupported\n", frame_format); |
1308 |
|
|
} |
1309 |
|
|
|
1310 |
|
|
/* ensure we can't overread the packet */ |
1311 |
✗✓ |
160 |
if (buf_size < 8 + s->decoding_map_size + video_data_size + s->skip_map_size) { |
1312 |
|
|
av_log(avctx, AV_LOG_ERROR, "Invalid IP packet size\n"); |
1313 |
|
|
return AVERROR_INVALIDDATA; |
1314 |
|
|
} |
1315 |
|
|
|
1316 |
✗✓ |
160 |
if ((ret = ff_get_buffer(avctx, frame, AV_GET_BUFFER_FLAG_REF)) < 0) |
1317 |
|
|
return ret; |
1318 |
|
|
|
1319 |
✓✓ |
160 |
if (!s->is_16bpp) { |
1320 |
|
|
int size; |
1321 |
|
110 |
const uint8_t *pal = av_packet_get_side_data(avpkt, AV_PKT_DATA_PALETTE, &size); |
1322 |
✓✓✓✗
|
110 |
if (pal && size == AVPALETTE_SIZE) { |
1323 |
|
1 |
frame->palette_has_changed = 1; |
1324 |
|
1 |
memcpy(s->pal, pal, AVPALETTE_SIZE); |
1325 |
✗✓ |
109 |
} else if (pal) { |
1326 |
|
|
av_log(avctx, AV_LOG_ERROR, "Palette size %d is wrong\n", size); |
1327 |
|
|
} |
1328 |
|
|
} |
1329 |
|
|
|
1330 |
✗✗✓✗
|
160 |
switch (frame_format) { |
1331 |
|
|
case 0x06: |
1332 |
|
|
ipvideo_decode_format_06_opcodes(s, frame); |
1333 |
|
|
break; |
1334 |
|
|
case 0x10: |
1335 |
|
|
ipvideo_decode_format_10_opcodes(s, frame); |
1336 |
|
|
break; |
1337 |
|
160 |
case 0x11: |
1338 |
|
160 |
ipvideo_decode_format_11_opcodes(s, frame); |
1339 |
|
160 |
break; |
1340 |
|
|
} |
1341 |
|
|
|
1342 |
|
160 |
*got_frame = send_buffer; |
1343 |
|
|
|
1344 |
|
|
/* shuffle frames */ |
1345 |
|
160 |
av_frame_unref(s->second_last_frame); |
1346 |
|
160 |
FFSWAP(AVFrame*, s->second_last_frame, s->last_frame); |
1347 |
✗✓ |
160 |
if ((ret = av_frame_ref(s->last_frame, frame)) < 0) |
1348 |
|
|
return ret; |
1349 |
|
|
|
1350 |
|
|
/* report that the buffer was completely consumed */ |
1351 |
|
160 |
return buf_size; |
1352 |
|
|
} |
1353 |
|
|
|
1354 |
|
5 |
static av_cold int ipvideo_decode_end(AVCodecContext *avctx) |
1355 |
|
|
{ |
1356 |
|
5 |
IpvideoContext *s = avctx->priv_data; |
1357 |
|
|
|
1358 |
|
5 |
av_frame_free(&s->last_frame); |
1359 |
|
5 |
av_frame_free(&s->second_last_frame); |
1360 |
|
5 |
av_frame_free(&s->cur_decode_frame); |
1361 |
|
5 |
av_frame_free(&s->prev_decode_frame); |
1362 |
|
|
|
1363 |
|
5 |
return 0; |
1364 |
|
|
} |
1365 |
|
|
|
1366 |
|
|
AVCodec ff_interplay_video_decoder = { |
1367 |
|
|
.name = "interplayvideo", |
1368 |
|
|
.long_name = NULL_IF_CONFIG_SMALL("Interplay MVE video"), |
1369 |
|
|
.type = AVMEDIA_TYPE_VIDEO, |
1370 |
|
|
.id = AV_CODEC_ID_INTERPLAY_VIDEO, |
1371 |
|
|
.priv_data_size = sizeof(IpvideoContext), |
1372 |
|
|
.init = ipvideo_decode_init, |
1373 |
|
|
.close = ipvideo_decode_end, |
1374 |
|
|
.decode = ipvideo_decode_frame, |
1375 |
|
|
.capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_PARAM_CHANGE, |
1376 |
|
|
.caps_internal = FF_CODEC_CAP_INIT_CLEANUP, |
1377 |
|
|
}; |