Line | Branch | Exec | Source |
---|---|---|---|
1 | /* | ||
2 | * This file is part of FFmpeg. | ||
3 | * | ||
4 | * FFmpeg is free software; you can redistribute it and/or | ||
5 | * modify it under the terms of the GNU Lesser General Public | ||
6 | * License as published by the Free Software Foundation; either | ||
7 | * version 2.1 of the License, or (at your option) any later version. | ||
8 | * | ||
9 | * FFmpeg is distributed in the hope that it will be useful, | ||
10 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
11 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
12 | * Lesser General Public License for more details. | ||
13 | * | ||
14 | * You should have received a copy of the GNU Lesser General Public | ||
15 | * License along with FFmpeg; if not, write to the Free Software | ||
16 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | ||
17 | */ | ||
18 | |||
19 | #include <stdatomic.h> | ||
20 | |||
21 | #include "libavutil/mastering_display_metadata.h" | ||
22 | #include "libavutil/mem_internal.h" | ||
23 | #include "libavutil/pixdesc.h" | ||
24 | #include "libavutil/thread.h" | ||
25 | |||
26 | #include "apv.h" | ||
27 | #include "apv_decode.h" | ||
28 | #include "apv_dsp.h" | ||
29 | #include "avcodec.h" | ||
30 | #include "cbs.h" | ||
31 | #include "cbs_apv.h" | ||
32 | #include "codec_internal.h" | ||
33 | #include "decode.h" | ||
34 | #include "internal.h" | ||
35 | #include "thread.h" | ||
36 | |||
37 | |||
38 | typedef struct APVDerivedTileInfo { | ||
39 | uint8_t tile_cols; | ||
40 | uint8_t tile_rows; | ||
41 | uint16_t num_tiles; | ||
42 | // The spec uses an extra element on the end of these arrays | ||
43 | // not corresponding to any tile. | ||
44 | uint16_t col_starts[APV_MAX_TILE_COLS + 1]; | ||
45 | uint16_t row_starts[APV_MAX_TILE_ROWS + 1]; | ||
46 | } APVDerivedTileInfo; | ||
47 | |||
48 | typedef struct APVDecodeContext { | ||
49 | CodedBitstreamContext *cbc; | ||
50 | APVDSPContext dsp; | ||
51 | |||
52 | CodedBitstreamFragment au; | ||
53 | APVDerivedTileInfo tile_info; | ||
54 | |||
55 | AVPacket *pkt; | ||
56 | AVFrame *output_frame; | ||
57 | atomic_int tile_errors; | ||
58 | |||
59 | int nb_unit; | ||
60 | |||
61 | uint8_t warned_additional_frames; | ||
62 | uint8_t warned_unknown_pbu_types; | ||
63 | } APVDecodeContext; | ||
64 | |||
65 | static const enum AVPixelFormat apv_format_table[5][5] = { | ||
66 | { AV_PIX_FMT_GRAY8, AV_PIX_FMT_GRAY10, AV_PIX_FMT_GRAY12, AV_PIX_FMT_GRAY14, AV_PIX_FMT_GRAY16 }, | ||
67 | { 0 }, // 4:2:0 is not valid. | ||
68 | { AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV422P10, AV_PIX_FMT_YUV422P12, AV_PIX_FMT_YUV422P14, AV_PIX_FMT_YUV422P16 }, | ||
69 | { AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUV444P10, AV_PIX_FMT_YUV444P12, AV_PIX_FMT_YUV444P14, AV_PIX_FMT_YUV444P16 }, | ||
70 | { AV_PIX_FMT_YUVA444P, AV_PIX_FMT_YUVA444P10, AV_PIX_FMT_YUVA444P12, 0 ,AV_PIX_FMT_YUVA444P16 }, | ||
71 | }; | ||
72 | |||
73 | static APVVLCLUT decode_lut; | ||
74 | |||
75 | 12 | static int apv_decode_check_format(AVCodecContext *avctx, | |
76 | const APVRawFrameHeader *header) | ||
77 | { | ||
78 | int err, bit_depth; | ||
79 | |||
80 | 12 | avctx->profile = header->frame_info.profile_idc; | |
81 | 12 | avctx->level = header->frame_info.level_idc; | |
82 | |||
83 | 12 | bit_depth = header->frame_info.bit_depth_minus8 + 8; | |
84 |
3/6✓ Branch 0 taken 12 times.
✗ Branch 1 not taken.
✓ Branch 2 taken 12 times.
✗ Branch 3 not taken.
✗ Branch 4 not taken.
✓ Branch 5 taken 12 times.
|
12 | if (bit_depth < 8 || bit_depth > 16 || bit_depth % 2) { |
85 | ✗ | avpriv_request_sample(avctx, "Bit depth %d", bit_depth); | |
86 | ✗ | return AVERROR_PATCHWELCOME; | |
87 | } | ||
88 | 12 | avctx->pix_fmt = | |
89 | 12 | apv_format_table[header->frame_info.chroma_format_idc][bit_depth - 4 >> 2]; | |
90 | |||
91 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 12 times.
|
12 | if (!avctx->pix_fmt) { |
92 | ✗ | avpriv_request_sample(avctx, "YUVA444P14"); | |
93 | ✗ | return AVERROR_PATCHWELCOME; | |
94 | } | ||
95 | |||
96 | 12 | err = ff_set_dimensions(avctx, | |
97 | 12 | FFALIGN(header->frame_info.frame_width, 16), | |
98 | 12 | FFALIGN(header->frame_info.frame_height, 16)); | |
99 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 12 times.
|
12 | if (err < 0) { |
100 | // Unsupported frame size. | ||
101 | ✗ | return err; | |
102 | } | ||
103 | 12 | avctx->width = header->frame_info.frame_width; | |
104 | 12 | avctx->height = header->frame_info.frame_height; | |
105 | |||
106 | 12 | avctx->sample_aspect_ratio = (AVRational){ 1, 1 }; | |
107 | |||
108 | 12 | avctx->color_primaries = header->color_primaries; | |
109 | 12 | avctx->color_trc = header->transfer_characteristics; | |
110 | 12 | avctx->colorspace = header->matrix_coefficients; | |
111 | 24 | avctx->color_range = header->full_range_flag ? AVCOL_RANGE_JPEG | |
112 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 12 times.
|
12 | : AVCOL_RANGE_MPEG; |
113 | 12 | avctx->chroma_sample_location = AVCHROMA_LOC_TOPLEFT; | |
114 | |||
115 | 12 | avctx->refs = 0; | |
116 | 12 | avctx->has_b_frames = 0; | |
117 | |||
118 | 12 | return 0; | |
119 | } | ||
120 | |||
121 | static const CodedBitstreamUnitType apv_decompose_unit_types[] = { | ||
122 | APV_PBU_PRIMARY_FRAME, | ||
123 | APV_PBU_METADATA, | ||
124 | }; | ||
125 | |||
126 | static AVOnce apv_entropy_once = AV_ONCE_INIT; | ||
127 | |||
128 | 6 | static av_cold void apv_entropy_build_decode_lut(void) | |
129 | { | ||
130 | 6 | ff_apv_entropy_build_decode_lut(&decode_lut); | |
131 | 6 | } | |
132 | |||
133 | 8 | static av_cold int apv_decode_init(AVCodecContext *avctx) | |
134 | { | ||
135 | 8 | APVDecodeContext *apv = avctx->priv_data; | |
136 | int err; | ||
137 | |||
138 | 8 | ff_thread_once(&apv_entropy_once, apv_entropy_build_decode_lut); | |
139 | |||
140 | 8 | err = ff_cbs_init(&apv->cbc, AV_CODEC_ID_APV, avctx); | |
141 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 8 times.
|
8 | if (err < 0) |
142 | ✗ | return err; | |
143 | |||
144 | 8 | apv->cbc->decompose_unit_types = | |
145 | apv_decompose_unit_types; | ||
146 | 8 | apv->cbc->nb_decompose_unit_types = | |
147 | FF_ARRAY_ELEMS(apv_decompose_unit_types); | ||
148 | |||
149 | // Extradata could be set here, but is ignored by the decoder. | ||
150 | |||
151 | 8 | apv->pkt = avctx->internal->in_pkt; | |
152 | 8 | ff_apv_dsp_init(&apv->dsp); | |
153 | |||
154 | 8 | atomic_init(&apv->tile_errors, 0); | |
155 | |||
156 | 8 | return 0; | |
157 | } | ||
158 | |||
159 | ✗ | static void apv_decode_flush(AVCodecContext *avctx) | |
160 | { | ||
161 | ✗ | APVDecodeContext *apv = avctx->priv_data; | |
162 | |||
163 | ✗ | apv->nb_unit = 0; | |
164 | ✗ | av_packet_unref(apv->pkt); | |
165 | ✗ | ff_cbs_fragment_reset(&apv->au); | |
166 | ✗ | ff_cbs_flush(apv->cbc); | |
167 | ✗ | } | |
168 | |||
169 | 8 | static av_cold int apv_decode_close(AVCodecContext *avctx) | |
170 | { | ||
171 | 8 | APVDecodeContext *apv = avctx->priv_data; | |
172 | |||
173 | 8 | ff_cbs_fragment_free(&apv->au); | |
174 | 8 | ff_cbs_close(&apv->cbc); | |
175 | |||
176 | 8 | return 0; | |
177 | } | ||
178 | |||
179 | 8640 | static int apv_decode_block(AVCodecContext *avctx, | |
180 | void *output, | ||
181 | ptrdiff_t pitch, | ||
182 | GetBitContext *gbc, | ||
183 | APVEntropyState *entropy_state, | ||
184 | int bit_depth, | ||
185 | int qp_shift, | ||
186 | const uint16_t *qmatrix) | ||
187 | { | ||
188 | 8640 | APVDecodeContext *apv = avctx->priv_data; | |
189 | int err; | ||
190 | |||
191 | 8640 | LOCAL_ALIGNED_32(int16_t, coeff, [64]); | |
192 | 8640 | memset(coeff, 0, 64 * sizeof(int16_t)); | |
193 | |||
194 | 8640 | err = ff_apv_entropy_decode_block(coeff, gbc, entropy_state); | |
195 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 8640 times.
|
8640 | if (err < 0) |
196 | ✗ | return err; | |
197 | |||
198 | 8640 | apv->dsp.decode_transquant(output, pitch, | |
199 | coeff, qmatrix, | ||
200 | bit_depth, qp_shift); | ||
201 | |||
202 | 8640 | return 0; | |
203 | } | ||
204 | |||
205 | 24 | static int apv_decode_tile_component(AVCodecContext *avctx, void *data, | |
206 | int job, int thread) | ||
207 | { | ||
208 | 24 | APVRawFrame *input = data; | |
209 | 24 | APVDecodeContext *apv = avctx->priv_data; | |
210 | 24 | const CodedBitstreamAPVContext *apv_cbc = apv->cbc->priv_data; | |
211 | 24 | const APVDerivedTileInfo *tile_info = &apv->tile_info; | |
212 | |||
213 | 24 | int tile_index = job / apv_cbc->num_comp; | |
214 | 24 | int comp_index = job % apv_cbc->num_comp; | |
215 | |||
216 | const AVPixFmtDescriptor *pix_fmt_desc = | ||
217 | 24 | av_pix_fmt_desc_get(avctx->pix_fmt); | |
218 | |||
219 |
2/2✓ Branch 0 taken 12 times.
✓ Branch 1 taken 12 times.
|
24 | int sub_w_shift = comp_index == 0 ? 0 : pix_fmt_desc->log2_chroma_w; |
220 |
2/2✓ Branch 0 taken 12 times.
✓ Branch 1 taken 12 times.
|
24 | int sub_h_shift = comp_index == 0 ? 0 : pix_fmt_desc->log2_chroma_h; |
221 | |||
222 | 24 | APVRawTile *tile = &input->tile[tile_index]; | |
223 | |||
224 | 24 | int tile_y = tile_index / tile_info->tile_cols; | |
225 | 24 | int tile_x = tile_index % tile_info->tile_cols; | |
226 | |||
227 | 24 | int tile_start_x = tile_info->col_starts[tile_x]; | |
228 | 24 | int tile_start_y = tile_info->row_starts[tile_y]; | |
229 | |||
230 | 24 | int tile_width = tile_info->col_starts[tile_x + 1] - tile_start_x; | |
231 | 24 | int tile_height = tile_info->row_starts[tile_y + 1] - tile_start_y; | |
232 | |||
233 | 24 | int tile_mb_width = tile_width / APV_MB_WIDTH; | |
234 | 24 | int tile_mb_height = tile_height / APV_MB_HEIGHT; | |
235 | |||
236 | 24 | int blk_mb_width = 2 >> sub_w_shift; | |
237 | 24 | int blk_mb_height = 2 >> sub_h_shift; | |
238 | |||
239 | int bit_depth; | ||
240 | int qp_shift; | ||
241 | 24 | LOCAL_ALIGNED_32(uint16_t, qmatrix_scaled, [64]); | |
242 | |||
243 | GetBitContext gbc; | ||
244 | |||
245 | 24 | APVEntropyState entropy_state = { | |
246 | .log_ctx = avctx, | ||
247 | .decode_lut = &decode_lut, | ||
248 | .prev_dc = 0, | ||
249 | .prev_k_dc = 5, | ||
250 | .prev_k_level = 0, | ||
251 | }; | ||
252 | |||
253 | int err; | ||
254 | |||
255 | 24 | err = init_get_bits8(&gbc, tile->tile_data[comp_index], | |
256 | 24 | tile->tile_header.tile_data_size[comp_index]); | |
257 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 24 times.
|
24 | if (err < 0) |
258 | ✗ | goto fail; | |
259 | |||
260 | // Combine the bitstream quantisation matrix with the qp scaling | ||
261 | // in advance. (Including qp_shift as well would overflow 16 bits.) | ||
262 | // Fix the row ordering at the same time. | ||
263 | { | ||
264 | static const uint8_t apv_level_scale[6] = { 40, 45, 51, 57, 64, 71 }; | ||
265 | 24 | int qp = tile->tile_header.tile_qp[comp_index]; | |
266 | 24 | int level_scale = apv_level_scale[qp % 6]; | |
267 | |||
268 | 24 | bit_depth = apv_cbc->bit_depth; | |
269 | 24 | qp_shift = qp / 6; | |
270 | |||
271 |
2/2✓ Branch 0 taken 192 times.
✓ Branch 1 taken 24 times.
|
216 | for (int y = 0; y < 8; y++) { |
272 |
2/2✓ Branch 0 taken 1536 times.
✓ Branch 1 taken 192 times.
|
1728 | for (int x = 0; x < 8; x++) |
273 | 1536 | qmatrix_scaled[y * 8 + x] = level_scale * | |
274 | 1536 | input->frame_header.quantization_matrix.q_matrix[comp_index][x][y]; | |
275 | } | ||
276 | } | ||
277 | |||
278 |
2/2✓ Branch 0 taken 288 times.
✓ Branch 1 taken 24 times.
|
312 | for (int mb_y = 0; mb_y < tile_mb_height; mb_y++) { |
279 |
2/2✓ Branch 0 taken 2880 times.
✓ Branch 1 taken 288 times.
|
3168 | for (int mb_x = 0; mb_x < tile_mb_width; mb_x++) { |
280 |
2/2✓ Branch 0 taken 5760 times.
✓ Branch 1 taken 2880 times.
|
8640 | for (int blk_y = 0; blk_y < blk_mb_height; blk_y++) { |
281 |
2/2✓ Branch 0 taken 8640 times.
✓ Branch 1 taken 5760 times.
|
14400 | for (int blk_x = 0; blk_x < blk_mb_width; blk_x++) { |
282 | 8640 | int frame_y = (tile_start_y + | |
283 | 8640 | APV_MB_HEIGHT * mb_y + | |
284 | 8640 | APV_TR_SIZE * blk_y) >> sub_h_shift; | |
285 | 8640 | int frame_x = (tile_start_x + | |
286 | 8640 | APV_MB_WIDTH * mb_x + | |
287 | 8640 | APV_TR_SIZE * blk_x) >> sub_w_shift; | |
288 | |||
289 | 8640 | ptrdiff_t frame_pitch = apv->output_frame->linesize[comp_index]; | |
290 | 8640 | uint8_t *block_start = apv->output_frame->data[comp_index] + | |
291 | 8640 | frame_y * frame_pitch + 2 * frame_x; | |
292 | |||
293 | 8640 | err = apv_decode_block(avctx, | |
294 | block_start, frame_pitch, | ||
295 | &gbc, &entropy_state, | ||
296 | bit_depth, | ||
297 | qp_shift, | ||
298 | qmatrix_scaled); | ||
299 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 8640 times.
|
8640 | if (err < 0) { |
300 | // Error in block decode means entropy desync, | ||
301 | // so this is not recoverable. | ||
302 | ✗ | goto fail; | |
303 | } | ||
304 | } | ||
305 | } | ||
306 | } | ||
307 | } | ||
308 | |||
309 | 24 | av_log(avctx, AV_LOG_DEBUG, | |
310 | "Decoded tile %d component %d: %dx%d MBs starting at (%d,%d)\n", | ||
311 | tile_index, comp_index, tile_mb_width, tile_mb_height, | ||
312 | tile_start_x, tile_start_y); | ||
313 | |||
314 | 24 | return 0; | |
315 | |||
316 | ✗ | fail: | |
317 | ✗ | av_log(avctx, AV_LOG_VERBOSE, | |
318 | "Decode error in tile %d component %d.\n", | ||
319 | tile_index, comp_index); | ||
320 | ✗ | atomic_fetch_add_explicit(&apv->tile_errors, 1, memory_order_relaxed); | |
321 | ✗ | return err; | |
322 | } | ||
323 | |||
324 | 6 | static void apv_derive_tile_info(APVDerivedTileInfo *ti, | |
325 | const APVRawFrameHeader *fh) | ||
326 | { | ||
327 | 6 | int frame_width_in_mbs = (fh->frame_info.frame_width + (APV_MB_WIDTH - 1)) >> 4; | |
328 | 6 | int frame_height_in_mbs = (fh->frame_info.frame_height + (APV_MB_HEIGHT - 1)) >> 4; | |
329 | int start_mb, i; | ||
330 | |||
331 | 6 | start_mb = 0; | |
332 |
2/2✓ Branch 0 taken 12 times.
✓ Branch 1 taken 6 times.
|
18 | for (i = 0; start_mb < frame_width_in_mbs; i++) { |
333 | 12 | ti->col_starts[i] = start_mb * APV_MB_WIDTH; | |
334 | 12 | start_mb += fh->tile_info.tile_width_in_mbs; | |
335 | } | ||
336 | 6 | ti->col_starts[i] = frame_width_in_mbs * APV_MB_WIDTH; | |
337 | 6 | ti->tile_cols = i; | |
338 | |||
339 | 6 | start_mb = 0; | |
340 |
2/2✓ Branch 0 taken 6 times.
✓ Branch 1 taken 6 times.
|
12 | for (i = 0; start_mb < frame_height_in_mbs; i++) { |
341 | 6 | ti->row_starts[i] = start_mb * APV_MB_HEIGHT; | |
342 | 6 | start_mb += fh->tile_info.tile_height_in_mbs; | |
343 | } | ||
344 | 6 | ti->row_starts[i] = frame_height_in_mbs * APV_MB_HEIGHT; | |
345 | 6 | ti->tile_rows = i; | |
346 | |||
347 | 6 | ti->num_tiles = ti->tile_cols * ti->tile_rows; | |
348 | 6 | } | |
349 | |||
350 | 12 | static int apv_decode(AVCodecContext *avctx, AVFrame *output, | |
351 | APVRawFrame *input) | ||
352 | { | ||
353 | 12 | APVDecodeContext *apv = avctx->priv_data; | |
354 | 12 | const AVPixFmtDescriptor *desc = NULL; | |
355 | 12 | APVDerivedTileInfo *tile_info = &apv->tile_info; | |
356 | int err, job_count; | ||
357 | |||
358 | 12 | err = apv_decode_check_format(avctx, &input->frame_header); | |
359 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 12 times.
|
12 | if (err < 0) { |
360 | ✗ | av_log(avctx, AV_LOG_ERROR, "Unsupported format parameters.\n"); | |
361 | ✗ | return err; | |
362 | } | ||
363 | |||
364 |
2/2✓ Branch 0 taken 6 times.
✓ Branch 1 taken 6 times.
|
12 | if (avctx->skip_frame == AVDISCARD_ALL) |
365 | 6 | return 0; | |
366 | |||
367 | 6 | desc = av_pix_fmt_desc_get(avctx->pix_fmt); | |
368 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 6 times.
|
6 | av_assert0(desc); |
369 | |||
370 | 6 | err = ff_thread_get_buffer(avctx, output, 0); | |
371 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 6 times.
|
6 | if (err < 0) |
372 | ✗ | return err; | |
373 | |||
374 | 6 | apv->output_frame = output; | |
375 | 6 | atomic_store_explicit(&apv->tile_errors, 0, memory_order_relaxed); | |
376 | |||
377 | 6 | apv_derive_tile_info(tile_info, &input->frame_header); | |
378 | |||
379 | // Each component within a tile is independent of every other, | ||
380 | // so we can decode all in parallel. | ||
381 | 6 | job_count = tile_info->num_tiles * desc->nb_components; | |
382 | |||
383 | 6 | avctx->execute2(avctx, apv_decode_tile_component, | |
384 | input, NULL, job_count); | ||
385 | |||
386 | 6 | err = atomic_load_explicit(&apv->tile_errors, memory_order_relaxed); | |
387 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 6 times.
|
6 | if (err > 0) { |
388 | ✗ | av_log(avctx, AV_LOG_ERROR, | |
389 | "Decode errors in %d tile components.\n", err); | ||
390 | ✗ | if (avctx->flags & AV_CODEC_FLAG_OUTPUT_CORRUPT) { | |
391 | // Output the frame anyway. | ||
392 | ✗ | output->flags |= AV_FRAME_FLAG_CORRUPT; | |
393 | } else { | ||
394 | ✗ | return AVERROR_INVALIDDATA; | |
395 | } | ||
396 | } | ||
397 | |||
398 | 6 | return 0; | |
399 | } | ||
400 | |||
401 | 12 | static int apv_decode_metadata(AVCodecContext *avctx, AVFrame *frame, | |
402 | const APVRawMetadata *md) | ||
403 | { | ||
404 | int err; | ||
405 | |||
406 |
2/2✓ Branch 0 taken 24 times.
✓ Branch 1 taken 12 times.
|
36 | for (int i = 0; i < md->metadata_count; i++) { |
407 | 24 | const APVRawMetadataPayload *pl = &md->payloads[i]; | |
408 | |||
409 |
1/3✗ Branch 0 not taken.
✗ Branch 1 not taken.
✓ Branch 2 taken 24 times.
|
24 | switch (pl->payload_type) { |
410 | ✗ | case APV_METADATA_MDCV: | |
411 | { | ||
412 | ✗ | const APVRawMetadataMDCV *mdcv = &pl->mdcv; | |
413 | AVMasteringDisplayMetadata *mdm; | ||
414 | |||
415 | ✗ | err = ff_decode_mastering_display_new(avctx, frame, &mdm); | |
416 | ✗ | if (err < 0) | |
417 | ✗ | return err; | |
418 | |||
419 | ✗ | if (mdm) { | |
420 | ✗ | for (int j = 0; j < 3; j++) { | |
421 | ✗ | mdm->display_primaries[j][0] = | |
422 | ✗ | av_make_q(mdcv->primary_chromaticity_x[j], 1 << 16); | |
423 | ✗ | mdm->display_primaries[j][1] = | |
424 | ✗ | av_make_q(mdcv->primary_chromaticity_y[j], 1 << 16); | |
425 | } | ||
426 | |||
427 | ✗ | mdm->white_point[0] = | |
428 | ✗ | av_make_q(mdcv->white_point_chromaticity_x, 1 << 16); | |
429 | ✗ | mdm->white_point[1] = | |
430 | ✗ | av_make_q(mdcv->white_point_chromaticity_y, 1 << 16); | |
431 | |||
432 | ✗ | mdm->max_luminance = | |
433 | ✗ | av_make_q(mdcv->max_mastering_luminance, 1 << 8); | |
434 | ✗ | mdm->min_luminance = | |
435 | ✗ | av_make_q(mdcv->min_mastering_luminance, 1 << 14); | |
436 | |||
437 | ✗ | mdm->has_primaries = 1; | |
438 | ✗ | mdm->has_luminance = 1; | |
439 | } | ||
440 | } | ||
441 | ✗ | break; | |
442 | ✗ | case APV_METADATA_CLL: | |
443 | { | ||
444 | ✗ | const APVRawMetadataCLL *cll = &pl->cll; | |
445 | AVContentLightMetadata *clm; | ||
446 | |||
447 | ✗ | err = ff_decode_content_light_new(avctx, frame, &clm); | |
448 | ✗ | if (err < 0) | |
449 | ✗ | return err; | |
450 | |||
451 | ✗ | if (clm) { | |
452 | ✗ | clm->MaxCLL = cll->max_cll; | |
453 | ✗ | clm->MaxFALL = cll->max_fall; | |
454 | } | ||
455 | } | ||
456 | ✗ | break; | |
457 | 24 | default: | |
458 | // Ignore other types of metadata. | ||
459 | 24 | break; | |
460 | } | ||
461 | } | ||
462 | |||
463 | 12 | return 0; | |
464 | } | ||
465 | |||
466 | 24 | static int apv_receive_frame_internal(AVCodecContext *avctx, AVFrame *frame) | |
467 | { | ||
468 | 24 | APVDecodeContext *apv = avctx->priv_data; | |
469 | 24 | CodedBitstreamFragment *au = &apv->au; | |
470 | int i, err; | ||
471 | |||
472 |
2/2✓ Branch 0 taken 24 times.
✓ Branch 1 taken 12 times.
|
36 | for (i = apv->nb_unit; i < au->nb_units; i++) { |
473 | 24 | CodedBitstreamUnit *pbu = &au->units[i]; | |
474 | |||
475 |
2/5✓ Branch 0 taken 12 times.
✓ Branch 1 taken 12 times.
✗ Branch 2 not taken.
✗ Branch 3 not taken.
✗ Branch 4 not taken.
|
24 | switch (pbu->type) { |
476 | 12 | case APV_PBU_PRIMARY_FRAME: | |
477 | 12 | err = apv_decode(avctx, frame, pbu->content); | |
478 | 12 | i++; | |
479 | 12 | goto end; | |
480 | 12 | case APV_PBU_METADATA: | |
481 | 12 | apv_decode_metadata(avctx, frame, pbu->content); | |
482 | 12 | break; | |
483 | ✗ | case APV_PBU_NON_PRIMARY_FRAME: | |
484 | case APV_PBU_PREVIEW_FRAME: | ||
485 | case APV_PBU_DEPTH_FRAME: | ||
486 | case APV_PBU_ALPHA_FRAME: | ||
487 | ✗ | if (!avctx->internal->is_copy && | |
488 | ✗ | !apv->warned_additional_frames) { | |
489 | ✗ | av_log(avctx, AV_LOG_WARNING, | |
490 | "Stream contains additional non-primary frames " | ||
491 | "which will be ignored by the decoder.\n"); | ||
492 | ✗ | apv->warned_additional_frames = 1; | |
493 | } | ||
494 | ✗ | break; | |
495 | ✗ | case APV_PBU_ACCESS_UNIT_INFORMATION: | |
496 | case APV_PBU_FILLER: | ||
497 | // Not relevant to the decoder. | ||
498 | ✗ | break; | |
499 | ✗ | default: | |
500 | ✗ | if (!avctx->internal->is_copy && | |
501 | ✗ | !apv->warned_unknown_pbu_types) { | |
502 | ✗ | av_log(avctx, AV_LOG_WARNING, | |
503 | "Stream contains PBUs with unknown types " | ||
504 | "which will be ignored by the decoder.\n"); | ||
505 | ✗ | apv->warned_unknown_pbu_types = 1; | |
506 | } | ||
507 | ✗ | break; | |
508 | } | ||
509 | } | ||
510 | |||
511 | 12 | err = AVERROR(EAGAIN); | |
512 | 24 | end: | |
513 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 24 times.
|
24 | av_assert0(i <= apv->au.nb_units); |
514 | 24 | apv->nb_unit = i; | |
515 | |||
516 |
5/6✓ Branch 0 taken 12 times.
✓ Branch 1 taken 12 times.
✓ Branch 2 taken 12 times.
✗ Branch 3 not taken.
✓ Branch 4 taken 12 times.
✓ Branch 5 taken 12 times.
|
24 | if ((err < 0 && err != AVERROR(EAGAIN)) || apv->au.nb_units == i) { |
517 | 12 | av_packet_unref(apv->pkt); | |
518 | 12 | ff_cbs_fragment_reset(&apv->au); | |
519 | 12 | apv->nb_unit = 0; | |
520 | } | ||
521 |
4/4✓ Branch 0 taken 12 times.
✓ Branch 1 taken 12 times.
✓ Branch 2 taken 6 times.
✓ Branch 3 taken 6 times.
|
24 | if (!err && !frame->buf[0]) |
522 | 6 | err = AVERROR(EAGAIN); | |
523 | |||
524 | 24 | return err; | |
525 | } | ||
526 | |||
527 | 26 | static int apv_receive_frame(AVCodecContext *avctx, AVFrame *frame) | |
528 | { | ||
529 | 26 | APVDecodeContext *apv = avctx->priv_data; | |
530 | int err; | ||
531 | |||
532 | do { | ||
533 |
2/2✓ Branch 0 taken 32 times.
✓ Branch 1 taken 12 times.
|
44 | if (!apv->au.nb_units) { |
534 | 32 | err = ff_decode_get_packet(avctx, apv->pkt); | |
535 |
2/2✓ Branch 0 taken 20 times.
✓ Branch 1 taken 12 times.
|
32 | if (err < 0) |
536 | 20 | return err; | |
537 | |||
538 | 12 | err = ff_cbs_read_packet(apv->cbc, &apv->au, apv->pkt); | |
539 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 12 times.
|
12 | if (err < 0) { |
540 | ✗ | ff_cbs_fragment_reset(&apv->au); | |
541 | ✗ | av_packet_unref(apv->pkt); | |
542 | ✗ | av_log(avctx, AV_LOG_ERROR, "Failed to read packet.\n"); | |
543 | ✗ | return err; | |
544 | } | ||
545 | |||
546 | 12 | apv->nb_unit = 0; | |
547 | 12 | av_log(avctx, AV_LOG_DEBUG, "Total PBUs on this packet: %d.\n", | |
548 | apv->au.nb_units); | ||
549 | } | ||
550 | |||
551 | 24 | err = apv_receive_frame_internal(avctx, frame); | |
552 |
2/2✓ Branch 0 taken 18 times.
✓ Branch 1 taken 6 times.
|
24 | } while (err == AVERROR(EAGAIN)); |
553 | |||
554 | 6 | return err; | |
555 | } | ||
556 | |||
557 | const FFCodec ff_apv_decoder = { | ||
558 | .p.name = "apv", | ||
559 | CODEC_LONG_NAME("Advanced Professional Video"), | ||
560 | .p.type = AVMEDIA_TYPE_VIDEO, | ||
561 | .p.id = AV_CODEC_ID_APV, | ||
562 | .priv_data_size = sizeof(APVDecodeContext), | ||
563 | .init = apv_decode_init, | ||
564 | .flush = apv_decode_flush, | ||
565 | .close = apv_decode_close, | ||
566 | FF_CODEC_RECEIVE_FRAME_CB(apv_receive_frame), | ||
567 | .p.capabilities = AV_CODEC_CAP_DR1 | | ||
568 | AV_CODEC_CAP_SLICE_THREADS | | ||
569 | AV_CODEC_CAP_FRAME_THREADS, | ||
570 | .caps_internal = FF_CODEC_CAP_SKIP_FRAME_FILL_PARAM, | ||
571 | }; | ||
572 |