FFmpeg coverage


Directory: ../../../ffmpeg/
File: src/libavcodec/apv_decode.c
Date: 2025-05-09 06:10:30
Exec Total Coverage
Lines: 141 207 68.1%
Functions: 9 9 100.0%
Branches: 37 76 48.7%

Line Branch Exec Source
1 /*
2 * This file is part of FFmpeg.
3 *
4 * FFmpeg is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2.1 of the License, or (at your option) any later version.
8 *
9 * FFmpeg is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
13 *
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with FFmpeg; if not, write to the Free Software
16 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 */
18
19 #include <stdatomic.h>
20
21 #include "libavutil/mastering_display_metadata.h"
22 #include "libavutil/mem_internal.h"
23 #include "libavutil/pixdesc.h"
24 #include "libavutil/thread.h"
25
26 #include "apv.h"
27 #include "apv_decode.h"
28 #include "apv_dsp.h"
29 #include "avcodec.h"
30 #include "cbs.h"
31 #include "cbs_apv.h"
32 #include "codec_internal.h"
33 #include "decode.h"
34 #include "internal.h"
35 #include "thread.h"
36
37
38 typedef struct APVDecodeContext {
39 CodedBitstreamContext *cbc;
40 APVDSPContext dsp;
41
42 CodedBitstreamFragment au;
43 APVDerivedTileInfo tile_info;
44
45 AVFrame *output_frame;
46 atomic_int tile_errors;
47
48 uint8_t warned_additional_frames;
49 uint8_t warned_unknown_pbu_types;
50 } APVDecodeContext;
51
52 static const enum AVPixelFormat apv_format_table[5][5] = {
53 { AV_PIX_FMT_GRAY8, AV_PIX_FMT_GRAY10, AV_PIX_FMT_GRAY12, AV_PIX_FMT_GRAY14, AV_PIX_FMT_GRAY16 },
54 { 0 }, // 4:2:0 is not valid.
55 { AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV422P10, AV_PIX_FMT_YUV422P12, AV_PIX_FMT_GRAY14, AV_PIX_FMT_YUV422P16 },
56 { AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUV444P10, AV_PIX_FMT_YUV444P12, AV_PIX_FMT_GRAY14, AV_PIX_FMT_YUV444P16 },
57 { AV_PIX_FMT_YUVA444P, AV_PIX_FMT_YUVA444P10, AV_PIX_FMT_YUVA444P12, AV_PIX_FMT_GRAY14, AV_PIX_FMT_YUVA444P16 },
58 };
59
60 static APVVLCLUT decode_lut;
61
62 5 static int apv_decode_check_format(AVCodecContext *avctx,
63 const APVRawFrameHeader *header)
64 {
65 int err, bit_depth;
66
67 5 avctx->profile = header->frame_info.profile_idc;
68 5 avctx->level = header->frame_info.level_idc;
69
70 5 bit_depth = header->frame_info.bit_depth_minus8 + 8;
71
3/6
✓ Branch 0 taken 5 times.
✗ Branch 1 not taken.
✓ Branch 2 taken 5 times.
✗ Branch 3 not taken.
✗ Branch 4 not taken.
✓ Branch 5 taken 5 times.
5 if (bit_depth < 8 || bit_depth > 16 || bit_depth % 2) {
72 avpriv_request_sample(avctx, "Bit depth %d", bit_depth);
73 return AVERROR_PATCHWELCOME;
74 }
75 5 avctx->pix_fmt =
76 5 apv_format_table[header->frame_info.chroma_format_idc][bit_depth - 4 >> 2];
77
78 5 err = ff_set_dimensions(avctx,
79 5 FFALIGN(header->frame_info.frame_width, 16),
80 5 FFALIGN(header->frame_info.frame_height, 16));
81
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 5 times.
5 if (err < 0) {
82 // Unsupported frame size.
83 return err;
84 }
85 5 avctx->width = header->frame_info.frame_width;
86 5 avctx->height = header->frame_info.frame_height;
87
88 5 avctx->sample_aspect_ratio = (AVRational){ 1, 1 };
89
90 5 avctx->color_primaries = header->color_primaries;
91 5 avctx->color_trc = header->transfer_characteristics;
92 5 avctx->colorspace = header->matrix_coefficients;
93 10 avctx->color_range = header->full_range_flag ? AVCOL_RANGE_JPEG
94
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 5 times.
5 : AVCOL_RANGE_MPEG;
95 5 avctx->chroma_sample_location = AVCHROMA_LOC_TOPLEFT;
96
97 5 avctx->refs = 0;
98 5 avctx->has_b_frames = 0;
99
100 5 return 0;
101 }
102
103 static const CodedBitstreamUnitType apv_decompose_unit_types[] = {
104 APV_PBU_PRIMARY_FRAME,
105 APV_PBU_METADATA,
106 };
107
108 static AVOnce apv_entropy_once = AV_ONCE_INIT;
109
110 2 static av_cold void apv_entropy_build_decode_lut(void)
111 {
112 2 ff_apv_entropy_build_decode_lut(&decode_lut);
113 2 }
114
115 3 static av_cold int apv_decode_init(AVCodecContext *avctx)
116 {
117 3 APVDecodeContext *apv = avctx->priv_data;
118 int err;
119
120 3 ff_thread_once(&apv_entropy_once, apv_entropy_build_decode_lut);
121
122 3 err = ff_cbs_init(&apv->cbc, AV_CODEC_ID_APV, avctx);
123
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 3 times.
3 if (err < 0)
124 return err;
125
126 3 apv->cbc->decompose_unit_types =
127 apv_decompose_unit_types;
128 3 apv->cbc->nb_decompose_unit_types =
129 FF_ARRAY_ELEMS(apv_decompose_unit_types);
130
131 // Extradata could be set here, but is ignored by the decoder.
132
133 3 ff_apv_dsp_init(&apv->dsp);
134
135 3 atomic_init(&apv->tile_errors, 0);
136
137 3 return 0;
138 }
139
140 3 static av_cold int apv_decode_close(AVCodecContext *avctx)
141 {
142 3 APVDecodeContext *apv = avctx->priv_data;
143
144 3 ff_cbs_fragment_free(&apv->au);
145 3 ff_cbs_close(&apv->cbc);
146
147 3 return 0;
148 }
149
150 9600 static int apv_decode_block(AVCodecContext *avctx,
151 void *output,
152 ptrdiff_t pitch,
153 GetBitContext *gbc,
154 APVEntropyState *entropy_state,
155 int bit_depth,
156 int qp_shift,
157 const uint16_t *qmatrix)
158 {
159 9600 APVDecodeContext *apv = avctx->priv_data;
160 int err;
161
162 9600 LOCAL_ALIGNED_32(int16_t, coeff, [64]);
163
164 9600 err = ff_apv_entropy_decode_block(coeff, gbc, entropy_state);
165
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 9600 times.
9600 if (err < 0)
166 return err;
167
168 9600 apv->dsp.decode_transquant(output, pitch,
169 coeff, qmatrix,
170 bit_depth, qp_shift);
171
172 9600 return 0;
173 }
174
175 30 static int apv_decode_tile_component(AVCodecContext *avctx, void *data,
176 int job, int thread)
177 {
178 30 APVRawFrame *input = data;
179 30 APVDecodeContext *apv = avctx->priv_data;
180 30 const CodedBitstreamAPVContext *apv_cbc = apv->cbc->priv_data;
181 30 const APVDerivedTileInfo *tile_info = &apv_cbc->tile_info;
182
183 30 int tile_index = job / apv_cbc->num_comp;
184 30 int comp_index = job % apv_cbc->num_comp;
185
186 const AVPixFmtDescriptor *pix_fmt_desc =
187 30 av_pix_fmt_desc_get(avctx->pix_fmt);
188
189
2/2
✓ Branch 0 taken 20 times.
✓ Branch 1 taken 10 times.
30 int sub_w_shift = comp_index == 0 ? 0 : pix_fmt_desc->log2_chroma_w;
190
2/2
✓ Branch 0 taken 20 times.
✓ Branch 1 taken 10 times.
30 int sub_h_shift = comp_index == 0 ? 0 : pix_fmt_desc->log2_chroma_h;
191
192 30 APVRawTile *tile = &input->tile[tile_index];
193
194 30 int tile_y = tile_index / tile_info->tile_cols;
195 30 int tile_x = tile_index % tile_info->tile_cols;
196
197 30 int tile_start_x = tile_info->col_starts[tile_x];
198 30 int tile_start_y = tile_info->row_starts[tile_y];
199
200 30 int tile_width = tile_info->col_starts[tile_x + 1] - tile_start_x;
201 30 int tile_height = tile_info->row_starts[tile_y + 1] - tile_start_y;
202
203 30 int tile_mb_width = tile_width / APV_MB_WIDTH;
204 30 int tile_mb_height = tile_height / APV_MB_HEIGHT;
205
206 30 int blk_mb_width = 2 >> sub_w_shift;
207 30 int blk_mb_height = 2 >> sub_h_shift;
208
209 int bit_depth;
210 int qp_shift;
211 30 LOCAL_ALIGNED_32(uint16_t, qmatrix_scaled, [64]);
212
213 GetBitContext gbc;
214
215 30 APVEntropyState entropy_state = {
216 .log_ctx = avctx,
217 .decode_lut = &decode_lut,
218 .prev_dc = 0,
219 .prev_dc_diff = 20,
220 .prev_1st_ac_level = 0,
221 };
222
223 int err;
224
225 30 err = init_get_bits8(&gbc, tile->tile_data[comp_index],
226 30 tile->tile_header.tile_data_size[comp_index]);
227
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 30 times.
30 if (err < 0)
228 goto fail;
229
230 // Combine the bitstream quantisation matrix with the qp scaling
231 // in advance. (Including qp_shift as well would overflow 16 bits.)
232 // Fix the row ordering at the same time.
233 {
234 static const uint8_t apv_level_scale[6] = { 40, 45, 51, 57, 64, 71 };
235 30 int qp = tile->tile_header.tile_qp[comp_index];
236 30 int level_scale = apv_level_scale[qp % 6];
237
238 30 bit_depth = apv_cbc->bit_depth;
239 30 qp_shift = qp / 6;
240
241
2/2
✓ Branch 0 taken 240 times.
✓ Branch 1 taken 30 times.
270 for (int y = 0; y < 8; y++) {
242
2/2
✓ Branch 0 taken 1920 times.
✓ Branch 1 taken 240 times.
2160 for (int x = 0; x < 8; x++)
243 1920 qmatrix_scaled[y * 8 + x] = level_scale *
244 1920 input->frame_header.quantization_matrix.q_matrix[comp_index][x][y];
245 }
246 }
247
248
2/2
✓ Branch 0 taken 360 times.
✓ Branch 1 taken 30 times.
390 for (int mb_y = 0; mb_y < tile_mb_height; mb_y++) {
249
2/2
✓ Branch 0 taken 3600 times.
✓ Branch 1 taken 360 times.
3960 for (int mb_x = 0; mb_x < tile_mb_width; mb_x++) {
250
2/2
✓ Branch 0 taken 7200 times.
✓ Branch 1 taken 3600 times.
10800 for (int blk_y = 0; blk_y < blk_mb_height; blk_y++) {
251
2/2
✓ Branch 0 taken 9600 times.
✓ Branch 1 taken 7200 times.
16800 for (int blk_x = 0; blk_x < blk_mb_width; blk_x++) {
252 9600 int frame_y = (tile_start_y +
253 9600 APV_MB_HEIGHT * mb_y +
254 9600 APV_TR_SIZE * blk_y) >> sub_h_shift;
255 9600 int frame_x = (tile_start_x +
256 9600 APV_MB_WIDTH * mb_x +
257 9600 APV_TR_SIZE * blk_x) >> sub_w_shift;
258
259 9600 ptrdiff_t frame_pitch = apv->output_frame->linesize[comp_index];
260 9600 uint8_t *block_start = apv->output_frame->data[comp_index] +
261 9600 frame_y * frame_pitch + 2 * frame_x;
262
263 9600 err = apv_decode_block(avctx,
264 block_start, frame_pitch,
265 &gbc, &entropy_state,
266 bit_depth,
267 qp_shift,
268 qmatrix_scaled);
269
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 9600 times.
9600 if (err < 0) {
270 // Error in block decode means entropy desync,
271 // so this is not recoverable.
272 goto fail;
273 }
274 }
275 }
276 }
277 }
278
279 30 av_log(avctx, AV_LOG_DEBUG,
280 "Decoded tile %d component %d: %dx%d MBs starting at (%d,%d)\n",
281 tile_index, comp_index, tile_mb_width, tile_mb_height,
282 tile_start_x, tile_start_y);
283
284 30 return 0;
285
286 fail:
287 av_log(avctx, AV_LOG_VERBOSE,
288 "Decode error in tile %d component %d.\n",
289 tile_index, comp_index);
290 atomic_fetch_add_explicit(&apv->tile_errors, 1, memory_order_relaxed);
291 return err;
292 }
293
294 5 static int apv_decode(AVCodecContext *avctx, AVFrame *output,
295 APVRawFrame *input)
296 {
297 5 APVDecodeContext *apv = avctx->priv_data;
298 5 const CodedBitstreamAPVContext *apv_cbc = apv->cbc->priv_data;
299 5 const APVDerivedTileInfo *tile_info = &apv_cbc->tile_info;
300 int err, job_count;
301
302 5 err = apv_decode_check_format(avctx, &input->frame_header);
303
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 5 times.
5 if (err < 0) {
304 av_log(avctx, AV_LOG_ERROR, "Unsupported format parameters.\n");
305 return err;
306 }
307
308 5 err = ff_thread_get_buffer(avctx, output, 0);
309
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 5 times.
5 if (err < 0)
310 return err;
311
312 5 apv->output_frame = output;
313 5 atomic_store_explicit(&apv->tile_errors, 0, memory_order_relaxed);
314
315 // Each component within a tile is independent of every other,
316 // so we can decode all in parallel.
317 5 job_count = tile_info->num_tiles * apv_cbc->num_comp;
318
319 5 avctx->execute2(avctx, apv_decode_tile_component,
320 input, NULL, job_count);
321
322 5 err = atomic_load_explicit(&apv->tile_errors, memory_order_relaxed);
323
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 5 times.
5 if (err > 0) {
324 av_log(avctx, AV_LOG_ERROR,
325 "Decode errors in %d tile components.\n", err);
326 if (avctx->flags & AV_CODEC_FLAG_OUTPUT_CORRUPT) {
327 // Output the frame anyway.
328 output->flags |= AV_FRAME_FLAG_CORRUPT;
329 } else {
330 return AVERROR_INVALIDDATA;
331 }
332 }
333
334 5 return 0;
335 }
336
337 5 static int apv_decode_metadata(AVCodecContext *avctx, AVFrame *frame,
338 const APVRawMetadata *md)
339 {
340 int err;
341
342
2/2
✓ Branch 0 taken 10 times.
✓ Branch 1 taken 5 times.
15 for (int i = 0; i < md->metadata_count; i++) {
343 10 const APVRawMetadataPayload *pl = &md->payloads[i];
344
345
1/3
✗ Branch 0 not taken.
✗ Branch 1 not taken.
✓ Branch 2 taken 10 times.
10 switch (pl->payload_type) {
346 case APV_METADATA_MDCV:
347 {
348 const APVRawMetadataMDCV *mdcv = &pl->mdcv;
349 AVMasteringDisplayMetadata *mdm;
350
351 err = ff_decode_mastering_display_new(avctx, frame, &mdm);
352 if (err < 0)
353 return err;
354
355 if (mdm) {
356 for (int j = 0; j < 3; j++) {
357 mdm->display_primaries[j][0] =
358 av_make_q(mdcv->primary_chromaticity_x[j], 1 << 16);
359 mdm->display_primaries[j][1] =
360 av_make_q(mdcv->primary_chromaticity_y[j], 1 << 16);
361 }
362
363 mdm->white_point[0] =
364 av_make_q(mdcv->white_point_chromaticity_x, 1 << 16);
365 mdm->white_point[1] =
366 av_make_q(mdcv->white_point_chromaticity_y, 1 << 16);
367
368 mdm->max_luminance =
369 av_make_q(mdcv->max_mastering_luminance, 1 << 8);
370 mdm->min_luminance =
371 av_make_q(mdcv->min_mastering_luminance, 1 << 14);
372
373 mdm->has_primaries = 1;
374 mdm->has_luminance = 1;
375 }
376 }
377 break;
378 case APV_METADATA_CLL:
379 {
380 const APVRawMetadataCLL *cll = &pl->cll;
381 AVContentLightMetadata *clm;
382
383 err = ff_decode_content_light_new(avctx, frame, &clm);
384 if (err < 0)
385 return err;
386
387 if (clm) {
388 clm->MaxCLL = cll->max_cll;
389 clm->MaxFALL = cll->max_fall;
390 }
391 }
392 break;
393 10 default:
394 // Ignore other types of metadata.
395 10 break;
396 }
397 }
398
399 5 return 0;
400 }
401
402 5 static int apv_decode_frame(AVCodecContext *avctx, AVFrame *frame,
403 int *got_frame, AVPacket *packet)
404 {
405 5 APVDecodeContext *apv = avctx->priv_data;
406 5 CodedBitstreamFragment *au = &apv->au;
407 int err;
408
409 5 err = ff_cbs_read_packet(apv->cbc, au, packet);
410
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 5 times.
5 if (err < 0) {
411 av_log(avctx, AV_LOG_ERROR, "Failed to read packet.\n");
412 goto fail;
413 }
414
415
2/2
✓ Branch 0 taken 10 times.
✓ Branch 1 taken 5 times.
15 for (int i = 0; i < au->nb_units; i++) {
416 10 CodedBitstreamUnit *pbu = &au->units[i];
417
418
2/5
✓ Branch 0 taken 5 times.
✓ Branch 1 taken 5 times.
✗ Branch 2 not taken.
✗ Branch 3 not taken.
✗ Branch 4 not taken.
10 switch (pbu->type) {
419 5 case APV_PBU_PRIMARY_FRAME:
420 5 err = apv_decode(avctx, frame, pbu->content);
421
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 5 times.
5 if (err < 0)
422 goto fail;
423 5 *got_frame = 1;
424 5 break;
425 5 case APV_PBU_METADATA:
426 5 apv_decode_metadata(avctx, frame, pbu->content);
427 5 break;
428 case APV_PBU_NON_PRIMARY_FRAME:
429 case APV_PBU_PREVIEW_FRAME:
430 case APV_PBU_DEPTH_FRAME:
431 case APV_PBU_ALPHA_FRAME:
432 if (!avctx->internal->is_copy &&
433 !apv->warned_additional_frames) {
434 av_log(avctx, AV_LOG_WARNING,
435 "Stream contains additional non-primary frames "
436 "which will be ignored by the decoder.\n");
437 apv->warned_additional_frames = 1;
438 }
439 break;
440 case APV_PBU_ACCESS_UNIT_INFORMATION:
441 case APV_PBU_FILLER:
442 // Not relevant to the decoder.
443 break;
444 default:
445 if (!avctx->internal->is_copy &&
446 !apv->warned_unknown_pbu_types) {
447 av_log(avctx, AV_LOG_WARNING,
448 "Stream contains PBUs with unknown types "
449 "which will be ignored by the decoder.\n");
450 apv->warned_unknown_pbu_types = 1;
451 }
452 break;
453 }
454 }
455
456 5 err = packet->size;
457 5 fail:
458 5 ff_cbs_fragment_reset(au);
459 5 return err;
460 }
461
462 const FFCodec ff_apv_decoder = {
463 .p.name = "apv",
464 CODEC_LONG_NAME("Advanced Professional Video"),
465 .p.type = AVMEDIA_TYPE_VIDEO,
466 .p.id = AV_CODEC_ID_APV,
467 .priv_data_size = sizeof(APVDecodeContext),
468 .init = apv_decode_init,
469 .close = apv_decode_close,
470 FF_CODEC_DECODE_CB(apv_decode_frame),
471 .p.capabilities = AV_CODEC_CAP_DR1 |
472 AV_CODEC_CAP_SLICE_THREADS |
473 AV_CODEC_CAP_FRAME_THREADS,
474 };
475