Line | Branch | Exec | Source |
---|---|---|---|
1 | /* | ||
2 | * This file is part of FFmpeg. | ||
3 | * | ||
4 | * FFmpeg is free software; you can redistribute it and/or | ||
5 | * modify it under the terms of the GNU Lesser General Public | ||
6 | * License as published by the Free Software Foundation; either | ||
7 | * version 2.1 of the License, or (at your option) any later version. | ||
8 | * | ||
9 | * FFmpeg is distributed in the hope that it will be useful, | ||
10 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
11 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
12 | * Lesser General Public License for more details. | ||
13 | * | ||
14 | * You should have received a copy of the GNU Lesser General Public | ||
15 | * License along with FFmpeg; if not, write to the Free Software | ||
16 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | ||
17 | */ | ||
18 | |||
19 | #include <stdatomic.h> | ||
20 | |||
21 | #include "libavutil/mastering_display_metadata.h" | ||
22 | #include "libavutil/mem_internal.h" | ||
23 | #include "libavutil/pixdesc.h" | ||
24 | #include "libavutil/thread.h" | ||
25 | |||
26 | #include "apv.h" | ||
27 | #include "apv_decode.h" | ||
28 | #include "apv_dsp.h" | ||
29 | #include "avcodec.h" | ||
30 | #include "cbs.h" | ||
31 | #include "cbs_apv.h" | ||
32 | #include "codec_internal.h" | ||
33 | #include "decode.h" | ||
34 | #include "internal.h" | ||
35 | #include "thread.h" | ||
36 | |||
37 | |||
38 | typedef struct APVDecodeContext { | ||
39 | CodedBitstreamContext *cbc; | ||
40 | APVDSPContext dsp; | ||
41 | |||
42 | CodedBitstreamFragment au; | ||
43 | APVDerivedTileInfo tile_info; | ||
44 | |||
45 | AVFrame *output_frame; | ||
46 | atomic_int tile_errors; | ||
47 | |||
48 | uint8_t warned_additional_frames; | ||
49 | uint8_t warned_unknown_pbu_types; | ||
50 | } APVDecodeContext; | ||
51 | |||
52 | static const enum AVPixelFormat apv_format_table[5][5] = { | ||
53 | { AV_PIX_FMT_GRAY8, AV_PIX_FMT_GRAY10, AV_PIX_FMT_GRAY12, AV_PIX_FMT_GRAY14, AV_PIX_FMT_GRAY16 }, | ||
54 | { 0 }, // 4:2:0 is not valid. | ||
55 | { AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV422P10, AV_PIX_FMT_YUV422P12, AV_PIX_FMT_GRAY14, AV_PIX_FMT_YUV422P16 }, | ||
56 | { AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUV444P10, AV_PIX_FMT_YUV444P12, AV_PIX_FMT_GRAY14, AV_PIX_FMT_YUV444P16 }, | ||
57 | { AV_PIX_FMT_YUVA444P, AV_PIX_FMT_YUVA444P10, AV_PIX_FMT_YUVA444P12, AV_PIX_FMT_GRAY14, AV_PIX_FMT_YUVA444P16 }, | ||
58 | }; | ||
59 | |||
60 | static APVVLCLUT decode_lut; | ||
61 | |||
62 | 9 | static int apv_decode_check_format(AVCodecContext *avctx, | |
63 | const APVRawFrameHeader *header) | ||
64 | { | ||
65 | int err, bit_depth; | ||
66 | |||
67 | 9 | avctx->profile = header->frame_info.profile_idc; | |
68 | 9 | avctx->level = header->frame_info.level_idc; | |
69 | |||
70 | 9 | bit_depth = header->frame_info.bit_depth_minus8 + 8; | |
71 |
3/6✓ Branch 0 taken 9 times.
✗ Branch 1 not taken.
✓ Branch 2 taken 9 times.
✗ Branch 3 not taken.
✗ Branch 4 not taken.
✓ Branch 5 taken 9 times.
|
9 | if (bit_depth < 8 || bit_depth > 16 || bit_depth % 2) { |
72 | ✗ | avpriv_request_sample(avctx, "Bit depth %d", bit_depth); | |
73 | ✗ | return AVERROR_PATCHWELCOME; | |
74 | } | ||
75 | 9 | avctx->pix_fmt = | |
76 | 9 | apv_format_table[header->frame_info.chroma_format_idc][bit_depth - 4 >> 2]; | |
77 | |||
78 | 9 | err = ff_set_dimensions(avctx, | |
79 | 9 | FFALIGN(header->frame_info.frame_width, 16), | |
80 | 9 | FFALIGN(header->frame_info.frame_height, 16)); | |
81 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 9 times.
|
9 | if (err < 0) { |
82 | // Unsupported frame size. | ||
83 | ✗ | return err; | |
84 | } | ||
85 | 9 | avctx->width = header->frame_info.frame_width; | |
86 | 9 | avctx->height = header->frame_info.frame_height; | |
87 | |||
88 | 9 | avctx->sample_aspect_ratio = (AVRational){ 1, 1 }; | |
89 | |||
90 | 9 | avctx->color_primaries = header->color_primaries; | |
91 | 9 | avctx->color_trc = header->transfer_characteristics; | |
92 | 9 | avctx->colorspace = header->matrix_coefficients; | |
93 | 18 | avctx->color_range = header->full_range_flag ? AVCOL_RANGE_JPEG | |
94 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 9 times.
|
9 | : AVCOL_RANGE_MPEG; |
95 | 9 | avctx->chroma_sample_location = AVCHROMA_LOC_TOPLEFT; | |
96 | |||
97 | 9 | avctx->refs = 0; | |
98 | 9 | avctx->has_b_frames = 0; | |
99 | |||
100 | 9 | return 0; | |
101 | } | ||
102 | |||
103 | static const CodedBitstreamUnitType apv_decompose_unit_types[] = { | ||
104 | APV_PBU_PRIMARY_FRAME, | ||
105 | APV_PBU_METADATA, | ||
106 | }; | ||
107 | |||
108 | static AVOnce apv_entropy_once = AV_ONCE_INIT; | ||
109 | |||
110 | 3 | static av_cold void apv_entropy_build_decode_lut(void) | |
111 | { | ||
112 | 3 | ff_apv_entropy_build_decode_lut(&decode_lut); | |
113 | 3 | } | |
114 | |||
115 | 5 | static av_cold int apv_decode_init(AVCodecContext *avctx) | |
116 | { | ||
117 | 5 | APVDecodeContext *apv = avctx->priv_data; | |
118 | int err; | ||
119 | |||
120 | 5 | ff_thread_once(&apv_entropy_once, apv_entropy_build_decode_lut); | |
121 | |||
122 | 5 | err = ff_cbs_init(&apv->cbc, AV_CODEC_ID_APV, avctx); | |
123 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 5 times.
|
5 | if (err < 0) |
124 | ✗ | return err; | |
125 | |||
126 | 5 | apv->cbc->decompose_unit_types = | |
127 | apv_decompose_unit_types; | ||
128 | 5 | apv->cbc->nb_decompose_unit_types = | |
129 | FF_ARRAY_ELEMS(apv_decompose_unit_types); | ||
130 | |||
131 | // Extradata could be set here, but is ignored by the decoder. | ||
132 | |||
133 | 5 | ff_apv_dsp_init(&apv->dsp); | |
134 | |||
135 | 5 | atomic_init(&apv->tile_errors, 0); | |
136 | |||
137 | 5 | return 0; | |
138 | } | ||
139 | |||
140 | 5 | static av_cold int apv_decode_close(AVCodecContext *avctx) | |
141 | { | ||
142 | 5 | APVDecodeContext *apv = avctx->priv_data; | |
143 | |||
144 | 5 | ff_cbs_fragment_free(&apv->au); | |
145 | 5 | ff_cbs_close(&apv->cbc); | |
146 | |||
147 | 5 | return 0; | |
148 | } | ||
149 | |||
150 | 13440 | static int apv_decode_block(AVCodecContext *avctx, | |
151 | void *output, | ||
152 | ptrdiff_t pitch, | ||
153 | GetBitContext *gbc, | ||
154 | APVEntropyState *entropy_state, | ||
155 | int bit_depth, | ||
156 | int qp_shift, | ||
157 | const uint16_t *qmatrix) | ||
158 | { | ||
159 | 13440 | APVDecodeContext *apv = avctx->priv_data; | |
160 | int err; | ||
161 | |||
162 | 13440 | LOCAL_ALIGNED_32(int16_t, coeff, [64]); | |
163 | 13440 | memset(coeff, 0, 64 * sizeof(int16_t)); | |
164 | |||
165 | 13440 | err = ff_apv_entropy_decode_block(coeff, gbc, entropy_state); | |
166 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 13440 times.
|
13440 | if (err < 0) |
167 | ✗ | return err; | |
168 | |||
169 | 13440 | apv->dsp.decode_transquant(output, pitch, | |
170 | coeff, qmatrix, | ||
171 | bit_depth, qp_shift); | ||
172 | |||
173 | 13440 | return 0; | |
174 | } | ||
175 | |||
176 | 38 | static int apv_decode_tile_component(AVCodecContext *avctx, void *data, | |
177 | int job, int thread) | ||
178 | { | ||
179 | 38 | APVRawFrame *input = data; | |
180 | 38 | APVDecodeContext *apv = avctx->priv_data; | |
181 | 38 | const CodedBitstreamAPVContext *apv_cbc = apv->cbc->priv_data; | |
182 | 38 | const APVDerivedTileInfo *tile_info = &apv_cbc->tile_info; | |
183 | |||
184 | 38 | int tile_index = job / apv_cbc->num_comp; | |
185 | 38 | int comp_index = job % apv_cbc->num_comp; | |
186 | |||
187 | const AVPixFmtDescriptor *pix_fmt_desc = | ||
188 | 38 | av_pix_fmt_desc_get(avctx->pix_fmt); | |
189 | |||
190 |
2/2✓ Branch 0 taken 20 times.
✓ Branch 1 taken 18 times.
|
38 | int sub_w_shift = comp_index == 0 ? 0 : pix_fmt_desc->log2_chroma_w; |
191 |
2/2✓ Branch 0 taken 20 times.
✓ Branch 1 taken 18 times.
|
38 | int sub_h_shift = comp_index == 0 ? 0 : pix_fmt_desc->log2_chroma_h; |
192 | |||
193 | 38 | APVRawTile *tile = &input->tile[tile_index]; | |
194 | |||
195 | 38 | int tile_y = tile_index / tile_info->tile_cols; | |
196 | 38 | int tile_x = tile_index % tile_info->tile_cols; | |
197 | |||
198 | 38 | int tile_start_x = tile_info->col_starts[tile_x]; | |
199 | 38 | int tile_start_y = tile_info->row_starts[tile_y]; | |
200 | |||
201 | 38 | int tile_width = tile_info->col_starts[tile_x + 1] - tile_start_x; | |
202 | 38 | int tile_height = tile_info->row_starts[tile_y + 1] - tile_start_y; | |
203 | |||
204 | 38 | int tile_mb_width = tile_width / APV_MB_WIDTH; | |
205 | 38 | int tile_mb_height = tile_height / APV_MB_HEIGHT; | |
206 | |||
207 | 38 | int blk_mb_width = 2 >> sub_w_shift; | |
208 | 38 | int blk_mb_height = 2 >> sub_h_shift; | |
209 | |||
210 | int bit_depth; | ||
211 | int qp_shift; | ||
212 | 38 | LOCAL_ALIGNED_32(uint16_t, qmatrix_scaled, [64]); | |
213 | |||
214 | GetBitContext gbc; | ||
215 | |||
216 | 38 | APVEntropyState entropy_state = { | |
217 | .log_ctx = avctx, | ||
218 | .decode_lut = &decode_lut, | ||
219 | .prev_dc = 0, | ||
220 | .prev_k_dc = 5, | ||
221 | .prev_k_level = 0, | ||
222 | }; | ||
223 | |||
224 | int err; | ||
225 | |||
226 | 38 | err = init_get_bits8(&gbc, tile->tile_data[comp_index], | |
227 | 38 | tile->tile_header.tile_data_size[comp_index]); | |
228 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 38 times.
|
38 | if (err < 0) |
229 | ✗ | goto fail; | |
230 | |||
231 | // Combine the bitstream quantisation matrix with the qp scaling | ||
232 | // in advance. (Including qp_shift as well would overflow 16 bits.) | ||
233 | // Fix the row ordering at the same time. | ||
234 | { | ||
235 | static const uint8_t apv_level_scale[6] = { 40, 45, 51, 57, 64, 71 }; | ||
236 | 38 | int qp = tile->tile_header.tile_qp[comp_index]; | |
237 | 38 | int level_scale = apv_level_scale[qp % 6]; | |
238 | |||
239 | 38 | bit_depth = apv_cbc->bit_depth; | |
240 | 38 | qp_shift = qp / 6; | |
241 | |||
242 |
2/2✓ Branch 0 taken 304 times.
✓ Branch 1 taken 38 times.
|
342 | for (int y = 0; y < 8; y++) { |
243 |
2/2✓ Branch 0 taken 2432 times.
✓ Branch 1 taken 304 times.
|
2736 | for (int x = 0; x < 8; x++) |
244 | 2432 | qmatrix_scaled[y * 8 + x] = level_scale * | |
245 | 2432 | input->frame_header.quantization_matrix.q_matrix[comp_index][x][y]; | |
246 | } | ||
247 | } | ||
248 | |||
249 |
2/2✓ Branch 0 taken 456 times.
✓ Branch 1 taken 38 times.
|
494 | for (int mb_y = 0; mb_y < tile_mb_height; mb_y++) { |
250 |
2/2✓ Branch 0 taken 4560 times.
✓ Branch 1 taken 456 times.
|
5016 | for (int mb_x = 0; mb_x < tile_mb_width; mb_x++) { |
251 |
2/2✓ Branch 0 taken 9120 times.
✓ Branch 1 taken 4560 times.
|
13680 | for (int blk_y = 0; blk_y < blk_mb_height; blk_y++) { |
252 |
2/2✓ Branch 0 taken 13440 times.
✓ Branch 1 taken 9120 times.
|
22560 | for (int blk_x = 0; blk_x < blk_mb_width; blk_x++) { |
253 | 13440 | int frame_y = (tile_start_y + | |
254 | 13440 | APV_MB_HEIGHT * mb_y + | |
255 | 13440 | APV_TR_SIZE * blk_y) >> sub_h_shift; | |
256 | 13440 | int frame_x = (tile_start_x + | |
257 | 13440 | APV_MB_WIDTH * mb_x + | |
258 | 13440 | APV_TR_SIZE * blk_x) >> sub_w_shift; | |
259 | |||
260 | 13440 | ptrdiff_t frame_pitch = apv->output_frame->linesize[comp_index]; | |
261 | 13440 | uint8_t *block_start = apv->output_frame->data[comp_index] + | |
262 | 13440 | frame_y * frame_pitch + 2 * frame_x; | |
263 | |||
264 | 13440 | err = apv_decode_block(avctx, | |
265 | block_start, frame_pitch, | ||
266 | &gbc, &entropy_state, | ||
267 | bit_depth, | ||
268 | qp_shift, | ||
269 | qmatrix_scaled); | ||
270 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 13440 times.
|
13440 | if (err < 0) { |
271 | // Error in block decode means entropy desync, | ||
272 | // so this is not recoverable. | ||
273 | ✗ | goto fail; | |
274 | } | ||
275 | } | ||
276 | } | ||
277 | } | ||
278 | } | ||
279 | |||
280 | 38 | av_log(avctx, AV_LOG_DEBUG, | |
281 | "Decoded tile %d component %d: %dx%d MBs starting at (%d,%d)\n", | ||
282 | tile_index, comp_index, tile_mb_width, tile_mb_height, | ||
283 | tile_start_x, tile_start_y); | ||
284 | |||
285 | 38 | return 0; | |
286 | |||
287 | ✗ | fail: | |
288 | ✗ | av_log(avctx, AV_LOG_VERBOSE, | |
289 | "Decode error in tile %d component %d.\n", | ||
290 | tile_index, comp_index); | ||
291 | ✗ | atomic_fetch_add_explicit(&apv->tile_errors, 1, memory_order_relaxed); | |
292 | ✗ | return err; | |
293 | } | ||
294 | |||
295 | 9 | static int apv_decode(AVCodecContext *avctx, AVFrame *output, | |
296 | APVRawFrame *input) | ||
297 | { | ||
298 | 9 | APVDecodeContext *apv = avctx->priv_data; | |
299 | 9 | const CodedBitstreamAPVContext *apv_cbc = apv->cbc->priv_data; | |
300 | 9 | const APVDerivedTileInfo *tile_info = &apv_cbc->tile_info; | |
301 | int err, job_count; | ||
302 | |||
303 | 9 | err = apv_decode_check_format(avctx, &input->frame_header); | |
304 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 9 times.
|
9 | if (err < 0) { |
305 | ✗ | av_log(avctx, AV_LOG_ERROR, "Unsupported format parameters.\n"); | |
306 | ✗ | return err; | |
307 | } | ||
308 | |||
309 | 9 | err = ff_thread_get_buffer(avctx, output, 0); | |
310 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 9 times.
|
9 | if (err < 0) |
311 | ✗ | return err; | |
312 | |||
313 | 9 | apv->output_frame = output; | |
314 | 9 | atomic_store_explicit(&apv->tile_errors, 0, memory_order_relaxed); | |
315 | |||
316 | // Each component within a tile is independent of every other, | ||
317 | // so we can decode all in parallel. | ||
318 | 9 | job_count = tile_info->num_tiles * apv_cbc->num_comp; | |
319 | |||
320 | 9 | avctx->execute2(avctx, apv_decode_tile_component, | |
321 | input, NULL, job_count); | ||
322 | |||
323 | 9 | err = atomic_load_explicit(&apv->tile_errors, memory_order_relaxed); | |
324 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 9 times.
|
9 | if (err > 0) { |
325 | ✗ | av_log(avctx, AV_LOG_ERROR, | |
326 | "Decode errors in %d tile components.\n", err); | ||
327 | ✗ | if (avctx->flags & AV_CODEC_FLAG_OUTPUT_CORRUPT) { | |
328 | // Output the frame anyway. | ||
329 | ✗ | output->flags |= AV_FRAME_FLAG_CORRUPT; | |
330 | } else { | ||
331 | ✗ | return AVERROR_INVALIDDATA; | |
332 | } | ||
333 | } | ||
334 | |||
335 | 9 | return 0; | |
336 | } | ||
337 | |||
338 | 9 | static int apv_decode_metadata(AVCodecContext *avctx, AVFrame *frame, | |
339 | const APVRawMetadata *md) | ||
340 | { | ||
341 | int err; | ||
342 | |||
343 |
2/2✓ Branch 0 taken 18 times.
✓ Branch 1 taken 9 times.
|
27 | for (int i = 0; i < md->metadata_count; i++) { |
344 | 18 | const APVRawMetadataPayload *pl = &md->payloads[i]; | |
345 | |||
346 |
1/3✗ Branch 0 not taken.
✗ Branch 1 not taken.
✓ Branch 2 taken 18 times.
|
18 | switch (pl->payload_type) { |
347 | ✗ | case APV_METADATA_MDCV: | |
348 | { | ||
349 | ✗ | const APVRawMetadataMDCV *mdcv = &pl->mdcv; | |
350 | AVMasteringDisplayMetadata *mdm; | ||
351 | |||
352 | ✗ | err = ff_decode_mastering_display_new(avctx, frame, &mdm); | |
353 | ✗ | if (err < 0) | |
354 | ✗ | return err; | |
355 | |||
356 | ✗ | if (mdm) { | |
357 | ✗ | for (int j = 0; j < 3; j++) { | |
358 | ✗ | mdm->display_primaries[j][0] = | |
359 | ✗ | av_make_q(mdcv->primary_chromaticity_x[j], 1 << 16); | |
360 | ✗ | mdm->display_primaries[j][1] = | |
361 | ✗ | av_make_q(mdcv->primary_chromaticity_y[j], 1 << 16); | |
362 | } | ||
363 | |||
364 | ✗ | mdm->white_point[0] = | |
365 | ✗ | av_make_q(mdcv->white_point_chromaticity_x, 1 << 16); | |
366 | ✗ | mdm->white_point[1] = | |
367 | ✗ | av_make_q(mdcv->white_point_chromaticity_y, 1 << 16); | |
368 | |||
369 | ✗ | mdm->max_luminance = | |
370 | ✗ | av_make_q(mdcv->max_mastering_luminance, 1 << 8); | |
371 | ✗ | mdm->min_luminance = | |
372 | ✗ | av_make_q(mdcv->min_mastering_luminance, 1 << 14); | |
373 | |||
374 | ✗ | mdm->has_primaries = 1; | |
375 | ✗ | mdm->has_luminance = 1; | |
376 | } | ||
377 | } | ||
378 | ✗ | break; | |
379 | ✗ | case APV_METADATA_CLL: | |
380 | { | ||
381 | ✗ | const APVRawMetadataCLL *cll = &pl->cll; | |
382 | AVContentLightMetadata *clm; | ||
383 | |||
384 | ✗ | err = ff_decode_content_light_new(avctx, frame, &clm); | |
385 | ✗ | if (err < 0) | |
386 | ✗ | return err; | |
387 | |||
388 | ✗ | if (clm) { | |
389 | ✗ | clm->MaxCLL = cll->max_cll; | |
390 | ✗ | clm->MaxFALL = cll->max_fall; | |
391 | } | ||
392 | } | ||
393 | ✗ | break; | |
394 | 18 | default: | |
395 | // Ignore other types of metadata. | ||
396 | 18 | break; | |
397 | } | ||
398 | } | ||
399 | |||
400 | 9 | return 0; | |
401 | } | ||
402 | |||
403 | 9 | static int apv_decode_frame(AVCodecContext *avctx, AVFrame *frame, | |
404 | int *got_frame, AVPacket *packet) | ||
405 | { | ||
406 | 9 | APVDecodeContext *apv = avctx->priv_data; | |
407 | 9 | CodedBitstreamFragment *au = &apv->au; | |
408 | int err; | ||
409 | |||
410 | 9 | err = ff_cbs_read_packet(apv->cbc, au, packet); | |
411 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 9 times.
|
9 | if (err < 0) { |
412 | ✗ | av_log(avctx, AV_LOG_ERROR, "Failed to read packet.\n"); | |
413 | ✗ | goto fail; | |
414 | } | ||
415 | |||
416 |
2/2✓ Branch 0 taken 18 times.
✓ Branch 1 taken 9 times.
|
27 | for (int i = 0; i < au->nb_units; i++) { |
417 | 18 | CodedBitstreamUnit *pbu = &au->units[i]; | |
418 | |||
419 |
2/5✓ Branch 0 taken 9 times.
✓ Branch 1 taken 9 times.
✗ Branch 2 not taken.
✗ Branch 3 not taken.
✗ Branch 4 not taken.
|
18 | switch (pbu->type) { |
420 | 9 | case APV_PBU_PRIMARY_FRAME: | |
421 | 9 | err = apv_decode(avctx, frame, pbu->content); | |
422 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 9 times.
|
9 | if (err < 0) |
423 | ✗ | goto fail; | |
424 | 9 | *got_frame = 1; | |
425 | 9 | break; | |
426 | 9 | case APV_PBU_METADATA: | |
427 | 9 | apv_decode_metadata(avctx, frame, pbu->content); | |
428 | 9 | break; | |
429 | ✗ | case APV_PBU_NON_PRIMARY_FRAME: | |
430 | case APV_PBU_PREVIEW_FRAME: | ||
431 | case APV_PBU_DEPTH_FRAME: | ||
432 | case APV_PBU_ALPHA_FRAME: | ||
433 | ✗ | if (!avctx->internal->is_copy && | |
434 | ✗ | !apv->warned_additional_frames) { | |
435 | ✗ | av_log(avctx, AV_LOG_WARNING, | |
436 | "Stream contains additional non-primary frames " | ||
437 | "which will be ignored by the decoder.\n"); | ||
438 | ✗ | apv->warned_additional_frames = 1; | |
439 | } | ||
440 | ✗ | break; | |
441 | ✗ | case APV_PBU_ACCESS_UNIT_INFORMATION: | |
442 | case APV_PBU_FILLER: | ||
443 | // Not relevant to the decoder. | ||
444 | ✗ | break; | |
445 | ✗ | default: | |
446 | ✗ | if (!avctx->internal->is_copy && | |
447 | ✗ | !apv->warned_unknown_pbu_types) { | |
448 | ✗ | av_log(avctx, AV_LOG_WARNING, | |
449 | "Stream contains PBUs with unknown types " | ||
450 | "which will be ignored by the decoder.\n"); | ||
451 | ✗ | apv->warned_unknown_pbu_types = 1; | |
452 | } | ||
453 | ✗ | break; | |
454 | } | ||
455 | } | ||
456 | |||
457 | 9 | err = packet->size; | |
458 | 9 | fail: | |
459 | 9 | ff_cbs_fragment_reset(au); | |
460 | 9 | return err; | |
461 | } | ||
462 | |||
463 | const FFCodec ff_apv_decoder = { | ||
464 | .p.name = "apv", | ||
465 | CODEC_LONG_NAME("Advanced Professional Video"), | ||
466 | .p.type = AVMEDIA_TYPE_VIDEO, | ||
467 | .p.id = AV_CODEC_ID_APV, | ||
468 | .priv_data_size = sizeof(APVDecodeContext), | ||
469 | .init = apv_decode_init, | ||
470 | .close = apv_decode_close, | ||
471 | FF_CODEC_DECODE_CB(apv_decode_frame), | ||
472 | .p.capabilities = AV_CODEC_CAP_DR1 | | ||
473 | AV_CODEC_CAP_SLICE_THREADS | | ||
474 | AV_CODEC_CAP_FRAME_THREADS, | ||
475 | }; | ||
476 |