FFmpeg coverage


Directory: ../../../ffmpeg/
File: src/libavfilter/vf_colorspace.c
Date: 2025-10-10 03:51:19
Exec Total Coverage
Lines: 0 448 0.0%
Functions: 0 12 0.0%
Branches: 0 302 0.0%

Line Branch Exec Source
1 /*
2 * Copyright (c) 2016 Ronald S. Bultje <rsbultje@gmail.com>
3 *
4 * This file is part of FFmpeg.
5 *
6 * FFmpeg is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2.1 of the License, or (at your option) any later version.
10 *
11 * FFmpeg is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with FFmpeg; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 */
20
21 /*
22 * @file
23 * Convert between colorspaces.
24 */
25
26 #include "libavutil/avassert.h"
27 #include "libavutil/csp.h"
28 #include "libavutil/frame.h"
29 #include "libavutil/mem.h"
30 #include "libavutil/mem_internal.h"
31 #include "libavutil/opt.h"
32 #include "libavutil/pixdesc.h"
33 #include "libavutil/pixfmt.h"
34
35 #include "avfilter.h"
36 #include "colorspacedsp.h"
37 #include "filters.h"
38 #include "formats.h"
39 #include "video.h"
40 #include "colorspace.h"
41
42 enum DitherMode {
43 DITHER_NONE,
44 DITHER_FSB,
45 DITHER_NB,
46 };
47
48 enum Colorspace {
49 CS_UNSPECIFIED,
50 CS_BT470M,
51 CS_BT470BG,
52 CS_BT601_6_525,
53 CS_BT601_6_625,
54 CS_BT709,
55 CS_SMPTE170M,
56 CS_SMPTE240M,
57 CS_BT2020,
58 CS_NB,
59 };
60
61 enum WhitepointAdaptation {
62 WP_ADAPT_BRADFORD,
63 WP_ADAPT_VON_KRIES,
64 NB_WP_ADAPT_NON_IDENTITY,
65 WP_ADAPT_IDENTITY = NB_WP_ADAPT_NON_IDENTITY,
66 NB_WP_ADAPT,
67 };
68
69 enum ClipGamutMode {
70 CLIP_GAMUT_NONE,
71 CLIP_GAMUT_RGB,
72 NB_CLIP_GAMUT,
73 };
74
75 static const enum AVColorTransferCharacteristic default_trc[CS_NB + 1] = {
76 [CS_UNSPECIFIED] = AVCOL_TRC_UNSPECIFIED,
77 [CS_BT470M] = AVCOL_TRC_GAMMA22,
78 [CS_BT470BG] = AVCOL_TRC_GAMMA28,
79 [CS_BT601_6_525] = AVCOL_TRC_SMPTE170M,
80 [CS_BT601_6_625] = AVCOL_TRC_SMPTE170M,
81 [CS_BT709] = AVCOL_TRC_BT709,
82 [CS_SMPTE170M] = AVCOL_TRC_SMPTE170M,
83 [CS_SMPTE240M] = AVCOL_TRC_SMPTE240M,
84 [CS_BT2020] = AVCOL_TRC_BT2020_10,
85 [CS_NB] = AVCOL_TRC_UNSPECIFIED,
86 };
87
88 static const enum AVColorPrimaries default_prm[CS_NB + 1] = {
89 [CS_UNSPECIFIED] = AVCOL_PRI_UNSPECIFIED,
90 [CS_BT470M] = AVCOL_PRI_BT470M,
91 [CS_BT470BG] = AVCOL_PRI_BT470BG,
92 [CS_BT601_6_525] = AVCOL_PRI_SMPTE170M,
93 [CS_BT601_6_625] = AVCOL_PRI_BT470BG,
94 [CS_BT709] = AVCOL_PRI_BT709,
95 [CS_SMPTE170M] = AVCOL_PRI_SMPTE170M,
96 [CS_SMPTE240M] = AVCOL_PRI_SMPTE240M,
97 [CS_BT2020] = AVCOL_PRI_BT2020,
98 [CS_NB] = AVCOL_PRI_UNSPECIFIED,
99 };
100
101 static const enum AVColorSpace default_csp[CS_NB + 1] = {
102 [CS_UNSPECIFIED] = AVCOL_SPC_UNSPECIFIED,
103 [CS_BT470M] = AVCOL_SPC_SMPTE170M,
104 [CS_BT470BG] = AVCOL_SPC_BT470BG,
105 [CS_BT601_6_525] = AVCOL_SPC_SMPTE170M,
106 [CS_BT601_6_625] = AVCOL_SPC_BT470BG,
107 [CS_BT709] = AVCOL_SPC_BT709,
108 [CS_SMPTE170M] = AVCOL_SPC_SMPTE170M,
109 [CS_SMPTE240M] = AVCOL_SPC_SMPTE240M,
110 [CS_BT2020] = AVCOL_SPC_BT2020_NCL,
111 [CS_NB] = AVCOL_SPC_UNSPECIFIED,
112 };
113
114 struct TransferCharacteristics {
115 double alpha, beta, gamma, delta;
116 };
117
118 typedef struct ColorSpaceContext {
119 const AVClass *class;
120
121 ColorSpaceDSPContext dsp;
122
123 enum Colorspace user_all, user_iall;
124 enum AVColorSpace in_csp, out_csp, user_csp, user_icsp;
125 enum AVColorRange in_rng, out_rng, user_rng, user_irng;
126 enum AVColorTransferCharacteristic in_trc, out_trc, user_trc, user_itrc;
127 enum AVColorPrimaries in_prm, out_prm, user_prm, user_iprm;
128 enum AVPixelFormat in_format, user_format;
129 int fast_mode;
130 enum DitherMode dither;
131 enum WhitepointAdaptation wp_adapt;
132 enum ClipGamutMode clip_gamut;
133
134 int16_t *rgb[3];
135 ptrdiff_t rgb_stride;
136 unsigned rgb_sz;
137 int *dither_scratch[3][2], *dither_scratch_base[3][2];
138
139 const AVColorPrimariesDesc *in_primaries, *out_primaries;
140 int lrgb2lrgb_passthrough;
141 DECLARE_ALIGNED(16, int16_t, lrgb2lrgb_coeffs)[3][3][8];
142
143 const struct TransferCharacteristics *in_txchr, *out_txchr;
144 int rgb2rgb_passthrough;
145 int16_t *lin_lut, *delin_lut;
146
147 const AVLumaCoefficients *in_lumacoef, *out_lumacoef;
148 int yuv2yuv_passthrough, yuv2yuv_fastmode;
149 DECLARE_ALIGNED(16, int16_t, yuv2rgb_coeffs)[3][3][8];
150 DECLARE_ALIGNED(16, int16_t, rgb2yuv_coeffs)[3][3][8];
151 DECLARE_ALIGNED(16, int16_t, yuv2yuv_coeffs)[3][3][8];
152 DECLARE_ALIGNED(16, int16_t, yuv_offset)[2 /* in, out */][8];
153 yuv2rgb_fn yuv2rgb;
154 rgb2yuv_fn rgb2yuv;
155 rgb2yuv_fsb_fn rgb2yuv_fsb;
156 yuv2yuv_fn yuv2yuv;
157 double yuv2rgb_dbl_coeffs[3][3], rgb2yuv_dbl_coeffs[3][3];
158 int in_y_rng, in_uv_rng, out_y_rng, out_uv_rng;
159
160 int did_warn_range;
161 } ColorSpaceContext;
162
163 // FIXME deal with odd width/heights
164 // FIXME faster linearize/delinearize implementation (integer pow)
165 // FIXME bt2020cl support (linearization between yuv/rgb step instead of between rgb/xyz)
166 // FIXME test that the values in (de)lin_lut don't exceed their container storage
167 // type size (only useful if we keep the LUT and don't move to fast integer pow)
168 // FIXME dithering if bitdepth goes down?
169 // FIXME bitexact for fate integration?
170
171 // FIXME I'm pretty sure gamma22/28 also have a linear toe slope, but I can't
172 // find any actual tables that document their real values...
173 // See http://www.13thmonkey.org/~boris/gammacorrection/ first graph why it matters
174 static const struct TransferCharacteristics transfer_characteristics[] = {
175 [AVCOL_TRC_BT709] = { 1.099, 0.018, 0.45, 4.5 },
176 [AVCOL_TRC_GAMMA22] = { 1.0, 0.0, 1.0 / 2.2, 0.0 },
177 [AVCOL_TRC_GAMMA28] = { 1.0, 0.0, 1.0 / 2.8, 0.0 },
178 [AVCOL_TRC_SMPTE170M] = { 1.099, 0.018, 0.45, 4.5 },
179 [AVCOL_TRC_SMPTE240M] = { 1.1115, 0.0228, 0.45, 4.0 },
180 [AVCOL_TRC_LINEAR] = { 1.0, 0.0, 1.0, 0.0 },
181 [AVCOL_TRC_IEC61966_2_1] = { 1.055, 0.0031308, 1.0 / 2.4, 12.92 },
182 [AVCOL_TRC_IEC61966_2_4] = { 1.099, 0.018, 0.45, 4.5 },
183 [AVCOL_TRC_BT2020_10] = { 1.099, 0.018, 0.45, 4.5 },
184 [AVCOL_TRC_BT2020_12] = { 1.0993, 0.0181, 0.45, 4.5 },
185 };
186
187 static const struct TransferCharacteristics *
188 get_transfer_characteristics(enum AVColorTransferCharacteristic trc)
189 {
190 const struct TransferCharacteristics *coeffs;
191
192 if ((unsigned)trc >= FF_ARRAY_ELEMS(transfer_characteristics))
193 return NULL;
194 coeffs = &transfer_characteristics[trc];
195 if (!coeffs->alpha)
196 return NULL;
197
198 return coeffs;
199 }
200
201 static int fill_gamma_table(ColorSpaceContext *s)
202 {
203 int n;
204 double in_alpha = s->in_txchr->alpha, in_beta = s->in_txchr->beta;
205 double in_gamma = s->in_txchr->gamma, in_delta = s->in_txchr->delta;
206 double in_ialpha = 1.0 / in_alpha, in_igamma = 1.0 / in_gamma, in_idelta = 1.0 / in_delta;
207 double out_alpha = s->out_txchr->alpha, out_beta = s->out_txchr->beta;
208 double out_gamma = s->out_txchr->gamma, out_delta = s->out_txchr->delta;
209 int clip_gamut = s->clip_gamut == CLIP_GAMUT_RGB;
210
211 s->lin_lut = av_malloc(sizeof(*s->lin_lut) * 32768 * 2);
212 if (!s->lin_lut)
213 return AVERROR(ENOMEM);
214 s->delin_lut = &s->lin_lut[32768];
215 for (n = 0; n < 32768; n++) {
216 double v = (n - 2048.0) / 28672.0, d, l;
217
218 // delinearize
219 if (v <= -out_beta) {
220 d = -out_alpha * pow(-v, out_gamma) + (out_alpha - 1.0);
221 } else if (v < out_beta) {
222 d = out_delta * v;
223 } else {
224 d = out_alpha * pow(v, out_gamma) - (out_alpha - 1.0);
225 }
226 int d_rounded = lrint(d * 28672.0);
227 s->delin_lut[n] = clip_gamut ? av_clip(d_rounded, 0, 28672)
228 : av_clip_int16(d_rounded);
229
230 // linearize
231 if (v <= -in_beta * in_delta) {
232 l = -pow((1.0 - in_alpha - v) * in_ialpha, in_igamma);
233 } else if (v < in_beta * in_delta) {
234 l = v * in_idelta;
235 } else {
236 l = pow((v + in_alpha - 1.0) * in_ialpha, in_igamma);
237 }
238 int l_rounded = lrint(l * 28672.0);
239 s->lin_lut[n] = clip_gamut ? av_clip(l_rounded, 0, 28672)
240 : av_clip_int16(l_rounded);
241 }
242
243 return 0;
244 }
245
246 /*
247 * See http://www.brucelindbloom.com/index.html?Eqn_ChromAdapt.html
248 * This function uses the Bradford mechanism.
249 */
250 static void fill_whitepoint_conv_table(double out[3][3], enum WhitepointAdaptation wp_adapt,
251 const AVWhitepointCoefficients *wp_src,
252 const AVWhitepointCoefficients *wp_dst)
253 {
254 static const double ma_tbl[NB_WP_ADAPT_NON_IDENTITY][3][3] = {
255 [WP_ADAPT_BRADFORD] = {
256 { 0.8951, 0.2664, -0.1614 },
257 { -0.7502, 1.7135, 0.0367 },
258 { 0.0389, -0.0685, 1.0296 },
259 }, [WP_ADAPT_VON_KRIES] = {
260 { 0.40024, 0.70760, -0.08081 },
261 { -0.22630, 1.16532, 0.04570 },
262 { 0.00000, 0.00000, 0.91822 },
263 },
264 };
265 const double (*ma)[3] = ma_tbl[wp_adapt];
266 double xw_src = av_q2d(wp_src->x), yw_src = av_q2d(wp_src->y);
267 double xw_dst = av_q2d(wp_dst->x), yw_dst = av_q2d(wp_dst->y);
268 double zw_src = 1.0 - xw_src - yw_src;
269 double zw_dst = 1.0 - xw_dst - yw_dst;
270 double mai[3][3], fac[3][3], tmp[3][3];
271 double rs, gs, bs, rd, gd, bd;
272
273 ff_matrix_invert_3x3(ma, mai);
274 rs = ma[0][0] * xw_src + ma[0][1] * yw_src + ma[0][2] * zw_src;
275 gs = ma[1][0] * xw_src + ma[1][1] * yw_src + ma[1][2] * zw_src;
276 bs = ma[2][0] * xw_src + ma[2][1] * yw_src + ma[2][2] * zw_src;
277 rd = ma[0][0] * xw_dst + ma[0][1] * yw_dst + ma[0][2] * zw_dst;
278 gd = ma[1][0] * xw_dst + ma[1][1] * yw_dst + ma[1][2] * zw_dst;
279 bd = ma[2][0] * xw_dst + ma[2][1] * yw_dst + ma[2][2] * zw_dst;
280 fac[0][0] = rd / rs;
281 fac[1][1] = gd / gs;
282 fac[2][2] = bd / bs;
283 fac[0][1] = fac[0][2] = fac[1][0] = fac[1][2] = fac[2][0] = fac[2][1] = 0.0;
284 ff_matrix_mul_3x3(tmp, ma, fac);
285 ff_matrix_mul_3x3(out, tmp, mai);
286 }
287
288 static void apply_lut(int16_t *buf[3], ptrdiff_t stride,
289 int w, int h, const int16_t *lut)
290 {
291 int y, x, n;
292
293 for (n = 0; n < 3; n++) {
294 int16_t *data = buf[n];
295
296 for (y = 0; y < h; y++) {
297 for (x = 0; x < w; x++)
298 data[x] = lut[av_clip_uintp2(2048 + data[x], 15)];
299
300 data += stride;
301 }
302 }
303 }
304
305 typedef struct ThreadData {
306 AVFrame *in, *out;
307 ptrdiff_t in_linesize[3], out_linesize[3];
308 int in_ss_h, out_ss_h;
309 } ThreadData;
310
311 static int convert(AVFilterContext *ctx, void *data, int job_nr, int n_jobs)
312 {
313 const ThreadData *td = data;
314 ColorSpaceContext *s = ctx->priv;
315 uint8_t *in_data[3], *out_data[3];
316 int16_t *rgb[3];
317 int h_in = (td->in->height + 1) >> 1;
318 int h1 = 2 * (job_nr * h_in / n_jobs), h2 = 2 * ((job_nr + 1) * h_in / n_jobs);
319 int w = td->in->width, h = h2 - h1;
320
321 in_data[0] = td->in->data[0] + td->in_linesize[0] * h1;
322 in_data[1] = td->in->data[1] + td->in_linesize[1] * (h1 >> td->in_ss_h);
323 in_data[2] = td->in->data[2] + td->in_linesize[2] * (h1 >> td->in_ss_h);
324 out_data[0] = td->out->data[0] + td->out_linesize[0] * h1;
325 out_data[1] = td->out->data[1] + td->out_linesize[1] * (h1 >> td->out_ss_h);
326 out_data[2] = td->out->data[2] + td->out_linesize[2] * (h1 >> td->out_ss_h);
327 rgb[0] = s->rgb[0] + s->rgb_stride * h1;
328 rgb[1] = s->rgb[1] + s->rgb_stride * h1;
329 rgb[2] = s->rgb[2] + s->rgb_stride * h1;
330
331 // FIXME for simd, also make sure we do pictures with negative stride
332 // top-down so we don't overwrite lines with padding of data before it
333 // in the same buffer (same as swscale)
334
335 if (s->yuv2yuv_fastmode) {
336 // FIXME possibly use a fast mode in case only the y range changes?
337 // since in that case, only the diagonal entries in yuv2yuv_coeffs[]
338 // are non-zero
339 s->yuv2yuv(out_data, td->out_linesize, in_data, td->in_linesize, w, h,
340 s->yuv2yuv_coeffs, s->yuv_offset);
341 } else {
342 // FIXME maybe (for caching efficiency) do pipeline per-line instead of
343 // full buffer per function? (Or, since yuv2rgb requires 2 lines: per
344 // 2 lines, for yuv420.)
345 /*
346 * General design:
347 * - yuv2rgb converts from whatever range the input was ([16-235/240] or
348 * [0,255] or the 10/12bpp equivalents thereof) to an integer version
349 * of RGB in psuedo-restricted 15+sign bits. That means that the float
350 * range [0.0,1.0] is in [0,28762], and the remainder of the int16_t
351 * range is used for overflow/underflow outside the representable
352 * range of this RGB type. rgb2yuv is the exact opposite.
353 * - gamma correction is done using a LUT since that appears to work
354 * fairly fast.
355 * - If the input is chroma-subsampled (420/422), the yuv2rgb conversion
356 * (or rgb2yuv conversion) uses nearest-neighbour sampling to read
357 * read chroma pixels at luma resolution. If you want some more fancy
358 * filter, you can use swscale to convert to yuv444p.
359 * - all coefficients are 14bit (so in the [-2.0,2.0] range).
360 */
361 s->yuv2rgb(rgb, s->rgb_stride, in_data, td->in_linesize, w, h,
362 s->yuv2rgb_coeffs, s->yuv_offset[0]);
363 if (!s->rgb2rgb_passthrough) {
364 apply_lut(rgb, s->rgb_stride, w, h, s->lin_lut);
365 if (!s->lrgb2lrgb_passthrough)
366 s->dsp.multiply3x3(rgb, s->rgb_stride, w, h, s->lrgb2lrgb_coeffs);
367 apply_lut(rgb, s->rgb_stride, w, h, s->delin_lut);
368 }
369 if (s->dither == DITHER_FSB) {
370 s->rgb2yuv_fsb(out_data, td->out_linesize, rgb, s->rgb_stride, w, h,
371 s->rgb2yuv_coeffs, s->yuv_offset[1], s->dither_scratch);
372 } else {
373 s->rgb2yuv(out_data, td->out_linesize, rgb, s->rgb_stride, w, h,
374 s->rgb2yuv_coeffs, s->yuv_offset[1]);
375 }
376 }
377
378 return 0;
379 }
380
381 static int get_range_off(AVFilterContext *ctx, int *off,
382 int *y_rng, int *uv_rng,
383 enum AVColorRange rng, int depth)
384 {
385 switch (rng) {
386 case AVCOL_RANGE_UNSPECIFIED: {
387 ColorSpaceContext *s = ctx->priv;
388
389 if (!s->did_warn_range) {
390 av_log(ctx, AV_LOG_WARNING, "Input range not set, assuming tv/mpeg\n");
391 s->did_warn_range = 1;
392 }
393 }
394 // fall-through
395 case AVCOL_RANGE_MPEG:
396 *off = 16 << (depth - 8);
397 *y_rng = 219 << (depth - 8);
398 *uv_rng = 224 << (depth - 8);
399 break;
400 case AVCOL_RANGE_JPEG:
401 *off = 0;
402 *y_rng = *uv_rng = (256 << (depth - 8)) - 1;
403 break;
404 default:
405 return AVERROR(EINVAL);
406 }
407
408 return 0;
409 }
410
411 static int create_filtergraph(AVFilterContext *ctx,
412 const AVFrame *in, const AVFrame *out)
413 {
414 ColorSpaceContext *s = ctx->priv;
415 const AVPixFmtDescriptor *in_desc = av_pix_fmt_desc_get(in->format);
416 const AVPixFmtDescriptor *out_desc = av_pix_fmt_desc_get(out->format);
417 int m, n, o, res, fmt_identical, redo_yuv2rgb = 0, redo_rgb2yuv = 0;
418
419 #define supported_depth(d) ((d) == 8 || (d) == 10 || (d) == 12)
420 #define supported_subsampling(lcw, lch) \
421 (((lcw) == 0 && (lch) == 0) || ((lcw) == 1 && (lch) == 0) || ((lcw) == 1 && (lch) == 1))
422 #define supported_format(d) \
423 ((d) != NULL && (d)->nb_components == 3 && \
424 !((d)->flags & AV_PIX_FMT_FLAG_RGB) && \
425 supported_depth((d)->comp[0].depth) && \
426 supported_subsampling((d)->log2_chroma_w, (d)->log2_chroma_h))
427
428 if (!supported_format(in_desc)) {
429 av_log(ctx, AV_LOG_ERROR,
430 "Unsupported input format %d (%s) or bitdepth (%d)\n",
431 in->format, av_get_pix_fmt_name(in->format),
432 in_desc ? in_desc->comp[0].depth : -1);
433 return AVERROR(EINVAL);
434 }
435 if (!supported_format(out_desc)) {
436 av_log(ctx, AV_LOG_ERROR,
437 "Unsupported output format %d (%s) or bitdepth (%d)\n",
438 out->format, av_get_pix_fmt_name(out->format),
439 out_desc ? out_desc->comp[0].depth : -1);
440 return AVERROR(EINVAL);
441 }
442
443 if (in->color_primaries != s->in_prm) s->in_primaries = NULL;
444 if (out->color_primaries != s->out_prm) s->out_primaries = NULL;
445 if (in->color_trc != s->in_trc) s->in_txchr = NULL;
446 if (out->color_trc != s->out_trc) s->out_txchr = NULL;
447 if (in->colorspace != s->in_csp ||
448 in->color_range != s->in_rng) s->in_lumacoef = NULL;
449 if (out->color_range != s->out_rng) s->rgb2yuv = NULL;
450
451 if (!s->out_primaries || !s->in_primaries) {
452 s->in_prm = in->color_primaries;
453 if (s->user_iall != CS_UNSPECIFIED)
454 s->in_prm = default_prm[FFMIN(s->user_iall, CS_NB)];
455 if (s->user_iprm != AVCOL_PRI_UNSPECIFIED)
456 s->in_prm = s->user_iprm;
457 s->in_primaries = av_csp_primaries_desc_from_id(s->in_prm);
458 if (!s->in_primaries) {
459 av_log(ctx, AV_LOG_ERROR,
460 "Unsupported input primaries %d (%s)\n",
461 s->in_prm, av_color_primaries_name(s->in_prm));
462 return AVERROR(EINVAL);
463 }
464 s->out_prm = out->color_primaries;
465 s->out_primaries = av_csp_primaries_desc_from_id(s->out_prm);
466 if (!s->out_primaries) {
467 if (s->out_prm == AVCOL_PRI_UNSPECIFIED) {
468 if (s->user_all == CS_UNSPECIFIED) {
469 av_log(ctx, AV_LOG_ERROR, "Please specify output primaries\n");
470 } else {
471 av_log(ctx, AV_LOG_ERROR,
472 "Unsupported output color property %d\n", s->user_all);
473 }
474 } else {
475 av_log(ctx, AV_LOG_ERROR,
476 "Unsupported output primaries %d (%s)\n",
477 s->out_prm, av_color_primaries_name(s->out_prm));
478 }
479 return AVERROR(EINVAL);
480 }
481 s->lrgb2lrgb_passthrough = !memcmp(s->in_primaries, s->out_primaries,
482 sizeof(*s->in_primaries));
483 if (!s->lrgb2lrgb_passthrough) {
484 double rgb2xyz[3][3], xyz2rgb[3][3], rgb2rgb[3][3];
485 const AVWhitepointCoefficients *wp_out, *wp_in;
486
487 wp_out = &s->out_primaries->wp;
488 wp_in = &s->in_primaries->wp;
489 ff_fill_rgb2xyz_table(&s->out_primaries->prim, wp_out, rgb2xyz);
490 ff_matrix_invert_3x3(rgb2xyz, xyz2rgb);
491 ff_fill_rgb2xyz_table(&s->in_primaries->prim, wp_in, rgb2xyz);
492 if (memcmp(wp_in, wp_out, sizeof(*wp_in)) != 0 &&
493 s->wp_adapt != WP_ADAPT_IDENTITY) {
494 double wpconv[3][3], tmp[3][3];
495
496 fill_whitepoint_conv_table(wpconv, s->wp_adapt, &s->in_primaries->wp,
497 &s->out_primaries->wp);
498 ff_matrix_mul_3x3(tmp, rgb2xyz, wpconv);
499 ff_matrix_mul_3x3(rgb2rgb, tmp, xyz2rgb);
500 } else {
501 ff_matrix_mul_3x3(rgb2rgb, rgb2xyz, xyz2rgb);
502 }
503 for (m = 0; m < 3; m++)
504 for (n = 0; n < 3; n++) {
505 s->lrgb2lrgb_coeffs[m][n][0] = lrint(16384.0 * rgb2rgb[m][n]);
506 for (o = 1; o < 8; o++)
507 s->lrgb2lrgb_coeffs[m][n][o] = s->lrgb2lrgb_coeffs[m][n][0];
508 }
509
510 }
511 }
512
513 if (!s->in_txchr) {
514 av_freep(&s->lin_lut);
515 s->in_trc = in->color_trc;
516 if (s->user_iall != CS_UNSPECIFIED)
517 s->in_trc = default_trc[FFMIN(s->user_iall, CS_NB)];
518 if (s->user_itrc != AVCOL_TRC_UNSPECIFIED)
519 s->in_trc = s->user_itrc;
520 s->in_txchr = get_transfer_characteristics(s->in_trc);
521 if (!s->in_txchr) {
522 av_log(ctx, AV_LOG_ERROR,
523 "Unsupported input transfer characteristics %d (%s)\n",
524 s->in_trc, av_color_transfer_name(s->in_trc));
525 return AVERROR(EINVAL);
526 }
527 }
528
529 if (!s->out_txchr) {
530 av_freep(&s->lin_lut);
531 s->out_trc = out->color_trc;
532 s->out_txchr = get_transfer_characteristics(s->out_trc);
533 if (!s->out_txchr) {
534 if (s->out_trc == AVCOL_TRC_UNSPECIFIED) {
535 if (s->user_all == CS_UNSPECIFIED) {
536 av_log(ctx, AV_LOG_ERROR,
537 "Please specify output transfer characteristics\n");
538 } else {
539 av_log(ctx, AV_LOG_ERROR,
540 "Unsupported output color property %d\n", s->user_all);
541 }
542 } else {
543 av_log(ctx, AV_LOG_ERROR,
544 "Unsupported output transfer characteristics %d (%s)\n",
545 s->out_trc, av_color_transfer_name(s->out_trc));
546 }
547 return AVERROR(EINVAL);
548 }
549 }
550
551 s->rgb2rgb_passthrough = s->fast_mode || (s->lrgb2lrgb_passthrough &&
552 !memcmp(s->in_txchr, s->out_txchr, sizeof(*s->in_txchr)));
553 if (!s->rgb2rgb_passthrough && !s->lin_lut) {
554 res = fill_gamma_table(s);
555 if (res < 0)
556 return res;
557 }
558
559 if (!s->in_lumacoef) {
560 s->in_csp = in->colorspace;
561 if (s->user_iall != CS_UNSPECIFIED)
562 s->in_csp = default_csp[FFMIN(s->user_iall, CS_NB)];
563 if (s->user_icsp != AVCOL_SPC_UNSPECIFIED)
564 s->in_csp = s->user_icsp;
565 s->in_rng = in->color_range;
566 if (s->user_irng != AVCOL_RANGE_UNSPECIFIED)
567 s->in_rng = s->user_irng;
568 s->in_lumacoef = av_csp_luma_coeffs_from_avcsp(s->in_csp);
569 if (!s->in_lumacoef) {
570 av_log(ctx, AV_LOG_ERROR,
571 "Unsupported input colorspace %d (%s)\n",
572 s->in_csp, av_color_space_name(s->in_csp));
573 return AVERROR(EINVAL);
574 }
575 redo_yuv2rgb = 1;
576 }
577
578 if (!s->rgb2yuv) {
579 s->out_rng = out->color_range;
580 redo_rgb2yuv = 1;
581 }
582
583 fmt_identical = in_desc->log2_chroma_h == out_desc->log2_chroma_h &&
584 in_desc->log2_chroma_w == out_desc->log2_chroma_w;
585 s->yuv2yuv_fastmode = s->rgb2rgb_passthrough && fmt_identical;
586 s->yuv2yuv_passthrough = s->yuv2yuv_fastmode && s->in_rng == s->out_rng &&
587 !memcmp(s->in_lumacoef, s->out_lumacoef,
588 sizeof(*s->in_lumacoef)) &&
589 in_desc->comp[0].depth == out_desc->comp[0].depth;
590 if (!s->yuv2yuv_passthrough) {
591 if (redo_yuv2rgb) {
592 double rgb2yuv[3][3], (*yuv2rgb)[3] = s->yuv2rgb_dbl_coeffs;
593 int off, bits, in_rng;
594
595 res = get_range_off(ctx, &off, &s->in_y_rng, &s->in_uv_rng,
596 s->in_rng, in_desc->comp[0].depth);
597 if (res < 0) {
598 av_log(ctx, AV_LOG_ERROR,
599 "Unsupported input color range %d (%s)\n",
600 s->in_rng, av_color_range_name(s->in_rng));
601 return res;
602 }
603 for (n = 0; n < 8; n++)
604 s->yuv_offset[0][n] = off;
605 ff_fill_rgb2yuv_table(s->in_lumacoef, rgb2yuv);
606 ff_matrix_invert_3x3(rgb2yuv, yuv2rgb);
607 bits = 1 << (in_desc->comp[0].depth - 1);
608 for (n = 0; n < 3; n++) {
609 for (in_rng = s->in_y_rng, m = 0; m < 3; m++, in_rng = s->in_uv_rng) {
610 s->yuv2rgb_coeffs[n][m][0] = lrint(28672 * bits * yuv2rgb[n][m] / in_rng);
611 for (o = 1; o < 8; o++)
612 s->yuv2rgb_coeffs[n][m][o] = s->yuv2rgb_coeffs[n][m][0];
613 }
614 }
615 av_assert2(s->yuv2rgb_coeffs[0][1][0] == 0);
616 av_assert2(s->yuv2rgb_coeffs[2][2][0] == 0);
617 av_assert2(s->yuv2rgb_coeffs[0][0][0] == s->yuv2rgb_coeffs[1][0][0]);
618 av_assert2(s->yuv2rgb_coeffs[0][0][0] == s->yuv2rgb_coeffs[2][0][0]);
619 s->yuv2rgb = s->dsp.yuv2rgb[(in_desc->comp[0].depth - 8) >> 1]
620 [in_desc->log2_chroma_h + in_desc->log2_chroma_w];
621 }
622
623 if (redo_rgb2yuv) {
624 double (*rgb2yuv)[3] = s->rgb2yuv_dbl_coeffs;
625 int off, out_rng, bits;
626
627 res = get_range_off(ctx, &off, &s->out_y_rng, &s->out_uv_rng,
628 s->out_rng, out_desc->comp[0].depth);
629 if (res < 0) {
630 av_log(ctx, AV_LOG_ERROR,
631 "Unsupported output color range %d (%s)\n",
632 s->out_rng, av_color_range_name(s->out_rng));
633 return res;
634 }
635 for (n = 0; n < 8; n++)
636 s->yuv_offset[1][n] = off;
637 ff_fill_rgb2yuv_table(s->out_lumacoef, rgb2yuv);
638 bits = 1 << (29 - out_desc->comp[0].depth);
639 for (out_rng = s->out_y_rng, n = 0; n < 3; n++, out_rng = s->out_uv_rng) {
640 for (m = 0; m < 3; m++) {
641 s->rgb2yuv_coeffs[n][m][0] = lrint(bits * out_rng * rgb2yuv[n][m] / 28672);
642 for (o = 1; o < 8; o++)
643 s->rgb2yuv_coeffs[n][m][o] = s->rgb2yuv_coeffs[n][m][0];
644 }
645 }
646 av_assert2(s->rgb2yuv_coeffs[1][2][0] == s->rgb2yuv_coeffs[2][0][0]);
647 s->rgb2yuv = s->dsp.rgb2yuv[(out_desc->comp[0].depth - 8) >> 1]
648 [out_desc->log2_chroma_h + out_desc->log2_chroma_w];
649 s->rgb2yuv_fsb = s->dsp.rgb2yuv_fsb[(out_desc->comp[0].depth - 8) >> 1]
650 [out_desc->log2_chroma_h + out_desc->log2_chroma_w];
651 }
652
653 if (s->yuv2yuv_fastmode && (redo_yuv2rgb || redo_rgb2yuv)) {
654 int idepth = in_desc->comp[0].depth, odepth = out_desc->comp[0].depth;
655 double (*rgb2yuv)[3] = s->rgb2yuv_dbl_coeffs;
656 double (*yuv2rgb)[3] = s->yuv2rgb_dbl_coeffs;
657 double yuv2yuv[3][3];
658 int in_rng, out_rng;
659
660 ff_matrix_mul_3x3(yuv2yuv, yuv2rgb, rgb2yuv);
661 for (out_rng = s->out_y_rng, m = 0; m < 3; m++, out_rng = s->out_uv_rng) {
662 for (in_rng = s->in_y_rng, n = 0; n < 3; n++, in_rng = s->in_uv_rng) {
663 s->yuv2yuv_coeffs[m][n][0] =
664 lrint(16384 * yuv2yuv[m][n] * out_rng * (1 << idepth) /
665 (in_rng * (1 << odepth)));
666 for (o = 1; o < 8; o++)
667 s->yuv2yuv_coeffs[m][n][o] = s->yuv2yuv_coeffs[m][n][0];
668 }
669 }
670 av_assert2(s->yuv2yuv_coeffs[1][0][0] == 0);
671 av_assert2(s->yuv2yuv_coeffs[2][0][0] == 0);
672 s->yuv2yuv = s->dsp.yuv2yuv[(idepth - 8) >> 1][(odepth - 8) >> 1]
673 [in_desc->log2_chroma_h + in_desc->log2_chroma_w];
674 }
675 }
676
677 return 0;
678 }
679
680 static av_cold int init(AVFilterContext *ctx)
681 {
682 ColorSpaceContext *s = ctx->priv;
683
684 s->out_csp = s->user_csp == AVCOL_SPC_UNSPECIFIED ?
685 default_csp[FFMIN(s->user_all, CS_NB)] : s->user_csp;
686 s->out_lumacoef = av_csp_luma_coeffs_from_avcsp(s->out_csp);
687 if (!s->out_lumacoef) {
688 if (s->out_csp == AVCOL_SPC_UNSPECIFIED) {
689 if (s->user_all == CS_UNSPECIFIED) {
690 av_log(ctx, AV_LOG_ERROR,
691 "Please specify output colorspace\n");
692 } else {
693 av_log(ctx, AV_LOG_ERROR,
694 "Unsupported output color property %d\n", s->user_all);
695 }
696 } else {
697 av_log(ctx, AV_LOG_ERROR,
698 "Unsupported output colorspace %d (%s)\n", s->out_csp,
699 av_color_space_name(s->out_csp));
700 }
701 return AVERROR(EINVAL);
702 }
703
704 ff_colorspacedsp_init(&s->dsp);
705
706 return 0;
707 }
708
709 static void uninit(AVFilterContext *ctx)
710 {
711 ColorSpaceContext *s = ctx->priv;
712
713 av_freep(&s->rgb[0]);
714 av_freep(&s->rgb[1]);
715 av_freep(&s->rgb[2]);
716 s->rgb_sz = 0;
717 av_freep(&s->dither_scratch_base[0][0]);
718 av_freep(&s->dither_scratch_base[0][1]);
719 av_freep(&s->dither_scratch_base[1][0]);
720 av_freep(&s->dither_scratch_base[1][1]);
721 av_freep(&s->dither_scratch_base[2][0]);
722 av_freep(&s->dither_scratch_base[2][1]);
723
724 av_freep(&s->lin_lut);
725 }
726
727 static int filter_frame(AVFilterLink *link, AVFrame *in)
728 {
729 AVFilterContext *ctx = link->dst;
730 AVFilterLink *outlink = ctx->outputs[0];
731 ColorSpaceContext *s = ctx->priv;
732 // FIXME if yuv2yuv_passthrough, don't get a new buffer but use the
733 // input one if it is writable *OR* the actual literal values of in_*
734 // and out_* are identical (not just their respective properties)
735 AVFrame *out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
736 int res;
737 ptrdiff_t rgb_stride = FFALIGN(in->width * sizeof(int16_t), 32);
738 unsigned rgb_sz = rgb_stride * in->height;
739 ThreadData td;
740
741 if (!out) {
742 av_frame_free(&in);
743 return AVERROR(ENOMEM);
744 }
745 res = av_frame_copy_props(out, in);
746 if (res < 0) {
747 av_frame_free(&in);
748 av_frame_free(&out);
749 return res;
750 }
751
752 out->colorspace = s->out_csp;
753 out->color_range = s->user_rng == AVCOL_RANGE_UNSPECIFIED ?
754 in->color_range : s->user_rng;
755 out->color_primaries = s->user_prm == AVCOL_PRI_UNSPECIFIED ?
756 default_prm[FFMIN(s->user_all, CS_NB)] : s->user_prm;
757 if (s->user_trc == AVCOL_TRC_UNSPECIFIED) {
758 const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(out->format);
759
760 out->color_trc = default_trc[FFMIN(s->user_all, CS_NB)];
761 if (out->color_trc == AVCOL_TRC_BT2020_10 && desc && desc->comp[0].depth >= 12)
762 out->color_trc = AVCOL_TRC_BT2020_12;
763 } else {
764 out->color_trc = s->user_trc;
765 }
766
767 if (out->color_primaries != in->color_primaries || out->color_trc != in->color_trc) {
768 av_frame_side_data_remove_by_props(&out->side_data, &out->nb_side_data,
769 AV_SIDE_DATA_PROP_COLOR_DEPENDENT);
770 }
771
772 if (rgb_sz != s->rgb_sz) {
773 const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(out->format);
774 int uvw = in->width >> desc->log2_chroma_w;
775
776 av_freep(&s->rgb[0]);
777 av_freep(&s->rgb[1]);
778 av_freep(&s->rgb[2]);
779 s->rgb_sz = 0;
780 av_freep(&s->dither_scratch_base[0][0]);
781 av_freep(&s->dither_scratch_base[0][1]);
782 av_freep(&s->dither_scratch_base[1][0]);
783 av_freep(&s->dither_scratch_base[1][1]);
784 av_freep(&s->dither_scratch_base[2][0]);
785 av_freep(&s->dither_scratch_base[2][1]);
786
787 s->rgb[0] = av_malloc(rgb_sz);
788 s->rgb[1] = av_malloc(rgb_sz);
789 s->rgb[2] = av_malloc(rgb_sz);
790 s->dither_scratch_base[0][0] =
791 av_malloc(sizeof(*s->dither_scratch_base[0][0]) * (in->width + 4));
792 s->dither_scratch_base[0][1] =
793 av_malloc(sizeof(*s->dither_scratch_base[0][1]) * (in->width + 4));
794 s->dither_scratch_base[1][0] =
795 av_malloc(sizeof(*s->dither_scratch_base[1][0]) * (uvw + 4));
796 s->dither_scratch_base[1][1] =
797 av_malloc(sizeof(*s->dither_scratch_base[1][1]) * (uvw + 4));
798 s->dither_scratch_base[2][0] =
799 av_malloc(sizeof(*s->dither_scratch_base[2][0]) * (uvw + 4));
800 s->dither_scratch_base[2][1] =
801 av_malloc(sizeof(*s->dither_scratch_base[2][1]) * (uvw + 4));
802 s->dither_scratch[0][0] = &s->dither_scratch_base[0][0][1];
803 s->dither_scratch[0][1] = &s->dither_scratch_base[0][1][1];
804 s->dither_scratch[1][0] = &s->dither_scratch_base[1][0][1];
805 s->dither_scratch[1][1] = &s->dither_scratch_base[1][1][1];
806 s->dither_scratch[2][0] = &s->dither_scratch_base[2][0][1];
807 s->dither_scratch[2][1] = &s->dither_scratch_base[2][1][1];
808 if (!s->rgb[0] || !s->rgb[1] || !s->rgb[2] ||
809 !s->dither_scratch_base[0][0] || !s->dither_scratch_base[0][1] ||
810 !s->dither_scratch_base[1][0] || !s->dither_scratch_base[1][1] ||
811 !s->dither_scratch_base[2][0] || !s->dither_scratch_base[2][1]) {
812 uninit(ctx);
813 av_frame_free(&in);
814 av_frame_free(&out);
815 return AVERROR(ENOMEM);
816 }
817 s->rgb_sz = rgb_sz;
818 }
819 res = create_filtergraph(ctx, in, out);
820 if (res < 0) {
821 av_frame_free(&in);
822 av_frame_free(&out);
823 return res;
824 }
825 s->rgb_stride = rgb_stride / sizeof(int16_t);
826 td.in = in;
827 td.out = out;
828 td.in_linesize[0] = in->linesize[0];
829 td.in_linesize[1] = in->linesize[1];
830 td.in_linesize[2] = in->linesize[2];
831 td.out_linesize[0] = out->linesize[0];
832 td.out_linesize[1] = out->linesize[1];
833 td.out_linesize[2] = out->linesize[2];
834 td.in_ss_h = av_pix_fmt_desc_get(in->format)->log2_chroma_h;
835 td.out_ss_h = av_pix_fmt_desc_get(out->format)->log2_chroma_h;
836 if (s->yuv2yuv_passthrough) {
837 res = av_frame_copy(out, in);
838 if (res < 0) {
839 av_frame_free(&in);
840 av_frame_free(&out);
841 return res;
842 }
843 } else {
844 ff_filter_execute(ctx, convert, &td, NULL,
845 FFMIN((in->height + 1) >> 1, ff_filter_get_nb_threads(ctx)));
846 }
847 av_frame_free(&in);
848
849 return ff_filter_frame(outlink, out);
850 }
851
852 static int query_formats(const AVFilterContext *ctx,
853 AVFilterFormatsConfig **cfg_in,
854 AVFilterFormatsConfig **cfg_out)
855 {
856 static const enum AVPixelFormat pix_fmts[] = {
857 AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV444P,
858 AV_PIX_FMT_YUV420P10, AV_PIX_FMT_YUV422P10, AV_PIX_FMT_YUV444P10,
859 AV_PIX_FMT_YUV420P12, AV_PIX_FMT_YUV422P12, AV_PIX_FMT_YUV444P12,
860 AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUVJ444P,
861 AV_PIX_FMT_NONE
862 };
863 int res;
864 const ColorSpaceContext *s = ctx->priv;
865 AVFilterFormats *formats;
866
867 res = ff_formats_ref(ff_make_formats_list_singleton(s->out_csp), &cfg_out[0]->color_spaces);
868 if (res < 0)
869 return res;
870 if (s->user_rng != AVCOL_RANGE_UNSPECIFIED) {
871 res = ff_formats_ref(ff_make_formats_list_singleton(s->user_rng), &cfg_out[0]->color_ranges);
872 if (res < 0)
873 return res;
874 }
875
876 formats = ff_make_format_list(pix_fmts);
877 if (!formats)
878 return AVERROR(ENOMEM);
879 if (s->user_format == AV_PIX_FMT_NONE)
880 return ff_set_common_formats2(ctx, cfg_in, cfg_out, formats);
881
882 res = ff_formats_ref(formats, &cfg_in[0]->formats);
883 if (res < 0)
884 return res;
885
886 formats = NULL;
887 res = ff_add_format(&formats, s->user_format);
888 if (res < 0)
889 return res;
890
891 return ff_formats_ref(formats, &cfg_out[0]->formats);
892 }
893
894 static int config_props(AVFilterLink *outlink)
895 {
896 AVFilterContext *ctx = outlink->dst;
897 AVFilterLink *inlink = outlink->src->inputs[0];
898
899 if (inlink->w % 2 || inlink->h % 2) {
900 av_log(ctx, AV_LOG_ERROR, "Invalid odd size (%dx%d)\n",
901 inlink->w, inlink->h);
902 return AVERROR_PATCHWELCOME;
903 }
904
905 outlink->w = inlink->w;
906 outlink->h = inlink->h;
907 outlink->sample_aspect_ratio = inlink->sample_aspect_ratio;
908 outlink->time_base = inlink->time_base;
909
910 return 0;
911 }
912
913 #define OFFSET(x) offsetof(ColorSpaceContext, x)
914 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM | AV_OPT_FLAG_VIDEO_PARAM
915 #define ENUM(x, y, z) { x, "", 0, AV_OPT_TYPE_CONST, { .i64 = y }, INT_MIN, INT_MAX, FLAGS, .unit = z }
916
917 static const AVOption colorspace_options[] = {
918 { "all", "Set all color properties together",
919 OFFSET(user_all), AV_OPT_TYPE_INT, { .i64 = CS_UNSPECIFIED },
920 CS_UNSPECIFIED, CS_NB - 1, FLAGS, .unit = "all" },
921 ENUM("bt470m", CS_BT470M, "all"),
922 ENUM("bt470bg", CS_BT470BG, "all"),
923 ENUM("bt601-6-525", CS_BT601_6_525, "all"),
924 ENUM("bt601-6-625", CS_BT601_6_625, "all"),
925 ENUM("bt709", CS_BT709, "all"),
926 ENUM("smpte170m", CS_SMPTE170M, "all"),
927 ENUM("smpte240m", CS_SMPTE240M, "all"),
928 ENUM("bt2020", CS_BT2020, "all"),
929
930 { "space", "Output colorspace",
931 OFFSET(user_csp), AV_OPT_TYPE_INT, { .i64 = AVCOL_SPC_UNSPECIFIED },
932 AVCOL_SPC_RGB, AVCOL_SPC_NB - 1, FLAGS, .unit = "csp"},
933 ENUM("bt709", AVCOL_SPC_BT709, "csp"),
934 ENUM("fcc", AVCOL_SPC_FCC, "csp"),
935 ENUM("bt470bg", AVCOL_SPC_BT470BG, "csp"),
936 ENUM("smpte170m", AVCOL_SPC_SMPTE170M, "csp"),
937 ENUM("smpte240m", AVCOL_SPC_SMPTE240M, "csp"),
938 ENUM("ycgco", AVCOL_SPC_YCGCO, "csp"),
939 ENUM("gbr", AVCOL_SPC_RGB, "csp"),
940 ENUM("bt2020nc", AVCOL_SPC_BT2020_NCL, "csp"),
941 ENUM("bt2020ncl", AVCOL_SPC_BT2020_NCL, "csp"),
942
943 { "range", "Output color range",
944 OFFSET(user_rng), AV_OPT_TYPE_INT, { .i64 = AVCOL_RANGE_UNSPECIFIED },
945 AVCOL_RANGE_UNSPECIFIED, AVCOL_RANGE_NB - 1, FLAGS, .unit = "rng" },
946 ENUM("tv", AVCOL_RANGE_MPEG, "rng"),
947 ENUM("mpeg", AVCOL_RANGE_MPEG, "rng"),
948 ENUM("pc", AVCOL_RANGE_JPEG, "rng"),
949 ENUM("jpeg", AVCOL_RANGE_JPEG, "rng"),
950
951 { "primaries", "Output color primaries",
952 OFFSET(user_prm), AV_OPT_TYPE_INT, { .i64 = AVCOL_PRI_UNSPECIFIED },
953 AVCOL_PRI_RESERVED0, AVCOL_PRI_NB - 1, FLAGS, .unit = "prm" },
954 ENUM("bt709", AVCOL_PRI_BT709, "prm"),
955 ENUM("bt470m", AVCOL_PRI_BT470M, "prm"),
956 ENUM("bt470bg", AVCOL_PRI_BT470BG, "prm"),
957 ENUM("smpte170m", AVCOL_PRI_SMPTE170M, "prm"),
958 ENUM("smpte240m", AVCOL_PRI_SMPTE240M, "prm"),
959 ENUM("smpte428", AVCOL_PRI_SMPTE428, "prm"),
960 ENUM("film", AVCOL_PRI_FILM, "prm"),
961 ENUM("smpte431", AVCOL_PRI_SMPTE431, "prm"),
962 ENUM("smpte432", AVCOL_PRI_SMPTE432, "prm"),
963 ENUM("bt2020", AVCOL_PRI_BT2020, "prm"),
964 ENUM("jedec-p22", AVCOL_PRI_JEDEC_P22, "prm"),
965 ENUM("ebu3213", AVCOL_PRI_EBU3213, "prm"),
966
967 { "trc", "Output transfer characteristics",
968 OFFSET(user_trc), AV_OPT_TYPE_INT, { .i64 = AVCOL_TRC_UNSPECIFIED },
969 AVCOL_TRC_RESERVED0, AVCOL_TRC_NB - 1, FLAGS, .unit = "trc" },
970 ENUM("bt709", AVCOL_TRC_BT709, "trc"),
971 ENUM("bt470m", AVCOL_TRC_GAMMA22, "trc"),
972 ENUM("gamma22", AVCOL_TRC_GAMMA22, "trc"),
973 ENUM("bt470bg", AVCOL_TRC_GAMMA28, "trc"),
974 ENUM("gamma28", AVCOL_TRC_GAMMA28, "trc"),
975 ENUM("smpte170m", AVCOL_TRC_SMPTE170M, "trc"),
976 ENUM("smpte240m", AVCOL_TRC_SMPTE240M, "trc"),
977 ENUM("linear", AVCOL_TRC_LINEAR, "trc"),
978 ENUM("srgb", AVCOL_TRC_IEC61966_2_1, "trc"),
979 ENUM("iec61966-2-1", AVCOL_TRC_IEC61966_2_1, "trc"),
980 ENUM("xvycc", AVCOL_TRC_IEC61966_2_4, "trc"),
981 ENUM("iec61966-2-4", AVCOL_TRC_IEC61966_2_4, "trc"),
982 ENUM("bt2020-10", AVCOL_TRC_BT2020_10, "trc"),
983 ENUM("bt2020-12", AVCOL_TRC_BT2020_12, "trc"),
984
985 { "format", "Output pixel format",
986 OFFSET(user_format), AV_OPT_TYPE_INT, { .i64 = AV_PIX_FMT_NONE },
987 AV_PIX_FMT_NONE, AV_PIX_FMT_GBRAP12LE, FLAGS, .unit = "fmt" },
988 ENUM("yuv420p", AV_PIX_FMT_YUV420P, "fmt"),
989 ENUM("yuv420p10", AV_PIX_FMT_YUV420P10, "fmt"),
990 ENUM("yuv420p12", AV_PIX_FMT_YUV420P12, "fmt"),
991 ENUM("yuv422p", AV_PIX_FMT_YUV422P, "fmt"),
992 ENUM("yuv422p10", AV_PIX_FMT_YUV422P10, "fmt"),
993 ENUM("yuv422p12", AV_PIX_FMT_YUV422P12, "fmt"),
994 ENUM("yuv444p", AV_PIX_FMT_YUV444P, "fmt"),
995 ENUM("yuv444p10", AV_PIX_FMT_YUV444P10, "fmt"),
996 ENUM("yuv444p12", AV_PIX_FMT_YUV444P12, "fmt"),
997
998 { "fast", "Ignore primary chromaticity and gamma correction",
999 OFFSET(fast_mode), AV_OPT_TYPE_BOOL, { .i64 = 0 },
1000 0, 1, FLAGS },
1001
1002 { "dither", "Dithering mode",
1003 OFFSET(dither), AV_OPT_TYPE_INT, { .i64 = DITHER_NONE },
1004 DITHER_NONE, DITHER_NB - 1, FLAGS, .unit = "dither" },
1005 ENUM("none", DITHER_NONE, "dither"),
1006 ENUM("fsb", DITHER_FSB, "dither"),
1007
1008 { "wpadapt", "Whitepoint adaptation method",
1009 OFFSET(wp_adapt), AV_OPT_TYPE_INT, { .i64 = WP_ADAPT_BRADFORD },
1010 WP_ADAPT_BRADFORD, NB_WP_ADAPT - 1, FLAGS, .unit = "wpadapt" },
1011 ENUM("bradford", WP_ADAPT_BRADFORD, "wpadapt"),
1012 ENUM("vonkries", WP_ADAPT_VON_KRIES, "wpadapt"),
1013 ENUM("identity", WP_ADAPT_IDENTITY, "wpadapt"),
1014
1015 { "clipgamut",
1016 "Controls how to clip out-of-gamut colors that arise as a result of colorspace conversion.",
1017 OFFSET(clip_gamut), AV_OPT_TYPE_INT, { .i64 = CLIP_GAMUT_NONE },
1018 CLIP_GAMUT_NONE, NB_CLIP_GAMUT - 1, FLAGS, .unit = "clipgamut" },
1019 ENUM("none", CLIP_GAMUT_NONE, "clipgamut"),
1020 ENUM("rgb", CLIP_GAMUT_RGB, "clipgamut"),
1021
1022 { "iall", "Set all input color properties together",
1023 OFFSET(user_iall), AV_OPT_TYPE_INT, { .i64 = CS_UNSPECIFIED },
1024 CS_UNSPECIFIED, CS_NB - 1, FLAGS, .unit = "all" },
1025 { "ispace", "Input colorspace",
1026 OFFSET(user_icsp), AV_OPT_TYPE_INT, { .i64 = AVCOL_SPC_UNSPECIFIED },
1027 AVCOL_PRI_RESERVED0, AVCOL_PRI_NB - 1, FLAGS, .unit = "csp" },
1028 { "irange", "Input color range",
1029 OFFSET(user_irng), AV_OPT_TYPE_INT, { .i64 = AVCOL_RANGE_UNSPECIFIED },
1030 AVCOL_RANGE_UNSPECIFIED, AVCOL_RANGE_NB - 1, FLAGS, .unit = "rng" },
1031 { "iprimaries", "Input color primaries",
1032 OFFSET(user_iprm), AV_OPT_TYPE_INT, { .i64 = AVCOL_PRI_UNSPECIFIED },
1033 AVCOL_PRI_RESERVED0, AVCOL_PRI_NB - 1, FLAGS, .unit = "prm" },
1034 { "itrc", "Input transfer characteristics",
1035 OFFSET(user_itrc), AV_OPT_TYPE_INT, { .i64 = AVCOL_TRC_UNSPECIFIED },
1036 AVCOL_TRC_RESERVED0, AVCOL_TRC_NB - 1, FLAGS, .unit = "trc" },
1037
1038 { NULL }
1039 };
1040
1041 AVFILTER_DEFINE_CLASS(colorspace);
1042
1043 static const AVFilterPad inputs[] = {
1044 {
1045 .name = "default",
1046 .type = AVMEDIA_TYPE_VIDEO,
1047 .filter_frame = filter_frame,
1048 },
1049 };
1050
1051 static const AVFilterPad outputs[] = {
1052 {
1053 .name = "default",
1054 .type = AVMEDIA_TYPE_VIDEO,
1055 .config_props = config_props,
1056 },
1057 };
1058
1059 const FFFilter ff_vf_colorspace = {
1060 .p.name = "colorspace",
1061 .p.description = NULL_IF_CONFIG_SMALL("Convert between colorspaces."),
1062 .p.priv_class = &colorspace_class,
1063 .p.flags = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC | AVFILTER_FLAG_SLICE_THREADS,
1064 .init = init,
1065 .uninit = uninit,
1066 .priv_size = sizeof(ColorSpaceContext),
1067 FILTER_INPUTS(inputs),
1068 FILTER_OUTPUTS(outputs),
1069 FILTER_QUERY_FUNC2(query_formats),
1070 };
1071