| Line | Branch | Exec | Source |
|---|---|---|---|
| 1 | /* | ||
| 2 | * Copyright (c) 2016 Ronald S. Bultje <rsbultje@gmail.com> | ||
| 3 | * | ||
| 4 | * This file is part of FFmpeg. | ||
| 5 | * | ||
| 6 | * FFmpeg is free software; you can redistribute it and/or | ||
| 7 | * modify it under the terms of the GNU Lesser General Public | ||
| 8 | * License as published by the Free Software Foundation; either | ||
| 9 | * version 2.1 of the License, or (at your option) any later version. | ||
| 10 | * | ||
| 11 | * FFmpeg is distributed in the hope that it will be useful, | ||
| 12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 14 | * Lesser General Public License for more details. | ||
| 15 | * | ||
| 16 | * You should have received a copy of the GNU Lesser General Public | ||
| 17 | * License along with FFmpeg; if not, write to the Free Software | ||
| 18 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | ||
| 19 | */ | ||
| 20 | |||
| 21 | /* | ||
| 22 | * @file | ||
| 23 | * Convert between colorspaces. | ||
| 24 | */ | ||
| 25 | |||
| 26 | #include "libavutil/avassert.h" | ||
| 27 | #include "libavutil/csp.h" | ||
| 28 | #include "libavutil/frame.h" | ||
| 29 | #include "libavutil/mem.h" | ||
| 30 | #include "libavutil/mem_internal.h" | ||
| 31 | #include "libavutil/opt.h" | ||
| 32 | #include "libavutil/pixdesc.h" | ||
| 33 | #include "libavutil/pixfmt.h" | ||
| 34 | |||
| 35 | #include "avfilter.h" | ||
| 36 | #include "colorspacedsp.h" | ||
| 37 | #include "filters.h" | ||
| 38 | #include "formats.h" | ||
| 39 | #include "video.h" | ||
| 40 | #include "colorspace.h" | ||
| 41 | |||
| 42 | enum DitherMode { | ||
| 43 | DITHER_NONE, | ||
| 44 | DITHER_FSB, | ||
| 45 | DITHER_NB, | ||
| 46 | }; | ||
| 47 | |||
| 48 | enum Colorspace { | ||
| 49 | CS_UNSPECIFIED, | ||
| 50 | CS_BT470M, | ||
| 51 | CS_BT470BG, | ||
| 52 | CS_BT601_6_525, | ||
| 53 | CS_BT601_6_625, | ||
| 54 | CS_BT709, | ||
| 55 | CS_SMPTE170M, | ||
| 56 | CS_SMPTE240M, | ||
| 57 | CS_BT2020, | ||
| 58 | CS_NB, | ||
| 59 | }; | ||
| 60 | |||
| 61 | enum WhitepointAdaptation { | ||
| 62 | WP_ADAPT_BRADFORD, | ||
| 63 | WP_ADAPT_VON_KRIES, | ||
| 64 | NB_WP_ADAPT_NON_IDENTITY, | ||
| 65 | WP_ADAPT_IDENTITY = NB_WP_ADAPT_NON_IDENTITY, | ||
| 66 | NB_WP_ADAPT, | ||
| 67 | }; | ||
| 68 | |||
| 69 | enum ClipGamutMode { | ||
| 70 | CLIP_GAMUT_NONE, | ||
| 71 | CLIP_GAMUT_RGB, | ||
| 72 | NB_CLIP_GAMUT, | ||
| 73 | }; | ||
| 74 | |||
| 75 | static const enum AVColorTransferCharacteristic default_trc[CS_NB + 1] = { | ||
| 76 | [CS_UNSPECIFIED] = AVCOL_TRC_UNSPECIFIED, | ||
| 77 | [CS_BT470M] = AVCOL_TRC_GAMMA22, | ||
| 78 | [CS_BT470BG] = AVCOL_TRC_GAMMA28, | ||
| 79 | [CS_BT601_6_525] = AVCOL_TRC_SMPTE170M, | ||
| 80 | [CS_BT601_6_625] = AVCOL_TRC_SMPTE170M, | ||
| 81 | [CS_BT709] = AVCOL_TRC_BT709, | ||
| 82 | [CS_SMPTE170M] = AVCOL_TRC_SMPTE170M, | ||
| 83 | [CS_SMPTE240M] = AVCOL_TRC_SMPTE240M, | ||
| 84 | [CS_BT2020] = AVCOL_TRC_BT2020_10, | ||
| 85 | [CS_NB] = AVCOL_TRC_UNSPECIFIED, | ||
| 86 | }; | ||
| 87 | |||
| 88 | static const enum AVColorPrimaries default_prm[CS_NB + 1] = { | ||
| 89 | [CS_UNSPECIFIED] = AVCOL_PRI_UNSPECIFIED, | ||
| 90 | [CS_BT470M] = AVCOL_PRI_BT470M, | ||
| 91 | [CS_BT470BG] = AVCOL_PRI_BT470BG, | ||
| 92 | [CS_BT601_6_525] = AVCOL_PRI_SMPTE170M, | ||
| 93 | [CS_BT601_6_625] = AVCOL_PRI_BT470BG, | ||
| 94 | [CS_BT709] = AVCOL_PRI_BT709, | ||
| 95 | [CS_SMPTE170M] = AVCOL_PRI_SMPTE170M, | ||
| 96 | [CS_SMPTE240M] = AVCOL_PRI_SMPTE240M, | ||
| 97 | [CS_BT2020] = AVCOL_PRI_BT2020, | ||
| 98 | [CS_NB] = AVCOL_PRI_UNSPECIFIED, | ||
| 99 | }; | ||
| 100 | |||
| 101 | static const enum AVColorSpace default_csp[CS_NB + 1] = { | ||
| 102 | [CS_UNSPECIFIED] = AVCOL_SPC_UNSPECIFIED, | ||
| 103 | [CS_BT470M] = AVCOL_SPC_SMPTE170M, | ||
| 104 | [CS_BT470BG] = AVCOL_SPC_BT470BG, | ||
| 105 | [CS_BT601_6_525] = AVCOL_SPC_SMPTE170M, | ||
| 106 | [CS_BT601_6_625] = AVCOL_SPC_BT470BG, | ||
| 107 | [CS_BT709] = AVCOL_SPC_BT709, | ||
| 108 | [CS_SMPTE170M] = AVCOL_SPC_SMPTE170M, | ||
| 109 | [CS_SMPTE240M] = AVCOL_SPC_SMPTE240M, | ||
| 110 | [CS_BT2020] = AVCOL_SPC_BT2020_NCL, | ||
| 111 | [CS_NB] = AVCOL_SPC_UNSPECIFIED, | ||
| 112 | }; | ||
| 113 | |||
| 114 | struct TransferCharacteristics { | ||
| 115 | double alpha, beta, gamma, delta; | ||
| 116 | }; | ||
| 117 | |||
| 118 | typedef struct ColorSpaceContext { | ||
| 119 | const AVClass *class; | ||
| 120 | |||
| 121 | ColorSpaceDSPContext dsp; | ||
| 122 | |||
| 123 | enum Colorspace user_all, user_iall; | ||
| 124 | enum AVColorSpace in_csp, out_csp, user_csp, user_icsp; | ||
| 125 | enum AVColorRange in_rng, out_rng, user_rng, user_irng; | ||
| 126 | enum AVColorTransferCharacteristic in_trc, out_trc, user_trc, user_itrc; | ||
| 127 | enum AVColorPrimaries in_prm, out_prm, user_prm, user_iprm; | ||
| 128 | enum AVPixelFormat in_format, user_format; | ||
| 129 | int fast_mode; | ||
| 130 | /* enum DitherMode */ | ||
| 131 | int dither; | ||
| 132 | /* enum WhitepointAdaptation */ | ||
| 133 | int wp_adapt; | ||
| 134 | /* enum ClipGamutMode */ | ||
| 135 | int clip_gamut; | ||
| 136 | |||
| 137 | int16_t *rgb[3]; | ||
| 138 | ptrdiff_t rgb_stride; | ||
| 139 | unsigned rgb_sz; | ||
| 140 | int *dither_scratch[3][2], *dither_scratch_base[3][2]; | ||
| 141 | |||
| 142 | const AVColorPrimariesDesc *in_primaries, *out_primaries; | ||
| 143 | int lrgb2lrgb_passthrough; | ||
| 144 | DECLARE_ALIGNED(16, int16_t, lrgb2lrgb_coeffs)[3][3][8]; | ||
| 145 | |||
| 146 | const struct TransferCharacteristics *in_txchr, *out_txchr; | ||
| 147 | int rgb2rgb_passthrough; | ||
| 148 | int16_t *lin_lut, *delin_lut; | ||
| 149 | |||
| 150 | const AVLumaCoefficients *in_lumacoef, *out_lumacoef; | ||
| 151 | int yuv2yuv_passthrough, yuv2yuv_fastmode; | ||
| 152 | DECLARE_ALIGNED(16, int16_t, yuv2rgb_coeffs)[3][3][8]; | ||
| 153 | DECLARE_ALIGNED(16, int16_t, rgb2yuv_coeffs)[3][3][8]; | ||
| 154 | DECLARE_ALIGNED(16, int16_t, yuv2yuv_coeffs)[3][3][8]; | ||
| 155 | DECLARE_ALIGNED(16, int16_t, yuv_offset)[2 /* in, out */][8]; | ||
| 156 | yuv2rgb_fn yuv2rgb; | ||
| 157 | rgb2yuv_fn rgb2yuv; | ||
| 158 | rgb2yuv_fsb_fn rgb2yuv_fsb; | ||
| 159 | yuv2yuv_fn yuv2yuv; | ||
| 160 | double yuv2rgb_dbl_coeffs[3][3], rgb2yuv_dbl_coeffs[3][3]; | ||
| 161 | int in_y_rng, in_uv_rng, out_y_rng, out_uv_rng; | ||
| 162 | |||
| 163 | int did_warn_range; | ||
| 164 | } ColorSpaceContext; | ||
| 165 | |||
| 166 | // FIXME deal with odd width/heights | ||
| 167 | // FIXME faster linearize/delinearize implementation (integer pow) | ||
| 168 | // FIXME bt2020cl support (linearization between yuv/rgb step instead of between rgb/xyz) | ||
| 169 | // FIXME test that the values in (de)lin_lut don't exceed their container storage | ||
| 170 | // type size (only useful if we keep the LUT and don't move to fast integer pow) | ||
| 171 | // FIXME dithering if bitdepth goes down? | ||
| 172 | // FIXME bitexact for fate integration? | ||
| 173 | |||
| 174 | // FIXME I'm pretty sure gamma22/28 also have a linear toe slope, but I can't | ||
| 175 | // find any actual tables that document their real values... | ||
| 176 | // See http://www.13thmonkey.org/~boris/gammacorrection/ first graph why it matters | ||
| 177 | static const struct TransferCharacteristics transfer_characteristics[] = { | ||
| 178 | [AVCOL_TRC_BT709] = { 1.099, 0.018, 0.45, 4.5 }, | ||
| 179 | [AVCOL_TRC_GAMMA22] = { 1.0, 0.0, 1.0 / 2.2, 0.0 }, | ||
| 180 | [AVCOL_TRC_GAMMA28] = { 1.0, 0.0, 1.0 / 2.8, 0.0 }, | ||
| 181 | [AVCOL_TRC_SMPTE170M] = { 1.099, 0.018, 0.45, 4.5 }, | ||
| 182 | [AVCOL_TRC_SMPTE240M] = { 1.1115, 0.0228, 0.45, 4.0 }, | ||
| 183 | [AVCOL_TRC_LINEAR] = { 1.0, 0.0, 1.0, 0.0 }, | ||
| 184 | [AVCOL_TRC_IEC61966_2_1] = { 1.055, 0.0031308, 1.0 / 2.4, 12.92 }, | ||
| 185 | [AVCOL_TRC_IEC61966_2_4] = { 1.099, 0.018, 0.45, 4.5 }, | ||
| 186 | [AVCOL_TRC_BT2020_10] = { 1.099, 0.018, 0.45, 4.5 }, | ||
| 187 | [AVCOL_TRC_BT2020_12] = { 1.0993, 0.0181, 0.45, 4.5 }, | ||
| 188 | }; | ||
| 189 | |||
| 190 | static const struct TransferCharacteristics * | ||
| 191 | ✗ | get_transfer_characteristics(enum AVColorTransferCharacteristic trc) | |
| 192 | { | ||
| 193 | const struct TransferCharacteristics *coeffs; | ||
| 194 | |||
| 195 | ✗ | if ((unsigned)trc >= FF_ARRAY_ELEMS(transfer_characteristics)) | |
| 196 | ✗ | return NULL; | |
| 197 | ✗ | coeffs = &transfer_characteristics[trc]; | |
| 198 | ✗ | if (!coeffs->alpha) | |
| 199 | ✗ | return NULL; | |
| 200 | |||
| 201 | ✗ | return coeffs; | |
| 202 | } | ||
| 203 | |||
| 204 | ✗ | static int fill_gamma_table(ColorSpaceContext *s) | |
| 205 | { | ||
| 206 | int n; | ||
| 207 | ✗ | double in_alpha = s->in_txchr->alpha, in_beta = s->in_txchr->beta; | |
| 208 | ✗ | double in_gamma = s->in_txchr->gamma, in_delta = s->in_txchr->delta; | |
| 209 | ✗ | double in_ialpha = 1.0 / in_alpha, in_igamma = 1.0 / in_gamma, in_idelta = 1.0 / in_delta; | |
| 210 | ✗ | double out_alpha = s->out_txchr->alpha, out_beta = s->out_txchr->beta; | |
| 211 | ✗ | double out_gamma = s->out_txchr->gamma, out_delta = s->out_txchr->delta; | |
| 212 | ✗ | int clip_gamut = s->clip_gamut == CLIP_GAMUT_RGB; | |
| 213 | |||
| 214 | ✗ | s->lin_lut = av_malloc(sizeof(*s->lin_lut) * 32768 * 2); | |
| 215 | ✗ | if (!s->lin_lut) | |
| 216 | ✗ | return AVERROR(ENOMEM); | |
| 217 | ✗ | s->delin_lut = &s->lin_lut[32768]; | |
| 218 | ✗ | for (n = 0; n < 32768; n++) { | |
| 219 | ✗ | double v = (n - 2048.0) / 28672.0, d, l; | |
| 220 | |||
| 221 | // delinearize | ||
| 222 | ✗ | if (v <= -out_beta) { | |
| 223 | ✗ | d = -out_alpha * pow(-v, out_gamma) + (out_alpha - 1.0); | |
| 224 | ✗ | } else if (v < out_beta) { | |
| 225 | ✗ | d = out_delta * v; | |
| 226 | } else { | ||
| 227 | ✗ | d = out_alpha * pow(v, out_gamma) - (out_alpha - 1.0); | |
| 228 | } | ||
| 229 | ✗ | int d_rounded = lrint(d * 28672.0); | |
| 230 | ✗ | s->delin_lut[n] = clip_gamut ? av_clip(d_rounded, 0, 28672) | |
| 231 | ✗ | : av_clip_int16(d_rounded); | |
| 232 | |||
| 233 | // linearize | ||
| 234 | ✗ | if (v <= -in_beta * in_delta) { | |
| 235 | ✗ | l = -pow((1.0 - in_alpha - v) * in_ialpha, in_igamma); | |
| 236 | ✗ | } else if (v < in_beta * in_delta) { | |
| 237 | ✗ | l = v * in_idelta; | |
| 238 | } else { | ||
| 239 | ✗ | l = pow((v + in_alpha - 1.0) * in_ialpha, in_igamma); | |
| 240 | } | ||
| 241 | ✗ | int l_rounded = lrint(l * 28672.0); | |
| 242 | ✗ | s->lin_lut[n] = clip_gamut ? av_clip(l_rounded, 0, 28672) | |
| 243 | ✗ | : av_clip_int16(l_rounded); | |
| 244 | } | ||
| 245 | |||
| 246 | ✗ | return 0; | |
| 247 | } | ||
| 248 | |||
| 249 | /* | ||
| 250 | * See http://www.brucelindbloom.com/index.html?Eqn_ChromAdapt.html | ||
| 251 | * This function uses the Bradford mechanism. | ||
| 252 | */ | ||
| 253 | ✗ | static void fill_whitepoint_conv_table(double out[3][3], enum WhitepointAdaptation wp_adapt, | |
| 254 | const AVWhitepointCoefficients *wp_src, | ||
| 255 | const AVWhitepointCoefficients *wp_dst) | ||
| 256 | { | ||
| 257 | static const double ma_tbl[NB_WP_ADAPT_NON_IDENTITY][3][3] = { | ||
| 258 | [WP_ADAPT_BRADFORD] = { | ||
| 259 | { 0.8951, 0.2664, -0.1614 }, | ||
| 260 | { -0.7502, 1.7135, 0.0367 }, | ||
| 261 | { 0.0389, -0.0685, 1.0296 }, | ||
| 262 | }, [WP_ADAPT_VON_KRIES] = { | ||
| 263 | { 0.40024, 0.70760, -0.08081 }, | ||
| 264 | { -0.22630, 1.16532, 0.04570 }, | ||
| 265 | { 0.00000, 0.00000, 0.91822 }, | ||
| 266 | }, | ||
| 267 | }; | ||
| 268 | ✗ | const double (*ma)[3] = ma_tbl[wp_adapt]; | |
| 269 | ✗ | double xw_src = av_q2d(wp_src->x), yw_src = av_q2d(wp_src->y); | |
| 270 | ✗ | double xw_dst = av_q2d(wp_dst->x), yw_dst = av_q2d(wp_dst->y); | |
| 271 | ✗ | double zw_src = 1.0 - xw_src - yw_src; | |
| 272 | ✗ | double zw_dst = 1.0 - xw_dst - yw_dst; | |
| 273 | double mai[3][3], fac[3][3], tmp[3][3]; | ||
| 274 | double rs, gs, bs, rd, gd, bd; | ||
| 275 | |||
| 276 | ✗ | ff_matrix_invert_3x3(ma, mai); | |
| 277 | ✗ | rs = ma[0][0] * xw_src + ma[0][1] * yw_src + ma[0][2] * zw_src; | |
| 278 | ✗ | gs = ma[1][0] * xw_src + ma[1][1] * yw_src + ma[1][2] * zw_src; | |
| 279 | ✗ | bs = ma[2][0] * xw_src + ma[2][1] * yw_src + ma[2][2] * zw_src; | |
| 280 | ✗ | rd = ma[0][0] * xw_dst + ma[0][1] * yw_dst + ma[0][2] * zw_dst; | |
| 281 | ✗ | gd = ma[1][0] * xw_dst + ma[1][1] * yw_dst + ma[1][2] * zw_dst; | |
| 282 | ✗ | bd = ma[2][0] * xw_dst + ma[2][1] * yw_dst + ma[2][2] * zw_dst; | |
| 283 | ✗ | fac[0][0] = rd / rs; | |
| 284 | ✗ | fac[1][1] = gd / gs; | |
| 285 | ✗ | fac[2][2] = bd / bs; | |
| 286 | ✗ | fac[0][1] = fac[0][2] = fac[1][0] = fac[1][2] = fac[2][0] = fac[2][1] = 0.0; | |
| 287 | ✗ | ff_matrix_mul_3x3(tmp, ma, fac); | |
| 288 | ✗ | ff_matrix_mul_3x3(out, tmp, mai); | |
| 289 | ✗ | } | |
| 290 | |||
| 291 | ✗ | static void apply_lut(int16_t *buf[3], ptrdiff_t stride, | |
| 292 | int w, int h, const int16_t *lut) | ||
| 293 | { | ||
| 294 | int y, x, n; | ||
| 295 | |||
| 296 | ✗ | for (n = 0; n < 3; n++) { | |
| 297 | ✗ | int16_t *data = buf[n]; | |
| 298 | |||
| 299 | ✗ | for (y = 0; y < h; y++) { | |
| 300 | ✗ | for (x = 0; x < w; x++) | |
| 301 | ✗ | data[x] = lut[av_clip_uintp2(2048 + data[x], 15)]; | |
| 302 | |||
| 303 | ✗ | data += stride; | |
| 304 | } | ||
| 305 | } | ||
| 306 | ✗ | } | |
| 307 | |||
| 308 | typedef struct ThreadData { | ||
| 309 | AVFrame *in, *out; | ||
| 310 | ptrdiff_t in_linesize[3], out_linesize[3]; | ||
| 311 | int in_ss_h, out_ss_h; | ||
| 312 | } ThreadData; | ||
| 313 | |||
| 314 | ✗ | static int convert(AVFilterContext *ctx, void *data, int job_nr, int n_jobs) | |
| 315 | { | ||
| 316 | ✗ | const ThreadData *td = data; | |
| 317 | ✗ | ColorSpaceContext *s = ctx->priv; | |
| 318 | uint8_t *in_data[3], *out_data[3]; | ||
| 319 | int16_t *rgb[3]; | ||
| 320 | ✗ | int h_in = (td->in->height + 1) >> 1; | |
| 321 | ✗ | int h1 = 2 * (job_nr * h_in / n_jobs), h2 = 2 * ((job_nr + 1) * h_in / n_jobs); | |
| 322 | ✗ | int w = td->in->width, h = h2 - h1; | |
| 323 | |||
| 324 | ✗ | in_data[0] = td->in->data[0] + td->in_linesize[0] * h1; | |
| 325 | ✗ | in_data[1] = td->in->data[1] + td->in_linesize[1] * (h1 >> td->in_ss_h); | |
| 326 | ✗ | in_data[2] = td->in->data[2] + td->in_linesize[2] * (h1 >> td->in_ss_h); | |
| 327 | ✗ | out_data[0] = td->out->data[0] + td->out_linesize[0] * h1; | |
| 328 | ✗ | out_data[1] = td->out->data[1] + td->out_linesize[1] * (h1 >> td->out_ss_h); | |
| 329 | ✗ | out_data[2] = td->out->data[2] + td->out_linesize[2] * (h1 >> td->out_ss_h); | |
| 330 | ✗ | rgb[0] = s->rgb[0] + s->rgb_stride * h1; | |
| 331 | ✗ | rgb[1] = s->rgb[1] + s->rgb_stride * h1; | |
| 332 | ✗ | rgb[2] = s->rgb[2] + s->rgb_stride * h1; | |
| 333 | |||
| 334 | // FIXME for simd, also make sure we do pictures with negative stride | ||
| 335 | // top-down so we don't overwrite lines with padding of data before it | ||
| 336 | // in the same buffer (same as swscale) | ||
| 337 | |||
| 338 | ✗ | if (s->yuv2yuv_fastmode) { | |
| 339 | // FIXME possibly use a fast mode in case only the y range changes? | ||
| 340 | // since in that case, only the diagonal entries in yuv2yuv_coeffs[] | ||
| 341 | // are non-zero | ||
| 342 | ✗ | s->yuv2yuv(out_data, td->out_linesize, in_data, td->in_linesize, w, h, | |
| 343 | ✗ | s->yuv2yuv_coeffs, s->yuv_offset); | |
| 344 | } else { | ||
| 345 | // FIXME maybe (for caching efficiency) do pipeline per-line instead of | ||
| 346 | // full buffer per function? (Or, since yuv2rgb requires 2 lines: per | ||
| 347 | // 2 lines, for yuv420.) | ||
| 348 | /* | ||
| 349 | * General design: | ||
| 350 | * - yuv2rgb converts from whatever range the input was ([16-235/240] or | ||
| 351 | * [0,255] or the 10/12bpp equivalents thereof) to an integer version | ||
| 352 | * of RGB in psuedo-restricted 15+sign bits. That means that the float | ||
| 353 | * range [0.0,1.0] is in [0,28762], and the remainder of the int16_t | ||
| 354 | * range is used for overflow/underflow outside the representable | ||
| 355 | * range of this RGB type. rgb2yuv is the exact opposite. | ||
| 356 | * - gamma correction is done using a LUT since that appears to work | ||
| 357 | * fairly fast. | ||
| 358 | * - If the input is chroma-subsampled (420/422), the yuv2rgb conversion | ||
| 359 | * (or rgb2yuv conversion) uses nearest-neighbour sampling to read | ||
| 360 | * read chroma pixels at luma resolution. If you want some more fancy | ||
| 361 | * filter, you can use swscale to convert to yuv444p. | ||
| 362 | * - all coefficients are 14bit (so in the [-2.0,2.0] range). | ||
| 363 | */ | ||
| 364 | ✗ | s->yuv2rgb(rgb, s->rgb_stride, in_data, td->in_linesize, w, h, | |
| 365 | ✗ | s->yuv2rgb_coeffs, s->yuv_offset[0]); | |
| 366 | ✗ | if (!s->rgb2rgb_passthrough) { | |
| 367 | ✗ | apply_lut(rgb, s->rgb_stride, w, h, s->lin_lut); | |
| 368 | ✗ | if (!s->lrgb2lrgb_passthrough) | |
| 369 | ✗ | s->dsp.multiply3x3(rgb, s->rgb_stride, w, h, s->lrgb2lrgb_coeffs); | |
| 370 | ✗ | apply_lut(rgb, s->rgb_stride, w, h, s->delin_lut); | |
| 371 | } | ||
| 372 | ✗ | if (s->dither == DITHER_FSB) { | |
| 373 | ✗ | s->rgb2yuv_fsb(out_data, td->out_linesize, rgb, s->rgb_stride, w, h, | |
| 374 | ✗ | s->rgb2yuv_coeffs, s->yuv_offset[1], s->dither_scratch); | |
| 375 | } else { | ||
| 376 | ✗ | s->rgb2yuv(out_data, td->out_linesize, rgb, s->rgb_stride, w, h, | |
| 377 | ✗ | s->rgb2yuv_coeffs, s->yuv_offset[1]); | |
| 378 | } | ||
| 379 | } | ||
| 380 | |||
| 381 | ✗ | return 0; | |
| 382 | } | ||
| 383 | |||
| 384 | ✗ | static int get_range_off(AVFilterContext *ctx, int *off, | |
| 385 | int *y_rng, int *uv_rng, | ||
| 386 | enum AVColorRange rng, int depth) | ||
| 387 | { | ||
| 388 | ✗ | switch (rng) { | |
| 389 | ✗ | case AVCOL_RANGE_UNSPECIFIED: { | |
| 390 | ✗ | ColorSpaceContext *s = ctx->priv; | |
| 391 | |||
| 392 | ✗ | if (!s->did_warn_range) { | |
| 393 | ✗ | av_log(ctx, AV_LOG_WARNING, "Input range not set, assuming tv/mpeg\n"); | |
| 394 | ✗ | s->did_warn_range = 1; | |
| 395 | } | ||
| 396 | } | ||
| 397 | // fall-through | ||
| 398 | case AVCOL_RANGE_MPEG: | ||
| 399 | ✗ | *off = 16 << (depth - 8); | |
| 400 | ✗ | *y_rng = 219 << (depth - 8); | |
| 401 | ✗ | *uv_rng = 224 << (depth - 8); | |
| 402 | ✗ | break; | |
| 403 | ✗ | case AVCOL_RANGE_JPEG: | |
| 404 | ✗ | *off = 0; | |
| 405 | ✗ | *y_rng = *uv_rng = (256 << (depth - 8)) - 1; | |
| 406 | ✗ | break; | |
| 407 | ✗ | default: | |
| 408 | ✗ | return AVERROR(EINVAL); | |
| 409 | } | ||
| 410 | |||
| 411 | ✗ | return 0; | |
| 412 | } | ||
| 413 | |||
| 414 | ✗ | static int create_filtergraph(AVFilterContext *ctx, | |
| 415 | const AVFrame *in, const AVFrame *out) | ||
| 416 | { | ||
| 417 | ✗ | ColorSpaceContext *s = ctx->priv; | |
| 418 | ✗ | const AVPixFmtDescriptor *in_desc = av_pix_fmt_desc_get(in->format); | |
| 419 | ✗ | const AVPixFmtDescriptor *out_desc = av_pix_fmt_desc_get(out->format); | |
| 420 | ✗ | int m, n, o, res, fmt_identical, redo_yuv2rgb = 0, redo_rgb2yuv = 0; | |
| 421 | |||
| 422 | #define supported_depth(d) ((d) == 8 || (d) == 10 || (d) == 12) | ||
| 423 | #define supported_subsampling(lcw, lch) \ | ||
| 424 | (((lcw) == 0 && (lch) == 0) || ((lcw) == 1 && (lch) == 0) || ((lcw) == 1 && (lch) == 1)) | ||
| 425 | #define supported_format(d) \ | ||
| 426 | ((d) != NULL && (d)->nb_components == 3 && \ | ||
| 427 | !((d)->flags & AV_PIX_FMT_FLAG_RGB) && \ | ||
| 428 | supported_depth((d)->comp[0].depth) && \ | ||
| 429 | supported_subsampling((d)->log2_chroma_w, (d)->log2_chroma_h)) | ||
| 430 | |||
| 431 | ✗ | if (!supported_format(in_desc)) { | |
| 432 | ✗ | av_log(ctx, AV_LOG_ERROR, | |
| 433 | "Unsupported input format %d (%s) or bitdepth (%d)\n", | ||
| 434 | ✗ | in->format, av_get_pix_fmt_name(in->format), | |
| 435 | in_desc ? in_desc->comp[0].depth : -1); | ||
| 436 | ✗ | return AVERROR(EINVAL); | |
| 437 | } | ||
| 438 | ✗ | if (!supported_format(out_desc)) { | |
| 439 | ✗ | av_log(ctx, AV_LOG_ERROR, | |
| 440 | "Unsupported output format %d (%s) or bitdepth (%d)\n", | ||
| 441 | ✗ | out->format, av_get_pix_fmt_name(out->format), | |
| 442 | out_desc ? out_desc->comp[0].depth : -1); | ||
| 443 | ✗ | return AVERROR(EINVAL); | |
| 444 | } | ||
| 445 | |||
| 446 | ✗ | if (in->color_primaries != s->in_prm) s->in_primaries = NULL; | |
| 447 | ✗ | if (out->color_primaries != s->out_prm) s->out_primaries = NULL; | |
| 448 | ✗ | if (in->color_trc != s->in_trc) s->in_txchr = NULL; | |
| 449 | ✗ | if (out->color_trc != s->out_trc) s->out_txchr = NULL; | |
| 450 | ✗ | if (in->colorspace != s->in_csp || | |
| 451 | ✗ | in->color_range != s->in_rng) s->in_lumacoef = NULL; | |
| 452 | ✗ | if (out->color_range != s->out_rng) s->rgb2yuv = NULL; | |
| 453 | |||
| 454 | ✗ | if (!s->out_primaries || !s->in_primaries) { | |
| 455 | ✗ | s->in_prm = in->color_primaries; | |
| 456 | ✗ | if (s->user_iall != CS_UNSPECIFIED) | |
| 457 | ✗ | s->in_prm = default_prm[FFMIN(s->user_iall, CS_NB)]; | |
| 458 | ✗ | if (s->user_iprm != AVCOL_PRI_UNSPECIFIED) | |
| 459 | ✗ | s->in_prm = s->user_iprm; | |
| 460 | ✗ | s->in_primaries = av_csp_primaries_desc_from_id(s->in_prm); | |
| 461 | ✗ | if (!s->in_primaries) { | |
| 462 | ✗ | av_log(ctx, AV_LOG_ERROR, | |
| 463 | "Unsupported input primaries %d (%s)\n", | ||
| 464 | ✗ | s->in_prm, av_color_primaries_name(s->in_prm)); | |
| 465 | ✗ | return AVERROR(EINVAL); | |
| 466 | } | ||
| 467 | ✗ | s->out_prm = out->color_primaries; | |
| 468 | ✗ | s->out_primaries = av_csp_primaries_desc_from_id(s->out_prm); | |
| 469 | ✗ | if (!s->out_primaries) { | |
| 470 | ✗ | if (s->out_prm == AVCOL_PRI_UNSPECIFIED) { | |
| 471 | ✗ | if (s->user_all == CS_UNSPECIFIED) { | |
| 472 | ✗ | av_log(ctx, AV_LOG_ERROR, "Please specify output primaries\n"); | |
| 473 | } else { | ||
| 474 | ✗ | av_log(ctx, AV_LOG_ERROR, | |
| 475 | ✗ | "Unsupported output color property %d\n", s->user_all); | |
| 476 | } | ||
| 477 | } else { | ||
| 478 | ✗ | av_log(ctx, AV_LOG_ERROR, | |
| 479 | "Unsupported output primaries %d (%s)\n", | ||
| 480 | ✗ | s->out_prm, av_color_primaries_name(s->out_prm)); | |
| 481 | } | ||
| 482 | ✗ | return AVERROR(EINVAL); | |
| 483 | } | ||
| 484 | ✗ | s->lrgb2lrgb_passthrough = !memcmp(s->in_primaries, s->out_primaries, | |
| 485 | sizeof(*s->in_primaries)); | ||
| 486 | ✗ | if (!s->lrgb2lrgb_passthrough) { | |
| 487 | double rgb2xyz[3][3], xyz2rgb[3][3], rgb2rgb[3][3]; | ||
| 488 | const AVWhitepointCoefficients *wp_out, *wp_in; | ||
| 489 | |||
| 490 | ✗ | wp_out = &s->out_primaries->wp; | |
| 491 | ✗ | wp_in = &s->in_primaries->wp; | |
| 492 | ✗ | ff_fill_rgb2xyz_table(&s->out_primaries->prim, wp_out, rgb2xyz); | |
| 493 | ✗ | ff_matrix_invert_3x3(rgb2xyz, xyz2rgb); | |
| 494 | ✗ | ff_fill_rgb2xyz_table(&s->in_primaries->prim, wp_in, rgb2xyz); | |
| 495 | ✗ | if (memcmp(wp_in, wp_out, sizeof(*wp_in)) != 0 && | |
| 496 | ✗ | s->wp_adapt != WP_ADAPT_IDENTITY) { | |
| 497 | double wpconv[3][3], tmp[3][3]; | ||
| 498 | |||
| 499 | ✗ | fill_whitepoint_conv_table(wpconv, s->wp_adapt, &s->in_primaries->wp, | |
| 500 | ✗ | &s->out_primaries->wp); | |
| 501 | ✗ | ff_matrix_mul_3x3(tmp, rgb2xyz, wpconv); | |
| 502 | ✗ | ff_matrix_mul_3x3(rgb2rgb, tmp, xyz2rgb); | |
| 503 | } else { | ||
| 504 | ✗ | ff_matrix_mul_3x3(rgb2rgb, rgb2xyz, xyz2rgb); | |
| 505 | } | ||
| 506 | ✗ | for (m = 0; m < 3; m++) | |
| 507 | ✗ | for (n = 0; n < 3; n++) { | |
| 508 | ✗ | s->lrgb2lrgb_coeffs[m][n][0] = lrint(16384.0 * rgb2rgb[m][n]); | |
| 509 | ✗ | for (o = 1; o < 8; o++) | |
| 510 | ✗ | s->lrgb2lrgb_coeffs[m][n][o] = s->lrgb2lrgb_coeffs[m][n][0]; | |
| 511 | } | ||
| 512 | |||
| 513 | } | ||
| 514 | } | ||
| 515 | |||
| 516 | ✗ | if (!s->in_txchr) { | |
| 517 | ✗ | av_freep(&s->lin_lut); | |
| 518 | ✗ | s->in_trc = in->color_trc; | |
| 519 | ✗ | if (s->user_iall != CS_UNSPECIFIED) | |
| 520 | ✗ | s->in_trc = default_trc[FFMIN(s->user_iall, CS_NB)]; | |
| 521 | ✗ | if (s->user_itrc != AVCOL_TRC_UNSPECIFIED) | |
| 522 | ✗ | s->in_trc = s->user_itrc; | |
| 523 | ✗ | s->in_txchr = get_transfer_characteristics(s->in_trc); | |
| 524 | ✗ | if (!s->in_txchr) { | |
| 525 | ✗ | av_log(ctx, AV_LOG_ERROR, | |
| 526 | "Unsupported input transfer characteristics %d (%s)\n", | ||
| 527 | ✗ | s->in_trc, av_color_transfer_name(s->in_trc)); | |
| 528 | ✗ | return AVERROR(EINVAL); | |
| 529 | } | ||
| 530 | } | ||
| 531 | |||
| 532 | ✗ | if (!s->out_txchr) { | |
| 533 | ✗ | av_freep(&s->lin_lut); | |
| 534 | ✗ | s->out_trc = out->color_trc; | |
| 535 | ✗ | s->out_txchr = get_transfer_characteristics(s->out_trc); | |
| 536 | ✗ | if (!s->out_txchr) { | |
| 537 | ✗ | if (s->out_trc == AVCOL_TRC_UNSPECIFIED) { | |
| 538 | ✗ | if (s->user_all == CS_UNSPECIFIED) { | |
| 539 | ✗ | av_log(ctx, AV_LOG_ERROR, | |
| 540 | "Please specify output transfer characteristics\n"); | ||
| 541 | } else { | ||
| 542 | ✗ | av_log(ctx, AV_LOG_ERROR, | |
| 543 | ✗ | "Unsupported output color property %d\n", s->user_all); | |
| 544 | } | ||
| 545 | } else { | ||
| 546 | ✗ | av_log(ctx, AV_LOG_ERROR, | |
| 547 | "Unsupported output transfer characteristics %d (%s)\n", | ||
| 548 | ✗ | s->out_trc, av_color_transfer_name(s->out_trc)); | |
| 549 | } | ||
| 550 | ✗ | return AVERROR(EINVAL); | |
| 551 | } | ||
| 552 | } | ||
| 553 | |||
| 554 | ✗ | s->rgb2rgb_passthrough = s->fast_mode || (s->lrgb2lrgb_passthrough && | |
| 555 | ✗ | !memcmp(s->in_txchr, s->out_txchr, sizeof(*s->in_txchr))); | |
| 556 | ✗ | if (!s->rgb2rgb_passthrough && !s->lin_lut) { | |
| 557 | ✗ | res = fill_gamma_table(s); | |
| 558 | ✗ | if (res < 0) | |
| 559 | ✗ | return res; | |
| 560 | } | ||
| 561 | |||
| 562 | ✗ | if (!s->in_lumacoef) { | |
| 563 | ✗ | s->in_csp = in->colorspace; | |
| 564 | ✗ | if (s->user_iall != CS_UNSPECIFIED) | |
| 565 | ✗ | s->in_csp = default_csp[FFMIN(s->user_iall, CS_NB)]; | |
| 566 | ✗ | if (s->user_icsp != AVCOL_SPC_UNSPECIFIED) | |
| 567 | ✗ | s->in_csp = s->user_icsp; | |
| 568 | ✗ | s->in_rng = in->color_range; | |
| 569 | ✗ | if (s->user_irng != AVCOL_RANGE_UNSPECIFIED) | |
| 570 | ✗ | s->in_rng = s->user_irng; | |
| 571 | ✗ | s->in_lumacoef = av_csp_luma_coeffs_from_avcsp(s->in_csp); | |
| 572 | ✗ | if (!s->in_lumacoef) { | |
| 573 | ✗ | av_log(ctx, AV_LOG_ERROR, | |
| 574 | "Unsupported input colorspace %d (%s)\n", | ||
| 575 | ✗ | s->in_csp, av_color_space_name(s->in_csp)); | |
| 576 | ✗ | return AVERROR(EINVAL); | |
| 577 | } | ||
| 578 | ✗ | redo_yuv2rgb = 1; | |
| 579 | } | ||
| 580 | |||
| 581 | ✗ | if (!s->rgb2yuv) { | |
| 582 | ✗ | s->out_rng = out->color_range; | |
| 583 | ✗ | redo_rgb2yuv = 1; | |
| 584 | } | ||
| 585 | |||
| 586 | ✗ | fmt_identical = in_desc->log2_chroma_h == out_desc->log2_chroma_h && | |
| 587 | ✗ | in_desc->log2_chroma_w == out_desc->log2_chroma_w; | |
| 588 | ✗ | s->yuv2yuv_fastmode = s->rgb2rgb_passthrough && fmt_identical; | |
| 589 | ✗ | s->yuv2yuv_passthrough = s->yuv2yuv_fastmode && s->in_rng == s->out_rng && | |
| 590 | ✗ | !memcmp(s->in_lumacoef, s->out_lumacoef, | |
| 591 | ✗ | sizeof(*s->in_lumacoef)) && | |
| 592 | ✗ | in_desc->comp[0].depth == out_desc->comp[0].depth; | |
| 593 | ✗ | if (!s->yuv2yuv_passthrough) { | |
| 594 | ✗ | if (redo_yuv2rgb) { | |
| 595 | ✗ | double rgb2yuv[3][3], (*yuv2rgb)[3] = s->yuv2rgb_dbl_coeffs; | |
| 596 | int off, bits, in_rng; | ||
| 597 | |||
| 598 | ✗ | res = get_range_off(ctx, &off, &s->in_y_rng, &s->in_uv_rng, | |
| 599 | ✗ | s->in_rng, in_desc->comp[0].depth); | |
| 600 | ✗ | if (res < 0) { | |
| 601 | ✗ | av_log(ctx, AV_LOG_ERROR, | |
| 602 | "Unsupported input color range %d (%s)\n", | ||
| 603 | ✗ | s->in_rng, av_color_range_name(s->in_rng)); | |
| 604 | ✗ | return res; | |
| 605 | } | ||
| 606 | ✗ | for (n = 0; n < 8; n++) | |
| 607 | ✗ | s->yuv_offset[0][n] = off; | |
| 608 | ✗ | ff_fill_rgb2yuv_table(s->in_lumacoef, rgb2yuv); | |
| 609 | ✗ | ff_matrix_invert_3x3(rgb2yuv, yuv2rgb); | |
| 610 | ✗ | bits = 1 << (in_desc->comp[0].depth - 1); | |
| 611 | ✗ | for (n = 0; n < 3; n++) { | |
| 612 | ✗ | for (in_rng = s->in_y_rng, m = 0; m < 3; m++, in_rng = s->in_uv_rng) { | |
| 613 | ✗ | s->yuv2rgb_coeffs[n][m][0] = lrint(28672 * bits * yuv2rgb[n][m] / in_rng); | |
| 614 | ✗ | for (o = 1; o < 8; o++) | |
| 615 | ✗ | s->yuv2rgb_coeffs[n][m][o] = s->yuv2rgb_coeffs[n][m][0]; | |
| 616 | } | ||
| 617 | } | ||
| 618 | av_assert2(s->yuv2rgb_coeffs[0][1][0] == 0); | ||
| 619 | av_assert2(s->yuv2rgb_coeffs[2][2][0] == 0); | ||
| 620 | av_assert2(s->yuv2rgb_coeffs[0][0][0] == s->yuv2rgb_coeffs[1][0][0]); | ||
| 621 | av_assert2(s->yuv2rgb_coeffs[0][0][0] == s->yuv2rgb_coeffs[2][0][0]); | ||
| 622 | ✗ | s->yuv2rgb = s->dsp.yuv2rgb[(in_desc->comp[0].depth - 8) >> 1] | |
| 623 | ✗ | [in_desc->log2_chroma_h + in_desc->log2_chroma_w]; | |
| 624 | } | ||
| 625 | |||
| 626 | ✗ | if (redo_rgb2yuv) { | |
| 627 | ✗ | double (*rgb2yuv)[3] = s->rgb2yuv_dbl_coeffs; | |
| 628 | int off, out_rng, bits; | ||
| 629 | |||
| 630 | ✗ | res = get_range_off(ctx, &off, &s->out_y_rng, &s->out_uv_rng, | |
| 631 | ✗ | s->out_rng, out_desc->comp[0].depth); | |
| 632 | ✗ | if (res < 0) { | |
| 633 | ✗ | av_log(ctx, AV_LOG_ERROR, | |
| 634 | "Unsupported output color range %d (%s)\n", | ||
| 635 | ✗ | s->out_rng, av_color_range_name(s->out_rng)); | |
| 636 | ✗ | return res; | |
| 637 | } | ||
| 638 | ✗ | for (n = 0; n < 8; n++) | |
| 639 | ✗ | s->yuv_offset[1][n] = off; | |
| 640 | ✗ | ff_fill_rgb2yuv_table(s->out_lumacoef, rgb2yuv); | |
| 641 | ✗ | bits = 1 << (29 - out_desc->comp[0].depth); | |
| 642 | ✗ | for (out_rng = s->out_y_rng, n = 0; n < 3; n++, out_rng = s->out_uv_rng) { | |
| 643 | ✗ | for (m = 0; m < 3; m++) { | |
| 644 | ✗ | s->rgb2yuv_coeffs[n][m][0] = lrint(bits * out_rng * rgb2yuv[n][m] / 28672); | |
| 645 | ✗ | for (o = 1; o < 8; o++) | |
| 646 | ✗ | s->rgb2yuv_coeffs[n][m][o] = s->rgb2yuv_coeffs[n][m][0]; | |
| 647 | } | ||
| 648 | } | ||
| 649 | av_assert2(s->rgb2yuv_coeffs[1][2][0] == s->rgb2yuv_coeffs[2][0][0]); | ||
| 650 | ✗ | s->rgb2yuv = s->dsp.rgb2yuv[(out_desc->comp[0].depth - 8) >> 1] | |
| 651 | ✗ | [out_desc->log2_chroma_h + out_desc->log2_chroma_w]; | |
| 652 | ✗ | s->rgb2yuv_fsb = s->dsp.rgb2yuv_fsb[(out_desc->comp[0].depth - 8) >> 1] | |
| 653 | ✗ | [out_desc->log2_chroma_h + out_desc->log2_chroma_w]; | |
| 654 | } | ||
| 655 | |||
| 656 | ✗ | if (s->yuv2yuv_fastmode && (redo_yuv2rgb || redo_rgb2yuv)) { | |
| 657 | ✗ | int idepth = in_desc->comp[0].depth, odepth = out_desc->comp[0].depth; | |
| 658 | ✗ | double (*rgb2yuv)[3] = s->rgb2yuv_dbl_coeffs; | |
| 659 | ✗ | double (*yuv2rgb)[3] = s->yuv2rgb_dbl_coeffs; | |
| 660 | double yuv2yuv[3][3]; | ||
| 661 | int in_rng, out_rng; | ||
| 662 | |||
| 663 | ✗ | ff_matrix_mul_3x3(yuv2yuv, yuv2rgb, rgb2yuv); | |
| 664 | ✗ | for (out_rng = s->out_y_rng, m = 0; m < 3; m++, out_rng = s->out_uv_rng) { | |
| 665 | ✗ | for (in_rng = s->in_y_rng, n = 0; n < 3; n++, in_rng = s->in_uv_rng) { | |
| 666 | ✗ | s->yuv2yuv_coeffs[m][n][0] = | |
| 667 | ✗ | lrint(16384 * yuv2yuv[m][n] * out_rng * (1 << idepth) / | |
| 668 | ✗ | (in_rng * (1 << odepth))); | |
| 669 | ✗ | for (o = 1; o < 8; o++) | |
| 670 | ✗ | s->yuv2yuv_coeffs[m][n][o] = s->yuv2yuv_coeffs[m][n][0]; | |
| 671 | } | ||
| 672 | } | ||
| 673 | av_assert2(s->yuv2yuv_coeffs[1][0][0] == 0); | ||
| 674 | av_assert2(s->yuv2yuv_coeffs[2][0][0] == 0); | ||
| 675 | ✗ | s->yuv2yuv = s->dsp.yuv2yuv[(idepth - 8) >> 1][(odepth - 8) >> 1] | |
| 676 | ✗ | [in_desc->log2_chroma_h + in_desc->log2_chroma_w]; | |
| 677 | } | ||
| 678 | } | ||
| 679 | |||
| 680 | ✗ | return 0; | |
| 681 | } | ||
| 682 | |||
| 683 | ✗ | static av_cold int init(AVFilterContext *ctx) | |
| 684 | { | ||
| 685 | ✗ | ColorSpaceContext *s = ctx->priv; | |
| 686 | |||
| 687 | ✗ | s->out_csp = s->user_csp == AVCOL_SPC_UNSPECIFIED ? | |
| 688 | ✗ | default_csp[FFMIN(s->user_all, CS_NB)] : s->user_csp; | |
| 689 | ✗ | s->out_lumacoef = av_csp_luma_coeffs_from_avcsp(s->out_csp); | |
| 690 | ✗ | if (!s->out_lumacoef) { | |
| 691 | ✗ | if (s->out_csp == AVCOL_SPC_UNSPECIFIED) { | |
| 692 | ✗ | if (s->user_all == CS_UNSPECIFIED) { | |
| 693 | ✗ | av_log(ctx, AV_LOG_ERROR, | |
| 694 | "Please specify output colorspace\n"); | ||
| 695 | } else { | ||
| 696 | ✗ | av_log(ctx, AV_LOG_ERROR, | |
| 697 | ✗ | "Unsupported output color property %d\n", s->user_all); | |
| 698 | } | ||
| 699 | } else { | ||
| 700 | ✗ | av_log(ctx, AV_LOG_ERROR, | |
| 701 | ✗ | "Unsupported output colorspace %d (%s)\n", s->out_csp, | |
| 702 | av_color_space_name(s->out_csp)); | ||
| 703 | } | ||
| 704 | ✗ | return AVERROR(EINVAL); | |
| 705 | } | ||
| 706 | |||
| 707 | ✗ | ff_colorspacedsp_init(&s->dsp); | |
| 708 | |||
| 709 | ✗ | return 0; | |
| 710 | } | ||
| 711 | |||
| 712 | ✗ | static void uninit(AVFilterContext *ctx) | |
| 713 | { | ||
| 714 | ✗ | ColorSpaceContext *s = ctx->priv; | |
| 715 | |||
| 716 | ✗ | av_freep(&s->rgb[0]); | |
| 717 | ✗ | av_freep(&s->rgb[1]); | |
| 718 | ✗ | av_freep(&s->rgb[2]); | |
| 719 | ✗ | s->rgb_sz = 0; | |
| 720 | ✗ | av_freep(&s->dither_scratch_base[0][0]); | |
| 721 | ✗ | av_freep(&s->dither_scratch_base[0][1]); | |
| 722 | ✗ | av_freep(&s->dither_scratch_base[1][0]); | |
| 723 | ✗ | av_freep(&s->dither_scratch_base[1][1]); | |
| 724 | ✗ | av_freep(&s->dither_scratch_base[2][0]); | |
| 725 | ✗ | av_freep(&s->dither_scratch_base[2][1]); | |
| 726 | |||
| 727 | ✗ | av_freep(&s->lin_lut); | |
| 728 | ✗ | } | |
| 729 | |||
| 730 | ✗ | static int filter_frame(AVFilterLink *link, AVFrame *in) | |
| 731 | { | ||
| 732 | ✗ | AVFilterContext *ctx = link->dst; | |
| 733 | ✗ | AVFilterLink *outlink = ctx->outputs[0]; | |
| 734 | ✗ | ColorSpaceContext *s = ctx->priv; | |
| 735 | // FIXME if yuv2yuv_passthrough, don't get a new buffer but use the | ||
| 736 | // input one if it is writable *OR* the actual literal values of in_* | ||
| 737 | // and out_* are identical (not just their respective properties) | ||
| 738 | ✗ | AVFrame *out = ff_get_video_buffer(outlink, outlink->w, outlink->h); | |
| 739 | int res; | ||
| 740 | ✗ | ptrdiff_t rgb_stride = FFALIGN(in->width * sizeof(int16_t), 32); | |
| 741 | ✗ | unsigned rgb_sz = rgb_stride * in->height; | |
| 742 | ThreadData td; | ||
| 743 | |||
| 744 | ✗ | if (!out) { | |
| 745 | ✗ | av_frame_free(&in); | |
| 746 | ✗ | return AVERROR(ENOMEM); | |
| 747 | } | ||
| 748 | ✗ | res = av_frame_copy_props(out, in); | |
| 749 | ✗ | if (res < 0) { | |
| 750 | ✗ | av_frame_free(&in); | |
| 751 | ✗ | av_frame_free(&out); | |
| 752 | ✗ | return res; | |
| 753 | } | ||
| 754 | |||
| 755 | ✗ | out->colorspace = s->out_csp; | |
| 756 | ✗ | out->color_range = s->user_rng == AVCOL_RANGE_UNSPECIFIED ? | |
| 757 | ✗ | in->color_range : s->user_rng; | |
| 758 | ✗ | out->color_primaries = s->user_prm == AVCOL_PRI_UNSPECIFIED ? | |
| 759 | ✗ | default_prm[FFMIN(s->user_all, CS_NB)] : s->user_prm; | |
| 760 | ✗ | if (s->user_trc == AVCOL_TRC_UNSPECIFIED) { | |
| 761 | ✗ | const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(out->format); | |
| 762 | |||
| 763 | ✗ | out->color_trc = default_trc[FFMIN(s->user_all, CS_NB)]; | |
| 764 | ✗ | if (out->color_trc == AVCOL_TRC_BT2020_10 && desc && desc->comp[0].depth >= 12) | |
| 765 | ✗ | out->color_trc = AVCOL_TRC_BT2020_12; | |
| 766 | } else { | ||
| 767 | ✗ | out->color_trc = s->user_trc; | |
| 768 | } | ||
| 769 | |||
| 770 | ✗ | if (out->color_primaries != in->color_primaries || out->color_trc != in->color_trc) { | |
| 771 | ✗ | av_frame_side_data_remove_by_props(&out->side_data, &out->nb_side_data, | |
| 772 | AV_SIDE_DATA_PROP_COLOR_DEPENDENT); | ||
| 773 | } | ||
| 774 | |||
| 775 | ✗ | if (rgb_sz != s->rgb_sz) { | |
| 776 | ✗ | const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(out->format); | |
| 777 | ✗ | int uvw = in->width >> desc->log2_chroma_w; | |
| 778 | |||
| 779 | ✗ | av_freep(&s->rgb[0]); | |
| 780 | ✗ | av_freep(&s->rgb[1]); | |
| 781 | ✗ | av_freep(&s->rgb[2]); | |
| 782 | ✗ | s->rgb_sz = 0; | |
| 783 | ✗ | av_freep(&s->dither_scratch_base[0][0]); | |
| 784 | ✗ | av_freep(&s->dither_scratch_base[0][1]); | |
| 785 | ✗ | av_freep(&s->dither_scratch_base[1][0]); | |
| 786 | ✗ | av_freep(&s->dither_scratch_base[1][1]); | |
| 787 | ✗ | av_freep(&s->dither_scratch_base[2][0]); | |
| 788 | ✗ | av_freep(&s->dither_scratch_base[2][1]); | |
| 789 | |||
| 790 | ✗ | s->rgb[0] = av_malloc(rgb_sz); | |
| 791 | ✗ | s->rgb[1] = av_malloc(rgb_sz); | |
| 792 | ✗ | s->rgb[2] = av_malloc(rgb_sz); | |
| 793 | ✗ | s->dither_scratch_base[0][0] = | |
| 794 | ✗ | av_malloc(sizeof(*s->dither_scratch_base[0][0]) * (in->width + 4)); | |
| 795 | ✗ | s->dither_scratch_base[0][1] = | |
| 796 | ✗ | av_malloc(sizeof(*s->dither_scratch_base[0][1]) * (in->width + 4)); | |
| 797 | ✗ | s->dither_scratch_base[1][0] = | |
| 798 | ✗ | av_malloc(sizeof(*s->dither_scratch_base[1][0]) * (uvw + 4)); | |
| 799 | ✗ | s->dither_scratch_base[1][1] = | |
| 800 | ✗ | av_malloc(sizeof(*s->dither_scratch_base[1][1]) * (uvw + 4)); | |
| 801 | ✗ | s->dither_scratch_base[2][0] = | |
| 802 | ✗ | av_malloc(sizeof(*s->dither_scratch_base[2][0]) * (uvw + 4)); | |
| 803 | ✗ | s->dither_scratch_base[2][1] = | |
| 804 | ✗ | av_malloc(sizeof(*s->dither_scratch_base[2][1]) * (uvw + 4)); | |
| 805 | ✗ | s->dither_scratch[0][0] = &s->dither_scratch_base[0][0][1]; | |
| 806 | ✗ | s->dither_scratch[0][1] = &s->dither_scratch_base[0][1][1]; | |
| 807 | ✗ | s->dither_scratch[1][0] = &s->dither_scratch_base[1][0][1]; | |
| 808 | ✗ | s->dither_scratch[1][1] = &s->dither_scratch_base[1][1][1]; | |
| 809 | ✗ | s->dither_scratch[2][0] = &s->dither_scratch_base[2][0][1]; | |
| 810 | ✗ | s->dither_scratch[2][1] = &s->dither_scratch_base[2][1][1]; | |
| 811 | ✗ | if (!s->rgb[0] || !s->rgb[1] || !s->rgb[2] || | |
| 812 | ✗ | !s->dither_scratch_base[0][0] || !s->dither_scratch_base[0][1] || | |
| 813 | ✗ | !s->dither_scratch_base[1][0] || !s->dither_scratch_base[1][1] || | |
| 814 | ✗ | !s->dither_scratch_base[2][0] || !s->dither_scratch_base[2][1]) { | |
| 815 | ✗ | uninit(ctx); | |
| 816 | ✗ | av_frame_free(&in); | |
| 817 | ✗ | av_frame_free(&out); | |
| 818 | ✗ | return AVERROR(ENOMEM); | |
| 819 | } | ||
| 820 | ✗ | s->rgb_sz = rgb_sz; | |
| 821 | } | ||
| 822 | ✗ | res = create_filtergraph(ctx, in, out); | |
| 823 | ✗ | if (res < 0) { | |
| 824 | ✗ | av_frame_free(&in); | |
| 825 | ✗ | av_frame_free(&out); | |
| 826 | ✗ | return res; | |
| 827 | } | ||
| 828 | ✗ | s->rgb_stride = rgb_stride / sizeof(int16_t); | |
| 829 | ✗ | td.in = in; | |
| 830 | ✗ | td.out = out; | |
| 831 | ✗ | td.in_linesize[0] = in->linesize[0]; | |
| 832 | ✗ | td.in_linesize[1] = in->linesize[1]; | |
| 833 | ✗ | td.in_linesize[2] = in->linesize[2]; | |
| 834 | ✗ | td.out_linesize[0] = out->linesize[0]; | |
| 835 | ✗ | td.out_linesize[1] = out->linesize[1]; | |
| 836 | ✗ | td.out_linesize[2] = out->linesize[2]; | |
| 837 | ✗ | td.in_ss_h = av_pix_fmt_desc_get(in->format)->log2_chroma_h; | |
| 838 | ✗ | td.out_ss_h = av_pix_fmt_desc_get(out->format)->log2_chroma_h; | |
| 839 | ✗ | if (s->yuv2yuv_passthrough) { | |
| 840 | ✗ | res = av_frame_copy(out, in); | |
| 841 | ✗ | if (res < 0) { | |
| 842 | ✗ | av_frame_free(&in); | |
| 843 | ✗ | av_frame_free(&out); | |
| 844 | ✗ | return res; | |
| 845 | } | ||
| 846 | } else { | ||
| 847 | ✗ | ff_filter_execute(ctx, convert, &td, NULL, | |
| 848 | ✗ | FFMIN((in->height + 1) >> 1, ff_filter_get_nb_threads(ctx))); | |
| 849 | } | ||
| 850 | ✗ | av_frame_free(&in); | |
| 851 | |||
| 852 | ✗ | return ff_filter_frame(outlink, out); | |
| 853 | } | ||
| 854 | |||
| 855 | ✗ | static int query_formats(const AVFilterContext *ctx, | |
| 856 | AVFilterFormatsConfig **cfg_in, | ||
| 857 | AVFilterFormatsConfig **cfg_out) | ||
| 858 | { | ||
| 859 | static const enum AVPixelFormat pix_fmts[] = { | ||
| 860 | AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV444P, | ||
| 861 | AV_PIX_FMT_YUV420P10, AV_PIX_FMT_YUV422P10, AV_PIX_FMT_YUV444P10, | ||
| 862 | AV_PIX_FMT_YUV420P12, AV_PIX_FMT_YUV422P12, AV_PIX_FMT_YUV444P12, | ||
| 863 | AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUVJ444P, | ||
| 864 | AV_PIX_FMT_NONE | ||
| 865 | }; | ||
| 866 | int res; | ||
| 867 | ✗ | const ColorSpaceContext *s = ctx->priv; | |
| 868 | AVFilterFormats *formats; | ||
| 869 | |||
| 870 | ✗ | res = ff_formats_ref(ff_make_formats_list_singleton(s->out_csp), &cfg_out[0]->color_spaces); | |
| 871 | ✗ | if (res < 0) | |
| 872 | ✗ | return res; | |
| 873 | ✗ | if (s->user_rng != AVCOL_RANGE_UNSPECIFIED) { | |
| 874 | ✗ | res = ff_formats_ref(ff_make_formats_list_singleton(s->user_rng), &cfg_out[0]->color_ranges); | |
| 875 | ✗ | if (res < 0) | |
| 876 | ✗ | return res; | |
| 877 | } | ||
| 878 | |||
| 879 | ✗ | formats = ff_make_pixel_format_list(pix_fmts); | |
| 880 | ✗ | if (!formats) | |
| 881 | ✗ | return AVERROR(ENOMEM); | |
| 882 | ✗ | if (s->user_format == AV_PIX_FMT_NONE) | |
| 883 | ✗ | return ff_set_common_formats2(ctx, cfg_in, cfg_out, formats); | |
| 884 | |||
| 885 | ✗ | res = ff_formats_ref(formats, &cfg_in[0]->formats); | |
| 886 | ✗ | if (res < 0) | |
| 887 | ✗ | return res; | |
| 888 | |||
| 889 | ✗ | formats = NULL; | |
| 890 | ✗ | res = ff_add_format(&formats, s->user_format); | |
| 891 | ✗ | if (res < 0) | |
| 892 | ✗ | return res; | |
| 893 | |||
| 894 | ✗ | return ff_formats_ref(formats, &cfg_out[0]->formats); | |
| 895 | } | ||
| 896 | |||
| 897 | ✗ | static int config_props(AVFilterLink *outlink) | |
| 898 | { | ||
| 899 | ✗ | AVFilterContext *ctx = outlink->dst; | |
| 900 | ✗ | AVFilterLink *inlink = outlink->src->inputs[0]; | |
| 901 | |||
| 902 | ✗ | if (inlink->w % 2 || inlink->h % 2) { | |
| 903 | ✗ | av_log(ctx, AV_LOG_ERROR, "Invalid odd size (%dx%d)\n", | |
| 904 | inlink->w, inlink->h); | ||
| 905 | ✗ | return AVERROR_PATCHWELCOME; | |
| 906 | } | ||
| 907 | |||
| 908 | ✗ | outlink->w = inlink->w; | |
| 909 | ✗ | outlink->h = inlink->h; | |
| 910 | ✗ | outlink->sample_aspect_ratio = inlink->sample_aspect_ratio; | |
| 911 | ✗ | outlink->time_base = inlink->time_base; | |
| 912 | |||
| 913 | ✗ | return 0; | |
| 914 | } | ||
| 915 | |||
| 916 | #define OFFSET(x) offsetof(ColorSpaceContext, x) | ||
| 917 | #define FLAGS AV_OPT_FLAG_FILTERING_PARAM | AV_OPT_FLAG_VIDEO_PARAM | ||
| 918 | #define ENUM(x, y, z) { x, "", 0, AV_OPT_TYPE_CONST, { .i64 = y }, INT_MIN, INT_MAX, FLAGS, .unit = z } | ||
| 919 | |||
| 920 | static const AVOption colorspace_options[] = { | ||
| 921 | { "all", "Set all color properties together", | ||
| 922 | OFFSET(user_all), AV_OPT_TYPE_INT, { .i64 = CS_UNSPECIFIED }, | ||
| 923 | CS_UNSPECIFIED, CS_NB - 1, FLAGS, .unit = "all" }, | ||
| 924 | ENUM("bt470m", CS_BT470M, "all"), | ||
| 925 | ENUM("bt470bg", CS_BT470BG, "all"), | ||
| 926 | ENUM("bt601-6-525", CS_BT601_6_525, "all"), | ||
| 927 | ENUM("bt601-6-625", CS_BT601_6_625, "all"), | ||
| 928 | ENUM("bt709", CS_BT709, "all"), | ||
| 929 | ENUM("smpte170m", CS_SMPTE170M, "all"), | ||
| 930 | ENUM("smpte240m", CS_SMPTE240M, "all"), | ||
| 931 | ENUM("bt2020", CS_BT2020, "all"), | ||
| 932 | |||
| 933 | { "space", "Output colorspace", | ||
| 934 | OFFSET(user_csp), AV_OPT_TYPE_INT, { .i64 = AVCOL_SPC_UNSPECIFIED }, | ||
| 935 | AVCOL_SPC_RGB, AVCOL_SPC_NB - 1, FLAGS, .unit = "csp"}, | ||
| 936 | ENUM("bt709", AVCOL_SPC_BT709, "csp"), | ||
| 937 | ENUM("fcc", AVCOL_SPC_FCC, "csp"), | ||
| 938 | ENUM("bt470bg", AVCOL_SPC_BT470BG, "csp"), | ||
| 939 | ENUM("smpte170m", AVCOL_SPC_SMPTE170M, "csp"), | ||
| 940 | ENUM("smpte240m", AVCOL_SPC_SMPTE240M, "csp"), | ||
| 941 | ENUM("ycgco", AVCOL_SPC_YCGCO, "csp"), | ||
| 942 | ENUM("gbr", AVCOL_SPC_RGB, "csp"), | ||
| 943 | ENUM("bt2020nc", AVCOL_SPC_BT2020_NCL, "csp"), | ||
| 944 | ENUM("bt2020ncl", AVCOL_SPC_BT2020_NCL, "csp"), | ||
| 945 | |||
| 946 | { "range", "Output color range", | ||
| 947 | OFFSET(user_rng), AV_OPT_TYPE_INT, { .i64 = AVCOL_RANGE_UNSPECIFIED }, | ||
| 948 | AVCOL_RANGE_UNSPECIFIED, AVCOL_RANGE_NB - 1, FLAGS, .unit = "rng" }, | ||
| 949 | ENUM("tv", AVCOL_RANGE_MPEG, "rng"), | ||
| 950 | ENUM("mpeg", AVCOL_RANGE_MPEG, "rng"), | ||
| 951 | ENUM("pc", AVCOL_RANGE_JPEG, "rng"), | ||
| 952 | ENUM("jpeg", AVCOL_RANGE_JPEG, "rng"), | ||
| 953 | |||
| 954 | { "primaries", "Output color primaries", | ||
| 955 | OFFSET(user_prm), AV_OPT_TYPE_INT, { .i64 = AVCOL_PRI_UNSPECIFIED }, | ||
| 956 | AVCOL_PRI_RESERVED0, AVCOL_PRI_EXT_NB - 1, FLAGS, .unit = "prm" }, | ||
| 957 | ENUM("bt709", AVCOL_PRI_BT709, "prm"), | ||
| 958 | ENUM("bt470m", AVCOL_PRI_BT470M, "prm"), | ||
| 959 | ENUM("bt470bg", AVCOL_PRI_BT470BG, "prm"), | ||
| 960 | ENUM("smpte170m", AVCOL_PRI_SMPTE170M, "prm"), | ||
| 961 | ENUM("smpte240m", AVCOL_PRI_SMPTE240M, "prm"), | ||
| 962 | ENUM("smpte428", AVCOL_PRI_SMPTE428, "prm"), | ||
| 963 | ENUM("film", AVCOL_PRI_FILM, "prm"), | ||
| 964 | ENUM("smpte431", AVCOL_PRI_SMPTE431, "prm"), | ||
| 965 | ENUM("smpte432", AVCOL_PRI_SMPTE432, "prm"), | ||
| 966 | ENUM("bt2020", AVCOL_PRI_BT2020, "prm"), | ||
| 967 | ENUM("jedec-p22", AVCOL_PRI_JEDEC_P22, "prm"), | ||
| 968 | ENUM("ebu3213", AVCOL_PRI_EBU3213, "prm"), | ||
| 969 | ENUM("vgamut", AVCOL_PRI_V_GAMUT, "prm"), | ||
| 970 | |||
| 971 | { "trc", "Output transfer characteristics", | ||
| 972 | OFFSET(user_trc), AV_OPT_TYPE_INT, { .i64 = AVCOL_TRC_UNSPECIFIED }, | ||
| 973 | AVCOL_TRC_RESERVED0, AVCOL_TRC_EXT_NB - 1, FLAGS, .unit = "trc" }, | ||
| 974 | ENUM("bt709", AVCOL_TRC_BT709, "trc"), | ||
| 975 | ENUM("bt470m", AVCOL_TRC_GAMMA22, "trc"), | ||
| 976 | ENUM("gamma22", AVCOL_TRC_GAMMA22, "trc"), | ||
| 977 | ENUM("bt470bg", AVCOL_TRC_GAMMA28, "trc"), | ||
| 978 | ENUM("gamma28", AVCOL_TRC_GAMMA28, "trc"), | ||
| 979 | ENUM("smpte170m", AVCOL_TRC_SMPTE170M, "trc"), | ||
| 980 | ENUM("smpte240m", AVCOL_TRC_SMPTE240M, "trc"), | ||
| 981 | ENUM("linear", AVCOL_TRC_LINEAR, "trc"), | ||
| 982 | ENUM("srgb", AVCOL_TRC_IEC61966_2_1, "trc"), | ||
| 983 | ENUM("iec61966-2-1", AVCOL_TRC_IEC61966_2_1, "trc"), | ||
| 984 | ENUM("xvycc", AVCOL_TRC_IEC61966_2_4, "trc"), | ||
| 985 | ENUM("iec61966-2-4", AVCOL_TRC_IEC61966_2_4, "trc"), | ||
| 986 | ENUM("bt2020-10", AVCOL_TRC_BT2020_10, "trc"), | ||
| 987 | ENUM("bt2020-12", AVCOL_TRC_BT2020_12, "trc"), | ||
| 988 | ENUM("vlog", AVCOL_TRC_V_LOG, "trc"), | ||
| 989 | |||
| 990 | { "format", "Output pixel format", | ||
| 991 | OFFSET(user_format), AV_OPT_TYPE_INT, { .i64 = AV_PIX_FMT_NONE }, | ||
| 992 | AV_PIX_FMT_NONE, AV_PIX_FMT_GBRAP12LE, FLAGS, .unit = "fmt" }, | ||
| 993 | ENUM("yuv420p", AV_PIX_FMT_YUV420P, "fmt"), | ||
| 994 | ENUM("yuv420p10", AV_PIX_FMT_YUV420P10, "fmt"), | ||
| 995 | ENUM("yuv420p12", AV_PIX_FMT_YUV420P12, "fmt"), | ||
| 996 | ENUM("yuv422p", AV_PIX_FMT_YUV422P, "fmt"), | ||
| 997 | ENUM("yuv422p10", AV_PIX_FMT_YUV422P10, "fmt"), | ||
| 998 | ENUM("yuv422p12", AV_PIX_FMT_YUV422P12, "fmt"), | ||
| 999 | ENUM("yuv444p", AV_PIX_FMT_YUV444P, "fmt"), | ||
| 1000 | ENUM("yuv444p10", AV_PIX_FMT_YUV444P10, "fmt"), | ||
| 1001 | ENUM("yuv444p12", AV_PIX_FMT_YUV444P12, "fmt"), | ||
| 1002 | |||
| 1003 | { "fast", "Ignore primary chromaticity and gamma correction", | ||
| 1004 | OFFSET(fast_mode), AV_OPT_TYPE_BOOL, { .i64 = 0 }, | ||
| 1005 | 0, 1, FLAGS }, | ||
| 1006 | |||
| 1007 | { "dither", "Dithering mode", | ||
| 1008 | OFFSET(dither), AV_OPT_TYPE_INT, { .i64 = DITHER_NONE }, | ||
| 1009 | DITHER_NONE, DITHER_NB - 1, FLAGS, .unit = "dither" }, | ||
| 1010 | ENUM("none", DITHER_NONE, "dither"), | ||
| 1011 | ENUM("fsb", DITHER_FSB, "dither"), | ||
| 1012 | |||
| 1013 | { "wpadapt", "Whitepoint adaptation method", | ||
| 1014 | OFFSET(wp_adapt), AV_OPT_TYPE_INT, { .i64 = WP_ADAPT_BRADFORD }, | ||
| 1015 | WP_ADAPT_BRADFORD, NB_WP_ADAPT - 1, FLAGS, .unit = "wpadapt" }, | ||
| 1016 | ENUM("bradford", WP_ADAPT_BRADFORD, "wpadapt"), | ||
| 1017 | ENUM("vonkries", WP_ADAPT_VON_KRIES, "wpadapt"), | ||
| 1018 | ENUM("identity", WP_ADAPT_IDENTITY, "wpadapt"), | ||
| 1019 | |||
| 1020 | { "clipgamut", | ||
| 1021 | "Controls how to clip out-of-gamut colors that arise as a result of colorspace conversion.", | ||
| 1022 | OFFSET(clip_gamut), AV_OPT_TYPE_INT, { .i64 = CLIP_GAMUT_NONE }, | ||
| 1023 | CLIP_GAMUT_NONE, NB_CLIP_GAMUT - 1, FLAGS, .unit = "clipgamut" }, | ||
| 1024 | ENUM("none", CLIP_GAMUT_NONE, "clipgamut"), | ||
| 1025 | ENUM("rgb", CLIP_GAMUT_RGB, "clipgamut"), | ||
| 1026 | |||
| 1027 | { "iall", "Set all input color properties together", | ||
| 1028 | OFFSET(user_iall), AV_OPT_TYPE_INT, { .i64 = CS_UNSPECIFIED }, | ||
| 1029 | CS_UNSPECIFIED, CS_NB - 1, FLAGS, .unit = "all" }, | ||
| 1030 | { "ispace", "Input colorspace", | ||
| 1031 | OFFSET(user_icsp), AV_OPT_TYPE_INT, { .i64 = AVCOL_SPC_UNSPECIFIED }, | ||
| 1032 | AVCOL_PRI_RESERVED0, AVCOL_PRI_NB - 1, FLAGS, .unit = "csp" }, | ||
| 1033 | { "irange", "Input color range", | ||
| 1034 | OFFSET(user_irng), AV_OPT_TYPE_INT, { .i64 = AVCOL_RANGE_UNSPECIFIED }, | ||
| 1035 | AVCOL_RANGE_UNSPECIFIED, AVCOL_RANGE_NB - 1, FLAGS, .unit = "rng" }, | ||
| 1036 | { "iprimaries", "Input color primaries", | ||
| 1037 | OFFSET(user_iprm), AV_OPT_TYPE_INT, { .i64 = AVCOL_PRI_UNSPECIFIED }, | ||
| 1038 | AVCOL_PRI_RESERVED0, AVCOL_PRI_EXT_NB - 1, FLAGS, .unit = "prm" }, | ||
| 1039 | { "itrc", "Input transfer characteristics", | ||
| 1040 | OFFSET(user_itrc), AV_OPT_TYPE_INT, { .i64 = AVCOL_TRC_UNSPECIFIED }, | ||
| 1041 | AVCOL_TRC_RESERVED0, AVCOL_TRC_EXT_NB - 1, FLAGS, .unit = "trc" }, | ||
| 1042 | |||
| 1043 | { NULL } | ||
| 1044 | }; | ||
| 1045 | |||
| 1046 | AVFILTER_DEFINE_CLASS(colorspace); | ||
| 1047 | |||
| 1048 | static const AVFilterPad inputs[] = { | ||
| 1049 | { | ||
| 1050 | .name = "default", | ||
| 1051 | .type = AVMEDIA_TYPE_VIDEO, | ||
| 1052 | .filter_frame = filter_frame, | ||
| 1053 | }, | ||
| 1054 | }; | ||
| 1055 | |||
| 1056 | static const AVFilterPad outputs[] = { | ||
| 1057 | { | ||
| 1058 | .name = "default", | ||
| 1059 | .type = AVMEDIA_TYPE_VIDEO, | ||
| 1060 | .config_props = config_props, | ||
| 1061 | }, | ||
| 1062 | }; | ||
| 1063 | |||
| 1064 | const FFFilter ff_vf_colorspace = { | ||
| 1065 | .p.name = "colorspace", | ||
| 1066 | .p.description = NULL_IF_CONFIG_SMALL("Convert between colorspaces."), | ||
| 1067 | .p.priv_class = &colorspace_class, | ||
| 1068 | .p.flags = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC | AVFILTER_FLAG_SLICE_THREADS, | ||
| 1069 | .init = init, | ||
| 1070 | .uninit = uninit, | ||
| 1071 | .priv_size = sizeof(ColorSpaceContext), | ||
| 1072 | FILTER_INPUTS(inputs), | ||
| 1073 | FILTER_OUTPUTS(outputs), | ||
| 1074 | FILTER_QUERY_FUNC2(query_formats), | ||
| 1075 | }; | ||
| 1076 |