| Line | Branch | Exec | Source |
|---|---|---|---|
| 1 | /* | ||
| 2 | * Copyright (c) 2011 Stefano Sabatini | ||
| 3 | * | ||
| 4 | * This file is part of FFmpeg. | ||
| 5 | * | ||
| 6 | * FFmpeg is free software; you can redistribute it and/or | ||
| 7 | * modify it under the terms of the GNU Lesser General Public | ||
| 8 | * License as published by the Free Software Foundation; either | ||
| 9 | * version 2.1 of the License, or (at your option) any later version. | ||
| 10 | * | ||
| 11 | * FFmpeg is distributed in the hope that it will be useful, | ||
| 12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 14 | * Lesser General Public License for more details. | ||
| 15 | * | ||
| 16 | * You should have received a copy of the GNU Lesser General Public | ||
| 17 | * License along with FFmpeg; if not, write to the Free Software | ||
| 18 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | ||
| 19 | */ | ||
| 20 | |||
| 21 | /** | ||
| 22 | * @file | ||
| 23 | * Compute a look-up table for binding the input value to the output | ||
| 24 | * value, and apply it to input video. | ||
| 25 | */ | ||
| 26 | |||
| 27 | #include "config_components.h" | ||
| 28 | |||
| 29 | #include "libavutil/attributes.h" | ||
| 30 | #include "libavutil/bswap.h" | ||
| 31 | #include "libavutil/common.h" | ||
| 32 | #include "libavutil/eval.h" | ||
| 33 | #include "libavutil/mem.h" | ||
| 34 | #include "libavutil/opt.h" | ||
| 35 | #include "libavutil/pixdesc.h" | ||
| 36 | #include "avfilter.h" | ||
| 37 | #include "drawutils.h" | ||
| 38 | #include "filters.h" | ||
| 39 | #include "formats.h" | ||
| 40 | #include "video.h" | ||
| 41 | |||
| 42 | static const char *const var_names[] = { | ||
| 43 | "w", ///< width of the input video | ||
| 44 | "h", ///< height of the input video | ||
| 45 | "val", ///< input value for the pixel | ||
| 46 | "maxval", ///< max value for the pixel | ||
| 47 | "minval", ///< min value for the pixel | ||
| 48 | "negval", ///< negated value | ||
| 49 | "clipval", | ||
| 50 | NULL | ||
| 51 | }; | ||
| 52 | |||
| 53 | enum var_name { | ||
| 54 | VAR_W, | ||
| 55 | VAR_H, | ||
| 56 | VAR_VAL, | ||
| 57 | VAR_MAXVAL, | ||
| 58 | VAR_MINVAL, | ||
| 59 | VAR_NEGVAL, | ||
| 60 | VAR_CLIPVAL, | ||
| 61 | VAR_VARS_NB | ||
| 62 | }; | ||
| 63 | |||
| 64 | typedef struct LutContext { | ||
| 65 | const AVClass *class; | ||
| 66 | uint16_t lut[4][256 * 256]; ///< lookup table for each component | ||
| 67 | char *comp_expr_str[4]; | ||
| 68 | AVExpr *comp_expr[4]; | ||
| 69 | int hsub, vsub; | ||
| 70 | double var_values[VAR_VARS_NB]; | ||
| 71 | int is_rgb, is_yuv; | ||
| 72 | int is_planar; | ||
| 73 | int is_16bit; | ||
| 74 | int step; | ||
| 75 | } LutContext; | ||
| 76 | |||
| 77 | #define Y 0 | ||
| 78 | #define U 1 | ||
| 79 | #define V 2 | ||
| 80 | #define R 0 | ||
| 81 | #define G 1 | ||
| 82 | #define B 2 | ||
| 83 | #define A 3 | ||
| 84 | |||
| 85 | #define OFFSET(x) offsetof(LutContext, x) | ||
| 86 | #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_RUNTIME_PARAM | ||
| 87 | |||
| 88 | static const AVOption options[] = { | ||
| 89 | { "c0", "set component #0 expression", OFFSET(comp_expr_str[0]), AV_OPT_TYPE_STRING, { .str = "clipval" }, .flags = FLAGS }, | ||
| 90 | { "c1", "set component #1 expression", OFFSET(comp_expr_str[1]), AV_OPT_TYPE_STRING, { .str = "clipval" }, .flags = FLAGS }, | ||
| 91 | { "c2", "set component #2 expression", OFFSET(comp_expr_str[2]), AV_OPT_TYPE_STRING, { .str = "clipval" }, .flags = FLAGS }, | ||
| 92 | { "c3", "set component #3 expression", OFFSET(comp_expr_str[3]), AV_OPT_TYPE_STRING, { .str = "clipval" }, .flags = FLAGS }, | ||
| 93 | { "y", "set Y expression", OFFSET(comp_expr_str[Y]), AV_OPT_TYPE_STRING, { .str = "clipval" }, .flags = FLAGS }, | ||
| 94 | { "u", "set U expression", OFFSET(comp_expr_str[U]), AV_OPT_TYPE_STRING, { .str = "clipval" }, .flags = FLAGS }, | ||
| 95 | { "v", "set V expression", OFFSET(comp_expr_str[V]), AV_OPT_TYPE_STRING, { .str = "clipval" }, .flags = FLAGS }, | ||
| 96 | { "r", "set R expression", OFFSET(comp_expr_str[R]), AV_OPT_TYPE_STRING, { .str = "clipval" }, .flags = FLAGS }, | ||
| 97 | { "g", "set G expression", OFFSET(comp_expr_str[G]), AV_OPT_TYPE_STRING, { .str = "clipval" }, .flags = FLAGS }, | ||
| 98 | { "b", "set B expression", OFFSET(comp_expr_str[B]), AV_OPT_TYPE_STRING, { .str = "clipval" }, .flags = FLAGS }, | ||
| 99 | { "a", "set A expression", OFFSET(comp_expr_str[A]), AV_OPT_TYPE_STRING, { .str = "clipval" }, .flags = FLAGS }, | ||
| 100 | { NULL } | ||
| 101 | }; | ||
| 102 | |||
| 103 | 115 | static av_cold void uninit(AVFilterContext *ctx) | |
| 104 | { | ||
| 105 | 115 | LutContext *s = ctx->priv; | |
| 106 | int i; | ||
| 107 | |||
| 108 |
2/2✓ Branch 0 taken 460 times.
✓ Branch 1 taken 115 times.
|
575 | for (i = 0; i < 4; i++) { |
| 109 | 460 | av_expr_free(s->comp_expr[i]); | |
| 110 | 460 | s->comp_expr[i] = NULL; | |
| 111 | 460 | av_freep(&s->comp_expr_str[i]); | |
| 112 | } | ||
| 113 | 115 | } | |
| 114 | |||
| 115 | #define YUV_FORMATS \ | ||
| 116 | AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV420P, \ | ||
| 117 | AV_PIX_FMT_YUV411P, AV_PIX_FMT_YUV410P, AV_PIX_FMT_YUV440P, \ | ||
| 118 | AV_PIX_FMT_YUVA420P, AV_PIX_FMT_YUVA422P, AV_PIX_FMT_YUVA444P, \ | ||
| 119 | AV_PIX_FMT_YUVJ444P, AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUVJ420P, \ | ||
| 120 | AV_PIX_FMT_YUVJ440P, \ | ||
| 121 | AV_PIX_FMT_YUV444P9LE, AV_PIX_FMT_YUV422P9LE, AV_PIX_FMT_YUV420P9LE, \ | ||
| 122 | AV_PIX_FMT_YUV444P10LE, AV_PIX_FMT_YUV422P10LE, AV_PIX_FMT_YUV420P10LE, AV_PIX_FMT_YUV440P10LE, \ | ||
| 123 | AV_PIX_FMT_YUV444P12LE, AV_PIX_FMT_YUV422P12LE, AV_PIX_FMT_YUV420P12LE, AV_PIX_FMT_YUV440P12LE, \ | ||
| 124 | AV_PIX_FMT_YUV444P14LE, AV_PIX_FMT_YUV422P14LE, AV_PIX_FMT_YUV420P14LE, \ | ||
| 125 | AV_PIX_FMT_YUV444P16LE, AV_PIX_FMT_YUV422P16LE, AV_PIX_FMT_YUV420P16LE, \ | ||
| 126 | AV_PIX_FMT_YUVA444P16LE, AV_PIX_FMT_YUVA422P16LE, AV_PIX_FMT_YUVA420P16LE | ||
| 127 | |||
| 128 | #define RGB_FORMATS \ | ||
| 129 | AV_PIX_FMT_ARGB, AV_PIX_FMT_RGBA, \ | ||
| 130 | AV_PIX_FMT_ABGR, AV_PIX_FMT_BGRA, \ | ||
| 131 | AV_PIX_FMT_RGB24, AV_PIX_FMT_BGR24, \ | ||
| 132 | AV_PIX_FMT_RGB48LE, AV_PIX_FMT_RGBA64LE, \ | ||
| 133 | AV_PIX_FMT_GBRP, AV_PIX_FMT_GBRAP, \ | ||
| 134 | AV_PIX_FMT_GBRP9LE, AV_PIX_FMT_GBRP10LE, \ | ||
| 135 | AV_PIX_FMT_GBRAP10LE, \ | ||
| 136 | AV_PIX_FMT_GBRP12LE, AV_PIX_FMT_GBRP14LE, \ | ||
| 137 | AV_PIX_FMT_GBRP16LE, AV_PIX_FMT_GBRAP12LE, \ | ||
| 138 | AV_PIX_FMT_GBRAP16LE | ||
| 139 | |||
| 140 | #define GRAY_FORMATS \ | ||
| 141 | AV_PIX_FMT_GRAY8, AV_PIX_FMT_GRAY9LE, AV_PIX_FMT_GRAY10LE, \ | ||
| 142 | AV_PIX_FMT_GRAY12LE, AV_PIX_FMT_GRAY14LE, AV_PIX_FMT_GRAY16LE | ||
| 143 | |||
| 144 | static const enum AVPixelFormat yuv_pix_fmts[] = { YUV_FORMATS, AV_PIX_FMT_NONE }; | ||
| 145 | static const enum AVPixelFormat rgb_pix_fmts[] = { RGB_FORMATS, AV_PIX_FMT_NONE }; | ||
| 146 | static const enum AVPixelFormat all_pix_fmts[] = { RGB_FORMATS, YUV_FORMATS, GRAY_FORMATS, AV_PIX_FMT_NONE }; | ||
| 147 | |||
| 148 | 58 | static int query_formats(const AVFilterContext *ctx, | |
| 149 | AVFilterFormatsConfig **cfg_in, | ||
| 150 | AVFilterFormatsConfig **cfg_out) | ||
| 151 | { | ||
| 152 | 58 | const LutContext *s = ctx->priv; | |
| 153 | |||
| 154 |
1/2✓ Branch 0 taken 58 times.
✗ Branch 1 not taken.
|
116 | const enum AVPixelFormat *pix_fmts = s->is_rgb ? rgb_pix_fmts : |
| 155 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 58 times.
|
58 | s->is_yuv ? yuv_pix_fmts : |
| 156 | all_pix_fmts; | ||
| 157 | 58 | return ff_set_common_formats_from_list2(ctx, cfg_in, cfg_out, pix_fmts); | |
| 158 | } | ||
| 159 | |||
| 160 | /** | ||
| 161 | * Clip value val in the minval - maxval range. | ||
| 162 | */ | ||
| 163 | ✗ | static double clip(void *opaque, double val) | |
| 164 | { | ||
| 165 | ✗ | LutContext *s = opaque; | |
| 166 | ✗ | double minval = s->var_values[VAR_MINVAL]; | |
| 167 | ✗ | double maxval = s->var_values[VAR_MAXVAL]; | |
| 168 | |||
| 169 | ✗ | return av_clip(val, minval, maxval); | |
| 170 | } | ||
| 171 | |||
| 172 | /** | ||
| 173 | * Compute gamma correction for value val, assuming the minval-maxval | ||
| 174 | * range, val is clipped to a value contained in the same interval. | ||
| 175 | */ | ||
| 176 | ✗ | static double compute_gammaval(void *opaque, double gamma) | |
| 177 | { | ||
| 178 | ✗ | LutContext *s = opaque; | |
| 179 | ✗ | double val = s->var_values[VAR_CLIPVAL]; | |
| 180 | ✗ | double minval = s->var_values[VAR_MINVAL]; | |
| 181 | ✗ | double maxval = s->var_values[VAR_MAXVAL]; | |
| 182 | |||
| 183 | ✗ | return pow((val-minval)/(maxval-minval), gamma) * (maxval-minval)+minval; | |
| 184 | } | ||
| 185 | |||
| 186 | /** | ||
| 187 | * Compute ITU Rec.709 gamma correction of value val. | ||
| 188 | */ | ||
| 189 | ✗ | static double compute_gammaval709(void *opaque, double gamma) | |
| 190 | { | ||
| 191 | ✗ | LutContext *s = opaque; | |
| 192 | ✗ | double val = s->var_values[VAR_CLIPVAL]; | |
| 193 | ✗ | double minval = s->var_values[VAR_MINVAL]; | |
| 194 | ✗ | double maxval = s->var_values[VAR_MAXVAL]; | |
| 195 | ✗ | double level = (val - minval) / (maxval - minval); | |
| 196 | ✗ | level = level < 0.018 ? 4.5 * level | |
| 197 | ✗ | : 1.099 * pow(level, 1.0 / gamma) - 0.099; | |
| 198 | ✗ | return level * (maxval - minval) + minval; | |
| 199 | } | ||
| 200 | |||
| 201 | static double (* const funcs1[])(void *, double) = { | ||
| 202 | clip, | ||
| 203 | compute_gammaval, | ||
| 204 | compute_gammaval709, | ||
| 205 | NULL | ||
| 206 | }; | ||
| 207 | |||
| 208 | static const char * const funcs1_names[] = { | ||
| 209 | "clip", | ||
| 210 | "gammaval", | ||
| 211 | "gammaval709", | ||
| 212 | NULL | ||
| 213 | }; | ||
| 214 | |||
| 215 | 57 | static int config_props(AVFilterLink *inlink) | |
| 216 | { | ||
| 217 | 57 | AVFilterContext *ctx = inlink->dst; | |
| 218 | 57 | LutContext *s = ctx->priv; | |
| 219 | 57 | const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(inlink->format); | |
| 220 | uint8_t rgba_map[4]; /* component index -> RGBA color index map */ | ||
| 221 | int min[4], max[4]; | ||
| 222 | int val, color, ret; | ||
| 223 | |||
| 224 | 57 | s->hsub = desc->log2_chroma_w; | |
| 225 | 57 | s->vsub = desc->log2_chroma_h; | |
| 226 | |||
| 227 | 57 | s->var_values[VAR_W] = inlink->w; | |
| 228 | 57 | s->var_values[VAR_H] = inlink->h; | |
| 229 | 57 | s->is_16bit = desc->comp[0].depth > 8; | |
| 230 | |||
| 231 |
3/3✓ Branch 0 taken 29 times.
✓ Branch 1 taken 2 times.
✓ Branch 2 taken 26 times.
|
57 | switch (inlink->format) { |
| 232 | 29 | case AV_PIX_FMT_YUV410P: | |
| 233 | case AV_PIX_FMT_YUV411P: | ||
| 234 | case AV_PIX_FMT_YUV420P: | ||
| 235 | case AV_PIX_FMT_YUV422P: | ||
| 236 | case AV_PIX_FMT_YUV440P: | ||
| 237 | case AV_PIX_FMT_YUV444P: | ||
| 238 | case AV_PIX_FMT_YUVA420P: | ||
| 239 | case AV_PIX_FMT_YUVA422P: | ||
| 240 | case AV_PIX_FMT_YUVA444P: | ||
| 241 | case AV_PIX_FMT_YUV420P9LE: | ||
| 242 | case AV_PIX_FMT_YUV422P9LE: | ||
| 243 | case AV_PIX_FMT_YUV444P9LE: | ||
| 244 | case AV_PIX_FMT_YUVA420P9LE: | ||
| 245 | case AV_PIX_FMT_YUVA422P9LE: | ||
| 246 | case AV_PIX_FMT_YUVA444P9LE: | ||
| 247 | case AV_PIX_FMT_YUV420P10LE: | ||
| 248 | case AV_PIX_FMT_YUV422P10LE: | ||
| 249 | case AV_PIX_FMT_YUV440P10LE: | ||
| 250 | case AV_PIX_FMT_YUV444P10LE: | ||
| 251 | case AV_PIX_FMT_YUVA420P10LE: | ||
| 252 | case AV_PIX_FMT_YUVA422P10LE: | ||
| 253 | case AV_PIX_FMT_YUVA444P10LE: | ||
| 254 | case AV_PIX_FMT_YUV420P12LE: | ||
| 255 | case AV_PIX_FMT_YUV422P12LE: | ||
| 256 | case AV_PIX_FMT_YUV440P12LE: | ||
| 257 | case AV_PIX_FMT_YUV444P12LE: | ||
| 258 | case AV_PIX_FMT_YUV420P14LE: | ||
| 259 | case AV_PIX_FMT_YUV422P14LE: | ||
| 260 | case AV_PIX_FMT_YUV444P14LE: | ||
| 261 | case AV_PIX_FMT_YUV420P16LE: | ||
| 262 | case AV_PIX_FMT_YUV422P16LE: | ||
| 263 | case AV_PIX_FMT_YUV444P16LE: | ||
| 264 | case AV_PIX_FMT_YUVA420P16LE: | ||
| 265 | case AV_PIX_FMT_YUVA422P16LE: | ||
| 266 | case AV_PIX_FMT_YUVA444P16LE: | ||
| 267 | 29 | min[Y] = 16 * (1 << (desc->comp[0].depth - 8)); | |
| 268 | 29 | min[U] = 16 * (1 << (desc->comp[1].depth - 8)); | |
| 269 | 29 | min[V] = 16 * (1 << (desc->comp[2].depth - 8)); | |
| 270 | 29 | min[A] = 0; | |
| 271 | 29 | max[Y] = 235 * (1 << (desc->comp[0].depth - 8)); | |
| 272 | 29 | max[U] = 240 * (1 << (desc->comp[1].depth - 8)); | |
| 273 | 29 | max[V] = 240 * (1 << (desc->comp[2].depth - 8)); | |
| 274 | 29 | max[A] = (1 << desc->comp[0].depth) - 1; | |
| 275 | 29 | break; | |
| 276 | 2 | case AV_PIX_FMT_RGB48LE: | |
| 277 | case AV_PIX_FMT_RGBA64LE: | ||
| 278 | 2 | min[0] = min[1] = min[2] = min[3] = 0; | |
| 279 | 2 | max[0] = max[1] = max[2] = max[3] = 65535; | |
| 280 | 2 | break; | |
| 281 | 26 | default: | |
| 282 | 26 | min[0] = min[1] = min[2] = min[3] = 0; | |
| 283 | 26 | max[0] = max[1] = max[2] = max[3] = 255 * (1 << (desc->comp[0].depth - 8)); | |
| 284 | } | ||
| 285 | |||
| 286 | 57 | s->is_yuv = s->is_rgb = 0; | |
| 287 | 57 | s->is_planar = desc->flags & AV_PIX_FMT_FLAG_PLANAR; | |
| 288 |
2/2✓ Branch 1 taken 33 times.
✓ Branch 2 taken 24 times.
|
57 | if (ff_fmt_is_in(inlink->format, yuv_pix_fmts)) s->is_yuv = 1; |
| 289 |
2/2✓ Branch 1 taken 18 times.
✓ Branch 2 taken 6 times.
|
24 | else if (ff_fmt_is_in(inlink->format, rgb_pix_fmts)) s->is_rgb = 1; |
| 290 | |||
| 291 |
2/2✓ Branch 0 taken 18 times.
✓ Branch 1 taken 39 times.
|
57 | if (s->is_rgb) { |
| 292 | 18 | ff_fill_rgba_map(rgba_map, inlink->format); | |
| 293 | 18 | s->step = av_get_bits_per_pixel(desc) >> 3; | |
| 294 |
2/2✓ Branch 0 taken 10 times.
✓ Branch 1 taken 8 times.
|
18 | if (s->is_16bit) { |
| 295 | 10 | s->step = s->step >> 1; | |
| 296 | } | ||
| 297 | } | ||
| 298 | |||
| 299 |
2/2✓ Branch 0 taken 174 times.
✓ Branch 1 taken 57 times.
|
231 | for (color = 0; color < desc->nb_components; color++) { |
| 300 | double res; | ||
| 301 |
2/2✓ Branch 0 taken 63 times.
✓ Branch 1 taken 111 times.
|
174 | int comp = s->is_rgb ? rgba_map[color] : color; |
| 302 | |||
| 303 | /* create the parsed expression */ | ||
| 304 | 174 | av_expr_free(s->comp_expr[color]); | |
| 305 | 174 | s->comp_expr[color] = NULL; | |
| 306 | 174 | ret = av_expr_parse(&s->comp_expr[color], s->comp_expr_str[color], | |
| 307 | var_names, funcs1_names, funcs1, NULL, NULL, 0, ctx); | ||
| 308 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 174 times.
|
174 | if (ret < 0) { |
| 309 | ✗ | av_log(ctx, AV_LOG_ERROR, | |
| 310 | "Error when parsing the expression '%s' for the component %d and color %d.\n", | ||
| 311 | s->comp_expr_str[comp], comp, color); | ||
| 312 | ✗ | return AVERROR(EINVAL); | |
| 313 | } | ||
| 314 | |||
| 315 | /* compute the lut */ | ||
| 316 | 174 | s->var_values[VAR_MAXVAL] = max[color]; | |
| 317 | 174 | s->var_values[VAR_MINVAL] = min[color]; | |
| 318 | |||
| 319 |
2/2✓ Branch 0 taken 11403264 times.
✓ Branch 1 taken 174 times.
|
11403438 | for (val = 0; val < FF_ARRAY_ELEMS(s->lut[comp]); val++) { |
| 320 | 11403264 | s->var_values[VAR_VAL] = val; | |
| 321 | 11403264 | s->var_values[VAR_CLIPVAL] = av_clip(val, min[color], max[color]); | |
| 322 | 11403264 | s->var_values[VAR_NEGVAL] = | |
| 323 | 11403264 | av_clip(min[color] + max[color] - s->var_values[VAR_VAL], | |
| 324 | min[color], max[color]); | ||
| 325 | |||
| 326 | 11403264 | res = av_expr_eval(s->comp_expr[color], s->var_values, s); | |
| 327 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 11403264 times.
|
11403264 | if (isnan(res)) { |
| 328 | ✗ | av_log(ctx, AV_LOG_ERROR, | |
| 329 | "Error when evaluating the expression '%s' for the value %d for the component %d.\n", | ||
| 330 | s->comp_expr_str[color], val, comp); | ||
| 331 | ✗ | return AVERROR(EINVAL); | |
| 332 | } | ||
| 333 | 11403264 | s->lut[comp][val] = av_clip((int)res, 0, max[A]); | |
| 334 | 11403264 | av_log(ctx, AV_LOG_DEBUG, "val[%d][%d] = %d\n", comp, val, s->lut[comp][val]); | |
| 335 | } | ||
| 336 | } | ||
| 337 | |||
| 338 | 57 | return 0; | |
| 339 | } | ||
| 340 | |||
| 341 | struct thread_data { | ||
| 342 | AVFrame *in; | ||
| 343 | AVFrame *out; | ||
| 344 | |||
| 345 | int w; | ||
| 346 | int h; | ||
| 347 | }; | ||
| 348 | |||
| 349 | #define LOAD_PACKED_COMMON\ | ||
| 350 | LutContext *s = ctx->priv;\ | ||
| 351 | const struct thread_data *td = arg;\ | ||
| 352 | \ | ||
| 353 | int i, j;\ | ||
| 354 | const int w = td->w;\ | ||
| 355 | const int h = td->h;\ | ||
| 356 | AVFrame *in = td->in;\ | ||
| 357 | AVFrame *out = td->out;\ | ||
| 358 | const uint16_t (*tab)[256*256] = (const uint16_t (*)[256*256])s->lut;\ | ||
| 359 | const int step = s->step;\ | ||
| 360 | \ | ||
| 361 | const int slice_start = (h * jobnr ) / nb_jobs;\ | ||
| 362 | const int slice_end = (h * (jobnr+1)) / nb_jobs;\ | ||
| 363 | |||
| 364 | /* packed, 16-bit */ | ||
| 365 | 4 | static int lut_packed_16bits(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs) | |
| 366 | { | ||
| 367 | 4 | LOAD_PACKED_COMMON | |
| 368 | |||
| 369 | uint16_t *inrow, *outrow, *inrow0, *outrow0; | ||
| 370 | 4 | const int in_linesize = in->linesize[0] / 2; | |
| 371 | 4 | const int out_linesize = out->linesize[0] / 2; | |
| 372 | 4 | inrow0 = (uint16_t *)in ->data[0]; | |
| 373 | 4 | outrow0 = (uint16_t *)out->data[0]; | |
| 374 | |||
| 375 |
2/2✓ Branch 0 taken 1152 times.
✓ Branch 1 taken 4 times.
|
1156 | for (i = slice_start; i < slice_end; i++) { |
| 376 | 1152 | inrow = inrow0 + i * in_linesize; | |
| 377 | 1152 | outrow = outrow0 + i * out_linesize; | |
| 378 |
2/2✓ Branch 0 taken 405504 times.
✓ Branch 1 taken 1152 times.
|
406656 | for (j = 0; j < w; j++) { |
| 379 | |||
| 380 |
2/4✓ Branch 0 taken 202752 times.
✓ Branch 1 taken 202752 times.
✗ Branch 2 not taken.
✗ Branch 3 not taken.
|
405504 | switch (step) { |
| 381 | #if HAVE_BIGENDIAN | ||
| 382 | case 4: outrow[3] = av_bswap16(tab[3][av_bswap16(inrow[3])]); // Fall-through | ||
| 383 | case 3: outrow[2] = av_bswap16(tab[2][av_bswap16(inrow[2])]); // Fall-through | ||
| 384 | case 2: outrow[1] = av_bswap16(tab[1][av_bswap16(inrow[1])]); // Fall-through | ||
| 385 | default: outrow[0] = av_bswap16(tab[0][av_bswap16(inrow[0])]); | ||
| 386 | #else | ||
| 387 | 202752 | case 4: outrow[3] = tab[3][inrow[3]]; // Fall-through | |
| 388 | 405504 | case 3: outrow[2] = tab[2][inrow[2]]; // Fall-through | |
| 389 | 405504 | case 2: outrow[1] = tab[1][inrow[1]]; // Fall-through | |
| 390 | 405504 | default: outrow[0] = tab[0][inrow[0]]; | |
| 391 | #endif | ||
| 392 | } | ||
| 393 | 405504 | outrow += step; | |
| 394 | 405504 | inrow += step; | |
| 395 | } | ||
| 396 | } | ||
| 397 | |||
| 398 | 4 | return 0; | |
| 399 | } | ||
| 400 | |||
| 401 | /* packed, 8-bit */ | ||
| 402 | 12 | static int lut_packed_8bits(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs) | |
| 403 | { | ||
| 404 | 12 | LOAD_PACKED_COMMON | |
| 405 | |||
| 406 | uint8_t *inrow, *outrow, *inrow0, *outrow0; | ||
| 407 | 12 | const int in_linesize = in->linesize[0]; | |
| 408 | 12 | const int out_linesize = out->linesize[0]; | |
| 409 | 12 | inrow0 = in ->data[0]; | |
| 410 | 12 | outrow0 = out->data[0]; | |
| 411 | |||
| 412 |
2/2✓ Branch 0 taken 3456 times.
✓ Branch 1 taken 12 times.
|
3468 | for (i = slice_start; i < slice_end; i++) { |
| 413 | 3456 | inrow = inrow0 + i * in_linesize; | |
| 414 | 3456 | outrow = outrow0 + i * out_linesize; | |
| 415 |
2/2✓ Branch 0 taken 1216512 times.
✓ Branch 1 taken 3456 times.
|
1219968 | for (j = 0; j < w; j++) { |
| 416 |
2/4✓ Branch 0 taken 811008 times.
✓ Branch 1 taken 405504 times.
✗ Branch 2 not taken.
✗ Branch 3 not taken.
|
1216512 | switch (step) { |
| 417 | 811008 | case 4: outrow[3] = tab[3][inrow[3]]; // Fall-through | |
| 418 | 1216512 | case 3: outrow[2] = tab[2][inrow[2]]; // Fall-through | |
| 419 | 1216512 | case 2: outrow[1] = tab[1][inrow[1]]; // Fall-through | |
| 420 | 1216512 | default: outrow[0] = tab[0][inrow[0]]; | |
| 421 | } | ||
| 422 | 1216512 | outrow += step; | |
| 423 | 1216512 | inrow += step; | |
| 424 | } | ||
| 425 | } | ||
| 426 | |||
| 427 | 12 | return 0; | |
| 428 | } | ||
| 429 | |||
| 430 | #define LOAD_PLANAR_COMMON\ | ||
| 431 | LutContext *s = ctx->priv;\ | ||
| 432 | const struct thread_data *td = arg;\ | ||
| 433 | int i, j, plane;\ | ||
| 434 | AVFrame *in = td->in;\ | ||
| 435 | AVFrame *out = td->out;\ | ||
| 436 | |||
| 437 | #define PLANAR_COMMON\ | ||
| 438 | int vsub = plane == 1 || plane == 2 ? s->vsub : 0;\ | ||
| 439 | int hsub = plane == 1 || plane == 2 ? s->hsub : 0;\ | ||
| 440 | int h = AV_CEIL_RSHIFT(td->h, vsub);\ | ||
| 441 | int w = AV_CEIL_RSHIFT(td->w, hsub);\ | ||
| 442 | const uint16_t *tab = s->lut[plane];\ | ||
| 443 | \ | ||
| 444 | const int slice_start = (h * jobnr ) / nb_jobs;\ | ||
| 445 | const int slice_end = (h * (jobnr+1)) / nb_jobs;\ | ||
| 446 | |||
| 447 | /* planar >8 bit depth */ | ||
| 448 | 66 | static int lut_planar_16bits(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs) | |
| 449 | { | ||
| 450 | 66 | LOAD_PLANAR_COMMON | |
| 451 | |||
| 452 | uint16_t *inrow, *outrow; | ||
| 453 | |||
| 454 |
5/6✓ Branch 0 taken 244 times.
✓ Branch 1 taken 12 times.
✓ Branch 2 taken 190 times.
✓ Branch 3 taken 54 times.
✓ Branch 4 taken 190 times.
✗ Branch 5 not taken.
|
256 | for (plane = 0; plane < 4 && in->data[plane] && in->linesize[plane]; plane++) { |
| 455 |
8/8✓ Branch 0 taken 134 times.
✓ Branch 1 taken 56 times.
✓ Branch 2 taken 56 times.
✓ Branch 3 taken 78 times.
✓ Branch 4 taken 134 times.
✓ Branch 5 taken 56 times.
✓ Branch 6 taken 56 times.
✓ Branch 7 taken 78 times.
|
190 | PLANAR_COMMON |
| 456 | |||
| 457 | 190 | const int in_linesize = in->linesize[plane] / 2; | |
| 458 | 190 | const int out_linesize = out->linesize[plane] / 2; | |
| 459 | |||
| 460 | 190 | inrow = (uint16_t *)in ->data[plane] + slice_start * in_linesize; | |
| 461 | 190 | outrow = (uint16_t *)out->data[plane] + slice_start * out_linesize; | |
| 462 | |||
| 463 |
2/2✓ Branch 0 taken 50112 times.
✓ Branch 1 taken 190 times.
|
50302 | for (i = slice_start; i < slice_end; i++) { |
| 464 |
2/2✓ Branch 0 taken 15814656 times.
✓ Branch 1 taken 50112 times.
|
15864768 | for (j = 0; j < w; j++) { |
| 465 | #if HAVE_BIGENDIAN | ||
| 466 | outrow[j] = av_bswap16(tab[av_bswap16(inrow[j])]); | ||
| 467 | #else | ||
| 468 | 15814656 | outrow[j] = tab[inrow[j]]; | |
| 469 | #endif | ||
| 470 | } | ||
| 471 | 50112 | inrow += in_linesize; | |
| 472 | 50112 | outrow += out_linesize; | |
| 473 | } | ||
| 474 | } | ||
| 475 | |||
| 476 | 66 | return 0; | |
| 477 | } | ||
| 478 | |||
| 479 | /* planar 8bit depth */ | ||
| 480 | 32 | static int lut_planar_8bits(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs) | |
| 481 | { | ||
| 482 | 32 | LOAD_PLANAR_COMMON | |
| 483 | |||
| 484 | uint8_t *inrow, *outrow; | ||
| 485 | |||
| 486 |
5/6✓ Branch 0 taken 124 times.
✓ Branch 1 taken 8 times.
✓ Branch 2 taken 100 times.
✓ Branch 3 taken 24 times.
✓ Branch 4 taken 100 times.
✗ Branch 5 not taken.
|
132 | for (plane = 0; plane < 4 && in->data[plane] && in->linesize[plane]; plane++) { |
| 487 |
8/8✓ Branch 0 taken 70 times.
✓ Branch 1 taken 30 times.
✓ Branch 2 taken 30 times.
✓ Branch 3 taken 40 times.
✓ Branch 4 taken 70 times.
✓ Branch 5 taken 30 times.
✓ Branch 6 taken 30 times.
✓ Branch 7 taken 40 times.
|
100 | PLANAR_COMMON |
| 488 | |||
| 489 | 100 | const int in_linesize = in->linesize[plane]; | |
| 490 | 100 | const int out_linesize = out->linesize[plane]; | |
| 491 | |||
| 492 | 100 | inrow = in ->data[plane] + slice_start * in_linesize; | |
| 493 | 100 | outrow = out->data[plane] + slice_start * out_linesize; | |
| 494 | |||
| 495 |
2/2✓ Branch 0 taken 25056 times.
✓ Branch 1 taken 100 times.
|
25156 | for (i = slice_start; i < slice_end; i++) { |
| 496 |
2/2✓ Branch 0 taken 7527168 times.
✓ Branch 1 taken 25056 times.
|
7552224 | for (j = 0; j < w; j++) |
| 497 | 7527168 | outrow[j] = tab[inrow[j]]; | |
| 498 | 25056 | inrow += in_linesize; | |
| 499 | 25056 | outrow += out_linesize; | |
| 500 | } | ||
| 501 | } | ||
| 502 | |||
| 503 | 32 | return 0; | |
| 504 | } | ||
| 505 | |||
| 506 | #define PACKED_THREAD_DATA\ | ||
| 507 | struct thread_data td = {\ | ||
| 508 | .in = in,\ | ||
| 509 | .out = out,\ | ||
| 510 | .w = inlink->w,\ | ||
| 511 | .h = in->height,\ | ||
| 512 | };\ | ||
| 513 | |||
| 514 | #define PLANAR_THREAD_DATA\ | ||
| 515 | struct thread_data td = {\ | ||
| 516 | .in = in,\ | ||
| 517 | .out = out,\ | ||
| 518 | .w = inlink->w,\ | ||
| 519 | .h = inlink->h,\ | ||
| 520 | };\ | ||
| 521 | |||
| 522 | 114 | static int filter_frame(AVFilterLink *inlink, AVFrame *in) | |
| 523 | { | ||
| 524 | 114 | AVFilterContext *ctx = inlink->dst; | |
| 525 | 114 | LutContext *s = ctx->priv; | |
| 526 | 114 | AVFilterLink *outlink = ctx->outputs[0]; | |
| 527 | AVFrame *out; | ||
| 528 | 114 | int direct = 0; | |
| 529 | |||
| 530 |
1/2✓ Branch 1 taken 114 times.
✗ Branch 2 not taken.
|
114 | if (av_frame_is_writable(in)) { |
| 531 | 114 | direct = 1; | |
| 532 | 114 | out = in; | |
| 533 | } else { | ||
| 534 | ✗ | out = ff_get_video_buffer(outlink, outlink->w, outlink->h); | |
| 535 | ✗ | if (!out) { | |
| 536 | ✗ | av_frame_free(&in); | |
| 537 | ✗ | return AVERROR(ENOMEM); | |
| 538 | } | ||
| 539 | ✗ | av_frame_copy_props(out, in); | |
| 540 | } | ||
| 541 | |||
| 542 | 114 | av_frame_side_data_remove_by_props(&out->side_data, &out->nb_side_data, | |
| 543 | AV_SIDE_DATA_PROP_COLOR_DEPENDENT); | ||
| 544 | |||
| 545 |
6/6✓ Branch 0 taken 36 times.
✓ Branch 1 taken 78 times.
✓ Branch 2 taken 20 times.
✓ Branch 3 taken 16 times.
✓ Branch 4 taken 4 times.
✓ Branch 5 taken 16 times.
|
118 | if (s->is_rgb && s->is_16bit && !s->is_planar) { |
| 546 | /* packed, 16-bit */ | ||
| 547 | 4 | PACKED_THREAD_DATA | |
| 548 | 4 | ff_filter_execute(ctx, lut_packed_16bits, &td, NULL, | |
| 549 |
1/2✓ Branch 0 taken 4 times.
✗ Branch 1 not taken.
|
4 | FFMIN(in->height, ff_filter_get_nb_threads(ctx))); |
| 550 |
4/4✓ Branch 0 taken 32 times.
✓ Branch 1 taken 78 times.
✓ Branch 2 taken 12 times.
✓ Branch 3 taken 20 times.
|
122 | } else if (s->is_rgb && !s->is_planar) { |
| 551 | /* packed 8 bits */ | ||
| 552 | 12 | PACKED_THREAD_DATA | |
| 553 | 12 | ff_filter_execute(ctx, lut_packed_8bits, &td, NULL, | |
| 554 |
1/2✓ Branch 0 taken 12 times.
✗ Branch 1 not taken.
|
12 | FFMIN(in->height, ff_filter_get_nb_threads(ctx))); |
| 555 |
2/2✓ Branch 0 taken 66 times.
✓ Branch 1 taken 32 times.
|
98 | } else if (s->is_16bit) { |
| 556 | /* planar >8 bit depth */ | ||
| 557 | 66 | PLANAR_THREAD_DATA | |
| 558 | 66 | ff_filter_execute(ctx, lut_planar_16bits, &td, NULL, | |
| 559 |
1/2✓ Branch 0 taken 66 times.
✗ Branch 1 not taken.
|
66 | FFMIN(in->height, ff_filter_get_nb_threads(ctx))); |
| 560 | } else { | ||
| 561 | /* planar 8bit depth */ | ||
| 562 | 32 | PLANAR_THREAD_DATA | |
| 563 | 32 | ff_filter_execute(ctx, lut_planar_8bits, &td, NULL, | |
| 564 |
1/2✓ Branch 0 taken 32 times.
✗ Branch 1 not taken.
|
32 | FFMIN(in->height, ff_filter_get_nb_threads(ctx))); |
| 565 | } | ||
| 566 | |||
| 567 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 114 times.
|
114 | if (!direct) |
| 568 | ✗ | av_frame_free(&in); | |
| 569 | |||
| 570 | 114 | return ff_filter_frame(outlink, out); | |
| 571 | } | ||
| 572 | |||
| 573 | ✗ | static int process_command(AVFilterContext *ctx, const char *cmd, const char *args, | |
| 574 | char *res, int res_len, int flags) | ||
| 575 | { | ||
| 576 | ✗ | int ret = ff_filter_process_command(ctx, cmd, args, res, res_len, flags); | |
| 577 | |||
| 578 | ✗ | if (ret < 0) | |
| 579 | ✗ | return ret; | |
| 580 | |||
| 581 | ✗ | return config_props(ctx->inputs[0]); | |
| 582 | } | ||
| 583 | |||
| 584 | static const AVFilterPad inputs[] = { | ||
| 585 | { .name = "default", | ||
| 586 | .type = AVMEDIA_TYPE_VIDEO, | ||
| 587 | .filter_frame = filter_frame, | ||
| 588 | .config_props = config_props, | ||
| 589 | }, | ||
| 590 | }; | ||
| 591 | |||
| 592 | #define DEFINE_LUT_FILTER(name_, description_, priv_class_) \ | ||
| 593 | const FFFilter ff_vf_##name_ = { \ | ||
| 594 | .p.name = #name_, \ | ||
| 595 | .p.description = NULL_IF_CONFIG_SMALL(description_), \ | ||
| 596 | .p.priv_class = &priv_class_ ## _class, \ | ||
| 597 | .p.flags = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC | \ | ||
| 598 | AVFILTER_FLAG_SLICE_THREADS, \ | ||
| 599 | .priv_size = sizeof(LutContext), \ | ||
| 600 | .init = name_##_init, \ | ||
| 601 | .uninit = uninit, \ | ||
| 602 | FILTER_INPUTS(inputs), \ | ||
| 603 | FILTER_OUTPUTS(ff_video_default_filterpad), \ | ||
| 604 | FILTER_QUERY_FUNC2(query_formats), \ | ||
| 605 | .process_command = process_command, \ | ||
| 606 | } | ||
| 607 | |||
| 608 | AVFILTER_DEFINE_CLASS_EXT(lut, "lut/lutyuv/lutrgb", options); | ||
| 609 | |||
| 610 | #if CONFIG_LUT_FILTER | ||
| 611 | |||
| 612 | #define lut_init NULL | ||
| 613 | DEFINE_LUT_FILTER(lut, "Compute and apply a lookup table to the RGB/YUV input video.", | ||
| 614 | lut); | ||
| 615 | #undef lut_init | ||
| 616 | #endif | ||
| 617 | |||
| 618 | #if CONFIG_LUTYUV_FILTER | ||
| 619 | |||
| 620 | ✗ | static av_cold int lutyuv_init(AVFilterContext *ctx) | |
| 621 | { | ||
| 622 | ✗ | LutContext *s = ctx->priv; | |
| 623 | |||
| 624 | ✗ | s->is_yuv = 1; | |
| 625 | |||
| 626 | ✗ | return 0; | |
| 627 | } | ||
| 628 | |||
| 629 | DEFINE_LUT_FILTER(lutyuv, "Compute and apply a lookup table to the YUV input video.", | ||
| 630 | lut); | ||
| 631 | #endif | ||
| 632 | |||
| 633 | #if CONFIG_LUTRGB_FILTER | ||
| 634 | |||
| 635 | ✗ | static av_cold int lutrgb_init(AVFilterContext *ctx) | |
| 636 | { | ||
| 637 | ✗ | LutContext *s = ctx->priv; | |
| 638 | |||
| 639 | ✗ | s->is_rgb = 1; | |
| 640 | |||
| 641 | ✗ | return 0; | |
| 642 | } | ||
| 643 | |||
| 644 | DEFINE_LUT_FILTER(lutrgb, "Compute and apply a lookup table to the RGB input video.", | ||
| 645 | lut); | ||
| 646 | #endif | ||
| 647 |