Line | Branch | Exec | Source |
---|---|---|---|
1 | /* | ||
2 | * Copyright (c) 2011 Stefano Sabatini | ||
3 | * | ||
4 | * This file is part of FFmpeg. | ||
5 | * | ||
6 | * FFmpeg is free software; you can redistribute it and/or | ||
7 | * modify it under the terms of the GNU Lesser General Public | ||
8 | * License as published by the Free Software Foundation; either | ||
9 | * version 2.1 of the License, or (at your option) any later version. | ||
10 | * | ||
11 | * FFmpeg is distributed in the hope that it will be useful, | ||
12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
14 | * Lesser General Public License for more details. | ||
15 | * | ||
16 | * You should have received a copy of the GNU Lesser General Public | ||
17 | * License along with FFmpeg; if not, write to the Free Software | ||
18 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | ||
19 | */ | ||
20 | |||
21 | /** | ||
22 | * @file | ||
23 | * Compute a look-up table for binding the input value to the output | ||
24 | * value, and apply it to input video. | ||
25 | */ | ||
26 | |||
27 | #include "config_components.h" | ||
28 | |||
29 | #include "libavutil/attributes.h" | ||
30 | #include "libavutil/bswap.h" | ||
31 | #include "libavutil/common.h" | ||
32 | #include "libavutil/eval.h" | ||
33 | #include "libavutil/mem.h" | ||
34 | #include "libavutil/opt.h" | ||
35 | #include "libavutil/pixdesc.h" | ||
36 | #include "avfilter.h" | ||
37 | #include "drawutils.h" | ||
38 | #include "filters.h" | ||
39 | #include "formats.h" | ||
40 | #include "video.h" | ||
41 | |||
42 | static const char *const var_names[] = { | ||
43 | "w", ///< width of the input video | ||
44 | "h", ///< height of the input video | ||
45 | "val", ///< input value for the pixel | ||
46 | "maxval", ///< max value for the pixel | ||
47 | "minval", ///< min value for the pixel | ||
48 | "negval", ///< negated value | ||
49 | "clipval", | ||
50 | NULL | ||
51 | }; | ||
52 | |||
53 | enum var_name { | ||
54 | VAR_W, | ||
55 | VAR_H, | ||
56 | VAR_VAL, | ||
57 | VAR_MAXVAL, | ||
58 | VAR_MINVAL, | ||
59 | VAR_NEGVAL, | ||
60 | VAR_CLIPVAL, | ||
61 | VAR_VARS_NB | ||
62 | }; | ||
63 | |||
64 | typedef struct LutContext { | ||
65 | const AVClass *class; | ||
66 | uint16_t lut[4][256 * 256]; ///< lookup table for each component | ||
67 | char *comp_expr_str[4]; | ||
68 | AVExpr *comp_expr[4]; | ||
69 | int hsub, vsub; | ||
70 | double var_values[VAR_VARS_NB]; | ||
71 | int is_rgb, is_yuv; | ||
72 | int is_planar; | ||
73 | int is_16bit; | ||
74 | int step; | ||
75 | } LutContext; | ||
76 | |||
77 | #define Y 0 | ||
78 | #define U 1 | ||
79 | #define V 2 | ||
80 | #define R 0 | ||
81 | #define G 1 | ||
82 | #define B 2 | ||
83 | #define A 3 | ||
84 | |||
85 | #define OFFSET(x) offsetof(LutContext, x) | ||
86 | #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_RUNTIME_PARAM | ||
87 | |||
88 | static const AVOption options[] = { | ||
89 | { "c0", "set component #0 expression", OFFSET(comp_expr_str[0]), AV_OPT_TYPE_STRING, { .str = "clipval" }, .flags = FLAGS }, | ||
90 | { "c1", "set component #1 expression", OFFSET(comp_expr_str[1]), AV_OPT_TYPE_STRING, { .str = "clipval" }, .flags = FLAGS }, | ||
91 | { "c2", "set component #2 expression", OFFSET(comp_expr_str[2]), AV_OPT_TYPE_STRING, { .str = "clipval" }, .flags = FLAGS }, | ||
92 | { "c3", "set component #3 expression", OFFSET(comp_expr_str[3]), AV_OPT_TYPE_STRING, { .str = "clipval" }, .flags = FLAGS }, | ||
93 | { "y", "set Y expression", OFFSET(comp_expr_str[Y]), AV_OPT_TYPE_STRING, { .str = "clipval" }, .flags = FLAGS }, | ||
94 | { "u", "set U expression", OFFSET(comp_expr_str[U]), AV_OPT_TYPE_STRING, { .str = "clipval" }, .flags = FLAGS }, | ||
95 | { "v", "set V expression", OFFSET(comp_expr_str[V]), AV_OPT_TYPE_STRING, { .str = "clipval" }, .flags = FLAGS }, | ||
96 | { "r", "set R expression", OFFSET(comp_expr_str[R]), AV_OPT_TYPE_STRING, { .str = "clipval" }, .flags = FLAGS }, | ||
97 | { "g", "set G expression", OFFSET(comp_expr_str[G]), AV_OPT_TYPE_STRING, { .str = "clipval" }, .flags = FLAGS }, | ||
98 | { "b", "set B expression", OFFSET(comp_expr_str[B]), AV_OPT_TYPE_STRING, { .str = "clipval" }, .flags = FLAGS }, | ||
99 | { "a", "set A expression", OFFSET(comp_expr_str[A]), AV_OPT_TYPE_STRING, { .str = "clipval" }, .flags = FLAGS }, | ||
100 | { NULL } | ||
101 | }; | ||
102 | |||
103 | 115 | static av_cold void uninit(AVFilterContext *ctx) | |
104 | { | ||
105 | 115 | LutContext *s = ctx->priv; | |
106 | int i; | ||
107 | |||
108 |
2/2✓ Branch 0 taken 460 times.
✓ Branch 1 taken 115 times.
|
575 | for (i = 0; i < 4; i++) { |
109 | 460 | av_expr_free(s->comp_expr[i]); | |
110 | 460 | s->comp_expr[i] = NULL; | |
111 | 460 | av_freep(&s->comp_expr_str[i]); | |
112 | } | ||
113 | 115 | } | |
114 | |||
115 | #define YUV_FORMATS \ | ||
116 | AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV420P, \ | ||
117 | AV_PIX_FMT_YUV411P, AV_PIX_FMT_YUV410P, AV_PIX_FMT_YUV440P, \ | ||
118 | AV_PIX_FMT_YUVA420P, AV_PIX_FMT_YUVA422P, AV_PIX_FMT_YUVA444P, \ | ||
119 | AV_PIX_FMT_YUVJ444P, AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUVJ420P, \ | ||
120 | AV_PIX_FMT_YUVJ440P, \ | ||
121 | AV_PIX_FMT_YUV444P9LE, AV_PIX_FMT_YUV422P9LE, AV_PIX_FMT_YUV420P9LE, \ | ||
122 | AV_PIX_FMT_YUV444P10LE, AV_PIX_FMT_YUV422P10LE, AV_PIX_FMT_YUV420P10LE, AV_PIX_FMT_YUV440P10LE, \ | ||
123 | AV_PIX_FMT_YUV444P12LE, AV_PIX_FMT_YUV422P12LE, AV_PIX_FMT_YUV420P12LE, AV_PIX_FMT_YUV440P12LE, \ | ||
124 | AV_PIX_FMT_YUV444P14LE, AV_PIX_FMT_YUV422P14LE, AV_PIX_FMT_YUV420P14LE, \ | ||
125 | AV_PIX_FMT_YUV444P16LE, AV_PIX_FMT_YUV422P16LE, AV_PIX_FMT_YUV420P16LE, \ | ||
126 | AV_PIX_FMT_YUVA444P16LE, AV_PIX_FMT_YUVA422P16LE, AV_PIX_FMT_YUVA420P16LE | ||
127 | |||
128 | #define RGB_FORMATS \ | ||
129 | AV_PIX_FMT_ARGB, AV_PIX_FMT_RGBA, \ | ||
130 | AV_PIX_FMT_ABGR, AV_PIX_FMT_BGRA, \ | ||
131 | AV_PIX_FMT_RGB24, AV_PIX_FMT_BGR24, \ | ||
132 | AV_PIX_FMT_RGB48LE, AV_PIX_FMT_RGBA64LE, \ | ||
133 | AV_PIX_FMT_GBRP, AV_PIX_FMT_GBRAP, \ | ||
134 | AV_PIX_FMT_GBRP9LE, AV_PIX_FMT_GBRP10LE, \ | ||
135 | AV_PIX_FMT_GBRAP10LE, \ | ||
136 | AV_PIX_FMT_GBRP12LE, AV_PIX_FMT_GBRP14LE, \ | ||
137 | AV_PIX_FMT_GBRP16LE, AV_PIX_FMT_GBRAP12LE, \ | ||
138 | AV_PIX_FMT_GBRAP16LE | ||
139 | |||
140 | #define GRAY_FORMATS \ | ||
141 | AV_PIX_FMT_GRAY8, AV_PIX_FMT_GRAY9LE, AV_PIX_FMT_GRAY10LE, \ | ||
142 | AV_PIX_FMT_GRAY12LE, AV_PIX_FMT_GRAY14LE, AV_PIX_FMT_GRAY16LE | ||
143 | |||
144 | static const enum AVPixelFormat yuv_pix_fmts[] = { YUV_FORMATS, AV_PIX_FMT_NONE }; | ||
145 | static const enum AVPixelFormat rgb_pix_fmts[] = { RGB_FORMATS, AV_PIX_FMT_NONE }; | ||
146 | static const enum AVPixelFormat all_pix_fmts[] = { RGB_FORMATS, YUV_FORMATS, GRAY_FORMATS, AV_PIX_FMT_NONE }; | ||
147 | |||
148 | 58 | static int query_formats(AVFilterContext *ctx) | |
149 | { | ||
150 | 58 | LutContext *s = ctx->priv; | |
151 | |||
152 |
1/2✓ Branch 0 taken 58 times.
✗ Branch 1 not taken.
|
116 | const enum AVPixelFormat *pix_fmts = s->is_rgb ? rgb_pix_fmts : |
153 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 58 times.
|
58 | s->is_yuv ? yuv_pix_fmts : |
154 | all_pix_fmts; | ||
155 | 58 | return ff_set_common_formats_from_list(ctx, pix_fmts); | |
156 | } | ||
157 | |||
158 | /** | ||
159 | * Clip value val in the minval - maxval range. | ||
160 | */ | ||
161 | ✗ | static double clip(void *opaque, double val) | |
162 | { | ||
163 | ✗ | LutContext *s = opaque; | |
164 | ✗ | double minval = s->var_values[VAR_MINVAL]; | |
165 | ✗ | double maxval = s->var_values[VAR_MAXVAL]; | |
166 | |||
167 | ✗ | return av_clip(val, minval, maxval); | |
168 | } | ||
169 | |||
170 | /** | ||
171 | * Compute gamma correction for value val, assuming the minval-maxval | ||
172 | * range, val is clipped to a value contained in the same interval. | ||
173 | */ | ||
174 | ✗ | static double compute_gammaval(void *opaque, double gamma) | |
175 | { | ||
176 | ✗ | LutContext *s = opaque; | |
177 | ✗ | double val = s->var_values[VAR_CLIPVAL]; | |
178 | ✗ | double minval = s->var_values[VAR_MINVAL]; | |
179 | ✗ | double maxval = s->var_values[VAR_MAXVAL]; | |
180 | |||
181 | ✗ | return pow((val-minval)/(maxval-minval), gamma) * (maxval-minval)+minval; | |
182 | } | ||
183 | |||
184 | /** | ||
185 | * Compute ITU Rec.709 gamma correction of value val. | ||
186 | */ | ||
187 | ✗ | static double compute_gammaval709(void *opaque, double gamma) | |
188 | { | ||
189 | ✗ | LutContext *s = opaque; | |
190 | ✗ | double val = s->var_values[VAR_CLIPVAL]; | |
191 | ✗ | double minval = s->var_values[VAR_MINVAL]; | |
192 | ✗ | double maxval = s->var_values[VAR_MAXVAL]; | |
193 | ✗ | double level = (val - minval) / (maxval - minval); | |
194 | ✗ | level = level < 0.018 ? 4.5 * level | |
195 | ✗ | : 1.099 * pow(level, 1.0 / gamma) - 0.099; | |
196 | ✗ | return level * (maxval - minval) + minval; | |
197 | } | ||
198 | |||
199 | static double (* const funcs1[])(void *, double) = { | ||
200 | clip, | ||
201 | compute_gammaval, | ||
202 | compute_gammaval709, | ||
203 | NULL | ||
204 | }; | ||
205 | |||
206 | static const char * const funcs1_names[] = { | ||
207 | "clip", | ||
208 | "gammaval", | ||
209 | "gammaval709", | ||
210 | NULL | ||
211 | }; | ||
212 | |||
213 | 57 | static int config_props(AVFilterLink *inlink) | |
214 | { | ||
215 | 57 | AVFilterContext *ctx = inlink->dst; | |
216 | 57 | LutContext *s = ctx->priv; | |
217 | 57 | const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(inlink->format); | |
218 | uint8_t rgba_map[4]; /* component index -> RGBA color index map */ | ||
219 | int min[4], max[4]; | ||
220 | int val, color, ret; | ||
221 | |||
222 | 57 | s->hsub = desc->log2_chroma_w; | |
223 | 57 | s->vsub = desc->log2_chroma_h; | |
224 | |||
225 | 57 | s->var_values[VAR_W] = inlink->w; | |
226 | 57 | s->var_values[VAR_H] = inlink->h; | |
227 | 57 | s->is_16bit = desc->comp[0].depth > 8; | |
228 | |||
229 |
3/3✓ Branch 0 taken 29 times.
✓ Branch 1 taken 2 times.
✓ Branch 2 taken 26 times.
|
57 | switch (inlink->format) { |
230 | 29 | case AV_PIX_FMT_YUV410P: | |
231 | case AV_PIX_FMT_YUV411P: | ||
232 | case AV_PIX_FMT_YUV420P: | ||
233 | case AV_PIX_FMT_YUV422P: | ||
234 | case AV_PIX_FMT_YUV440P: | ||
235 | case AV_PIX_FMT_YUV444P: | ||
236 | case AV_PIX_FMT_YUVA420P: | ||
237 | case AV_PIX_FMT_YUVA422P: | ||
238 | case AV_PIX_FMT_YUVA444P: | ||
239 | case AV_PIX_FMT_YUV420P9LE: | ||
240 | case AV_PIX_FMT_YUV422P9LE: | ||
241 | case AV_PIX_FMT_YUV444P9LE: | ||
242 | case AV_PIX_FMT_YUVA420P9LE: | ||
243 | case AV_PIX_FMT_YUVA422P9LE: | ||
244 | case AV_PIX_FMT_YUVA444P9LE: | ||
245 | case AV_PIX_FMT_YUV420P10LE: | ||
246 | case AV_PIX_FMT_YUV422P10LE: | ||
247 | case AV_PIX_FMT_YUV440P10LE: | ||
248 | case AV_PIX_FMT_YUV444P10LE: | ||
249 | case AV_PIX_FMT_YUVA420P10LE: | ||
250 | case AV_PIX_FMT_YUVA422P10LE: | ||
251 | case AV_PIX_FMT_YUVA444P10LE: | ||
252 | case AV_PIX_FMT_YUV420P12LE: | ||
253 | case AV_PIX_FMT_YUV422P12LE: | ||
254 | case AV_PIX_FMT_YUV440P12LE: | ||
255 | case AV_PIX_FMT_YUV444P12LE: | ||
256 | case AV_PIX_FMT_YUV420P14LE: | ||
257 | case AV_PIX_FMT_YUV422P14LE: | ||
258 | case AV_PIX_FMT_YUV444P14LE: | ||
259 | case AV_PIX_FMT_YUV420P16LE: | ||
260 | case AV_PIX_FMT_YUV422P16LE: | ||
261 | case AV_PIX_FMT_YUV444P16LE: | ||
262 | case AV_PIX_FMT_YUVA420P16LE: | ||
263 | case AV_PIX_FMT_YUVA422P16LE: | ||
264 | case AV_PIX_FMT_YUVA444P16LE: | ||
265 | 29 | min[Y] = 16 * (1 << (desc->comp[0].depth - 8)); | |
266 | 29 | min[U] = 16 * (1 << (desc->comp[1].depth - 8)); | |
267 | 29 | min[V] = 16 * (1 << (desc->comp[2].depth - 8)); | |
268 | 29 | min[A] = 0; | |
269 | 29 | max[Y] = 235 * (1 << (desc->comp[0].depth - 8)); | |
270 | 29 | max[U] = 240 * (1 << (desc->comp[1].depth - 8)); | |
271 | 29 | max[V] = 240 * (1 << (desc->comp[2].depth - 8)); | |
272 | 29 | max[A] = (1 << desc->comp[0].depth) - 1; | |
273 | 29 | break; | |
274 | 2 | case AV_PIX_FMT_RGB48LE: | |
275 | case AV_PIX_FMT_RGBA64LE: | ||
276 | 2 | min[0] = min[1] = min[2] = min[3] = 0; | |
277 | 2 | max[0] = max[1] = max[2] = max[3] = 65535; | |
278 | 2 | break; | |
279 | 26 | default: | |
280 | 26 | min[0] = min[1] = min[2] = min[3] = 0; | |
281 | 26 | max[0] = max[1] = max[2] = max[3] = 255 * (1 << (desc->comp[0].depth - 8)); | |
282 | } | ||
283 | |||
284 | 57 | s->is_yuv = s->is_rgb = 0; | |
285 | 57 | s->is_planar = desc->flags & AV_PIX_FMT_FLAG_PLANAR; | |
286 |
2/2✓ Branch 1 taken 33 times.
✓ Branch 2 taken 24 times.
|
57 | if (ff_fmt_is_in(inlink->format, yuv_pix_fmts)) s->is_yuv = 1; |
287 |
2/2✓ Branch 1 taken 18 times.
✓ Branch 2 taken 6 times.
|
24 | else if (ff_fmt_is_in(inlink->format, rgb_pix_fmts)) s->is_rgb = 1; |
288 | |||
289 |
2/2✓ Branch 0 taken 18 times.
✓ Branch 1 taken 39 times.
|
57 | if (s->is_rgb) { |
290 | 18 | ff_fill_rgba_map(rgba_map, inlink->format); | |
291 | 18 | s->step = av_get_bits_per_pixel(desc) >> 3; | |
292 |
2/2✓ Branch 0 taken 10 times.
✓ Branch 1 taken 8 times.
|
18 | if (s->is_16bit) { |
293 | 10 | s->step = s->step >> 1; | |
294 | } | ||
295 | } | ||
296 | |||
297 |
2/2✓ Branch 0 taken 174 times.
✓ Branch 1 taken 57 times.
|
231 | for (color = 0; color < desc->nb_components; color++) { |
298 | double res; | ||
299 |
2/2✓ Branch 0 taken 63 times.
✓ Branch 1 taken 111 times.
|
174 | int comp = s->is_rgb ? rgba_map[color] : color; |
300 | |||
301 | /* create the parsed expression */ | ||
302 | 174 | av_expr_free(s->comp_expr[color]); | |
303 | 174 | s->comp_expr[color] = NULL; | |
304 | 174 | ret = av_expr_parse(&s->comp_expr[color], s->comp_expr_str[color], | |
305 | var_names, funcs1_names, funcs1, NULL, NULL, 0, ctx); | ||
306 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 174 times.
|
174 | if (ret < 0) { |
307 | ✗ | av_log(ctx, AV_LOG_ERROR, | |
308 | "Error when parsing the expression '%s' for the component %d and color %d.\n", | ||
309 | s->comp_expr_str[comp], comp, color); | ||
310 | ✗ | return AVERROR(EINVAL); | |
311 | } | ||
312 | |||
313 | /* compute the lut */ | ||
314 | 174 | s->var_values[VAR_MAXVAL] = max[color]; | |
315 | 174 | s->var_values[VAR_MINVAL] = min[color]; | |
316 | |||
317 |
2/2✓ Branch 0 taken 11403264 times.
✓ Branch 1 taken 174 times.
|
11403438 | for (val = 0; val < FF_ARRAY_ELEMS(s->lut[comp]); val++) { |
318 | 11403264 | s->var_values[VAR_VAL] = val; | |
319 | 11403264 | s->var_values[VAR_CLIPVAL] = av_clip(val, min[color], max[color]); | |
320 | 11403264 | s->var_values[VAR_NEGVAL] = | |
321 | 11403264 | av_clip(min[color] + max[color] - s->var_values[VAR_VAL], | |
322 | min[color], max[color]); | ||
323 | |||
324 | 11403264 | res = av_expr_eval(s->comp_expr[color], s->var_values, s); | |
325 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 11403264 times.
|
11403264 | if (isnan(res)) { |
326 | ✗ | av_log(ctx, AV_LOG_ERROR, | |
327 | "Error when evaluating the expression '%s' for the value %d for the component %d.\n", | ||
328 | s->comp_expr_str[color], val, comp); | ||
329 | ✗ | return AVERROR(EINVAL); | |
330 | } | ||
331 | 11403264 | s->lut[comp][val] = av_clip((int)res, 0, max[A]); | |
332 | 11403264 | av_log(ctx, AV_LOG_DEBUG, "val[%d][%d] = %d\n", comp, val, s->lut[comp][val]); | |
333 | } | ||
334 | } | ||
335 | |||
336 | 57 | return 0; | |
337 | } | ||
338 | |||
339 | struct thread_data { | ||
340 | AVFrame *in; | ||
341 | AVFrame *out; | ||
342 | |||
343 | int w; | ||
344 | int h; | ||
345 | }; | ||
346 | |||
347 | #define LOAD_PACKED_COMMON\ | ||
348 | LutContext *s = ctx->priv;\ | ||
349 | const struct thread_data *td = arg;\ | ||
350 | \ | ||
351 | int i, j;\ | ||
352 | const int w = td->w;\ | ||
353 | const int h = td->h;\ | ||
354 | AVFrame *in = td->in;\ | ||
355 | AVFrame *out = td->out;\ | ||
356 | const uint16_t (*tab)[256*256] = (const uint16_t (*)[256*256])s->lut;\ | ||
357 | const int step = s->step;\ | ||
358 | \ | ||
359 | const int slice_start = (h * jobnr ) / nb_jobs;\ | ||
360 | const int slice_end = (h * (jobnr+1)) / nb_jobs;\ | ||
361 | |||
362 | /* packed, 16-bit */ | ||
363 | 2 | static int lut_packed_16bits(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs) | |
364 | { | ||
365 | 2 | LOAD_PACKED_COMMON | |
366 | |||
367 | uint16_t *inrow, *outrow, *inrow0, *outrow0; | ||
368 | 2 | const int in_linesize = in->linesize[0] / 2; | |
369 | 2 | const int out_linesize = out->linesize[0] / 2; | |
370 | 2 | inrow0 = (uint16_t *)in ->data[0]; | |
371 | 2 | outrow0 = (uint16_t *)out->data[0]; | |
372 | |||
373 |
2/2✓ Branch 0 taken 576 times.
✓ Branch 1 taken 2 times.
|
578 | for (i = slice_start; i < slice_end; i++) { |
374 | 576 | inrow = inrow0 + i * in_linesize; | |
375 | 576 | outrow = outrow0 + i * out_linesize; | |
376 |
2/2✓ Branch 0 taken 202752 times.
✓ Branch 1 taken 576 times.
|
203328 | for (j = 0; j < w; j++) { |
377 | |||
378 |
2/4✓ Branch 0 taken 101376 times.
✓ Branch 1 taken 101376 times.
✗ Branch 2 not taken.
✗ Branch 3 not taken.
|
202752 | switch (step) { |
379 | #if HAVE_BIGENDIAN | ||
380 | case 4: outrow[3] = av_bswap16(tab[3][av_bswap16(inrow[3])]); // Fall-through | ||
381 | case 3: outrow[2] = av_bswap16(tab[2][av_bswap16(inrow[2])]); // Fall-through | ||
382 | case 2: outrow[1] = av_bswap16(tab[1][av_bswap16(inrow[1])]); // Fall-through | ||
383 | default: outrow[0] = av_bswap16(tab[0][av_bswap16(inrow[0])]); | ||
384 | #else | ||
385 | 101376 | case 4: outrow[3] = tab[3][inrow[3]]; // Fall-through | |
386 | 202752 | case 3: outrow[2] = tab[2][inrow[2]]; // Fall-through | |
387 | 202752 | case 2: outrow[1] = tab[1][inrow[1]]; // Fall-through | |
388 | 202752 | default: outrow[0] = tab[0][inrow[0]]; | |
389 | #endif | ||
390 | } | ||
391 | 202752 | outrow += step; | |
392 | 202752 | inrow += step; | |
393 | } | ||
394 | } | ||
395 | |||
396 | 2 | return 0; | |
397 | } | ||
398 | |||
399 | /* packed, 8-bit */ | ||
400 | 6 | static int lut_packed_8bits(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs) | |
401 | { | ||
402 | 6 | LOAD_PACKED_COMMON | |
403 | |||
404 | uint8_t *inrow, *outrow, *inrow0, *outrow0; | ||
405 | 6 | const int in_linesize = in->linesize[0]; | |
406 | 6 | const int out_linesize = out->linesize[0]; | |
407 | 6 | inrow0 = in ->data[0]; | |
408 | 6 | outrow0 = out->data[0]; | |
409 | |||
410 |
2/2✓ Branch 0 taken 1728 times.
✓ Branch 1 taken 6 times.
|
1734 | for (i = slice_start; i < slice_end; i++) { |
411 | 1728 | inrow = inrow0 + i * in_linesize; | |
412 | 1728 | outrow = outrow0 + i * out_linesize; | |
413 |
2/2✓ Branch 0 taken 608256 times.
✓ Branch 1 taken 1728 times.
|
609984 | for (j = 0; j < w; j++) { |
414 |
2/4✓ Branch 0 taken 405504 times.
✓ Branch 1 taken 202752 times.
✗ Branch 2 not taken.
✗ Branch 3 not taken.
|
608256 | switch (step) { |
415 | 405504 | case 4: outrow[3] = tab[3][inrow[3]]; // Fall-through | |
416 | 608256 | case 3: outrow[2] = tab[2][inrow[2]]; // Fall-through | |
417 | 608256 | case 2: outrow[1] = tab[1][inrow[1]]; // Fall-through | |
418 | 608256 | default: outrow[0] = tab[0][inrow[0]]; | |
419 | } | ||
420 | 608256 | outrow += step; | |
421 | 608256 | inrow += step; | |
422 | } | ||
423 | } | ||
424 | |||
425 | 6 | return 0; | |
426 | } | ||
427 | |||
428 | #define LOAD_PLANAR_COMMON\ | ||
429 | LutContext *s = ctx->priv;\ | ||
430 | const struct thread_data *td = arg;\ | ||
431 | int i, j, plane;\ | ||
432 | AVFrame *in = td->in;\ | ||
433 | AVFrame *out = td->out;\ | ||
434 | |||
435 | #define PLANAR_COMMON\ | ||
436 | int vsub = plane == 1 || plane == 2 ? s->vsub : 0;\ | ||
437 | int hsub = plane == 1 || plane == 2 ? s->hsub : 0;\ | ||
438 | int h = AV_CEIL_RSHIFT(td->h, vsub);\ | ||
439 | int w = AV_CEIL_RSHIFT(td->w, hsub);\ | ||
440 | const uint16_t *tab = s->lut[plane];\ | ||
441 | \ | ||
442 | const int slice_start = (h * jobnr ) / nb_jobs;\ | ||
443 | const int slice_end = (h * (jobnr+1)) / nb_jobs;\ | ||
444 | |||
445 | /* planar >8 bit depth */ | ||
446 | 33 | static int lut_planar_16bits(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs) | |
447 | { | ||
448 | 33 | LOAD_PLANAR_COMMON | |
449 | |||
450 | uint16_t *inrow, *outrow; | ||
451 | |||
452 |
5/6✓ Branch 0 taken 122 times.
✓ Branch 1 taken 6 times.
✓ Branch 2 taken 95 times.
✓ Branch 3 taken 27 times.
✓ Branch 4 taken 95 times.
✗ Branch 5 not taken.
|
128 | for (plane = 0; plane < 4 && in->data[plane] && in->linesize[plane]; plane++) { |
453 |
8/8✓ Branch 0 taken 67 times.
✓ Branch 1 taken 28 times.
✓ Branch 2 taken 28 times.
✓ Branch 3 taken 39 times.
✓ Branch 4 taken 67 times.
✓ Branch 5 taken 28 times.
✓ Branch 6 taken 28 times.
✓ Branch 7 taken 39 times.
|
95 | PLANAR_COMMON |
454 | |||
455 | 95 | const int in_linesize = in->linesize[plane] / 2; | |
456 | 95 | const int out_linesize = out->linesize[plane] / 2; | |
457 | |||
458 | 95 | inrow = (uint16_t *)in ->data[plane] + slice_start * in_linesize; | |
459 | 95 | outrow = (uint16_t *)out->data[plane] + slice_start * out_linesize; | |
460 | |||
461 |
2/2✓ Branch 0 taken 25056 times.
✓ Branch 1 taken 95 times.
|
25151 | for (i = slice_start; i < slice_end; i++) { |
462 |
2/2✓ Branch 0 taken 7907328 times.
✓ Branch 1 taken 25056 times.
|
7932384 | for (j = 0; j < w; j++) { |
463 | #if HAVE_BIGENDIAN | ||
464 | outrow[j] = av_bswap16(tab[av_bswap16(inrow[j])]); | ||
465 | #else | ||
466 | 7907328 | outrow[j] = tab[inrow[j]]; | |
467 | #endif | ||
468 | } | ||
469 | 25056 | inrow += in_linesize; | |
470 | 25056 | outrow += out_linesize; | |
471 | } | ||
472 | } | ||
473 | |||
474 | 33 | return 0; | |
475 | } | ||
476 | |||
477 | /* planar 8bit depth */ | ||
478 | 16 | static int lut_planar_8bits(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs) | |
479 | { | ||
480 | 16 | LOAD_PLANAR_COMMON | |
481 | |||
482 | uint8_t *inrow, *outrow; | ||
483 | |||
484 |
5/6✓ Branch 0 taken 62 times.
✓ Branch 1 taken 4 times.
✓ Branch 2 taken 50 times.
✓ Branch 3 taken 12 times.
✓ Branch 4 taken 50 times.
✗ Branch 5 not taken.
|
66 | for (plane = 0; plane < 4 && in->data[plane] && in->linesize[plane]; plane++) { |
485 |
8/8✓ Branch 0 taken 35 times.
✓ Branch 1 taken 15 times.
✓ Branch 2 taken 15 times.
✓ Branch 3 taken 20 times.
✓ Branch 4 taken 35 times.
✓ Branch 5 taken 15 times.
✓ Branch 6 taken 15 times.
✓ Branch 7 taken 20 times.
|
50 | PLANAR_COMMON |
486 | |||
487 | 50 | const int in_linesize = in->linesize[plane]; | |
488 | 50 | const int out_linesize = out->linesize[plane]; | |
489 | |||
490 | 50 | inrow = in ->data[plane] + slice_start * in_linesize; | |
491 | 50 | outrow = out->data[plane] + slice_start * out_linesize; | |
492 | |||
493 |
2/2✓ Branch 0 taken 12528 times.
✓ Branch 1 taken 50 times.
|
12578 | for (i = slice_start; i < slice_end; i++) { |
494 |
2/2✓ Branch 0 taken 3763584 times.
✓ Branch 1 taken 12528 times.
|
3776112 | for (j = 0; j < w; j++) |
495 | 3763584 | outrow[j] = tab[inrow[j]]; | |
496 | 12528 | inrow += in_linesize; | |
497 | 12528 | outrow += out_linesize; | |
498 | } | ||
499 | } | ||
500 | |||
501 | 16 | return 0; | |
502 | } | ||
503 | |||
504 | #define PACKED_THREAD_DATA\ | ||
505 | struct thread_data td = {\ | ||
506 | .in = in,\ | ||
507 | .out = out,\ | ||
508 | .w = inlink->w,\ | ||
509 | .h = in->height,\ | ||
510 | };\ | ||
511 | |||
512 | #define PLANAR_THREAD_DATA\ | ||
513 | struct thread_data td = {\ | ||
514 | .in = in,\ | ||
515 | .out = out,\ | ||
516 | .w = inlink->w,\ | ||
517 | .h = inlink->h,\ | ||
518 | };\ | ||
519 | |||
520 | 57 | static int filter_frame(AVFilterLink *inlink, AVFrame *in) | |
521 | { | ||
522 | 57 | AVFilterContext *ctx = inlink->dst; | |
523 | 57 | LutContext *s = ctx->priv; | |
524 | 57 | AVFilterLink *outlink = ctx->outputs[0]; | |
525 | AVFrame *out; | ||
526 | 57 | int direct = 0; | |
527 | |||
528 |
1/2✓ Branch 1 taken 57 times.
✗ Branch 2 not taken.
|
57 | if (av_frame_is_writable(in)) { |
529 | 57 | direct = 1; | |
530 | 57 | out = in; | |
531 | } else { | ||
532 | ✗ | out = ff_get_video_buffer(outlink, outlink->w, outlink->h); | |
533 | ✗ | if (!out) { | |
534 | ✗ | av_frame_free(&in); | |
535 | ✗ | return AVERROR(ENOMEM); | |
536 | } | ||
537 | ✗ | av_frame_copy_props(out, in); | |
538 | } | ||
539 | |||
540 |
6/6✓ Branch 0 taken 18 times.
✓ Branch 1 taken 39 times.
✓ Branch 2 taken 10 times.
✓ Branch 3 taken 8 times.
✓ Branch 4 taken 2 times.
✓ Branch 5 taken 8 times.
|
59 | if (s->is_rgb && s->is_16bit && !s->is_planar) { |
541 | /* packed, 16-bit */ | ||
542 | 2 | PACKED_THREAD_DATA | |
543 | 2 | ff_filter_execute(ctx, lut_packed_16bits, &td, NULL, | |
544 |
1/2✓ Branch 0 taken 2 times.
✗ Branch 1 not taken.
|
2 | FFMIN(in->height, ff_filter_get_nb_threads(ctx))); |
545 |
4/4✓ Branch 0 taken 16 times.
✓ Branch 1 taken 39 times.
✓ Branch 2 taken 6 times.
✓ Branch 3 taken 10 times.
|
61 | } else if (s->is_rgb && !s->is_planar) { |
546 | /* packed 8 bits */ | ||
547 | 6 | PACKED_THREAD_DATA | |
548 | 6 | ff_filter_execute(ctx, lut_packed_8bits, &td, NULL, | |
549 |
1/2✓ Branch 0 taken 6 times.
✗ Branch 1 not taken.
|
6 | FFMIN(in->height, ff_filter_get_nb_threads(ctx))); |
550 |
2/2✓ Branch 0 taken 33 times.
✓ Branch 1 taken 16 times.
|
49 | } else if (s->is_16bit) { |
551 | /* planar >8 bit depth */ | ||
552 | 33 | PLANAR_THREAD_DATA | |
553 | 33 | ff_filter_execute(ctx, lut_planar_16bits, &td, NULL, | |
554 |
1/2✓ Branch 0 taken 33 times.
✗ Branch 1 not taken.
|
33 | FFMIN(in->height, ff_filter_get_nb_threads(ctx))); |
555 | } else { | ||
556 | /* planar 8bit depth */ | ||
557 | 16 | PLANAR_THREAD_DATA | |
558 | 16 | ff_filter_execute(ctx, lut_planar_8bits, &td, NULL, | |
559 |
1/2✓ Branch 0 taken 16 times.
✗ Branch 1 not taken.
|
16 | FFMIN(in->height, ff_filter_get_nb_threads(ctx))); |
560 | } | ||
561 | |||
562 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 57 times.
|
57 | if (!direct) |
563 | ✗ | av_frame_free(&in); | |
564 | |||
565 | 57 | return ff_filter_frame(outlink, out); | |
566 | } | ||
567 | |||
568 | ✗ | static int process_command(AVFilterContext *ctx, const char *cmd, const char *args, | |
569 | char *res, int res_len, int flags) | ||
570 | { | ||
571 | ✗ | int ret = ff_filter_process_command(ctx, cmd, args, res, res_len, flags); | |
572 | |||
573 | ✗ | if (ret < 0) | |
574 | ✗ | return ret; | |
575 | |||
576 | ✗ | return config_props(ctx->inputs[0]); | |
577 | } | ||
578 | |||
579 | static const AVFilterPad inputs[] = { | ||
580 | { .name = "default", | ||
581 | .type = AVMEDIA_TYPE_VIDEO, | ||
582 | .filter_frame = filter_frame, | ||
583 | .config_props = config_props, | ||
584 | }, | ||
585 | }; | ||
586 | |||
587 | #define DEFINE_LUT_FILTER(name_, description_, priv_class_) \ | ||
588 | const AVFilter ff_vf_##name_ = { \ | ||
589 | .name = #name_, \ | ||
590 | .description = NULL_IF_CONFIG_SMALL(description_), \ | ||
591 | .priv_class = &priv_class_ ## _class, \ | ||
592 | .priv_size = sizeof(LutContext), \ | ||
593 | .init = name_##_init, \ | ||
594 | .uninit = uninit, \ | ||
595 | FILTER_INPUTS(inputs), \ | ||
596 | FILTER_OUTPUTS(ff_video_default_filterpad), \ | ||
597 | FILTER_QUERY_FUNC(query_formats), \ | ||
598 | .flags = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC | \ | ||
599 | AVFILTER_FLAG_SLICE_THREADS, \ | ||
600 | .process_command = process_command, \ | ||
601 | } | ||
602 | |||
603 | AVFILTER_DEFINE_CLASS_EXT(lut, "lut/lutyuv/lutrgb", options); | ||
604 | |||
605 | #if CONFIG_LUT_FILTER | ||
606 | |||
607 | #define lut_init NULL | ||
608 | DEFINE_LUT_FILTER(lut, "Compute and apply a lookup table to the RGB/YUV input video.", | ||
609 | lut); | ||
610 | #undef lut_init | ||
611 | #endif | ||
612 | |||
613 | #if CONFIG_LUTYUV_FILTER | ||
614 | |||
615 | ✗ | static av_cold int lutyuv_init(AVFilterContext *ctx) | |
616 | { | ||
617 | ✗ | LutContext *s = ctx->priv; | |
618 | |||
619 | ✗ | s->is_yuv = 1; | |
620 | |||
621 | ✗ | return 0; | |
622 | } | ||
623 | |||
624 | DEFINE_LUT_FILTER(lutyuv, "Compute and apply a lookup table to the YUV input video.", | ||
625 | lut); | ||
626 | #endif | ||
627 | |||
628 | #if CONFIG_LUTRGB_FILTER | ||
629 | |||
630 | ✗ | static av_cold int lutrgb_init(AVFilterContext *ctx) | |
631 | { | ||
632 | ✗ | LutContext *s = ctx->priv; | |
633 | |||
634 | ✗ | s->is_rgb = 1; | |
635 | |||
636 | ✗ | return 0; | |
637 | } | ||
638 | |||
639 | DEFINE_LUT_FILTER(lutrgb, "Compute and apply a lookup table to the RGB input video.", | ||
640 | lut); | ||
641 | #endif | ||
642 |