FFmpeg coverage


Directory: ../../../ffmpeg/
File: src/libavfilter/vf_chromakey.c
Date: 2024-04-19 17:50:32
Exec Total Coverage
Lines: 0 145 0.0%
Functions: 0 12 0.0%
Branches: 0 76 0.0%

Line Branch Exec Source
1 /*
2 * Copyright (c) 2015 Timo Rothenpieler <timo@rothenpieler.org>
3 *
4 * This file is part of FFmpeg.
5 *
6 * FFmpeg is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2.1 of the License, or (at your option) any later version.
10 *
11 * FFmpeg is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with FFmpeg; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 */
20
21 #include "libavutil/opt.h"
22 #include "libavutil/intreadwrite.h"
23 #include "libavutil/pixdesc.h"
24 #include "avfilter.h"
25 #include "internal.h"
26
27 typedef struct ChromakeyContext {
28 const AVClass *class;
29
30 uint8_t chromakey_rgba[4];
31 uint16_t chromakey_uv[2];
32
33 float similarity;
34 float blend;
35
36 int is_yuv;
37 int depth;
38 int mid;
39 int max;
40
41 int hsub_log2;
42 int vsub_log2;
43
44 int (*do_slice)(AVFilterContext *ctx, void *arg,
45 int jobnr, int nb_jobs);
46 } ChromakeyContext;
47
48 static uint8_t do_chromakey_pixel(ChromakeyContext *ctx, uint8_t u[9], uint8_t v[9])
49 {
50 double diff = 0.0;
51 int du, dv, i;
52
53 for (i = 0; i < 9; ++i) {
54 du = (int)u[i] - ctx->chromakey_uv[0];
55 dv = (int)v[i] - ctx->chromakey_uv[1];
56
57 diff += sqrt((du * du + dv * dv) / (255.0 * 255.0 * 2));
58 }
59
60 diff /= 9.0;
61
62 if (ctx->blend > 0.0001) {
63 return av_clipd((diff - ctx->similarity) / ctx->blend, 0.0, 1.0) * 255.0;
64 } else {
65 return (diff > ctx->similarity) ? 255 : 0;
66 }
67 }
68
69 static uint16_t do_chromakey_pixel16(ChromakeyContext *ctx, uint16_t u[9], uint16_t v[9])
70 {
71 double max = ctx->max;
72 double diff = 0.0;
73 int du, dv, i;
74
75 for (i = 0; i < 9; ++i) {
76 du = (int)u[i] - ctx->chromakey_uv[0];
77 dv = (int)v[i] - ctx->chromakey_uv[1];
78
79 diff += sqrt((du * du + dv * dv) / (max * max * 2));
80 }
81
82 diff /= 9.0;
83
84 if (ctx->blend > 0.0001) {
85 return av_clipd((diff - ctx->similarity) / ctx->blend, 0.0, 1.0) * max;
86 } else {
87 return (diff > ctx->similarity) ? max : 0;
88 }
89 }
90
91 static av_always_inline void get_pixel_uv(AVFrame *frame, int hsub_log2, int vsub_log2, int x, int y, uint8_t *u, uint8_t *v)
92 {
93 if (x < 0 || x >= frame->width || y < 0 || y >= frame->height)
94 return;
95
96 x >>= hsub_log2;
97 y >>= vsub_log2;
98
99 *u = frame->data[1][frame->linesize[1] * y + x];
100 *v = frame->data[2][frame->linesize[2] * y + x];
101 }
102
103 static av_always_inline void get_pixel16_uv(AVFrame *frame, int hsub_log2, int vsub_log2, int x, int y, uint16_t *u, uint16_t *v)
104 {
105 if (x < 0 || x >= frame->width || y < 0 || y >= frame->height)
106 return;
107
108 x >>= hsub_log2;
109 y >>= vsub_log2;
110
111 *u = AV_RN16(&frame->data[1][frame->linesize[1] * y + 2 * x]);
112 *v = AV_RN16(&frame->data[2][frame->linesize[2] * y + 2 * x]);
113 }
114
115 static int do_chromakey_slice(AVFilterContext *avctx, void *arg, int jobnr, int nb_jobs)
116 {
117 AVFrame *frame = arg;
118
119 const int slice_start = (frame->height * jobnr) / nb_jobs;
120 const int slice_end = (frame->height * (jobnr + 1)) / nb_jobs;
121
122 ChromakeyContext *ctx = avctx->priv;
123
124 int x, y, xo, yo;
125 uint8_t u[9], v[9];
126
127 memset(u, ctx->chromakey_uv[0], sizeof(u));
128 memset(v, ctx->chromakey_uv[1], sizeof(v));
129
130 for (y = slice_start; y < slice_end; ++y) {
131 for (x = 0; x < frame->width; ++x) {
132 for (yo = 0; yo < 3; ++yo) {
133 for (xo = 0; xo < 3; ++xo) {
134 get_pixel_uv(frame, ctx->hsub_log2, ctx->vsub_log2, x + xo - 1, y + yo - 1, &u[yo * 3 + xo], &v[yo * 3 + xo]);
135 }
136 }
137
138 frame->data[3][frame->linesize[3] * y + x] = do_chromakey_pixel(ctx, u, v);
139 }
140 }
141
142 return 0;
143 }
144
145 static int do_chromakey16_slice(AVFilterContext *avctx, void *arg, int jobnr, int nb_jobs)
146 {
147 AVFrame *frame = arg;
148
149 const int slice_start = (frame->height * jobnr) / nb_jobs;
150 const int slice_end = (frame->height * (jobnr + 1)) / nb_jobs;
151
152 ChromakeyContext *ctx = avctx->priv;
153
154 int x, y, xo, yo;
155 uint16_t u[9], v[9];
156
157 for (int i = 0; i < 9; i++) {
158 u[i] = ctx->chromakey_uv[0];
159 v[i] = ctx->chromakey_uv[1];
160 }
161
162 for (y = slice_start; y < slice_end; ++y) {
163 for (x = 0; x < frame->width; ++x) {
164 uint16_t *dst = (uint16_t *)(frame->data[3] + frame->linesize[3] * y);
165
166 for (yo = 0; yo < 3; ++yo) {
167 for (xo = 0; xo < 3; ++xo) {
168 get_pixel16_uv(frame, ctx->hsub_log2, ctx->vsub_log2, x + xo - 1, y + yo - 1, &u[yo * 3 + xo], &v[yo * 3 + xo]);
169 }
170 }
171
172 dst[x] = do_chromakey_pixel16(ctx, u, v);
173 }
174 }
175
176 return 0;
177 }
178
179 static int do_chromahold_slice(AVFilterContext *avctx, void *arg, int jobnr, int nb_jobs)
180 {
181 ChromakeyContext *ctx = avctx->priv;
182 AVFrame *frame = arg;
183 const int slice_start = ((frame->height >> ctx->vsub_log2) * jobnr) / nb_jobs;
184 const int slice_end = ((frame->height >> ctx->vsub_log2) * (jobnr + 1)) / nb_jobs;
185
186 int x, y, alpha;
187
188 for (y = slice_start; y < slice_end; ++y) {
189 for (x = 0; x < frame->width >> ctx->hsub_log2; ++x) {
190 int u = frame->data[1][frame->linesize[1] * y + x];
191 int v = frame->data[2][frame->linesize[2] * y + x];
192 double diff;
193 int du, dv;
194
195 du = u - ctx->chromakey_uv[0];
196 dv = v - ctx->chromakey_uv[1];
197
198 diff = sqrt((du * du + dv * dv) / (255.0 * 255.0 * 2.0));
199
200 alpha = diff > ctx->similarity;
201 if (ctx->blend > 0.0001) {
202 double f = 1. - av_clipd((diff - ctx->similarity) / ctx->blend, 0.0, 1.0);
203
204 frame->data[1][frame->linesize[1] * y + x] = 128 + (u - 128) * f;
205 frame->data[2][frame->linesize[2] * y + x] = 128 + (v - 128) * f;
206 } else if (alpha) {
207 frame->data[1][frame->linesize[1] * y + x] = 128;
208 frame->data[2][frame->linesize[2] * y + x] = 128;
209 }
210 }
211 }
212
213 return 0;
214 }
215
216 static int do_chromahold16_slice(AVFilterContext *avctx, void *arg, int jobnr, int nb_jobs)
217 {
218 ChromakeyContext *ctx = avctx->priv;
219 AVFrame *frame = arg;
220 const int slice_start = ((frame->height >> ctx->vsub_log2) * jobnr) / nb_jobs;
221 const int slice_end = ((frame->height >> ctx->vsub_log2) * (jobnr + 1)) / nb_jobs;
222 const int mid = ctx->mid;
223 double max = ctx->max;
224
225 int x, y, alpha;
226
227 for (y = slice_start; y < slice_end; ++y) {
228 for (x = 0; x < frame->width >> ctx->hsub_log2; ++x) {
229 int u = AV_RN16(&frame->data[1][frame->linesize[1] * y + 2 * x]);
230 int v = AV_RN16(&frame->data[2][frame->linesize[2] * y + 2 * x]);
231 double diff;
232 int du, dv;
233
234 du = u - ctx->chromakey_uv[0];
235 dv = v - ctx->chromakey_uv[1];
236
237 diff = sqrt((du * du + dv * dv) / (max * max * 2.0));
238
239 alpha = diff > ctx->similarity;
240 if (ctx->blend > 0.0001) {
241 double f = 1. - av_clipd((diff - ctx->similarity) / ctx->blend, 0.0, 1.0);
242
243 AV_WN16(&frame->data[1][frame->linesize[1] * y + 2 * x], mid + (u - mid) * f);
244 AV_WN16(&frame->data[2][frame->linesize[2] * y + 2 * x], mid + (v - mid) * f);
245 } else if (alpha) {
246 AV_WN16(&frame->data[1][frame->linesize[1] * y + 2 * x], mid);
247 AV_WN16(&frame->data[2][frame->linesize[2] * y + 2 * x], mid);
248 }
249 }
250 }
251
252 return 0;
253 }
254
255 static int filter_frame(AVFilterLink *link, AVFrame *frame)
256 {
257 AVFilterContext *avctx = link->dst;
258 ChromakeyContext *ctx = avctx->priv;
259 int res;
260
261 if (res = ff_filter_execute(avctx, ctx->do_slice, frame, NULL,
262 FFMIN(frame->height, ff_filter_get_nb_threads(avctx))))
263 return res;
264
265 return ff_filter_frame(avctx->outputs[0], frame);
266 }
267
268 #define FIXNUM(x) lrint((x) * (1 << 10))
269 #define RGB_TO_U(rgb) (((- FIXNUM(0.16874) * rgb[0] - FIXNUM(0.33126) * rgb[1] + FIXNUM(0.50000) * rgb[2] + (1 << 9) - 1) >> 10) + 128)
270 #define RGB_TO_V(rgb) ((( FIXNUM(0.50000) * rgb[0] - FIXNUM(0.41869) * rgb[1] - FIXNUM(0.08131) * rgb[2] + (1 << 9) - 1) >> 10) + 128)
271
272 static av_cold int config_output(AVFilterLink *outlink)
273 {
274 const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(outlink->format);
275 AVFilterContext *avctx = outlink->src;
276 ChromakeyContext *ctx = avctx->priv;
277 int factor;
278
279 ctx->depth = desc->comp[0].depth;
280 ctx->mid = 1 << (ctx->depth - 1);
281 ctx->max = (1 << ctx->depth) - 1;
282
283 factor = 1 << (ctx->depth - 8);
284
285 if (ctx->is_yuv) {
286 ctx->chromakey_uv[0] = ctx->chromakey_rgba[1] * factor;
287 ctx->chromakey_uv[1] = ctx->chromakey_rgba[2] * factor;
288 } else {
289 ctx->chromakey_uv[0] = RGB_TO_U(ctx->chromakey_rgba) * factor;
290 ctx->chromakey_uv[1] = RGB_TO_V(ctx->chromakey_rgba) * factor;
291 }
292
293 if (!strcmp(avctx->filter->name, "chromakey")) {
294 ctx->do_slice = ctx->depth <= 8 ? do_chromakey_slice : do_chromakey16_slice;
295 } else {
296 ctx->do_slice = ctx->depth <= 8 ? do_chromahold_slice: do_chromahold16_slice;
297 }
298
299 return 0;
300 }
301
302 static av_cold int config_input(AVFilterLink *inlink)
303 {
304 AVFilterContext *avctx = inlink->dst;
305 ChromakeyContext *ctx = avctx->priv;
306 const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(inlink->format);
307
308 ctx->hsub_log2 = desc->log2_chroma_w;
309 ctx->vsub_log2 = desc->log2_chroma_h;
310
311 return 0;
312 }
313
314 static int process_command(AVFilterContext *ctx, const char *cmd, const char *args,
315 char *res, int res_len, int flags)
316 {
317 int ret;
318
319 ret = ff_filter_process_command(ctx, cmd, args, res, res_len, flags);
320 if (ret < 0)
321 return ret;
322
323 return config_output(ctx->outputs[0]);
324 }
325
326 static const AVFilterPad inputs[] = {
327 {
328 .name = "default",
329 .type = AVMEDIA_TYPE_VIDEO,
330 .flags = AVFILTERPAD_FLAG_NEEDS_WRITABLE,
331 .filter_frame = filter_frame,
332 .config_props = config_input,
333 },
334 };
335
336 static const AVFilterPad outputs[] = {
337 {
338 .name = "default",
339 .type = AVMEDIA_TYPE_VIDEO,
340 .config_props = config_output,
341 },
342 };
343
344 #define OFFSET(x) offsetof(ChromakeyContext, x)
345 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_RUNTIME_PARAM
346
347 static const AVOption chromakey_options[] = {
348 { "color", "set the chromakey key color", OFFSET(chromakey_rgba), AV_OPT_TYPE_COLOR, { .str = "black" }, 0, 0, FLAGS },
349 { "similarity", "set the chromakey similarity value", OFFSET(similarity), AV_OPT_TYPE_FLOAT, { .dbl = 0.01 }, 0.00001, 1.0, FLAGS },
350 { "blend", "set the chromakey key blend value", OFFSET(blend), AV_OPT_TYPE_FLOAT, { .dbl = 0.0 }, 0.0, 1.0, FLAGS },
351 { "yuv", "color parameter is in yuv instead of rgb", OFFSET(is_yuv), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, FLAGS },
352 { NULL }
353 };
354
355 static const enum AVPixelFormat chromakey_fmts[] = {
356 AV_PIX_FMT_YUVA420P,
357 AV_PIX_FMT_YUVA422P,
358 AV_PIX_FMT_YUVA444P,
359 AV_PIX_FMT_YUVA420P9, AV_PIX_FMT_YUVA422P9, AV_PIX_FMT_YUVA444P9,
360 AV_PIX_FMT_YUVA420P10, AV_PIX_FMT_YUVA422P10, AV_PIX_FMT_YUVA444P10,
361 AV_PIX_FMT_YUVA422P12, AV_PIX_FMT_YUVA444P12,
362 AV_PIX_FMT_YUVA420P16, AV_PIX_FMT_YUVA422P16, AV_PIX_FMT_YUVA444P16,
363 AV_PIX_FMT_NONE
364 };
365
366 AVFILTER_DEFINE_CLASS(chromakey);
367
368 const AVFilter ff_vf_chromakey = {
369 .name = "chromakey",
370 .description = NULL_IF_CONFIG_SMALL("Turns a certain color into transparency. Operates on YUV colors."),
371 .priv_size = sizeof(ChromakeyContext),
372 .priv_class = &chromakey_class,
373 FILTER_INPUTS(inputs),
374 FILTER_OUTPUTS(outputs),
375 FILTER_PIXFMTS_ARRAY(chromakey_fmts),
376 .flags = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC | AVFILTER_FLAG_SLICE_THREADS,
377 .process_command = process_command,
378 };
379
380 static const AVOption chromahold_options[] = {
381 { "color", "set the chromahold key color", OFFSET(chromakey_rgba), AV_OPT_TYPE_COLOR, { .str = "black" }, 0, 0, FLAGS },
382 { "similarity", "set the chromahold similarity value", OFFSET(similarity), AV_OPT_TYPE_FLOAT, { .dbl = 0.01 }, 0.00001, 1.0, FLAGS },
383 { "blend", "set the chromahold blend value", OFFSET(blend), AV_OPT_TYPE_FLOAT, { .dbl = 0.0 }, 0.0, 1.0, FLAGS },
384 { "yuv", "color parameter is in yuv instead of rgb", OFFSET(is_yuv), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, FLAGS },
385 { NULL }
386 };
387
388 static const enum AVPixelFormat hold_pixel_fmts[] = {
389 AV_PIX_FMT_YUV420P,
390 AV_PIX_FMT_YUV422P,
391 AV_PIX_FMT_YUV444P,
392 AV_PIX_FMT_YUVA420P,
393 AV_PIX_FMT_YUVA422P,
394 AV_PIX_FMT_YUVA444P,
395 AV_PIX_FMT_YUV420P9, AV_PIX_FMT_YUV422P9, AV_PIX_FMT_YUV444P9,
396 AV_PIX_FMT_YUV420P10, AV_PIX_FMT_YUV422P10, AV_PIX_FMT_YUV444P10,
397 AV_PIX_FMT_YUV444P12, AV_PIX_FMT_YUV422P12, AV_PIX_FMT_YUV420P12,
398 AV_PIX_FMT_YUV444P14, AV_PIX_FMT_YUV422P14, AV_PIX_FMT_YUV420P14,
399 AV_PIX_FMT_YUV420P16, AV_PIX_FMT_YUV422P16, AV_PIX_FMT_YUV444P16,
400 AV_PIX_FMT_YUVA420P9, AV_PIX_FMT_YUVA422P9, AV_PIX_FMT_YUVA444P9,
401 AV_PIX_FMT_YUVA420P10, AV_PIX_FMT_YUVA422P10, AV_PIX_FMT_YUVA444P10,
402 AV_PIX_FMT_YUVA422P12, AV_PIX_FMT_YUVA444P12,
403 AV_PIX_FMT_YUVA420P16, AV_PIX_FMT_YUVA422P16, AV_PIX_FMT_YUVA444P16,
404 AV_PIX_FMT_NONE
405 };
406
407 AVFILTER_DEFINE_CLASS(chromahold);
408
409 const AVFilter ff_vf_chromahold = {
410 .name = "chromahold",
411 .description = NULL_IF_CONFIG_SMALL("Turns a certain color range into gray."),
412 .priv_size = sizeof(ChromakeyContext),
413 .priv_class = &chromahold_class,
414 FILTER_INPUTS(inputs),
415 FILTER_OUTPUTS(outputs),
416 FILTER_PIXFMTS_ARRAY(hold_pixel_fmts),
417 .flags = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC | AVFILTER_FLAG_SLICE_THREADS,
418 .process_command = process_command,
419 };
420