FFmpeg coverage


Directory: ../../../ffmpeg/
File: src/libavfilter/vf_colordetect.c
Date: 2025-08-19 23:55:23
Exec Total Coverage
Lines: 9 95 9.5%
Functions: 1 7 14.3%
Branches: 12 50 24.0%

Line Branch Exec Source
1 /*
2 * Copyright (c) 2025 Niklas Haas
3 *
4 * This file is part of FFmpeg.
5 *
6 * FFmpeg is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2.1 of the License, or (at your option) any later version.
10 *
11 * FFmpeg is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with FFmpeg; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 */
20
21 /**
22 * @file
23 * Video color space detector, tries to auto-detect YUV range and alpha mode.
24 */
25
26 #include <stdbool.h>
27 #include <stdatomic.h>
28
29 #include "config.h"
30
31 #include "libavutil/mem.h"
32 #include "libavutil/opt.h"
33 #include "libavutil/pixdesc.h"
34
35 #include "avfilter.h"
36 #include "filters.h"
37 #include "formats.h"
38 #include "video.h"
39
40 #include "vf_colordetect.h"
41
42 enum ColorDetectMode {
43 COLOR_DETECT_COLOR_RANGE = 1 << 0,
44 COLOR_DETECT_ALPHA_MODE = 1 << 1,
45 };
46
47 typedef struct ColorDetectContext {
48 const AVClass *class;
49 FFColorDetectDSPContext dsp;
50 unsigned mode;
51
52 const AVPixFmtDescriptor *desc;
53 int nb_threads;
54 int depth;
55 int idx_a;
56 int mpeg_min;
57 int mpeg_max;
58
59 atomic_int detected_range; // enum AVColorRange
60 atomic_int detected_alpha; // enum FFAlphaDetect
61 } ColorDetectContext;
62
63 #define OFFSET(x) offsetof(ColorDetectContext, x)
64 #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
65
66 static const AVOption colordetect_options[] = {
67 { "mode", "Image properties to detect", OFFSET(mode), AV_OPT_TYPE_FLAGS, {.i64 = -1}, 0, UINT_MAX, FLAGS, .unit = "mode" },
68 { "color_range", "Detect (YUV) color range", 0, AV_OPT_TYPE_CONST, {.i64 = COLOR_DETECT_COLOR_RANGE}, 0, 0, FLAGS, .unit = "mode" },
69 { "alpha_mode", "Detect alpha mode", 0, AV_OPT_TYPE_CONST, {.i64 = COLOR_DETECT_ALPHA_MODE }, 0, 0, FLAGS, .unit = "mode" },
70 { "all", "Detect all supported properties", 0, AV_OPT_TYPE_CONST, {.i64 = -1}, 0, 0, FLAGS, .unit = "mode" },
71 { NULL }
72 };
73
74 AVFILTER_DEFINE_CLASS(colordetect);
75
76 static int query_format(const AVFilterContext *ctx,
77 AVFilterFormatsConfig **cfg_in,
78 AVFilterFormatsConfig **cfg_out)
79 {
80 int want_flags = AV_PIX_FMT_FLAG_PLANAR;
81 int reject_flags = AV_PIX_FMT_FLAG_PAL | AV_PIX_FMT_FLAG_HWACCEL |
82 AV_PIX_FMT_FLAG_BITSTREAM | AV_PIX_FMT_FLAG_FLOAT |
83 AV_PIX_FMT_FLAG_BAYER | AV_PIX_FMT_FLAG_XYZ;
84
85 if (HAVE_BIGENDIAN) {
86 want_flags |= AV_PIX_FMT_FLAG_BE;
87 } else {
88 reject_flags |= AV_PIX_FMT_FLAG_BE;
89 }
90
91 AVFilterFormats *formats = ff_formats_pixdesc_filter(want_flags, reject_flags);
92 return ff_set_common_formats2(ctx, cfg_in, cfg_out, formats);
93 }
94
95 static int config_input(AVFilterLink *inlink)
96 {
97 AVFilterContext *ctx = inlink->dst;
98 ColorDetectContext *s = ctx->priv;
99 const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(inlink->format);
100 const int depth = desc->comp[0].depth;
101 const int mpeg_min = 16 << (depth - 8);
102 const int mpeg_max = 235 << (depth - 8);
103 if (depth > 16) /* not currently possible; prevent future bugs */
104 return AVERROR(ENOTSUP);
105
106 s->desc = desc;
107 s->depth = depth;
108 s->mpeg_min = mpeg_min;
109 s->mpeg_max = mpeg_max;
110 s->nb_threads = ff_filter_get_nb_threads(ctx);
111
112 if (desc->flags & AV_PIX_FMT_FLAG_RGB) {
113 atomic_init(&s->detected_range, AVCOL_RANGE_JPEG);
114 } else {
115 atomic_init(&s->detected_range, AVCOL_RANGE_UNSPECIFIED);
116 }
117
118 if (desc->flags & AV_PIX_FMT_FLAG_ALPHA) {
119 s->idx_a = desc->comp[desc->nb_components - 1].plane;
120 atomic_init(&s->detected_alpha, FF_ALPHA_UNDETERMINED);
121 } else {
122 atomic_init(&s->detected_alpha, FF_ALPHA_NONE);
123 }
124
125 ff_color_detect_dsp_init(&s->dsp, depth, inlink->color_range);
126 return 0;
127 }
128
129 static int detect_range(AVFilterContext *ctx, void *arg,
130 int jobnr, int nb_jobs)
131 {
132 ColorDetectContext *s = ctx->priv;
133 const AVFrame *in = arg;
134 const ptrdiff_t stride = in->linesize[0];
135 const int y_start = (in->height * jobnr) / nb_jobs;
136 const int y_end = (in->height * (jobnr + 1)) / nb_jobs;
137 const int h_slice = y_end - y_start;
138
139 if (s->dsp.detect_range(in->data[0] + y_start * stride, stride,
140 in->width, h_slice, s->mpeg_min, s->mpeg_max))
141 atomic_store(&s->detected_range, AVCOL_RANGE_JPEG);
142
143 return 0;
144 }
145
146 static int detect_alpha(AVFilterContext *ctx, void *arg,
147 int jobnr, int nb_jobs)
148 {
149 ColorDetectContext *s = ctx->priv;
150 const AVFrame *in = arg;
151 const int w = in->width;
152 const int h = in->height;
153 const int y_start = (h * jobnr) / nb_jobs;
154 const int y_end = (h * (jobnr + 1)) / nb_jobs;
155 const int h_slice = y_end - y_start;
156
157 const int nb_planes = (s->desc->flags & AV_PIX_FMT_FLAG_RGB) ? 3 : 1;
158 const ptrdiff_t alpha_stride = in->linesize[s->idx_a];
159 const uint8_t *alpha = in->data[s->idx_a] + y_start * alpha_stride;
160
161 /**
162 * To check if a value is out of range, we need to compare the color value
163 * against the maximum possible color for a given alpha value.
164 * x > ((mpeg_max - mpeg_min) / pixel_max) * a + mpeg_min
165 *
166 * This simplifies to:
167 * (x - mpeg_min) * pixel_max > (mpeg_max - mpeg_min) * a
168 * = alpha_max * x - offset > mpeg_range * a in the below formula.
169 *
170 * We subtract an additional offset of (1 << (depth - 1)) to account for
171 * rounding errors in the value of `x`.
172 */
173 const int alpha_max = (1 << s->depth) - 1;
174 const int mpeg_range = s->mpeg_max - s->mpeg_min;
175 const int offset = alpha_max * s->mpeg_min + (1 << (s->depth - 1));
176
177 int ret = 0;
178 for (int i = 0; i < nb_planes; i++) {
179 const ptrdiff_t stride = in->linesize[i];
180 ret = s->dsp.detect_alpha(in->data[i] + y_start * stride, stride,
181 alpha, alpha_stride, w, h_slice, alpha_max,
182 mpeg_range, offset);
183 ret |= atomic_fetch_or_explicit(&s->detected_alpha, ret, memory_order_relaxed);
184 if (ret == FF_ALPHA_STRAIGHT)
185 break;
186 }
187
188 return 0;
189 }
190
191 static int filter_frame(AVFilterLink *inlink, AVFrame *in)
192 {
193 AVFilterContext *ctx = inlink->dst;
194 ColorDetectContext *s = ctx->priv;
195 const int nb_threads = FFMIN(inlink->h, s->nb_threads);
196
197 if (s->mode & COLOR_DETECT_COLOR_RANGE && s->detected_range == AVCOL_RANGE_UNSPECIFIED)
198 ff_filter_execute(ctx, detect_range, in, NULL, nb_threads);
199
200 if (s->mode & COLOR_DETECT_ALPHA_MODE && s->detected_alpha != FF_ALPHA_NONE &&
201 s->detected_alpha != FF_ALPHA_STRAIGHT)
202 ff_filter_execute(ctx, detect_alpha, in, NULL, nb_threads);
203
204 return ff_filter_frame(inlink->dst->outputs[0], in);
205 }
206
207 static av_cold void uninit(AVFilterContext *ctx)
208 {
209 ColorDetectContext *s = ctx->priv;
210 if (!s->mode)
211 return;
212
213 av_log(ctx, AV_LOG_INFO, "Detected color properties:\n");
214 if (s->mode & COLOR_DETECT_COLOR_RANGE) {
215 av_log(ctx, AV_LOG_INFO, " Color range: %s\n",
216 s->detected_range == AVCOL_RANGE_JPEG ? "JPEG / full range"
217 : "undetermined");
218 }
219
220 if (s->mode & COLOR_DETECT_ALPHA_MODE) {
221 av_log(ctx, AV_LOG_INFO, " Alpha mode: %s\n",
222 s->detected_alpha == FF_ALPHA_NONE ? "none" :
223 s->detected_alpha == FF_ALPHA_STRAIGHT ? "straight" :
224 s->detected_alpha == FF_ALPHA_TRANSPARENT ? "undetermined"
225 : "opaque");
226 }
227 }
228
229 78 av_cold void ff_color_detect_dsp_init(FFColorDetectDSPContext *dsp, int depth,
230 enum AVColorRange color_range)
231 {
232 #if ARCH_X86
233 78 ff_color_detect_dsp_init_x86(dsp, depth, color_range);
234 #endif
235
236
2/2
✓ Branch 0 taken 72 times.
✓ Branch 1 taken 6 times.
78 if (!dsp->detect_range)
237
2/2
✓ Branch 0 taken 36 times.
✓ Branch 1 taken 36 times.
72 dsp->detect_range = depth > 8 ? ff_detect_range16_c : ff_detect_range_c;
238
2/2
✓ Branch 0 taken 72 times.
✓ Branch 1 taken 6 times.
78 if (!dsp->detect_alpha) {
239
2/2
✓ Branch 0 taken 24 times.
✓ Branch 1 taken 48 times.
72 if (color_range == AVCOL_RANGE_JPEG) {
240
2/2
✓ Branch 0 taken 12 times.
✓ Branch 1 taken 12 times.
24 dsp->detect_alpha = depth > 8 ? ff_detect_alpha16_full_c : ff_detect_alpha_full_c;
241 } else {
242
2/2
✓ Branch 0 taken 24 times.
✓ Branch 1 taken 24 times.
48 dsp->detect_alpha = depth > 8 ? ff_detect_alpha16_limited_c : ff_detect_alpha_limited_c;
243 }
244 }
245 78 }
246
247 static const AVFilterPad colordetect_inputs[] = {
248 {
249 .name = "default",
250 .type = AVMEDIA_TYPE_VIDEO,
251 .config_props = config_input,
252 .filter_frame = filter_frame,
253 },
254 };
255
256 const FFFilter ff_vf_colordetect = {
257 .p.name = "colordetect",
258 .p.description = NULL_IF_CONFIG_SMALL("Detect video color properties."),
259 .p.priv_class = &colordetect_class,
260 .p.flags = AVFILTER_FLAG_SLICE_THREADS | AVFILTER_FLAG_METADATA_ONLY,
261 .priv_size = sizeof(ColorDetectContext),
262 FILTER_INPUTS(colordetect_inputs),
263 FILTER_OUTPUTS(ff_video_default_filterpad),
264 FILTER_QUERY_FUNC2(query_format),
265 .uninit = uninit,
266 };
267