FFmpeg coverage


Directory: ../../../ffmpeg/
File: src/libavfilter/vf_colordetect.c
Date: 2025-07-28 20:30:09
Exec Total Coverage
Lines: 9 91 9.9%
Functions: 1 7 14.3%
Branches: 12 46 26.1%

Line Branch Exec Source
1 /*
2 * Copyright (c) 2025 Niklas Haas
3 *
4 * This file is part of FFmpeg.
5 *
6 * FFmpeg is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2.1 of the License, or (at your option) any later version.
10 *
11 * FFmpeg is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with FFmpeg; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 */
20
21 /**
22 * @file
23 * Video color space detector, tries to auto-detect YUV range and alpha mode.
24 */
25
26 #include <stdbool.h>
27 #include <stdatomic.h>
28
29 #include "config.h"
30
31 #include "libavutil/mem.h"
32 #include "libavutil/opt.h"
33 #include "libavutil/pixdesc.h"
34
35 #include "avfilter.h"
36 #include "filters.h"
37 #include "formats.h"
38 #include "video.h"
39
40 #include "vf_colordetect.h"
41
42 enum AlphaMode {
43 ALPHA_NONE = -1,
44 ALPHA_UNDETERMINED = 0,
45 ALPHA_STRAIGHT,
46 /* No way to positively identify premultiplied alpha */
47 };
48
49 enum ColorDetectMode {
50 COLOR_DETECT_COLOR_RANGE = 1 << 0,
51 COLOR_DETECT_ALPHA_MODE = 1 << 1,
52 };
53
54 typedef struct ColorDetectContext {
55 const AVClass *class;
56 FFColorDetectDSPContext dsp;
57 unsigned mode;
58
59 const AVPixFmtDescriptor *desc;
60 int nb_threads;
61 int depth;
62 int idx_a;
63 int mpeg_min;
64 int mpeg_max;
65
66 atomic_int detected_range; // enum AVColorRange
67 atomic_int detected_alpha; // enum AlphaMode
68 } ColorDetectContext;
69
70 #define OFFSET(x) offsetof(ColorDetectContext, x)
71 #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
72
73 static const AVOption colordetect_options[] = {
74 { "mode", "Image properties to detect", OFFSET(mode), AV_OPT_TYPE_FLAGS, {.i64 = -1}, 0, UINT_MAX, FLAGS, .unit = "mode" },
75 { "color_range", "Detect (YUV) color range", 0, AV_OPT_TYPE_CONST, {.i64 = COLOR_DETECT_COLOR_RANGE}, 0, 0, FLAGS, .unit = "mode" },
76 { "alpha_mode", "Detect alpha mode", 0, AV_OPT_TYPE_CONST, {.i64 = COLOR_DETECT_ALPHA_MODE }, 0, 0, FLAGS, .unit = "mode" },
77 { "all", "Detect all supported properties", 0, AV_OPT_TYPE_CONST, {.i64 = -1}, 0, 0, FLAGS, .unit = "mode" },
78 { NULL }
79 };
80
81 AVFILTER_DEFINE_CLASS(colordetect);
82
83 static int query_format(const AVFilterContext *ctx,
84 AVFilterFormatsConfig **cfg_in,
85 AVFilterFormatsConfig **cfg_out)
86 {
87 int want_flags = AV_PIX_FMT_FLAG_PLANAR;
88 int reject_flags = AV_PIX_FMT_FLAG_PAL | AV_PIX_FMT_FLAG_HWACCEL |
89 AV_PIX_FMT_FLAG_BITSTREAM | AV_PIX_FMT_FLAG_FLOAT |
90 AV_PIX_FMT_FLAG_BAYER | AV_PIX_FMT_FLAG_XYZ;
91
92 if (HAVE_BIGENDIAN) {
93 want_flags |= AV_PIX_FMT_FLAG_BE;
94 } else {
95 reject_flags |= AV_PIX_FMT_FLAG_BE;
96 }
97
98 AVFilterFormats *formats = ff_formats_pixdesc_filter(want_flags, reject_flags);
99 return ff_set_common_formats2(ctx, cfg_in, cfg_out, formats);
100 }
101
102 static int config_input(AVFilterLink *inlink)
103 {
104 AVFilterContext *ctx = inlink->dst;
105 ColorDetectContext *s = ctx->priv;
106 const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(inlink->format);
107 const int depth = desc->comp[0].depth;
108 const int mpeg_min = 16 << (depth - 8);
109 const int mpeg_max = 235 << (depth - 8);
110 if (depth > 16) /* not currently possible; prevent future bugs */
111 return AVERROR(ENOTSUP);
112
113 s->desc = desc;
114 s->depth = depth;
115 s->mpeg_min = mpeg_min;
116 s->mpeg_max = mpeg_max;
117 s->nb_threads = ff_filter_get_nb_threads(ctx);
118
119 if (desc->flags & AV_PIX_FMT_FLAG_RGB) {
120 atomic_init(&s->detected_range, AVCOL_RANGE_JPEG);
121 } else {
122 atomic_init(&s->detected_range, AVCOL_RANGE_UNSPECIFIED);
123 }
124
125 if (desc->flags & AV_PIX_FMT_FLAG_ALPHA) {
126 s->idx_a = desc->comp[desc->nb_components - 1].plane;
127 atomic_init(&s->detected_alpha, ALPHA_UNDETERMINED);
128 } else {
129 atomic_init(&s->detected_alpha, ALPHA_NONE);
130 }
131
132 ff_color_detect_dsp_init(&s->dsp, depth, inlink->color_range);
133 return 0;
134 }
135
136 static int detect_range(AVFilterContext *ctx, void *arg,
137 int jobnr, int nb_jobs)
138 {
139 ColorDetectContext *s = ctx->priv;
140 const AVFrame *in = arg;
141 const ptrdiff_t stride = in->linesize[0];
142 const int y_start = (in->height * jobnr) / nb_jobs;
143 const int y_end = (in->height * (jobnr + 1)) / nb_jobs;
144 const int h_slice = y_end - y_start;
145
146 if (s->dsp.detect_range(in->data[0] + y_start * stride, stride,
147 in->width, h_slice, s->mpeg_min, s->mpeg_max))
148 atomic_store(&s->detected_range, AVCOL_RANGE_JPEG);
149
150 return 0;
151 }
152
153 static int detect_alpha(AVFilterContext *ctx, void *arg,
154 int jobnr, int nb_jobs)
155 {
156 ColorDetectContext *s = ctx->priv;
157 const AVFrame *in = arg;
158 const int w = in->width;
159 const int h = in->height;
160 const int y_start = (h * jobnr) / nb_jobs;
161 const int y_end = (h * (jobnr + 1)) / nb_jobs;
162 const int h_slice = y_end - y_start;
163
164 const int nb_planes = (s->desc->flags & AV_PIX_FMT_FLAG_RGB) ? 3 : 1;
165 const ptrdiff_t alpha_stride = in->linesize[s->idx_a];
166 const uint8_t *alpha = in->data[s->idx_a] + y_start * alpha_stride;
167
168 /**
169 * To check if a value is out of range, we need to compare the color value
170 * against the maximum possible color for a given alpha value.
171 * x > ((mpeg_max - mpeg_min) / pixel_max) * a + mpeg_min
172 *
173 * This simplifies to:
174 * (x - mpeg_min) * pixel_max > (mpeg_max - mpeg_min) * a
175 * = P * x - K > Q * a in the below formula.
176 *
177 * We subtract an additional offset of (1 << (depth - 1)) to account for
178 * rounding errors in the value of `x`, and an extra safety margin of
179 * Q because vf_premultiply.c et al. add an offset of (a >> 1) & 1.
180 */
181 const int p = (1 << s->depth) - 1;
182 const int q = s->mpeg_max - s->mpeg_min;
183 const int k = p * s->mpeg_min + q + (1 << (s->depth - 1));
184
185 for (int i = 0; i < nb_planes; i++) {
186 const ptrdiff_t stride = in->linesize[i];
187 if (s->dsp.detect_alpha(in->data[i] + y_start * stride, stride,
188 alpha, alpha_stride, w, h_slice, p, q, k)) {
189 atomic_store(&s->detected_alpha, ALPHA_STRAIGHT);
190 return 0;
191 }
192 }
193
194 return 0;
195 }
196
197 static int filter_frame(AVFilterLink *inlink, AVFrame *in)
198 {
199 AVFilterContext *ctx = inlink->dst;
200 ColorDetectContext *s = ctx->priv;
201 const int nb_threads = FFMIN(inlink->h, s->nb_threads);
202
203 if (s->mode & COLOR_DETECT_COLOR_RANGE && s->detected_range == AVCOL_RANGE_UNSPECIFIED)
204 ff_filter_execute(ctx, detect_range, in, NULL, nb_threads);
205 if (s->mode & COLOR_DETECT_ALPHA_MODE && s->detected_alpha == ALPHA_UNDETERMINED)
206 ff_filter_execute(ctx, detect_alpha, in, NULL, nb_threads);
207
208 return ff_filter_frame(inlink->dst->outputs[0], in);
209 }
210
211 static av_cold void uninit(AVFilterContext *ctx)
212 {
213 ColorDetectContext *s = ctx->priv;
214 if (!s->mode)
215 return;
216
217 av_log(ctx, AV_LOG_INFO, "Detected color properties:\n");
218 if (s->mode & COLOR_DETECT_COLOR_RANGE) {
219 av_log(ctx, AV_LOG_INFO, " Color range: %s\n",
220 s->detected_range == AVCOL_RANGE_JPEG ? "JPEG / full range"
221 : "undetermined");
222 }
223
224 if (s->mode & COLOR_DETECT_ALPHA_MODE) {
225 av_log(ctx, AV_LOG_INFO, " Alpha mode: %s\n",
226 s->detected_alpha == ALPHA_NONE ? "none" :
227 s->detected_alpha == ALPHA_STRAIGHT ? "straight / independent"
228 : "undetermined");
229 }
230 }
231
232 78 av_cold void ff_color_detect_dsp_init(FFColorDetectDSPContext *dsp, int depth,
233 enum AVColorRange color_range)
234 {
235 #if ARCH_X86
236 78 ff_color_detect_dsp_init_x86(dsp, depth, color_range);
237 #endif
238
239
2/2
✓ Branch 0 taken 72 times.
✓ Branch 1 taken 6 times.
78 if (!dsp->detect_range)
240
2/2
✓ Branch 0 taken 36 times.
✓ Branch 1 taken 36 times.
72 dsp->detect_range = depth > 8 ? ff_detect_range16_c : ff_detect_range_c;
241
2/2
✓ Branch 0 taken 72 times.
✓ Branch 1 taken 6 times.
78 if (!dsp->detect_alpha) {
242
2/2
✓ Branch 0 taken 24 times.
✓ Branch 1 taken 48 times.
72 if (color_range == AVCOL_RANGE_JPEG) {
243
2/2
✓ Branch 0 taken 12 times.
✓ Branch 1 taken 12 times.
24 dsp->detect_alpha = depth > 8 ? ff_detect_alpha16_full_c : ff_detect_alpha_full_c;
244 } else {
245
2/2
✓ Branch 0 taken 24 times.
✓ Branch 1 taken 24 times.
48 dsp->detect_alpha = depth > 8 ? ff_detect_alpha16_limited_c : ff_detect_alpha_limited_c;
246 }
247 }
248 78 }
249
250 static const AVFilterPad colordetect_inputs[] = {
251 {
252 .name = "default",
253 .type = AVMEDIA_TYPE_VIDEO,
254 .config_props = config_input,
255 .filter_frame = filter_frame,
256 },
257 };
258
259 const FFFilter ff_vf_colordetect = {
260 .p.name = "colordetect",
261 .p.description = NULL_IF_CONFIG_SMALL("Detect video color properties."),
262 .p.priv_class = &colordetect_class,
263 .p.flags = AVFILTER_FLAG_SLICE_THREADS | AVFILTER_FLAG_METADATA_ONLY,
264 .priv_size = sizeof(ColorDetectContext),
265 FILTER_INPUTS(colordetect_inputs),
266 FILTER_OUTPUTS(ff_video_default_filterpad),
267 FILTER_QUERY_FUNC2(query_format),
268 .uninit = uninit,
269 };
270