GCC Code Coverage Report | |||||||||||||||||||||
|
|||||||||||||||||||||
Line | Branch | Exec | Source |
1 |
/* |
||
2 |
* Copyright (c) 2013 Paul B Mahol |
||
3 |
* |
||
4 |
* This file is part of FFmpeg. |
||
5 |
* |
||
6 |
* FFmpeg is free software; you can redistribute it and/or |
||
7 |
* modify it under the terms of the GNU Lesser General Public |
||
8 |
* License as published by the Free Software Foundation; either |
||
9 |
* version 2.1 of the License, or (at your option) any later version. |
||
10 |
* |
||
11 |
* FFmpeg is distributed in the hope that it will be useful, |
||
12 |
* but WITHOUT ANY WARRANTY; without even the implied warranty of |
||
13 |
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
||
14 |
* Lesser General Public License for more details. |
||
15 |
* |
||
16 |
* You should have received a copy of the GNU Lesser General Public |
||
17 |
* License along with FFmpeg; if not, write to the Free Software |
||
18 |
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
||
19 |
*/ |
||
20 |
|||
21 |
#include "libavutil/opt.h" |
||
22 |
#include "libavutil/pixdesc.h" |
||
23 |
#include "avfilter.h" |
||
24 |
#include "drawutils.h" |
||
25 |
#include "formats.h" |
||
26 |
#include "internal.h" |
||
27 |
#include "video.h" |
||
28 |
|||
29 |
#define R 0 |
||
30 |
#define G 1 |
||
31 |
#define B 2 |
||
32 |
#define A 3 |
||
33 |
|||
34 |
typedef struct ThreadData { |
||
35 |
AVFrame *in, *out; |
||
36 |
} ThreadData; |
||
37 |
|||
38 |
typedef struct Range { |
||
39 |
float shadows; |
||
40 |
float midtones; |
||
41 |
float highlights; |
||
42 |
} Range; |
||
43 |
|||
44 |
typedef struct ColorBalanceContext { |
||
45 |
const AVClass *class; |
||
46 |
Range cyan_red; |
||
47 |
Range magenta_green; |
||
48 |
Range yellow_blue; |
||
49 |
int preserve_lightness; |
||
50 |
|||
51 |
uint8_t rgba_map[4]; |
||
52 |
int depth; |
||
53 |
int max; |
||
54 |
int step; |
||
55 |
|||
56 |
int (*color_balance)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs); |
||
57 |
} ColorBalanceContext; |
||
58 |
|||
59 |
#define OFFSET(x) offsetof(ColorBalanceContext, x) |
||
60 |
#define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_RUNTIME_PARAM |
||
61 |
static const AVOption colorbalance_options[] = { |
||
62 |
{ "rs", "set red shadows", OFFSET(cyan_red.shadows), AV_OPT_TYPE_FLOAT, {.dbl=0}, -1, 1, FLAGS }, |
||
63 |
{ "gs", "set green shadows", OFFSET(magenta_green.shadows), AV_OPT_TYPE_FLOAT, {.dbl=0}, -1, 1, FLAGS }, |
||
64 |
{ "bs", "set blue shadows", OFFSET(yellow_blue.shadows), AV_OPT_TYPE_FLOAT, {.dbl=0}, -1, 1, FLAGS }, |
||
65 |
{ "rm", "set red midtones", OFFSET(cyan_red.midtones), AV_OPT_TYPE_FLOAT, {.dbl=0}, -1, 1, FLAGS }, |
||
66 |
{ "gm", "set green midtones", OFFSET(magenta_green.midtones), AV_OPT_TYPE_FLOAT, {.dbl=0}, -1, 1, FLAGS }, |
||
67 |
{ "bm", "set blue midtones", OFFSET(yellow_blue.midtones), AV_OPT_TYPE_FLOAT, {.dbl=0}, -1, 1, FLAGS }, |
||
68 |
{ "rh", "set red highlights", OFFSET(cyan_red.highlights), AV_OPT_TYPE_FLOAT, {.dbl=0}, -1, 1, FLAGS }, |
||
69 |
{ "gh", "set green highlights", OFFSET(magenta_green.highlights), AV_OPT_TYPE_FLOAT, {.dbl=0}, -1, 1, FLAGS }, |
||
70 |
{ "bh", "set blue highlights", OFFSET(yellow_blue.highlights), AV_OPT_TYPE_FLOAT, {.dbl=0}, -1, 1, FLAGS }, |
||
71 |
{ "pl", "preserve lightness", OFFSET(preserve_lightness), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, FLAGS }, |
||
72 |
{ NULL } |
||
73 |
}; |
||
74 |
|||
75 |
AVFILTER_DEFINE_CLASS(colorbalance); |
||
76 |
|||
77 |
4 |
static int query_formats(AVFilterContext *ctx) |
|
78 |
{ |
||
79 |
static const enum AVPixelFormat pix_fmts[] = { |
||
80 |
AV_PIX_FMT_RGB24, AV_PIX_FMT_BGR24, |
||
81 |
AV_PIX_FMT_RGBA, AV_PIX_FMT_BGRA, |
||
82 |
AV_PIX_FMT_ABGR, AV_PIX_FMT_ARGB, |
||
83 |
AV_PIX_FMT_0BGR, AV_PIX_FMT_0RGB, |
||
84 |
AV_PIX_FMT_RGB0, AV_PIX_FMT_BGR0, |
||
85 |
AV_PIX_FMT_RGB48, AV_PIX_FMT_BGR48, |
||
86 |
AV_PIX_FMT_RGBA64, AV_PIX_FMT_BGRA64, |
||
87 |
AV_PIX_FMT_GBRP, AV_PIX_FMT_GBRAP, |
||
88 |
AV_PIX_FMT_GBRP9, |
||
89 |
AV_PIX_FMT_GBRP10, AV_PIX_FMT_GBRAP10, |
||
90 |
AV_PIX_FMT_GBRP12, AV_PIX_FMT_GBRAP12, |
||
91 |
AV_PIX_FMT_GBRP14, |
||
92 |
AV_PIX_FMT_GBRP16, AV_PIX_FMT_GBRAP16, |
||
93 |
AV_PIX_FMT_NONE |
||
94 |
}; |
||
95 |
4 |
AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts); |
|
96 |
✗✓ | 4 |
if (!fmts_list) |
97 |
return AVERROR(ENOMEM); |
||
98 |
4 |
return ff_set_common_formats(ctx, fmts_list); |
|
99 |
} |
||
100 |
|||
101 |
3649536 |
static float get_component(float v, float l, |
|
102 |
float s, float m, float h) |
||
103 |
{ |
||
104 |
3649536 |
const float a = 4.f, b = 0.333f, scale = 0.7f; |
|
105 |
|||
106 |
3649536 |
s *= av_clipf((b - l) * a + 0.5f, 0, 1) * scale; |
|
107 |
3649536 |
m *= av_clipf((l - b) * a + 0.5f, 0, 1) * av_clipf((1.0 - l - b) * a + 0.5f, 0, 1) * scale; |
|
108 |
3649536 |
h *= av_clipf((l + b - 1) * a + 0.5f, 0, 1) * scale; |
|
109 |
|||
110 |
3649536 |
v += s; |
|
111 |
3649536 |
v += m; |
|
112 |
3649536 |
v += h; |
|
113 |
|||
114 |
3649536 |
return av_clipf(v, 0, 1); |
|
115 |
} |
||
116 |
|||
117 |
static float hfun(float n, float h, float s, float l) |
||
118 |
{ |
||
119 |
float a = s * FFMIN(l, 1. - l); |
||
120 |
float k = fmodf(n + h / 30.f, 12.f); |
||
121 |
|||
122 |
return av_clipf(l - a * FFMAX(FFMIN3(k - 3.f, 9.f - k, 1), -1.f), 0, 1); |
||
123 |
} |
||
124 |
|||
125 |
static void preservel(float *r, float *g, float *b, float l) |
||
126 |
{ |
||
127 |
float max = FFMAX3(*r, *g, *b); |
||
128 |
float min = FFMIN3(*r, *g, *b); |
||
129 |
float h, s; |
||
130 |
|||
131 |
l *= 0.5; |
||
132 |
|||
133 |
if (*r == *g && *g == *b) { |
||
134 |
h = 0.; |
||
135 |
} else if (max == *r) { |
||
136 |
h = 60. * (0. + (*g - *b) / (max - min)); |
||
137 |
} else if (max == *g) { |
||
138 |
h = 60. * (2. + (*b - *r) / (max - min)); |
||
139 |
} else if (max == *b) { |
||
140 |
h = 60. * (4. + (*r - *g) / (max - min)); |
||
141 |
} else { |
||
142 |
h = 0.; |
||
143 |
} |
||
144 |
if (h < 0.) |
||
145 |
h += 360.; |
||
146 |
|||
147 |
if (max == 0. || min == 1.) { |
||
148 |
s = 0.; |
||
149 |
} else { |
||
150 |
s = (max - min) / (1. - FFABS(2. * l - 1)); |
||
151 |
} |
||
152 |
|||
153 |
*r = hfun(0, h, s, l); |
||
154 |
*g = hfun(8, h, s, l); |
||
155 |
*b = hfun(4, h, s, l); |
||
156 |
} |
||
157 |
|||
158 |
54 |
static int color_balance8_p(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs) |
|
159 |
{ |
||
160 |
54 |
ColorBalanceContext *s = ctx->priv; |
|
161 |
54 |
ThreadData *td = arg; |
|
162 |
54 |
AVFrame *in = td->in; |
|
163 |
54 |
AVFrame *out = td->out; |
|
164 |
54 |
const int slice_start = (out->height * jobnr) / nb_jobs; |
|
165 |
54 |
const int slice_end = (out->height * (jobnr+1)) / nb_jobs; |
|
166 |
54 |
const uint8_t *srcg = in->data[0] + slice_start * in->linesize[0]; |
|
167 |
54 |
const uint8_t *srcb = in->data[1] + slice_start * in->linesize[1]; |
|
168 |
54 |
const uint8_t *srcr = in->data[2] + slice_start * in->linesize[2]; |
|
169 |
54 |
const uint8_t *srca = in->data[3] + slice_start * in->linesize[3]; |
|
170 |
54 |
uint8_t *dstg = out->data[0] + slice_start * out->linesize[0]; |
|
171 |
54 |
uint8_t *dstb = out->data[1] + slice_start * out->linesize[1]; |
|
172 |
54 |
uint8_t *dstr = out->data[2] + slice_start * out->linesize[2]; |
|
173 |
54 |
uint8_t *dsta = out->data[3] + slice_start * out->linesize[3]; |
|
174 |
54 |
const float max = s->max; |
|
175 |
int i, j; |
||
176 |
|||
177 |
✓✓ | 1782 |
for (i = slice_start; i < slice_end; i++) { |
178 |
✓✓ | 609984 |
for (j = 0; j < out->width; j++) { |
179 |
608256 |
float r = srcr[j] / max; |
|
180 |
608256 |
float g = srcg[j] / max; |
|
181 |
608256 |
float b = srcb[j] / max; |
|
182 |
✓✓✓✓ ✓✓✓✓ ✓✓✓✓ |
608256 |
const float l = FFMAX3(r, g, b) + FFMIN3(r, g, b); |
183 |
|||
184 |
608256 |
r = get_component(r, l, s->cyan_red.shadows, s->cyan_red.midtones, s->cyan_red.highlights); |
|
185 |
608256 |
g = get_component(g, l, s->magenta_green.shadows, s->magenta_green.midtones, s->magenta_green.highlights); |
|
186 |
608256 |
b = get_component(b, l, s->yellow_blue.shadows, s->yellow_blue.midtones, s->yellow_blue.highlights); |
|
187 |
|||
188 |
✗✓ | 608256 |
if (s->preserve_lightness) |
189 |
preservel(&r, &g, &b, l); |
||
190 |
|||
191 |
608256 |
dstr[j] = av_clip_uint8(lrintf(r * max)); |
|
192 |
608256 |
dstg[j] = av_clip_uint8(lrintf(g * max)); |
|
193 |
608256 |
dstb[j] = av_clip_uint8(lrintf(b * max)); |
|
194 |
✗✓✗✗ |
608256 |
if (in != out && out->linesize[3]) |
195 |
dsta[j] = srca[j]; |
||
196 |
} |
||
197 |
|||
198 |
1728 |
srcg += in->linesize[0]; |
|
199 |
1728 |
srcb += in->linesize[1]; |
|
200 |
1728 |
srcr += in->linesize[2]; |
|
201 |
1728 |
srca += in->linesize[3]; |
|
202 |
1728 |
dstg += out->linesize[0]; |
|
203 |
1728 |
dstb += out->linesize[1]; |
|
204 |
1728 |
dstr += out->linesize[2]; |
|
205 |
1728 |
dsta += out->linesize[3]; |
|
206 |
} |
||
207 |
|||
208 |
54 |
return 0; |
|
209 |
} |
||
210 |
|||
211 |
static int color_balance16_p(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs) |
||
212 |
{ |
||
213 |
ColorBalanceContext *s = ctx->priv; |
||
214 |
ThreadData *td = arg; |
||
215 |
AVFrame *in = td->in; |
||
216 |
AVFrame *out = td->out; |
||
217 |
const int slice_start = (out->height * jobnr) / nb_jobs; |
||
218 |
const int slice_end = (out->height * (jobnr+1)) / nb_jobs; |
||
219 |
const uint16_t *srcg = (const uint16_t *)in->data[0] + slice_start * in->linesize[0] / 2; |
||
220 |
const uint16_t *srcb = (const uint16_t *)in->data[1] + slice_start * in->linesize[1] / 2; |
||
221 |
const uint16_t *srcr = (const uint16_t *)in->data[2] + slice_start * in->linesize[2] / 2; |
||
222 |
const uint16_t *srca = (const uint16_t *)in->data[3] + slice_start * in->linesize[3] / 2; |
||
223 |
uint16_t *dstg = (uint16_t *)out->data[0] + slice_start * out->linesize[0] / 2; |
||
224 |
uint16_t *dstb = (uint16_t *)out->data[1] + slice_start * out->linesize[1] / 2; |
||
225 |
uint16_t *dstr = (uint16_t *)out->data[2] + slice_start * out->linesize[2] / 2; |
||
226 |
uint16_t *dsta = (uint16_t *)out->data[3] + slice_start * out->linesize[3] / 2; |
||
227 |
const int depth = s->depth; |
||
228 |
const float max = s->max; |
||
229 |
int i, j; |
||
230 |
|||
231 |
for (i = slice_start; i < slice_end; i++) { |
||
232 |
for (j = 0; j < out->width; j++) { |
||
233 |
float r = srcr[j] / max; |
||
234 |
float g = srcg[j] / max; |
||
235 |
float b = srcb[j] / max; |
||
236 |
const float l = (FFMAX3(r, g, b) + FFMIN3(r, g, b)); |
||
237 |
|||
238 |
r = get_component(r, l, s->cyan_red.shadows, s->cyan_red.midtones, s->cyan_red.highlights); |
||
239 |
g = get_component(g, l, s->magenta_green.shadows, s->magenta_green.midtones, s->magenta_green.highlights); |
||
240 |
b = get_component(b, l, s->yellow_blue.shadows, s->yellow_blue.midtones, s->yellow_blue.highlights); |
||
241 |
|||
242 |
if (s->preserve_lightness) |
||
243 |
preservel(&r, &g, &b, l); |
||
244 |
|||
245 |
dstr[j] = av_clip_uintp2_c(lrintf(r * max), depth); |
||
246 |
dstg[j] = av_clip_uintp2_c(lrintf(g * max), depth); |
||
247 |
dstb[j] = av_clip_uintp2_c(lrintf(b * max), depth); |
||
248 |
if (in != out && out->linesize[3]) |
||
249 |
dsta[j] = srca[j]; |
||
250 |
} |
||
251 |
|||
252 |
srcg += in->linesize[0] / 2; |
||
253 |
srcb += in->linesize[1] / 2; |
||
254 |
srcr += in->linesize[2] / 2; |
||
255 |
srca += in->linesize[3] / 2; |
||
256 |
dstg += out->linesize[0] / 2; |
||
257 |
dstb += out->linesize[1] / 2; |
||
258 |
dstr += out->linesize[2] / 2; |
||
259 |
dsta += out->linesize[3] / 2; |
||
260 |
} |
||
261 |
|||
262 |
return 0; |
||
263 |
} |
||
264 |
|||
265 |
27 |
static int color_balance8(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs) |
|
266 |
{ |
||
267 |
27 |
ColorBalanceContext *s = ctx->priv; |
|
268 |
27 |
ThreadData *td = arg; |
|
269 |
27 |
AVFrame *in = td->in; |
|
270 |
27 |
AVFrame *out = td->out; |
|
271 |
27 |
AVFilterLink *outlink = ctx->outputs[0]; |
|
272 |
27 |
const int slice_start = (out->height * jobnr) / nb_jobs; |
|
273 |
27 |
const int slice_end = (out->height * (jobnr+1)) / nb_jobs; |
|
274 |
27 |
const uint8_t *srcrow = in->data[0] + slice_start * in->linesize[0]; |
|
275 |
27 |
const uint8_t roffset = s->rgba_map[R]; |
|
276 |
27 |
const uint8_t goffset = s->rgba_map[G]; |
|
277 |
27 |
const uint8_t boffset = s->rgba_map[B]; |
|
278 |
27 |
const uint8_t aoffset = s->rgba_map[A]; |
|
279 |
27 |
const float max = s->max; |
|
280 |
27 |
const int step = s->step; |
|
281 |
uint8_t *dstrow; |
||
282 |
int i, j; |
||
283 |
|||
284 |
27 |
dstrow = out->data[0] + slice_start * out->linesize[0]; |
|
285 |
✓✓ | 891 |
for (i = slice_start; i < slice_end; i++) { |
286 |
864 |
const uint8_t *src = srcrow; |
|
287 |
864 |
uint8_t *dst = dstrow; |
|
288 |
|||
289 |
✓✓ | 304992 |
for (j = 0; j < outlink->w * step; j += step) { |
290 |
304128 |
float r = src[j + roffset] / max; |
|
291 |
304128 |
float g = src[j + goffset] / max; |
|
292 |
304128 |
float b = src[j + boffset] / max; |
|
293 |
✓✓✓✓ ✓✓✓✓ ✓✓✓✓ |
304128 |
const float l = (FFMAX3(r, g, b) + FFMIN3(r, g, b)); |
294 |
|||
295 |
304128 |
r = get_component(r, l, s->cyan_red.shadows, s->cyan_red.midtones, s->cyan_red.highlights); |
|
296 |
304128 |
g = get_component(g, l, s->magenta_green.shadows, s->magenta_green.midtones, s->magenta_green.highlights); |
|
297 |
304128 |
b = get_component(b, l, s->yellow_blue.shadows, s->yellow_blue.midtones, s->yellow_blue.highlights); |
|
298 |
|||
299 |
✗✓ | 304128 |
if (s->preserve_lightness) |
300 |
preservel(&r, &g, &b, l); |
||
301 |
|||
302 |
304128 |
dst[j + roffset] = av_clip_uint8(lrintf(r * max)); |
|
303 |
304128 |
dst[j + goffset] = av_clip_uint8(lrintf(g * max)); |
|
304 |
304128 |
dst[j + boffset] = av_clip_uint8(lrintf(b * max)); |
|
305 |
✗✓✗✗ |
304128 |
if (in != out && step == 4) |
306 |
dst[j + aoffset] = src[j + aoffset]; |
||
307 |
} |
||
308 |
|||
309 |
864 |
srcrow += in->linesize[0]; |
|
310 |
864 |
dstrow += out->linesize[0]; |
|
311 |
} |
||
312 |
|||
313 |
27 |
return 0; |
|
314 |
} |
||
315 |
|||
316 |
27 |
static int color_balance16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs) |
|
317 |
{ |
||
318 |
27 |
ColorBalanceContext *s = ctx->priv; |
|
319 |
27 |
ThreadData *td = arg; |
|
320 |
27 |
AVFrame *in = td->in; |
|
321 |
27 |
AVFrame *out = td->out; |
|
322 |
27 |
AVFilterLink *outlink = ctx->outputs[0]; |
|
323 |
27 |
const int slice_start = (out->height * jobnr) / nb_jobs; |
|
324 |
27 |
const int slice_end = (out->height * (jobnr+1)) / nb_jobs; |
|
325 |
27 |
const uint16_t *srcrow = (const uint16_t *)in->data[0] + slice_start * in->linesize[0] / 2; |
|
326 |
27 |
const uint8_t roffset = s->rgba_map[R]; |
|
327 |
27 |
const uint8_t goffset = s->rgba_map[G]; |
|
328 |
27 |
const uint8_t boffset = s->rgba_map[B]; |
|
329 |
27 |
const uint8_t aoffset = s->rgba_map[A]; |
|
330 |
27 |
const int step = s->step / 2; |
|
331 |
27 |
const int depth = s->depth; |
|
332 |
27 |
const float max = s->max; |
|
333 |
uint16_t *dstrow; |
||
334 |
int i, j; |
||
335 |
|||
336 |
27 |
dstrow = (uint16_t *)out->data[0] + slice_start * out->linesize[0] / 2; |
|
337 |
✓✓ | 891 |
for (i = slice_start; i < slice_end; i++) { |
338 |
864 |
const uint16_t *src = srcrow; |
|
339 |
864 |
uint16_t *dst = dstrow; |
|
340 |
|||
341 |
✓✓ | 304992 |
for (j = 0; j < outlink->w * step; j += step) { |
342 |
304128 |
float r = src[j + roffset] / max; |
|
343 |
304128 |
float g = src[j + goffset] / max; |
|
344 |
304128 |
float b = src[j + boffset] / max; |
|
345 |
✓✓✓✓ ✓✓✓✓ ✓✓✓✓ |
304128 |
const float l = (FFMAX3(r, g, b) + FFMIN3(r, g, b)); |
346 |
|||
347 |
304128 |
r = get_component(r, l, s->cyan_red.shadows, s->cyan_red.midtones, s->cyan_red.highlights); |
|
348 |
304128 |
g = get_component(g, l, s->magenta_green.shadows, s->magenta_green.midtones, s->magenta_green.highlights); |
|
349 |
304128 |
b = get_component(b, l, s->yellow_blue.shadows, s->yellow_blue.midtones, s->yellow_blue.highlights); |
|
350 |
|||
351 |
✗✓ | 304128 |
if (s->preserve_lightness) |
352 |
preservel(&r, &g, &b, l); |
||
353 |
|||
354 |
304128 |
dst[j + roffset] = av_clip_uintp2_c(lrintf(r * max), depth); |
|
355 |
304128 |
dst[j + goffset] = av_clip_uintp2_c(lrintf(g * max), depth); |
|
356 |
304128 |
dst[j + boffset] = av_clip_uintp2_c(lrintf(b * max), depth); |
|
357 |
✗✓✗✗ |
304128 |
if (in != out && step == 4) |
358 |
dst[j + aoffset] = src[j + aoffset]; |
||
359 |
} |
||
360 |
|||
361 |
864 |
srcrow += in->linesize[0] / 2; |
|
362 |
864 |
dstrow += out->linesize[0] / 2; |
|
363 |
} |
||
364 |
|||
365 |
27 |
return 0; |
|
366 |
} |
||
367 |
|||
368 |
4 |
static int config_output(AVFilterLink *outlink) |
|
369 |
{ |
||
370 |
4 |
AVFilterContext *ctx = outlink->src; |
|
371 |
4 |
ColorBalanceContext *s = ctx->priv; |
|
372 |
4 |
const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(outlink->format); |
|
373 |
4 |
const int depth = desc->comp[0].depth; |
|
374 |
4 |
const int max = (1 << depth) - 1; |
|
375 |
4 |
const int planar = av_pix_fmt_count_planes(outlink->format) > 1; |
|
376 |
|||
377 |
4 |
s->depth = depth; |
|
378 |
4 |
s->max = max; |
|
379 |
|||
380 |
✓✓✓✓ |
4 |
if (max == 255 && planar) { |
381 |
2 |
s->color_balance = color_balance8_p; |
|
382 |
✗✓ | 2 |
} else if (planar) { |
383 |
s->color_balance = color_balance16_p; |
||
384 |
✓✓ | 2 |
} else if (max == 255) { |
385 |
1 |
s->color_balance = color_balance8; |
|
386 |
} else { |
||
387 |
1 |
s->color_balance = color_balance16; |
|
388 |
} |
||
389 |
|||
390 |
4 |
ff_fill_rgba_map(s->rgba_map, outlink->format); |
|
391 |
4 |
s->step = av_get_padded_bits_per_pixel(desc) >> 3; |
|
392 |
|||
393 |
4 |
return 0; |
|
394 |
} |
||
395 |
|||
396 |
12 |
static int filter_frame(AVFilterLink *inlink, AVFrame *in) |
|
397 |
{ |
||
398 |
12 |
AVFilterContext *ctx = inlink->dst; |
|
399 |
12 |
ColorBalanceContext *s = ctx->priv; |
|
400 |
12 |
AVFilterLink *outlink = ctx->outputs[0]; |
|
401 |
ThreadData td; |
||
402 |
AVFrame *out; |
||
403 |
|||
404 |
✓✗ | 12 |
if (av_frame_is_writable(in)) { |
405 |
12 |
out = in; |
|
406 |
} else { |
||
407 |
out = ff_get_video_buffer(outlink, outlink->w, outlink->h); |
||
408 |
if (!out) { |
||
409 |
av_frame_free(&in); |
||
410 |
return AVERROR(ENOMEM); |
||
411 |
} |
||
412 |
av_frame_copy_props(out, in); |
||
413 |
} |
||
414 |
|||
415 |
12 |
td.in = in; |
|
416 |
12 |
td.out = out; |
|
417 |
✓✗ | 12 |
ctx->internal->execute(ctx, s->color_balance, &td, NULL, FFMIN(outlink->h, ff_filter_get_nb_threads(ctx))); |
418 |
|||
419 |
✗✓ | 12 |
if (in != out) |
420 |
av_frame_free(&in); |
||
421 |
12 |
return ff_filter_frame(outlink, out); |
|
422 |
} |
||
423 |
|||
424 |
static const AVFilterPad colorbalance_inputs[] = { |
||
425 |
{ |
||
426 |
.name = "default", |
||
427 |
.type = AVMEDIA_TYPE_VIDEO, |
||
428 |
.filter_frame = filter_frame, |
||
429 |
}, |
||
430 |
{ NULL } |
||
431 |
}; |
||
432 |
|||
433 |
static const AVFilterPad colorbalance_outputs[] = { |
||
434 |
{ |
||
435 |
.name = "default", |
||
436 |
.type = AVMEDIA_TYPE_VIDEO, |
||
437 |
.config_props = config_output, |
||
438 |
}, |
||
439 |
{ NULL } |
||
440 |
}; |
||
441 |
|||
442 |
AVFilter ff_vf_colorbalance = { |
||
443 |
.name = "colorbalance", |
||
444 |
.description = NULL_IF_CONFIG_SMALL("Adjust the color balance."), |
||
445 |
.priv_size = sizeof(ColorBalanceContext), |
||
446 |
.priv_class = &colorbalance_class, |
||
447 |
.query_formats = query_formats, |
||
448 |
.inputs = colorbalance_inputs, |
||
449 |
.outputs = colorbalance_outputs, |
||
450 |
.flags = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC | AVFILTER_FLAG_SLICE_THREADS, |
||
451 |
.process_command = ff_filter_process_command, |
||
452 |
}; |
Generated by: GCOVR (Version 4.2) |