Line | Branch | Exec | Source |
---|---|---|---|
1 | /* | ||
2 | * Copyright (c) 2018 Paul B Mahol | ||
3 | * | ||
4 | * This file is part of FFmpeg. | ||
5 | * | ||
6 | * FFmpeg is free software; you can redistribute it and/or | ||
7 | * modify it under the terms of the GNU Lesser General Public | ||
8 | * License as published by the Free Software Foundation; either | ||
9 | * version 2.1 of the License, or (at your option) any later version. | ||
10 | * | ||
11 | * FFmpeg is distributed in the hope that it will be useful, | ||
12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
14 | * Lesser General Public License for more details. | ||
15 | * | ||
16 | * You should have received a copy of the GNU Lesser General Public | ||
17 | * License along with FFmpeg; if not, write to the Free Software | ||
18 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | ||
19 | */ | ||
20 | |||
21 | #include "libavutil/imgutils.h" | ||
22 | #include "libavutil/opt.h" | ||
23 | #include "libavutil/pixdesc.h" | ||
24 | |||
25 | #include "avfilter.h" | ||
26 | #include "filters.h" | ||
27 | #include "video.h" | ||
28 | |||
29 | typedef struct ChromaShiftContext { | ||
30 | const AVClass *class; | ||
31 | int cbh, cbv; | ||
32 | int crh, crv; | ||
33 | int rh, rv; | ||
34 | int gh, gv; | ||
35 | int bh, bv; | ||
36 | int ah, av; | ||
37 | int edge; | ||
38 | |||
39 | int nb_planes; | ||
40 | int depth; | ||
41 | int height[4]; | ||
42 | int width[4]; | ||
43 | int linesize[4]; | ||
44 | |||
45 | AVFrame *in; | ||
46 | |||
47 | int is_rgbashift; | ||
48 | int (*filter_slice[2])(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs); | ||
49 | } ChromaShiftContext; | ||
50 | |||
51 | #define DEFINE_SMEAR(depth, type, div) \ | ||
52 | static int smear_slice ## depth(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs) \ | ||
53 | { \ | ||
54 | ChromaShiftContext *s = ctx->priv; \ | ||
55 | AVFrame *in = s->in; \ | ||
56 | AVFrame *out = arg; \ | ||
57 | const int sulinesize = in->linesize[1] / div; \ | ||
58 | const int svlinesize = in->linesize[2] / div; \ | ||
59 | const int ulinesize = out->linesize[1] / div; \ | ||
60 | const int vlinesize = out->linesize[2] / div; \ | ||
61 | const int cbh = s->cbh; \ | ||
62 | const int cbv = s->cbv; \ | ||
63 | const int crh = s->crh; \ | ||
64 | const int crv = s->crv; \ | ||
65 | const int h = s->height[1]; \ | ||
66 | const int w = s->width[1]; \ | ||
67 | const int slice_start = (h * jobnr) / nb_jobs; \ | ||
68 | const int slice_end = (h * (jobnr+1)) / nb_jobs; \ | ||
69 | const type *su = (const type *)in->data[1]; \ | ||
70 | const type *sv = (const type *)in->data[2]; \ | ||
71 | type *du = (type *)out->data[1] + slice_start * ulinesize; \ | ||
72 | type *dv = (type *)out->data[2] + slice_start * vlinesize; \ | ||
73 | \ | ||
74 | for (int y = slice_start; y < slice_end; y++) { \ | ||
75 | const int duy = av_clip(y - cbv, 0, h-1) * sulinesize; \ | ||
76 | const int dvy = av_clip(y - crv, 0, h-1) * svlinesize; \ | ||
77 | \ | ||
78 | for (int x = 0; x < w; x++) { \ | ||
79 | du[x] = su[av_clip(x - cbh, 0, w - 1) + duy]; \ | ||
80 | dv[x] = sv[av_clip(x - crh, 0, w - 1) + dvy]; \ | ||
81 | } \ | ||
82 | \ | ||
83 | du += ulinesize; \ | ||
84 | dv += vlinesize; \ | ||
85 | } \ | ||
86 | \ | ||
87 | return 0; \ | ||
88 | } | ||
89 | |||
90 |
4/4✓ Branch 0 taken 96000 times.
✓ Branch 1 taken 600 times.
✓ Branch 2 taken 600 times.
✓ Branch 3 taken 45 times.
|
96645 | DEFINE_SMEAR(8, uint8_t, 1) |
91 | ✗ | DEFINE_SMEAR(16, uint16_t, 2) | |
92 | |||
93 | #define DEFINE_WRAP(depth, type, div) \ | ||
94 | static int wrap_slice ## depth(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs) \ | ||
95 | { \ | ||
96 | ChromaShiftContext *s = ctx->priv; \ | ||
97 | AVFrame *in = s->in; \ | ||
98 | AVFrame *out = arg; \ | ||
99 | const int sulinesize = in->linesize[1] / div; \ | ||
100 | const int svlinesize = in->linesize[2] / div; \ | ||
101 | const int ulinesize = out->linesize[1] / div; \ | ||
102 | const int vlinesize = out->linesize[2] / div; \ | ||
103 | const int cbh = s->cbh; \ | ||
104 | const int cbv = s->cbv; \ | ||
105 | const int crh = s->crh; \ | ||
106 | const int crv = s->crv; \ | ||
107 | const int h = s->height[1]; \ | ||
108 | const int w = s->width[1]; \ | ||
109 | const int slice_start = (h * jobnr) / nb_jobs; \ | ||
110 | const int slice_end = (h * (jobnr+1)) / nb_jobs; \ | ||
111 | const type *su = (const type *)in->data[1]; \ | ||
112 | const type *sv = (const type *)in->data[2]; \ | ||
113 | type *du = (type *)out->data[1] + slice_start * ulinesize; \ | ||
114 | type *dv = (type *)out->data[2] + slice_start * vlinesize; \ | ||
115 | \ | ||
116 | for (int y = slice_start; y < slice_end; y++) { \ | ||
117 | int uy = (y - cbv) % h; \ | ||
118 | int vy = (y - crv) % h; \ | ||
119 | \ | ||
120 | if (uy < 0) \ | ||
121 | uy += h; \ | ||
122 | if (vy < 0) \ | ||
123 | vy += h; \ | ||
124 | \ | ||
125 | for (int x = 0; x < w; x++) { \ | ||
126 | int ux = (x - cbh) % w; \ | ||
127 | int vx = (x - crh) % w; \ | ||
128 | \ | ||
129 | if (ux < 0) \ | ||
130 | ux += w; \ | ||
131 | if (vx < 0) \ | ||
132 | vx += w; \ | ||
133 | \ | ||
134 | du[x] = su[ux + uy * sulinesize]; \ | ||
135 | dv[x] = sv[vx + vy * svlinesize]; \ | ||
136 | } \ | ||
137 | \ | ||
138 | du += ulinesize; \ | ||
139 | dv += vlinesize; \ | ||
140 | } \ | ||
141 | \ | ||
142 | return 0; \ | ||
143 | } | ||
144 | |||
145 |
10/12✓ Branch 0 taken 5 times.
✓ Branch 1 taken 595 times.
✗ Branch 2 not taken.
✓ Branch 3 taken 600 times.
✗ Branch 4 not taken.
✓ Branch 5 taken 96000 times.
✓ Branch 6 taken 1200 times.
✓ Branch 7 taken 94800 times.
✓ Branch 8 taken 96000 times.
✓ Branch 9 taken 600 times.
✓ Branch 10 taken 600 times.
✓ Branch 11 taken 45 times.
|
96645 | DEFINE_WRAP(8, uint8_t, 1) |
146 | ✗ | DEFINE_WRAP(16, uint16_t, 2) | |
147 | |||
148 | #define DEFINE_RGBASMEAR(depth, type, div) \ | ||
149 | static int rgbasmear_slice ## depth(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs) \ | ||
150 | { \ | ||
151 | ChromaShiftContext *s = ctx->priv; \ | ||
152 | AVFrame *in = s->in; \ | ||
153 | AVFrame *out = arg; \ | ||
154 | const int srlinesize = in->linesize[2] / div; \ | ||
155 | const int sglinesize = in->linesize[0] / div; \ | ||
156 | const int sblinesize = in->linesize[1] / div; \ | ||
157 | const int salinesize = in->linesize[3] / div; \ | ||
158 | const int rlinesize = out->linesize[2] / div; \ | ||
159 | const int glinesize = out->linesize[0] / div; \ | ||
160 | const int blinesize = out->linesize[1] / div; \ | ||
161 | const int alinesize = out->linesize[3] / div; \ | ||
162 | const int rh = s->rh; \ | ||
163 | const int rv = s->rv; \ | ||
164 | const int gh = s->gh; \ | ||
165 | const int gv = s->gv; \ | ||
166 | const int bh = s->bh; \ | ||
167 | const int bv = s->bv; \ | ||
168 | const int ah = s->ah; \ | ||
169 | const int av = s->av; \ | ||
170 | const int h = s->height[1]; \ | ||
171 | const int w = s->width[1]; \ | ||
172 | const int slice_start = (h * jobnr) / nb_jobs; \ | ||
173 | const int slice_end = (h * (jobnr+1)) / nb_jobs; \ | ||
174 | const type *sr = (const type *)in->data[2]; \ | ||
175 | const type *sg = (const type *)in->data[0]; \ | ||
176 | const type *sb = (const type *)in->data[1]; \ | ||
177 | const type *sa = (const type *)in->data[3]; \ | ||
178 | type *dr = (type *)out->data[2] + slice_start * rlinesize; \ | ||
179 | type *dg = (type *)out->data[0] + slice_start * glinesize; \ | ||
180 | type *db = (type *)out->data[1] + slice_start * blinesize; \ | ||
181 | type *da = (type *)out->data[3] + slice_start * alinesize; \ | ||
182 | \ | ||
183 | for (int y = slice_start; y < slice_end; y++) { \ | ||
184 | const int ry = av_clip(y - rv, 0, h-1) * srlinesize; \ | ||
185 | const int gy = av_clip(y - gv, 0, h-1) * sglinesize; \ | ||
186 | const int by = av_clip(y - bv, 0, h-1) * sblinesize; \ | ||
187 | int ay; \ | ||
188 | \ | ||
189 | for (int x = 0; x < w; x++) { \ | ||
190 | dr[x] = sr[av_clip(x - rh, 0, w - 1) + ry]; \ | ||
191 | dg[x] = sg[av_clip(x - gh, 0, w - 1) + gy]; \ | ||
192 | db[x] = sb[av_clip(x - bh, 0, w - 1) + by]; \ | ||
193 | } \ | ||
194 | \ | ||
195 | dr += rlinesize; \ | ||
196 | dg += glinesize; \ | ||
197 | db += blinesize; \ | ||
198 | \ | ||
199 | if (s->nb_planes < 4) \ | ||
200 | continue; \ | ||
201 | ay = av_clip(y - av, 0, h-1) * salinesize; \ | ||
202 | for (int x = 0; x < w; x++) { \ | ||
203 | da[x] = sa[av_clip(x - ah, 0, w - 1) + ay]; \ | ||
204 | } \ | ||
205 | \ | ||
206 | da += alinesize; \ | ||
207 | } \ | ||
208 | \ | ||
209 | return 0; \ | ||
210 | } | ||
211 | |||
212 | ✗ | DEFINE_RGBASMEAR(8, uint8_t, 1) | |
213 | ✗ | DEFINE_RGBASMEAR(16, uint16_t, 2) | |
214 | |||
215 | #define DEFINE_RGBAWRAP(depth, type, div) \ | ||
216 | static int rgbawrap_slice ## depth(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs) \ | ||
217 | { \ | ||
218 | ChromaShiftContext *s = ctx->priv; \ | ||
219 | AVFrame *in = s->in; \ | ||
220 | AVFrame *out = arg; \ | ||
221 | const int srlinesize = in->linesize[2] / div; \ | ||
222 | const int sglinesize = in->linesize[0] / div; \ | ||
223 | const int sblinesize = in->linesize[1] / div; \ | ||
224 | const int salinesize = in->linesize[3] / div; \ | ||
225 | const int rlinesize = out->linesize[2] / div; \ | ||
226 | const int glinesize = out->linesize[0] / div; \ | ||
227 | const int blinesize = out->linesize[1] / div; \ | ||
228 | const int alinesize = out->linesize[3] / div; \ | ||
229 | const int rh = s->rh; \ | ||
230 | const int rv = s->rv; \ | ||
231 | const int gh = s->gh; \ | ||
232 | const int gv = s->gv; \ | ||
233 | const int bh = s->bh; \ | ||
234 | const int bv = s->bv; \ | ||
235 | const int ah = s->ah; \ | ||
236 | const int av = s->av; \ | ||
237 | const int h = s->height[1]; \ | ||
238 | const int w = s->width[1]; \ | ||
239 | const int slice_start = (h * jobnr) / nb_jobs; \ | ||
240 | const int slice_end = (h * (jobnr+1)) / nb_jobs; \ | ||
241 | const type *sr = (const type *)in->data[2]; \ | ||
242 | const type *sg = (const type *)in->data[0]; \ | ||
243 | const type *sb = (const type *)in->data[1]; \ | ||
244 | const type *sa = (const type *)in->data[3]; \ | ||
245 | type *dr = (type *)out->data[2] + slice_start * rlinesize; \ | ||
246 | type *dg = (type *)out->data[0] + slice_start * glinesize; \ | ||
247 | type *db = (type *)out->data[1] + slice_start * blinesize; \ | ||
248 | type *da = (type *)out->data[3] + slice_start * alinesize; \ | ||
249 | \ | ||
250 | for (int y = slice_start; y < slice_end; y++) { \ | ||
251 | int ry = (y - rv) % h; \ | ||
252 | int gy = (y - gv) % h; \ | ||
253 | int by = (y - bv) % h; \ | ||
254 | \ | ||
255 | if (ry < 0) \ | ||
256 | ry += h; \ | ||
257 | if (gy < 0) \ | ||
258 | gy += h; \ | ||
259 | if (by < 0) \ | ||
260 | by += h; \ | ||
261 | \ | ||
262 | for (int x = 0; x < w; x++) { \ | ||
263 | int rx = (x - rh) % w; \ | ||
264 | int gx = (x - gh) % w; \ | ||
265 | int bx = (x - bh) % w; \ | ||
266 | \ | ||
267 | if (rx < 0) \ | ||
268 | rx += w; \ | ||
269 | if (gx < 0) \ | ||
270 | gx += w; \ | ||
271 | if (bx < 0) \ | ||
272 | bx += w; \ | ||
273 | dr[x] = sr[rx + ry * srlinesize]; \ | ||
274 | dg[x] = sg[gx + gy * sglinesize]; \ | ||
275 | db[x] = sb[bx + by * sblinesize]; \ | ||
276 | } \ | ||
277 | \ | ||
278 | dr += rlinesize; \ | ||
279 | dg += glinesize; \ | ||
280 | db += blinesize; \ | ||
281 | \ | ||
282 | if (s->nb_planes < 4) \ | ||
283 | continue; \ | ||
284 | for (int x = 0; x < w; x++) { \ | ||
285 | int ax = (x - ah) % w; \ | ||
286 | int ay = (x - av) % h; \ | ||
287 | \ | ||
288 | if (ax < 0) \ | ||
289 | ax += w; \ | ||
290 | if (ay < 0) \ | ||
291 | ay += h; \ | ||
292 | da[x] = sa[ax + ay * salinesize]; \ | ||
293 | } \ | ||
294 | \ | ||
295 | da += alinesize; \ | ||
296 | } \ | ||
297 | \ | ||
298 | return 0; \ | ||
299 | } | ||
300 | |||
301 | ✗ | DEFINE_RGBAWRAP(8, uint8_t, 1) | |
302 | ✗ | DEFINE_RGBAWRAP(16, uint16_t, 2) | |
303 | |||
304 | 10 | static int filter_frame(AVFilterLink *inlink, AVFrame *in) | |
305 | { | ||
306 | 10 | AVFilterContext *ctx = inlink->dst; | |
307 | 10 | AVFilterLink *outlink = ctx->outputs[0]; | |
308 | 10 | ChromaShiftContext *s = ctx->priv; | |
309 | AVFrame *out; | ||
310 | |||
311 | 10 | out = ff_get_video_buffer(outlink, outlink->w, outlink->h); | |
312 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 10 times.
|
10 | if (!out) { |
313 | ✗ | av_frame_free(&in); | |
314 | ✗ | return AVERROR(ENOMEM); | |
315 | } | ||
316 | 10 | av_frame_copy_props(out, in); | |
317 | |||
318 | 10 | s->in = in; | |
319 |
1/2✓ Branch 0 taken 10 times.
✗ Branch 1 not taken.
|
10 | if (!s->is_rgbashift) { |
320 | 10 | av_image_copy_plane(out->data[0], | |
321 | out->linesize[0], | ||
322 | 10 | in->data[0], in->linesize[0], | |
323 | s->linesize[0], s->height[0]); | ||
324 | } | ||
325 | 10 | ff_filter_execute(ctx, s->filter_slice[s->edge], out, NULL, | |
326 |
1/2✓ Branch 0 taken 10 times.
✗ Branch 1 not taken.
|
10 | FFMIN3(s->height[1], |
327 | s->height[2], | ||
328 | ff_filter_get_nb_threads(ctx))); | ||
329 | 10 | s->in = NULL; | |
330 | 10 | av_frame_free(&in); | |
331 | 10 | return ff_filter_frame(outlink, out); | |
332 | } | ||
333 | |||
334 | 2 | static int config_input(AVFilterLink *inlink) | |
335 | { | ||
336 | 2 | AVFilterContext *ctx = inlink->dst; | |
337 | 2 | ChromaShiftContext *s = ctx->priv; | |
338 | 2 | const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(inlink->format); | |
339 | |||
340 | 2 | s->is_rgbashift = !strcmp(ctx->filter->name, "rgbashift"); | |
341 | 2 | s->depth = desc->comp[0].depth; | |
342 | 2 | s->nb_planes = desc->nb_components; | |
343 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 2 times.
|
2 | if (s->is_rgbashift) { |
344 | ✗ | s->filter_slice[1] = s->depth > 8 ? rgbawrap_slice16 : rgbawrap_slice8; | |
345 | ✗ | s->filter_slice[0] = s->depth > 8 ? rgbasmear_slice16 : rgbasmear_slice8; | |
346 | } else { | ||
347 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 2 times.
|
2 | s->filter_slice[1] = s->depth > 8 ? wrap_slice16 : wrap_slice8; |
348 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 2 times.
|
2 | s->filter_slice[0] = s->depth > 8 ? smear_slice16 : smear_slice8; |
349 | } | ||
350 | 2 | s->height[1] = s->height[2] = AV_CEIL_RSHIFT(inlink->h, desc->log2_chroma_h); | |
351 | 2 | s->height[0] = s->height[3] = inlink->h; | |
352 | 2 | s->width[1] = s->width[2] = AV_CEIL_RSHIFT(inlink->w, desc->log2_chroma_w); | |
353 | 2 | s->width[0] = s->width[3] = inlink->w; | |
354 | |||
355 | 2 | return av_image_fill_linesizes(s->linesize, inlink->format, inlink->w); | |
356 | } | ||
357 | |||
358 | #define OFFSET(x) offsetof(ChromaShiftContext, x) | ||
359 | #define VFR AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_FILTERING_PARAM | AV_OPT_FLAG_RUNTIME_PARAM | ||
360 | |||
361 | static const AVOption chromashift_options[] = { | ||
362 | { "cbh", "shift chroma-blue horizontally", OFFSET(cbh), AV_OPT_TYPE_INT, {.i64=0}, -255, 255, .flags = VFR }, | ||
363 | { "cbv", "shift chroma-blue vertically", OFFSET(cbv), AV_OPT_TYPE_INT, {.i64=0}, -255, 255, .flags = VFR }, | ||
364 | { "crh", "shift chroma-red horizontally", OFFSET(crh), AV_OPT_TYPE_INT, {.i64=0}, -255, 255, .flags = VFR }, | ||
365 | { "crv", "shift chroma-red vertically", OFFSET(crv), AV_OPT_TYPE_INT, {.i64=0}, -255, 255, .flags = VFR }, | ||
366 | { "edge", "set edge operation", OFFSET(edge), AV_OPT_TYPE_INT, {.i64=0}, 0, 1, .flags = VFR, .unit = "edge" }, | ||
367 | { "smear", 0, 0, AV_OPT_TYPE_CONST, {.i64=0}, 0, 0, .flags = VFR, .unit = "edge" }, | ||
368 | { "wrap", 0, 0, AV_OPT_TYPE_CONST, {.i64=1}, 0, 0, .flags = VFR, .unit = "edge" }, | ||
369 | { NULL }, | ||
370 | }; | ||
371 | |||
372 | static const AVFilterPad inputs[] = { | ||
373 | { | ||
374 | .name = "default", | ||
375 | .type = AVMEDIA_TYPE_VIDEO, | ||
376 | .filter_frame = filter_frame, | ||
377 | .config_props = config_input, | ||
378 | }, | ||
379 | }; | ||
380 | |||
381 | static const enum AVPixelFormat yuv_pix_fmts[] = { | ||
382 | AV_PIX_FMT_YUVA444P, AV_PIX_FMT_YUVA422P, AV_PIX_FMT_YUVA420P, | ||
383 | AV_PIX_FMT_YUVJ444P, AV_PIX_FMT_YUVJ440P, AV_PIX_FMT_YUVJ422P,AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ411P, | ||
384 | AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUV440P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV411P, AV_PIX_FMT_YUV410P, | ||
385 | AV_PIX_FMT_YUV420P9, AV_PIX_FMT_YUV422P9, AV_PIX_FMT_YUV444P9, | ||
386 | AV_PIX_FMT_YUV420P10, AV_PIX_FMT_YUV422P10, AV_PIX_FMT_YUV444P10, AV_PIX_FMT_YUV440P10, | ||
387 | AV_PIX_FMT_YUVA420P10, AV_PIX_FMT_YUVA422P10, AV_PIX_FMT_YUVA444P10, | ||
388 | AV_PIX_FMT_YUV420P12, AV_PIX_FMT_YUV422P12, AV_PIX_FMT_YUV444P12, AV_PIX_FMT_YUV440P12, | ||
389 | AV_PIX_FMT_YUVA422P12, AV_PIX_FMT_YUVA444P12, | ||
390 | AV_PIX_FMT_YUV444P14, AV_PIX_FMT_YUV422P14, AV_PIX_FMT_YUV420P14, | ||
391 | AV_PIX_FMT_YUV420P16, AV_PIX_FMT_YUV422P16, AV_PIX_FMT_YUV444P16, | ||
392 | AV_PIX_FMT_YUVA420P16, AV_PIX_FMT_YUVA422P16, AV_PIX_FMT_YUVA444P16, | ||
393 | AV_PIX_FMT_NONE | ||
394 | }; | ||
395 | |||
396 | AVFILTER_DEFINE_CLASS(chromashift); | ||
397 | |||
398 | const AVFilter ff_vf_chromashift = { | ||
399 | .name = "chromashift", | ||
400 | .description = NULL_IF_CONFIG_SMALL("Shift chroma."), | ||
401 | .priv_size = sizeof(ChromaShiftContext), | ||
402 | .priv_class = &chromashift_class, | ||
403 | FILTER_OUTPUTS(ff_video_default_filterpad), | ||
404 | FILTER_INPUTS(inputs), | ||
405 | FILTER_PIXFMTS_ARRAY(yuv_pix_fmts), | ||
406 | .flags = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC | AVFILTER_FLAG_SLICE_THREADS, | ||
407 | .process_command = ff_filter_process_command, | ||
408 | }; | ||
409 | |||
410 | static const enum AVPixelFormat rgb_pix_fmts[] = { | ||
411 | AV_PIX_FMT_GBRP, AV_PIX_FMT_GBRAP, AV_PIX_FMT_GBRP9, | ||
412 | AV_PIX_FMT_GBRP10, AV_PIX_FMT_GBRP12, | ||
413 | AV_PIX_FMT_GBRP14, AV_PIX_FMT_GBRP16, | ||
414 | AV_PIX_FMT_GBRAP10, AV_PIX_FMT_GBRAP12, AV_PIX_FMT_GBRAP16, | ||
415 | AV_PIX_FMT_NONE | ||
416 | }; | ||
417 | |||
418 | static const AVOption rgbashift_options[] = { | ||
419 | { "rh", "shift red horizontally", OFFSET(rh), AV_OPT_TYPE_INT, {.i64=0}, -255, 255, .flags = VFR }, | ||
420 | { "rv", "shift red vertically", OFFSET(rv), AV_OPT_TYPE_INT, {.i64=0}, -255, 255, .flags = VFR }, | ||
421 | { "gh", "shift green horizontally", OFFSET(gh), AV_OPT_TYPE_INT, {.i64=0}, -255, 255, .flags = VFR }, | ||
422 | { "gv", "shift green vertically", OFFSET(gv), AV_OPT_TYPE_INT, {.i64=0}, -255, 255, .flags = VFR }, | ||
423 | { "bh", "shift blue horizontally", OFFSET(bh), AV_OPT_TYPE_INT, {.i64=0}, -255, 255, .flags = VFR }, | ||
424 | { "bv", "shift blue vertically", OFFSET(bv), AV_OPT_TYPE_INT, {.i64=0}, -255, 255, .flags = VFR }, | ||
425 | { "ah", "shift alpha horizontally", OFFSET(ah), AV_OPT_TYPE_INT, {.i64=0}, -255, 255, .flags = VFR }, | ||
426 | { "av", "shift alpha vertically", OFFSET(av), AV_OPT_TYPE_INT, {.i64=0}, -255, 255, .flags = VFR }, | ||
427 | { "edge", "set edge operation", OFFSET(edge), AV_OPT_TYPE_INT, {.i64=0}, 0, 1, .flags = VFR, .unit = "edge" }, | ||
428 | { "smear", 0, 0, AV_OPT_TYPE_CONST, {.i64=0}, 0, 0, .flags = VFR, .unit = "edge" }, | ||
429 | { "wrap", 0, 0, AV_OPT_TYPE_CONST, {.i64=1}, 0, 0, .flags = VFR, .unit = "edge" }, | ||
430 | { NULL }, | ||
431 | }; | ||
432 | |||
433 | AVFILTER_DEFINE_CLASS(rgbashift); | ||
434 | |||
435 | const AVFilter ff_vf_rgbashift = { | ||
436 | .name = "rgbashift", | ||
437 | .description = NULL_IF_CONFIG_SMALL("Shift RGBA."), | ||
438 | .priv_size = sizeof(ChromaShiftContext), | ||
439 | .priv_class = &rgbashift_class, | ||
440 | FILTER_OUTPUTS(ff_video_default_filterpad), | ||
441 | FILTER_INPUTS(inputs), | ||
442 | FILTER_PIXFMTS_ARRAY(rgb_pix_fmts), | ||
443 | .flags = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC | AVFILTER_FLAG_SLICE_THREADS, | ||
444 | .process_command = ff_filter_process_command, | ||
445 | }; | ||
446 |