| Line | Branch | Exec | Source |
|---|---|---|---|
| 1 | /* | ||
| 2 | * Copyright (c) 2016 Floris Sluiter | ||
| 3 | * | ||
| 4 | * This file is part of FFmpeg. | ||
| 5 | * | ||
| 6 | * FFmpeg is free software; you can redistribute it and/or | ||
| 7 | * modify it under the terms of the GNU Lesser General Public | ||
| 8 | * License as published by the Free Software Foundation; either | ||
| 9 | * version 2.1 of the License, or (at your option) any later version. | ||
| 10 | * | ||
| 11 | * FFmpeg is distributed in the hope that it will be useful, | ||
| 12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 14 | * Lesser General Public License for more details. | ||
| 15 | * | ||
| 16 | * You should have received a copy of the GNU Lesser General Public | ||
| 17 | * License along with FFmpeg; if not, write to the Free Software | ||
| 18 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | ||
| 19 | */ | ||
| 20 | |||
| 21 | /** | ||
| 22 | * @file | ||
| 23 | * Pixel remap filter | ||
| 24 | * This filter copies pixel by pixel a source frame to a target frame. | ||
| 25 | * It remaps the pixels to a new x,y destination based on two files ymap/xmap. | ||
| 26 | * Map files are passed as a parameter and are in PGM format (P2 or P5), | ||
| 27 | * where the values are y(rows)/x(cols) coordinates of the source_frame. | ||
| 28 | * The *target* frame dimension is based on mapfile dimensions: specified in the | ||
| 29 | * header of the mapfile and reflected in the number of datavalues. | ||
| 30 | * Dimensions of ymap and xmap must be equal. Datavalues must be positive or zero. | ||
| 31 | * Any datavalue in the ymap or xmap which value is higher | ||
| 32 | * then the *source* frame height or width is silently ignored, leaving a | ||
| 33 | * blank/chromakey pixel. This can safely be used as a feature to create overlays. | ||
| 34 | * | ||
| 35 | * Algorithm digest: | ||
| 36 | * Target_frame[y][x] = Source_frame[ ymap[y][x] ][ [xmap[y][x] ]; | ||
| 37 | */ | ||
| 38 | |||
| 39 | #include "libavutil/colorspace.h" | ||
| 40 | #include "libavutil/imgutils.h" | ||
| 41 | #include "libavutil/pixdesc.h" | ||
| 42 | #include "libavutil/opt.h" | ||
| 43 | #include "avfilter.h" | ||
| 44 | #include "drawutils.h" | ||
| 45 | #include "filters.h" | ||
| 46 | #include "formats.h" | ||
| 47 | #include "framesync.h" | ||
| 48 | #include "video.h" | ||
| 49 | |||
| 50 | typedef struct RemapContext { | ||
| 51 | const AVClass *class; | ||
| 52 | int format; | ||
| 53 | |||
| 54 | int nb_planes; | ||
| 55 | int nb_components; | ||
| 56 | int step; | ||
| 57 | uint8_t fill_rgba[4]; | ||
| 58 | int fill_color[4]; | ||
| 59 | |||
| 60 | FFFrameSync fs; | ||
| 61 | |||
| 62 | int (*remap_slice)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs); | ||
| 63 | } RemapContext; | ||
| 64 | |||
| 65 | #define OFFSET(x) offsetof(RemapContext, x) | ||
| 66 | #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM | ||
| 67 | |||
| 68 | static const AVOption remap_options[] = { | ||
| 69 | { "format", "set output format", OFFSET(format), AV_OPT_TYPE_INT, {.i64=0}, 0, 1, FLAGS, .unit = "format" }, | ||
| 70 | { "color", "", 0, AV_OPT_TYPE_CONST, {.i64=0}, .flags = FLAGS, .unit = "format" }, | ||
| 71 | { "gray", "", 0, AV_OPT_TYPE_CONST, {.i64=1}, .flags = FLAGS, .unit = "format" }, | ||
| 72 | { "fill", "set the color of the unmapped pixels", OFFSET(fill_rgba), AV_OPT_TYPE_COLOR, {.str="black"}, .flags = FLAGS }, | ||
| 73 | { NULL } | ||
| 74 | }; | ||
| 75 | |||
| 76 | AVFILTER_DEFINE_CLASS(remap); | ||
| 77 | |||
| 78 | typedef struct ThreadData { | ||
| 79 | AVFrame *in, *xin, *yin, *out; | ||
| 80 | int nb_planes; | ||
| 81 | int nb_components; | ||
| 82 | int step; | ||
| 83 | } ThreadData; | ||
| 84 | |||
| 85 | ✗ | static int query_formats(const AVFilterContext *ctx, | |
| 86 | AVFilterFormatsConfig **cfg_in, | ||
| 87 | AVFilterFormatsConfig **cfg_out) | ||
| 88 | { | ||
| 89 | ✗ | const RemapContext *s = ctx->priv; | |
| 90 | static const enum AVPixelFormat pix_fmts[] = { | ||
| 91 | AV_PIX_FMT_YUVA444P, | ||
| 92 | AV_PIX_FMT_YUV444P, | ||
| 93 | AV_PIX_FMT_YUVJ444P, | ||
| 94 | AV_PIX_FMT_RGB24, AV_PIX_FMT_BGR24, | ||
| 95 | AV_PIX_FMT_ARGB, AV_PIX_FMT_ABGR, AV_PIX_FMT_RGBA, AV_PIX_FMT_BGRA, | ||
| 96 | AV_PIX_FMT_GBRP, AV_PIX_FMT_GBRAP, | ||
| 97 | AV_PIX_FMT_YUV444P9, AV_PIX_FMT_YUV444P10, AV_PIX_FMT_YUV444P12, | ||
| 98 | AV_PIX_FMT_YUV444P14, AV_PIX_FMT_YUV444P16, | ||
| 99 | AV_PIX_FMT_YUVA444P9, AV_PIX_FMT_YUVA444P10, AV_PIX_FMT_YUVA444P12, AV_PIX_FMT_YUVA444P16, | ||
| 100 | AV_PIX_FMT_GBRP9, AV_PIX_FMT_GBRP10, AV_PIX_FMT_GBRP12, | ||
| 101 | AV_PIX_FMT_GBRP14, AV_PIX_FMT_GBRP16, | ||
| 102 | AV_PIX_FMT_GBRAP10, AV_PIX_FMT_GBRAP12, AV_PIX_FMT_GBRAP16, | ||
| 103 | AV_PIX_FMT_RGB48, AV_PIX_FMT_BGR48, | ||
| 104 | AV_PIX_FMT_RGBA64, AV_PIX_FMT_BGRA64, | ||
| 105 | AV_PIX_FMT_NONE | ||
| 106 | }; | ||
| 107 | static const enum AVPixelFormat gray_pix_fmts[] = { | ||
| 108 | AV_PIX_FMT_GRAY8, AV_PIX_FMT_GRAY9, | ||
| 109 | AV_PIX_FMT_GRAY10, AV_PIX_FMT_GRAY12, | ||
| 110 | AV_PIX_FMT_GRAY14, AV_PIX_FMT_GRAY16, | ||
| 111 | AV_PIX_FMT_NONE | ||
| 112 | }; | ||
| 113 | static const enum AVPixelFormat map_fmts[] = { | ||
| 114 | AV_PIX_FMT_GRAY16, | ||
| 115 | AV_PIX_FMT_NONE | ||
| 116 | }; | ||
| 117 | ✗ | AVFilterFormats *pix_formats = NULL, *map_formats = NULL; | |
| 118 | int ret; | ||
| 119 | |||
| 120 | ✗ | pix_formats = ff_make_format_list(s->format ? gray_pix_fmts : pix_fmts); | |
| 121 | ✗ | if ((ret = ff_formats_ref(pix_formats, &cfg_in[0]->formats)) < 0 || | |
| 122 | ✗ | (ret = ff_formats_ref(pix_formats, &cfg_out[0]->formats)) < 0) | |
| 123 | ✗ | return ret; | |
| 124 | |||
| 125 | ✗ | map_formats = ff_make_format_list(map_fmts); | |
| 126 | ✗ | if ((ret = ff_formats_ref(map_formats, &cfg_in[1]->formats)) < 0) | |
| 127 | ✗ | return ret; | |
| 128 | ✗ | return ff_formats_ref(map_formats, &cfg_in[2]->formats); | |
| 129 | } | ||
| 130 | |||
| 131 | /** | ||
| 132 | * remap_planar algorithm expects planes of same size | ||
| 133 | * pixels are copied from source to target using : | ||
| 134 | * Target_frame[y][x] = Source_frame[ ymap[y][x] ][ [xmap[y][x] ]; | ||
| 135 | */ | ||
| 136 | #define DEFINE_REMAP_PLANAR_FUNC(name, bits, div) \ | ||
| 137 | static int remap_planar##bits##_##name##_slice(AVFilterContext *ctx, void *arg, \ | ||
| 138 | int jobnr, int nb_jobs) \ | ||
| 139 | { \ | ||
| 140 | RemapContext *s = ctx->priv; \ | ||
| 141 | const ThreadData *td = arg; \ | ||
| 142 | const AVFrame *in = td->in; \ | ||
| 143 | const AVFrame *xin = td->xin; \ | ||
| 144 | const AVFrame *yin = td->yin; \ | ||
| 145 | const AVFrame *out = td->out; \ | ||
| 146 | const int slice_start = (out->height * jobnr ) / nb_jobs; \ | ||
| 147 | const int slice_end = (out->height * (jobnr+1)) / nb_jobs; \ | ||
| 148 | const int xlinesize = xin->linesize[0] / 2; \ | ||
| 149 | const int ylinesize = yin->linesize[0] / 2; \ | ||
| 150 | int x , y, plane; \ | ||
| 151 | \ | ||
| 152 | for (plane = 0; plane < td->nb_planes ; plane++) { \ | ||
| 153 | const int dlinesize = out->linesize[plane] / div; \ | ||
| 154 | const uint##bits##_t *src = (const uint##bits##_t *)in->data[plane]; \ | ||
| 155 | uint##bits##_t *dst = (uint##bits##_t *)out->data[plane] + slice_start * dlinesize; \ | ||
| 156 | const int slinesize = in->linesize[plane] / div; \ | ||
| 157 | const uint16_t *xmap = (const uint16_t *)xin->data[0] + slice_start * xlinesize; \ | ||
| 158 | const uint16_t *ymap = (const uint16_t *)yin->data[0] + slice_start * ylinesize; \ | ||
| 159 | const int color = s->fill_color[plane]; \ | ||
| 160 | \ | ||
| 161 | for (y = slice_start; y < slice_end; y++) { \ | ||
| 162 | for (x = 0; x < out->width; x++) { \ | ||
| 163 | if (ymap[x] < in->height && xmap[x] < in->width) { \ | ||
| 164 | dst[x] = src[ymap[x] * slinesize + xmap[x]]; \ | ||
| 165 | } else { \ | ||
| 166 | dst[x] = color; \ | ||
| 167 | } \ | ||
| 168 | } \ | ||
| 169 | dst += dlinesize; \ | ||
| 170 | xmap += xlinesize; \ | ||
| 171 | ymap += ylinesize; \ | ||
| 172 | } \ | ||
| 173 | } \ | ||
| 174 | \ | ||
| 175 | return 0; \ | ||
| 176 | } | ||
| 177 | |||
| 178 | ✗ | DEFINE_REMAP_PLANAR_FUNC(nearest, 8, 1) | |
| 179 | ✗ | DEFINE_REMAP_PLANAR_FUNC(nearest, 16, 2) | |
| 180 | |||
| 181 | /** | ||
| 182 | * remap_packed algorithm expects pixels with both padded bits (step) and | ||
| 183 | * number of components correctly set. | ||
| 184 | * pixels are copied from source to target using : | ||
| 185 | * Target_frame[y][x] = Source_frame[ ymap[y][x] ][ [xmap[y][x] ]; | ||
| 186 | */ | ||
| 187 | #define DEFINE_REMAP_PACKED_FUNC(name, bits, div) \ | ||
| 188 | static int remap_packed##bits##_##name##_slice(AVFilterContext *ctx, void *arg, \ | ||
| 189 | int jobnr, int nb_jobs) \ | ||
| 190 | { \ | ||
| 191 | RemapContext *s = ctx->priv; \ | ||
| 192 | const ThreadData *td = arg; \ | ||
| 193 | const AVFrame *in = td->in; \ | ||
| 194 | const AVFrame *xin = td->xin; \ | ||
| 195 | const AVFrame *yin = td->yin; \ | ||
| 196 | const AVFrame *out = td->out; \ | ||
| 197 | const int slice_start = (out->height * jobnr ) / nb_jobs; \ | ||
| 198 | const int slice_end = (out->height * (jobnr+1)) / nb_jobs; \ | ||
| 199 | const int dlinesize = out->linesize[0] / div; \ | ||
| 200 | const int slinesize = in->linesize[0] / div; \ | ||
| 201 | const int xlinesize = xin->linesize[0] / 2; \ | ||
| 202 | const int ylinesize = yin->linesize[0] / 2; \ | ||
| 203 | const uint##bits##_t *src = (const uint##bits##_t *)in->data[0]; \ | ||
| 204 | uint##bits##_t *dst = (uint##bits##_t *)out->data[0] + slice_start * dlinesize; \ | ||
| 205 | const uint16_t *xmap = (const uint16_t *)xin->data[0] + slice_start * xlinesize; \ | ||
| 206 | const uint16_t *ymap = (const uint16_t *)yin->data[0] + slice_start * ylinesize; \ | ||
| 207 | const int step = td->step / div; \ | ||
| 208 | int c, x, y; \ | ||
| 209 | \ | ||
| 210 | for (y = slice_start; y < slice_end; y++) { \ | ||
| 211 | for (x = 0; x < out->width; x++) { \ | ||
| 212 | for (c = 0; c < td->nb_components; c++) { \ | ||
| 213 | if (ymap[x] < in->height && xmap[x] < in->width) { \ | ||
| 214 | dst[x * step + c] = src[ymap[x] * slinesize + xmap[x] * step + c]; \ | ||
| 215 | } else { \ | ||
| 216 | dst[x * step + c] = s->fill_color[c]; \ | ||
| 217 | } \ | ||
| 218 | } \ | ||
| 219 | } \ | ||
| 220 | dst += dlinesize; \ | ||
| 221 | xmap += xlinesize; \ | ||
| 222 | ymap += ylinesize; \ | ||
| 223 | } \ | ||
| 224 | \ | ||
| 225 | return 0; \ | ||
| 226 | } | ||
| 227 | |||
| 228 | ✗ | DEFINE_REMAP_PACKED_FUNC(nearest, 8, 1) | |
| 229 | ✗ | DEFINE_REMAP_PACKED_FUNC(nearest, 16, 2) | |
| 230 | |||
| 231 | ✗ | static int config_input(AVFilterLink *inlink) | |
| 232 | { | ||
| 233 | ✗ | AVFilterContext *ctx = inlink->dst; | |
| 234 | ✗ | RemapContext *s = ctx->priv; | |
| 235 | ✗ | const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(inlink->format); | |
| 236 | ✗ | int depth = desc->comp[0].depth; | |
| 237 | ✗ | int is_rgb = !!(desc->flags & AV_PIX_FMT_FLAG_RGB); | |
| 238 | ✗ | int factor = 1 << (depth - 8); | |
| 239 | uint8_t rgba_map[4]; | ||
| 240 | |||
| 241 | ✗ | ff_fill_rgba_map(rgba_map, inlink->format); | |
| 242 | ✗ | s->nb_planes = av_pix_fmt_count_planes(inlink->format); | |
| 243 | ✗ | s->nb_components = desc->nb_components; | |
| 244 | |||
| 245 | ✗ | if (is_rgb) { | |
| 246 | ✗ | s->fill_color[rgba_map[0]] = s->fill_rgba[0] * factor; | |
| 247 | ✗ | s->fill_color[rgba_map[1]] = s->fill_rgba[1] * factor; | |
| 248 | ✗ | s->fill_color[rgba_map[2]] = s->fill_rgba[2] * factor; | |
| 249 | ✗ | s->fill_color[rgba_map[3]] = s->fill_rgba[3] * factor; | |
| 250 | } else { | ||
| 251 | ✗ | s->fill_color[0] = RGB_TO_Y_BT709(s->fill_rgba[0], s->fill_rgba[1], s->fill_rgba[2]) * factor; | |
| 252 | ✗ | s->fill_color[1] = RGB_TO_U_BT709(s->fill_rgba[0], s->fill_rgba[1], s->fill_rgba[2], 0) * factor; | |
| 253 | ✗ | s->fill_color[2] = RGB_TO_V_BT709(s->fill_rgba[0], s->fill_rgba[1], s->fill_rgba[2], 0) * factor; | |
| 254 | ✗ | s->fill_color[3] = s->fill_rgba[3] * factor; | |
| 255 | } | ||
| 256 | |||
| 257 | ✗ | if (depth == 8) { | |
| 258 | ✗ | if (s->nb_planes > 1 || s->nb_components == 1) { | |
| 259 | ✗ | s->remap_slice = remap_planar8_nearest_slice; | |
| 260 | } else { | ||
| 261 | ✗ | s->remap_slice = remap_packed8_nearest_slice; | |
| 262 | } | ||
| 263 | } else { | ||
| 264 | ✗ | if (s->nb_planes > 1 || s->nb_components == 1) { | |
| 265 | ✗ | s->remap_slice = remap_planar16_nearest_slice; | |
| 266 | } else { | ||
| 267 | ✗ | s->remap_slice = remap_packed16_nearest_slice; | |
| 268 | } | ||
| 269 | } | ||
| 270 | |||
| 271 | ✗ | s->step = av_get_padded_bits_per_pixel(desc) >> 3; | |
| 272 | ✗ | return 0; | |
| 273 | } | ||
| 274 | |||
| 275 | ✗ | static int process_frame(FFFrameSync *fs) | |
| 276 | { | ||
| 277 | ✗ | AVFilterContext *ctx = fs->parent; | |
| 278 | ✗ | RemapContext *s = fs->opaque; | |
| 279 | ✗ | AVFilterLink *outlink = ctx->outputs[0]; | |
| 280 | AVFrame *out, *in, *xpic, *ypic; | ||
| 281 | int ret; | ||
| 282 | |||
| 283 | ✗ | if ((ret = ff_framesync_get_frame(&s->fs, 0, &in, 0)) < 0 || | |
| 284 | ✗ | (ret = ff_framesync_get_frame(&s->fs, 1, &xpic, 0)) < 0 || | |
| 285 | ✗ | (ret = ff_framesync_get_frame(&s->fs, 2, &ypic, 0)) < 0) | |
| 286 | ✗ | return ret; | |
| 287 | |||
| 288 | { | ||
| 289 | ThreadData td; | ||
| 290 | |||
| 291 | ✗ | out = ff_get_video_buffer(outlink, outlink->w, outlink->h); | |
| 292 | ✗ | if (!out) | |
| 293 | ✗ | return AVERROR(ENOMEM); | |
| 294 | ✗ | av_frame_copy_props(out, in); | |
| 295 | |||
| 296 | ✗ | td.in = in; | |
| 297 | ✗ | td.xin = xpic; | |
| 298 | ✗ | td.yin = ypic; | |
| 299 | ✗ | td.out = out; | |
| 300 | ✗ | td.nb_planes = s->nb_planes; | |
| 301 | ✗ | td.nb_components = s->nb_components; | |
| 302 | ✗ | td.step = s->step; | |
| 303 | ✗ | ff_filter_execute(ctx, s->remap_slice, &td, NULL, | |
| 304 | ✗ | FFMIN(outlink->h, ff_filter_get_nb_threads(ctx))); | |
| 305 | } | ||
| 306 | ✗ | out->pts = av_rescale_q(s->fs.pts, s->fs.time_base, outlink->time_base); | |
| 307 | |||
| 308 | ✗ | return ff_filter_frame(outlink, out); | |
| 309 | } | ||
| 310 | |||
| 311 | ✗ | static int config_output(AVFilterLink *outlink) | |
| 312 | { | ||
| 313 | ✗ | AVFilterContext *ctx = outlink->src; | |
| 314 | ✗ | RemapContext *s = ctx->priv; | |
| 315 | ✗ | AVFilterLink *srclink = ctx->inputs[0]; | |
| 316 | ✗ | AVFilterLink *xlink = ctx->inputs[1]; | |
| 317 | ✗ | AVFilterLink *ylink = ctx->inputs[2]; | |
| 318 | ✗ | FilterLink *il = ff_filter_link(srclink); | |
| 319 | ✗ | FilterLink *ol = ff_filter_link(outlink); | |
| 320 | FFFrameSyncIn *in; | ||
| 321 | int ret; | ||
| 322 | |||
| 323 | ✗ | if (xlink->w != ylink->w || xlink->h != ylink->h) { | |
| 324 | ✗ | av_log(ctx, AV_LOG_ERROR, "Second input link %s parameters " | |
| 325 | "(size %dx%d) do not match the corresponding " | ||
| 326 | "third input link %s parameters (%dx%d)\n", | ||
| 327 | ✗ | ctx->input_pads[1].name, xlink->w, xlink->h, | |
| 328 | ✗ | ctx->input_pads[2].name, ylink->w, ylink->h); | |
| 329 | ✗ | return AVERROR(EINVAL); | |
| 330 | } | ||
| 331 | |||
| 332 | ✗ | outlink->w = xlink->w; | |
| 333 | ✗ | outlink->h = xlink->h; | |
| 334 | ✗ | outlink->sample_aspect_ratio = srclink->sample_aspect_ratio; | |
| 335 | ✗ | ol->frame_rate = il->frame_rate; | |
| 336 | |||
| 337 | ✗ | ret = ff_framesync_init(&s->fs, ctx, 3); | |
| 338 | ✗ | if (ret < 0) | |
| 339 | ✗ | return ret; | |
| 340 | |||
| 341 | ✗ | in = s->fs.in; | |
| 342 | ✗ | in[0].time_base = srclink->time_base; | |
| 343 | ✗ | in[1].time_base = xlink->time_base; | |
| 344 | ✗ | in[2].time_base = ylink->time_base; | |
| 345 | ✗ | in[0].sync = 2; | |
| 346 | ✗ | in[0].before = EXT_STOP; | |
| 347 | ✗ | in[0].after = EXT_STOP; | |
| 348 | ✗ | in[1].sync = 1; | |
| 349 | ✗ | in[1].before = EXT_NULL; | |
| 350 | ✗ | in[1].after = EXT_INFINITY; | |
| 351 | ✗ | in[2].sync = 1; | |
| 352 | ✗ | in[2].before = EXT_NULL; | |
| 353 | ✗ | in[2].after = EXT_INFINITY; | |
| 354 | ✗ | s->fs.opaque = s; | |
| 355 | ✗ | s->fs.on_event = process_frame; | |
| 356 | |||
| 357 | ✗ | ret = ff_framesync_configure(&s->fs); | |
| 358 | ✗ | outlink->time_base = s->fs.time_base; | |
| 359 | |||
| 360 | ✗ | return ret; | |
| 361 | } | ||
| 362 | |||
| 363 | ✗ | static int activate(AVFilterContext *ctx) | |
| 364 | { | ||
| 365 | ✗ | RemapContext *s = ctx->priv; | |
| 366 | ✗ | return ff_framesync_activate(&s->fs); | |
| 367 | } | ||
| 368 | |||
| 369 | ✗ | static av_cold void uninit(AVFilterContext *ctx) | |
| 370 | { | ||
| 371 | ✗ | RemapContext *s = ctx->priv; | |
| 372 | |||
| 373 | ✗ | ff_framesync_uninit(&s->fs); | |
| 374 | ✗ | } | |
| 375 | |||
| 376 | static const AVFilterPad remap_inputs[] = { | ||
| 377 | { | ||
| 378 | .name = "source", | ||
| 379 | .type = AVMEDIA_TYPE_VIDEO, | ||
| 380 | .config_props = config_input, | ||
| 381 | }, | ||
| 382 | { | ||
| 383 | .name = "xmap", | ||
| 384 | .type = AVMEDIA_TYPE_VIDEO, | ||
| 385 | }, | ||
| 386 | { | ||
| 387 | .name = "ymap", | ||
| 388 | .type = AVMEDIA_TYPE_VIDEO, | ||
| 389 | }, | ||
| 390 | }; | ||
| 391 | |||
| 392 | static const AVFilterPad remap_outputs[] = { | ||
| 393 | { | ||
| 394 | .name = "default", | ||
| 395 | .type = AVMEDIA_TYPE_VIDEO, | ||
| 396 | .config_props = config_output, | ||
| 397 | }, | ||
| 398 | }; | ||
| 399 | |||
| 400 | const FFFilter ff_vf_remap = { | ||
| 401 | .p.name = "remap", | ||
| 402 | .p.description = NULL_IF_CONFIG_SMALL("Remap pixels."), | ||
| 403 | .p.priv_class = &remap_class, | ||
| 404 | .p.flags = AVFILTER_FLAG_SLICE_THREADS, | ||
| 405 | .priv_size = sizeof(RemapContext), | ||
| 406 | .uninit = uninit, | ||
| 407 | .activate = activate, | ||
| 408 | FILTER_INPUTS(remap_inputs), | ||
| 409 | FILTER_OUTPUTS(remap_outputs), | ||
| 410 | FILTER_QUERY_FUNC2(query_formats), | ||
| 411 | }; | ||
| 412 |