LCOV - code coverage report
Current view: top level - libavfilter - vf_overlay.c (source / functions) Hit Total Coverage
Test: coverage.info Lines: 341 414 82.4 %
Date: 2017-12-15 11:05:35 Functions: 28 30 93.3 %

          Line data    Source code
       1             : /*
       2             :  * Copyright (c) 2010 Stefano Sabatini
       3             :  * Copyright (c) 2010 Baptiste Coudurier
       4             :  * Copyright (c) 2007 Bobby Bingham
       5             :  *
       6             :  * This file is part of FFmpeg.
       7             :  *
       8             :  * FFmpeg is free software; you can redistribute it and/or
       9             :  * modify it under the terms of the GNU Lesser General Public
      10             :  * License as published by the Free Software Foundation; either
      11             :  * version 2.1 of the License, or (at your option) any later version.
      12             :  *
      13             :  * FFmpeg is distributed in the hope that it will be useful,
      14             :  * but WITHOUT ANY WARRANTY; without even the implied warranty of
      15             :  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
      16             :  * Lesser General Public License for more details.
      17             :  *
      18             :  * You should have received a copy of the GNU Lesser General Public
      19             :  * License along with FFmpeg; if not, write to the Free Software
      20             :  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
      21             :  */
      22             : 
      23             : /**
      24             :  * @file
      25             :  * overlay one video on top of another
      26             :  */
      27             : 
      28             : #include "avfilter.h"
      29             : #include "formats.h"
      30             : #include "libavutil/common.h"
      31             : #include "libavutil/eval.h"
      32             : #include "libavutil/avstring.h"
      33             : #include "libavutil/pixdesc.h"
      34             : #include "libavutil/imgutils.h"
      35             : #include "libavutil/mathematics.h"
      36             : #include "libavutil/opt.h"
      37             : #include "libavutil/timestamp.h"
      38             : #include "internal.h"
      39             : #include "drawutils.h"
      40             : #include "framesync.h"
      41             : #include "video.h"
      42             : 
      43             : static const char *const var_names[] = {
      44             :     "main_w",    "W", ///< width  of the main    video
      45             :     "main_h",    "H", ///< height of the main    video
      46             :     "overlay_w", "w", ///< width  of the overlay video
      47             :     "overlay_h", "h", ///< height of the overlay video
      48             :     "hsub",
      49             :     "vsub",
      50             :     "x",
      51             :     "y",
      52             :     "n",            ///< number of frame
      53             :     "pos",          ///< position in the file
      54             :     "t",            ///< timestamp expressed in seconds
      55             :     NULL
      56             : };
      57             : 
      58             : enum var_name {
      59             :     VAR_MAIN_W,    VAR_MW,
      60             :     VAR_MAIN_H,    VAR_MH,
      61             :     VAR_OVERLAY_W, VAR_OW,
      62             :     VAR_OVERLAY_H, VAR_OH,
      63             :     VAR_HSUB,
      64             :     VAR_VSUB,
      65             :     VAR_X,
      66             :     VAR_Y,
      67             :     VAR_N,
      68             :     VAR_POS,
      69             :     VAR_T,
      70             :     VAR_VARS_NB
      71             : };
      72             : 
      73             : #define MAIN    0
      74             : #define OVERLAY 1
      75             : 
      76             : #define R 0
      77             : #define G 1
      78             : #define B 2
      79             : #define A 3
      80             : 
      81             : #define Y 0
      82             : #define U 1
      83             : #define V 2
      84             : 
      85             : enum EvalMode {
      86             :     EVAL_MODE_INIT,
      87             :     EVAL_MODE_FRAME,
      88             :     EVAL_MODE_NB
      89             : };
      90             : 
      91             : enum OverlayFormat {
      92             :     OVERLAY_FORMAT_YUV420,
      93             :     OVERLAY_FORMAT_YUV422,
      94             :     OVERLAY_FORMAT_YUV444,
      95             :     OVERLAY_FORMAT_RGB,
      96             :     OVERLAY_FORMAT_GBRP,
      97             :     OVERLAY_FORMAT_AUTO,
      98             :     OVERLAY_FORMAT_NB
      99             : };
     100             : 
     101             : typedef struct OverlayContext {
     102             :     const AVClass *class;
     103             :     int x, y;                   ///< position of overlaid picture
     104             : 
     105             :     uint8_t main_is_packed_rgb;
     106             :     uint8_t main_rgba_map[4];
     107             :     uint8_t main_has_alpha;
     108             :     uint8_t overlay_is_packed_rgb;
     109             :     uint8_t overlay_rgba_map[4];
     110             :     uint8_t overlay_has_alpha;
     111             :     int format;                 ///< OverlayFormat
     112             :     int eval_mode;              ///< EvalMode
     113             : 
     114             :     FFFrameSync fs;
     115             : 
     116             :     int main_pix_step[4];       ///< steps per pixel for each plane of the main output
     117             :     int overlay_pix_step[4];    ///< steps per pixel for each plane of the overlay
     118             :     int hsub, vsub;             ///< chroma subsampling values
     119             :     const AVPixFmtDescriptor *main_desc; ///< format descriptor for main input
     120             : 
     121             :     double var_values[VAR_VARS_NB];
     122             :     char *x_expr, *y_expr;
     123             : 
     124             :     AVExpr *x_pexpr, *y_pexpr;
     125             : 
     126             :     void (*blend_image)(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y);
     127             : } OverlayContext;
     128             : 
     129          30 : static av_cold void uninit(AVFilterContext *ctx)
     130             : {
     131          30 :     OverlayContext *s = ctx->priv;
     132             : 
     133          30 :     ff_framesync_uninit(&s->fs);
     134          30 :     av_expr_free(s->x_pexpr); s->x_pexpr = NULL;
     135          30 :     av_expr_free(s->y_pexpr); s->y_pexpr = NULL;
     136          30 : }
     137             : 
     138        1446 : static inline int normalize_xy(double d, int chroma_sub)
     139             : {
     140        1446 :     if (isnan(d))
     141           0 :         return INT_MAX;
     142        1446 :     return (int)d & ~((1 << chroma_sub) - 1);
     143             : }
     144             : 
     145         723 : static void eval_expr(AVFilterContext *ctx)
     146             : {
     147         723 :     OverlayContext *s = ctx->priv;
     148             : 
     149         723 :     s->var_values[VAR_X] = av_expr_eval(s->x_pexpr, s->var_values, NULL);
     150         723 :     s->var_values[VAR_Y] = av_expr_eval(s->y_pexpr, s->var_values, NULL);
     151         723 :     s->var_values[VAR_X] = av_expr_eval(s->x_pexpr, s->var_values, NULL);
     152         723 :     s->x = normalize_xy(s->var_values[VAR_X], s->hsub);
     153         723 :     s->y = normalize_xy(s->var_values[VAR_Y], s->vsub);
     154         723 : }
     155             : 
     156          40 : static int set_expr(AVExpr **pexpr, const char *expr, const char *option, void *log_ctx)
     157             : {
     158             :     int ret;
     159          40 :     AVExpr *old = NULL;
     160             : 
     161          40 :     if (*pexpr)
     162           0 :         old = *pexpr;
     163          40 :     ret = av_expr_parse(pexpr, expr, var_names,
     164             :                         NULL, NULL, NULL, NULL, 0, log_ctx);
     165          40 :     if (ret < 0) {
     166           0 :         av_log(log_ctx, AV_LOG_ERROR,
     167             :                "Error when evaluating the expression '%s' for %s\n",
     168             :                expr, option);
     169           0 :         *pexpr = old;
     170           0 :         return ret;
     171             :     }
     172             : 
     173          40 :     av_expr_free(old);
     174          40 :     return 0;
     175             : }
     176             : 
     177           0 : static int process_command(AVFilterContext *ctx, const char *cmd, const char *args,
     178             :                            char *res, int res_len, int flags)
     179             : {
     180           0 :     OverlayContext *s = ctx->priv;
     181             :     int ret;
     182             : 
     183           0 :     if      (!strcmp(cmd, "x"))
     184           0 :         ret = set_expr(&s->x_pexpr, args, cmd, ctx);
     185           0 :     else if (!strcmp(cmd, "y"))
     186           0 :         ret = set_expr(&s->y_pexpr, args, cmd, ctx);
     187             :     else
     188           0 :         ret = AVERROR(ENOSYS);
     189             : 
     190           0 :     if (ret < 0)
     191           0 :         return ret;
     192             : 
     193           0 :     if (s->eval_mode == EVAL_MODE_INIT) {
     194           0 :         eval_expr(ctx);
     195           0 :         av_log(ctx, AV_LOG_VERBOSE, "x:%f xi:%d y:%f yi:%d\n",
     196             :                s->var_values[VAR_X], s->x,
     197             :                s->var_values[VAR_Y], s->y);
     198             :     }
     199           0 :     return ret;
     200             : }
     201             : 
     202             : static const enum AVPixelFormat alpha_pix_fmts[] = {
     203             :     AV_PIX_FMT_YUVA420P, AV_PIX_FMT_YUVA422P, AV_PIX_FMT_YUVA444P,
     204             :     AV_PIX_FMT_ARGB, AV_PIX_FMT_ABGR, AV_PIX_FMT_RGBA,
     205             :     AV_PIX_FMT_BGRA, AV_PIX_FMT_GBRAP, AV_PIX_FMT_NONE
     206             : };
     207             : 
     208          20 : static int query_formats(AVFilterContext *ctx)
     209             : {
     210          20 :     OverlayContext *s = ctx->priv;
     211             : 
     212             :     /* overlay formats contains alpha, for avoiding conversion with alpha information loss */
     213             :     static const enum AVPixelFormat main_pix_fmts_yuv420[] = {
     214             :         AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVA420P,
     215             :         AV_PIX_FMT_NV12, AV_PIX_FMT_NV21,
     216             :         AV_PIX_FMT_NONE
     217             :     };
     218             :     static const enum AVPixelFormat overlay_pix_fmts_yuv420[] = {
     219             :         AV_PIX_FMT_YUVA420P, AV_PIX_FMT_NONE
     220             :     };
     221             : 
     222             :     static const enum AVPixelFormat main_pix_fmts_yuv422[] = {
     223             :         AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUVA422P, AV_PIX_FMT_NONE
     224             :     };
     225             :     static const enum AVPixelFormat overlay_pix_fmts_yuv422[] = {
     226             :         AV_PIX_FMT_YUVA422P, AV_PIX_FMT_NONE
     227             :     };
     228             : 
     229             :     static const enum AVPixelFormat main_pix_fmts_yuv444[] = {
     230             :         AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUVJ444P, AV_PIX_FMT_YUVA444P, AV_PIX_FMT_NONE
     231             :     };
     232             :     static const enum AVPixelFormat overlay_pix_fmts_yuv444[] = {
     233             :         AV_PIX_FMT_YUVA444P, AV_PIX_FMT_NONE
     234             :     };
     235             : 
     236             :     static const enum AVPixelFormat main_pix_fmts_gbrp[] = {
     237             :         AV_PIX_FMT_GBRP, AV_PIX_FMT_GBRAP, AV_PIX_FMT_NONE
     238             :     };
     239             :     static const enum AVPixelFormat overlay_pix_fmts_gbrp[] = {
     240             :         AV_PIX_FMT_GBRAP, AV_PIX_FMT_NONE
     241             :     };
     242             : 
     243             :     static const enum AVPixelFormat main_pix_fmts_rgb[] = {
     244             :         AV_PIX_FMT_ARGB,  AV_PIX_FMT_RGBA,
     245             :         AV_PIX_FMT_ABGR,  AV_PIX_FMT_BGRA,
     246             :         AV_PIX_FMT_RGB24, AV_PIX_FMT_BGR24,
     247             :         AV_PIX_FMT_NONE
     248             :     };
     249             :     static const enum AVPixelFormat overlay_pix_fmts_rgb[] = {
     250             :         AV_PIX_FMT_ARGB,  AV_PIX_FMT_RGBA,
     251             :         AV_PIX_FMT_ABGR,  AV_PIX_FMT_BGRA,
     252             :         AV_PIX_FMT_NONE
     253             :     };
     254             : 
     255          20 :     AVFilterFormats *main_formats = NULL;
     256          20 :     AVFilterFormats *overlay_formats = NULL;
     257             :     int ret;
     258             : 
     259          20 :     switch (s->format) {
     260           9 :     case OVERLAY_FORMAT_YUV420:
     261          18 :         if (!(main_formats    = ff_make_format_list(main_pix_fmts_yuv420)) ||
     262           9 :             !(overlay_formats = ff_make_format_list(overlay_pix_fmts_yuv420))) {
     263           0 :                 ret = AVERROR(ENOMEM);
     264           0 :                 goto fail;
     265             :             }
     266           9 :         break;
     267           3 :     case OVERLAY_FORMAT_YUV422:
     268           6 :         if (!(main_formats    = ff_make_format_list(main_pix_fmts_yuv422)) ||
     269           3 :             !(overlay_formats = ff_make_format_list(overlay_pix_fmts_yuv422))) {
     270           0 :                 ret = AVERROR(ENOMEM);
     271           0 :                 goto fail;
     272             :             }
     273           3 :         break;
     274           3 :     case OVERLAY_FORMAT_YUV444:
     275           6 :         if (!(main_formats    = ff_make_format_list(main_pix_fmts_yuv444)) ||
     276           3 :             !(overlay_formats = ff_make_format_list(overlay_pix_fmts_yuv444))) {
     277           0 :                 ret = AVERROR(ENOMEM);
     278           0 :                 goto fail;
     279             :             }
     280           3 :         break;
     281           3 :     case OVERLAY_FORMAT_RGB:
     282           6 :         if (!(main_formats    = ff_make_format_list(main_pix_fmts_rgb)) ||
     283           3 :             !(overlay_formats = ff_make_format_list(overlay_pix_fmts_rgb))) {
     284           0 :                 ret = AVERROR(ENOMEM);
     285           0 :                 goto fail;
     286             :             }
     287           3 :         break;
     288           2 :     case OVERLAY_FORMAT_GBRP:
     289           4 :         if (!(main_formats    = ff_make_format_list(main_pix_fmts_gbrp)) ||
     290           2 :             !(overlay_formats = ff_make_format_list(overlay_pix_fmts_gbrp))) {
     291           0 :                 ret = AVERROR(ENOMEM);
     292           0 :                 goto fail;
     293             :             }
     294           2 :         break;
     295           0 :     case OVERLAY_FORMAT_AUTO:
     296           0 :         if (!(main_formats    = ff_make_format_list(alpha_pix_fmts))) {
     297           0 :                 ret = AVERROR(ENOMEM);
     298           0 :                 goto fail;
     299             :             }
     300           0 :         break;
     301           0 :     default:
     302           0 :         av_assert0(0);
     303             :     }
     304             : 
     305          20 :     if (s->format == OVERLAY_FORMAT_AUTO) {
     306           0 :         ret = ff_set_common_formats(ctx, main_formats);
     307           0 :         if (ret < 0)
     308           0 :             goto fail;
     309             :     } else {
     310          40 :         if ((ret = ff_formats_ref(main_formats   , &ctx->inputs[MAIN]->out_formats   )) < 0 ||
     311          40 :             (ret = ff_formats_ref(overlay_formats, &ctx->inputs[OVERLAY]->out_formats)) < 0 ||
     312          20 :             (ret = ff_formats_ref(main_formats   , &ctx->outputs[MAIN]->in_formats   )) < 0)
     313             :                 goto fail;
     314             :     }
     315             : 
     316          20 :     return 0;
     317           0 : fail:
     318           0 :     if (main_formats)
     319           0 :         av_freep(&main_formats->formats);
     320           0 :     av_freep(&main_formats);
     321           0 :     if (overlay_formats)
     322           0 :         av_freep(&overlay_formats->formats);
     323           0 :     av_freep(&overlay_formats);
     324           0 :     return ret;
     325             : }
     326             : 
     327          20 : static int config_input_overlay(AVFilterLink *inlink)
     328             : {
     329          20 :     AVFilterContext *ctx  = inlink->dst;
     330          20 :     OverlayContext  *s = inlink->dst->priv;
     331             :     int ret;
     332          20 :     const AVPixFmtDescriptor *pix_desc = av_pix_fmt_desc_get(inlink->format);
     333             : 
     334          20 :     av_image_fill_max_pixsteps(s->overlay_pix_step, NULL, pix_desc);
     335             : 
     336             :     /* Finish the configuration by evaluating the expressions
     337             :        now when both inputs are configured. */
     338          20 :     s->var_values[VAR_MAIN_W   ] = s->var_values[VAR_MW] = ctx->inputs[MAIN   ]->w;
     339          20 :     s->var_values[VAR_MAIN_H   ] = s->var_values[VAR_MH] = ctx->inputs[MAIN   ]->h;
     340          20 :     s->var_values[VAR_OVERLAY_W] = s->var_values[VAR_OW] = ctx->inputs[OVERLAY]->w;
     341          20 :     s->var_values[VAR_OVERLAY_H] = s->var_values[VAR_OH] = ctx->inputs[OVERLAY]->h;
     342          20 :     s->var_values[VAR_HSUB]  = 1<<pix_desc->log2_chroma_w;
     343          20 :     s->var_values[VAR_VSUB]  = 1<<pix_desc->log2_chroma_h;
     344          20 :     s->var_values[VAR_X]     = NAN;
     345          20 :     s->var_values[VAR_Y]     = NAN;
     346          20 :     s->var_values[VAR_N]     = 0;
     347          20 :     s->var_values[VAR_T]     = NAN;
     348          20 :     s->var_values[VAR_POS]   = NAN;
     349             : 
     350          40 :     if ((ret = set_expr(&s->x_pexpr,      s->x_expr,      "x",      ctx)) < 0 ||
     351          20 :         (ret = set_expr(&s->y_pexpr,      s->y_expr,      "y",      ctx)) < 0)
     352           0 :         return ret;
     353             : 
     354          20 :     s->overlay_is_packed_rgb =
     355          20 :         ff_fill_rgba_map(s->overlay_rgba_map, inlink->format) >= 0;
     356          20 :     s->overlay_has_alpha = ff_fmt_is_in(inlink->format, alpha_pix_fmts);
     357             : 
     358          20 :     if (s->eval_mode == EVAL_MODE_INIT) {
     359           0 :         eval_expr(ctx);
     360           0 :         av_log(ctx, AV_LOG_VERBOSE, "x:%f xi:%d y:%f yi:%d\n",
     361             :                s->var_values[VAR_X], s->x,
     362             :                s->var_values[VAR_Y], s->y);
     363             :     }
     364             : 
     365         120 :     av_log(ctx, AV_LOG_VERBOSE,
     366             :            "main w:%d h:%d fmt:%s overlay w:%d h:%d fmt:%s\n",
     367          40 :            ctx->inputs[MAIN]->w, ctx->inputs[MAIN]->h,
     368          20 :            av_get_pix_fmt_name(ctx->inputs[MAIN]->format),
     369          40 :            ctx->inputs[OVERLAY]->w, ctx->inputs[OVERLAY]->h,
     370          20 :            av_get_pix_fmt_name(ctx->inputs[OVERLAY]->format));
     371          20 :     return 0;
     372             : }
     373             : 
     374          20 : static int config_output(AVFilterLink *outlink)
     375             : {
     376          20 :     AVFilterContext *ctx = outlink->src;
     377          20 :     OverlayContext *s = ctx->priv;
     378             :     int ret;
     379             : 
     380          20 :     if ((ret = ff_framesync_init_dualinput(&s->fs, ctx)) < 0)
     381           0 :         return ret;
     382             : 
     383          20 :     outlink->w = ctx->inputs[MAIN]->w;
     384          20 :     outlink->h = ctx->inputs[MAIN]->h;
     385          20 :     outlink->time_base = ctx->inputs[MAIN]->time_base;
     386             : 
     387          20 :     return ff_framesync_configure(&s->fs);
     388             : }
     389             : 
     390             : // divide by 255 and round to nearest
     391             : // apply a fast variant: (X+127)/255 = ((X+127)*257+257)>>16 = ((X+128)*257)>>16
     392             : #define FAST_DIV255(x) ((((x) + 128) * 257) >> 16)
     393             : 
     394             : // calculate the unpremultiplied alpha, applying the general equation:
     395             : // alpha = alpha_overlay / ( (alpha_main + alpha_overlay) - (alpha_main * alpha_overlay) )
     396             : // (((x) << 16) - ((x) << 9) + (x)) is a faster version of: 255 * 255 * x
     397             : // ((((x) + (y)) << 8) - ((x) + (y)) - (y) * (x)) is a faster version of: 255 * (x + y)
     398             : #define UNPREMULTIPLY_ALPHA(x, y) ((((x) << 16) - ((x) << 9) + (x)) / ((((x) + (y)) << 8) - ((x) + (y)) - (y) * (x)))
     399             : 
     400             : /**
     401             :  * Blend image in src to destination buffer dst at position (x, y).
     402             :  */
     403             : 
     404          54 : static void blend_image_packed_rgb(AVFilterContext *ctx,
     405             :                                    AVFrame *dst, const AVFrame *src,
     406             :                                    int main_has_alpha, int x, int y)
     407             : {
     408          54 :     OverlayContext *s = ctx->priv;
     409             :     int i, imax, j, jmax;
     410          54 :     const int src_w = src->width;
     411          54 :     const int src_h = src->height;
     412          54 :     const int dst_w = dst->width;
     413          54 :     const int dst_h = dst->height;
     414             :     uint8_t alpha;          ///< the amount of overlay to blend on to main
     415          54 :     const int dr = s->main_rgba_map[R];
     416          54 :     const int dg = s->main_rgba_map[G];
     417          54 :     const int db = s->main_rgba_map[B];
     418          54 :     const int da = s->main_rgba_map[A];
     419          54 :     const int dstep = s->main_pix_step[0];
     420          54 :     const int sr = s->overlay_rgba_map[R];
     421          54 :     const int sg = s->overlay_rgba_map[G];
     422          54 :     const int sb = s->overlay_rgba_map[B];
     423          54 :     const int sa = s->overlay_rgba_map[A];
     424          54 :     const int sstep = s->overlay_pix_step[0];
     425             :     uint8_t *S, *sp, *d, *dp;
     426             : 
     427          54 :     i = FFMAX(-y, 0);
     428          54 :     sp = src->data[0] + i     * src->linesize[0];
     429          54 :     dp = dst->data[0] + (y+i) * dst->linesize[0];
     430             : 
     431        4566 :     for (imax = FFMIN(-y + dst_h, src_h); i < imax; i++) {
     432        4512 :         j = FFMAX(-x, 0);
     433        4512 :         S = sp + j     * sstep;
     434        4512 :         d = dp + (x+j) * dstep;
     435             : 
     436      454048 :         for (jmax = FFMIN(-x + dst_w, src_w); j < jmax; j++) {
     437      449536 :             alpha = S[sa];
     438             : 
     439             :             // if the main channel has an alpha channel, alpha has to be calculated
     440             :             // to create an un-premultiplied (straight) alpha value
     441      449536 :             if (main_has_alpha && alpha != 0 && alpha != 255) {
     442       31658 :                 uint8_t alpha_d = d[da];
     443       31658 :                 alpha = UNPREMULTIPLY_ALPHA(alpha, alpha_d);
     444             :             }
     445             : 
     446      449536 :             switch (alpha) {
     447         384 :             case 0:
     448         384 :                 break;
     449      385836 :             case 255:
     450      385836 :                 d[dr] = S[sr];
     451      385836 :                 d[dg] = S[sg];
     452      385836 :                 d[db] = S[sb];
     453      385836 :                 break;
     454       63316 :             default:
     455             :                 // main_value = main_value * (1 - alpha) + overlay_value * alpha
     456             :                 // since alpha is in the range 0-255, the result must divided by 255
     457       63316 :                 d[dr] = FAST_DIV255(d[dr] * (255 - alpha) + S[sr] * alpha);
     458       63316 :                 d[dg] = FAST_DIV255(d[dg] * (255 - alpha) + S[sg] * alpha);
     459       63316 :                 d[db] = FAST_DIV255(d[db] * (255 - alpha) + S[sb] * alpha);
     460             :             }
     461      449536 :             if (main_has_alpha) {
     462       32768 :                 switch (alpha) {
     463         192 :                 case 0:
     464         192 :                     break;
     465         918 :                 case 255:
     466         918 :                     d[da] = S[sa];
     467         918 :                     break;
     468       31658 :                 default:
     469             :                     // apply alpha compositing: main_alpha += (1-main_alpha) * overlay_alpha
     470       31658 :                     d[da] += FAST_DIV255((255 - d[da]) * S[sa]);
     471             :                 }
     472      416768 :             }
     473      449536 :             d += dstep;
     474      449536 :             S += sstep;
     475             :         }
     476        4512 :         dp += dst->linesize[0];
     477        4512 :         sp += src->linesize[0];
     478             :     }
     479          54 : }
     480             : 
     481        2007 : static av_always_inline void blend_plane(AVFilterContext *ctx,
     482             :                                          AVFrame *dst, const AVFrame *src,
     483             :                                          int src_w, int src_h,
     484             :                                          int dst_w, int dst_h,
     485             :                                          int i, int hsub, int vsub,
     486             :                                          int x, int y,
     487             :                                          int main_has_alpha,
     488             :                                          int dst_plane,
     489             :                                          int dst_offset,
     490             :                                          int dst_step)
     491             : {
     492        2007 :     int src_wp = AV_CEIL_RSHIFT(src_w, hsub);
     493        2007 :     int src_hp = AV_CEIL_RSHIFT(src_h, vsub);
     494        2007 :     int dst_wp = AV_CEIL_RSHIFT(dst_w, hsub);
     495        2007 :     int dst_hp = AV_CEIL_RSHIFT(dst_h, vsub);
     496        2007 :     int yp = y>>vsub;
     497        2007 :     int xp = x>>hsub;
     498             :     uint8_t *s, *sp, *d, *dp, *dap, *a, *da, *ap;
     499             :     int jmax, j, k, kmax;
     500             : 
     501        2007 :     j = FFMAX(-yp, 0);
     502        2007 :     sp = src->data[i] + j         * src->linesize[i];
     503        4014 :     dp = dst->data[dst_plane]
     504        2007 :                       + (yp+j)    * dst->linesize[dst_plane]
     505        2007 :                       + dst_offset;
     506        2007 :     ap = src->data[3] + (j<<vsub) * src->linesize[3];
     507        2007 :     dap = dst->data[3] + ((yp+j) << vsub) * dst->linesize[3];
     508             : 
     509      394591 :     for (jmax = FFMIN(-yp + dst_hp, src_hp); j < jmax; j++) {
     510      392584 :         k = FFMAX(-xp, 0);
     511      392584 :         d = dp + (xp+k) * dst_step;
     512      392584 :         s = sp + k;
     513      392584 :         a = ap + (k<<hsub);
     514      392584 :         da = dap + ((xp+k) << hsub);
     515             : 
     516   183224820 :         for (kmax = FFMIN(-xp + dst_wp, src_wp); k < kmax; k++) {
     517             :             int alpha_v, alpha_h, alpha;
     518             : 
     519             :             // average alpha for color components, improve quality
     520   182832236 :             if (hsub && vsub && j+1 < src_hp && k+1 < src_wp) {
     521   179045922 :                 alpha = (a[0] + a[src->linesize[3]] +
     522   119363948 :                          a[1] + a[src->linesize[3]+1]) >> 2;
     523   123150262 :             } else if (hsub || vsub) {
     524     1793756 :                 alpha_h = hsub && k+1 < src_wp ?
     525     1602744 :                     (a[0] + a[1]) >> 1 : a[0];
     526     1344220 :                 alpha_v = vsub && j+1 < src_hp ?
     527     1077752 :                     (a[0] + a[src->linesize[3]]) >> 1 : a[0];
     528      896878 :                 alpha = (alpha_v + alpha_h) >> 1;
     529             :             } else
     530   122253384 :                 alpha = a[0];
     531             :             // if the main channel has an alpha channel, alpha has to be calculated
     532             :             // to create an un-premultiplied (straight) alpha value
     533   182832236 :             if (main_has_alpha && alpha != 0 && alpha != 255) {
     534             :                 // average alpha for color components, improve quality
     535             :                 uint8_t alpha_d;
     536      300772 :                 if (hsub && vsub && j+1 < src_hp && k+1 < src_wp) {
     537       47064 :                     alpha_d = (da[0] + da[dst->linesize[3]] +
     538       31376 :                                da[1] + da[dst->linesize[3]+1]) >> 2;
     539      285084 :                 } else if (hsub || vsub) {
     540       63640 :                     alpha_h = hsub && k+1 < src_wp ?
     541       63640 :                         (da[0] + da[1]) >> 1 : da[0];
     542       32068 :                     alpha_v = vsub && j+1 < src_hp ?
     543       31820 :                         (da[0] + da[dst->linesize[3]]) >> 1 : da[0];
     544       31820 :                     alpha_d = (alpha_v + alpha_h) >> 1;
     545             :                 } else
     546      253264 :                     alpha_d = da[0];
     547      300772 :                 alpha = UNPREMULTIPLY_ALPHA(alpha, alpha_d);
     548             :             }
     549   182832236 :             *d = FAST_DIV255(*d * (255 - alpha) + *s * alpha);
     550   182832236 :             s++;
     551   182832236 :             d += dst_step;
     552   182832236 :             da += 1 << hsub;
     553   182832236 :             a += 1 << hsub;
     554             :         }
     555      392584 :         dp += dst->linesize[dst_plane];
     556      392584 :         sp += src->linesize[i];
     557      392584 :         ap += (1 << vsub) * src->linesize[3];
     558      392584 :         dap += (1 << vsub) * dst->linesize[3];
     559             :     }
     560        2007 : }
     561             : 
     562         108 : static inline void alpha_composite(const AVFrame *src, const AVFrame *dst,
     563             :                                    int src_w, int src_h,
     564             :                                    int dst_w, int dst_h,
     565             :                                    int x, int y)
     566             : {
     567             :     uint8_t alpha;          ///< the amount of overlay to blend on to main
     568             :     uint8_t *s, *sa, *d, *da;
     569             :     int i, imax, j, jmax;
     570             : 
     571         108 :     i = FFMAX(-y, 0);
     572         108 :     sa = src->data[3] + i     * src->linesize[3];
     573         108 :     da = dst->data[3] + (y+i) * dst->linesize[3];
     574             : 
     575        9132 :     for (imax = FFMIN(-y + dst_h, src_h); i < imax; i++) {
     576        9024 :         j = FFMAX(-x, 0);
     577        9024 :         s = sa + j;
     578        9024 :         d = da + x+j;
     579             : 
     580      908096 :         for (jmax = FFMIN(-x + dst_w, src_w); j < jmax; j++) {
     581      899072 :             alpha = *s;
     582      899072 :             if (alpha != 0 && alpha != 255) {
     583      126632 :                 uint8_t alpha_d = *d;
     584      126632 :                 alpha = UNPREMULTIPLY_ALPHA(alpha, alpha_d);
     585             :             }
     586      899072 :             switch (alpha) {
     587         768 :             case 0:
     588         768 :                 break;
     589      771672 :             case 255:
     590      771672 :                 *d = *s;
     591      771672 :                 break;
     592      126632 :             default:
     593             :                 // apply alpha compositing: main_alpha += (1-main_alpha) * overlay_alpha
     594      126632 :                 *d += FAST_DIV255((255 - *d) * *s);
     595             :             }
     596      899072 :             d += 1;
     597      899072 :             s += 1;
     598             :         }
     599        9024 :         da += dst->linesize[3];
     600        9024 :         sa += src->linesize[3];
     601             :     }
     602         108 : }
     603             : 
     604         665 : static av_always_inline void blend_image_yuv(AVFilterContext *ctx,
     605             :                                              AVFrame *dst, const AVFrame *src,
     606             :                                              int hsub, int vsub,
     607             :                                              int main_has_alpha,
     608             :                                              int x, int y)
     609             : {
     610         665 :     OverlayContext *s = ctx->priv;
     611         665 :     const int src_w = src->width;
     612         665 :     const int src_h = src->height;
     613         665 :     const int dst_w = dst->width;
     614         665 :     const int dst_h = dst->height;
     615             : 
     616        1995 :     blend_plane(ctx, dst, src, src_w, src_h, dst_w, dst_h, 0, 0,       0, x, y, main_has_alpha,
     617        1995 :                 s->main_desc->comp[0].plane, s->main_desc->comp[0].offset, s->main_desc->comp[0].step);
     618        1995 :     blend_plane(ctx, dst, src, src_w, src_h, dst_w, dst_h, 1, hsub, vsub, x, y, main_has_alpha,
     619        1995 :                 s->main_desc->comp[1].plane, s->main_desc->comp[1].offset, s->main_desc->comp[1].step);
     620        1995 :     blend_plane(ctx, dst, src, src_w, src_h, dst_w, dst_h, 2, hsub, vsub, x, y, main_has_alpha,
     621        1995 :                 s->main_desc->comp[2].plane, s->main_desc->comp[2].offset, s->main_desc->comp[2].step);
     622             : 
     623         665 :     if (main_has_alpha)
     624         106 :         alpha_composite(src, dst, src_w, src_h, dst_w, dst_h, x, y);
     625         665 : }
     626             : 
     627           4 : static av_always_inline void blend_image_planar_rgb(AVFilterContext *ctx,
     628             :                                                     AVFrame *dst, const AVFrame *src,
     629             :                                                     int hsub, int vsub,
     630             :                                                     int main_has_alpha,
     631             :                                                     int x, int y)
     632             : {
     633           4 :     OverlayContext *s = ctx->priv;
     634           4 :     const int src_w = src->width;
     635           4 :     const int src_h = src->height;
     636           4 :     const int dst_w = dst->width;
     637           4 :     const int dst_h = dst->height;
     638             : 
     639          12 :     blend_plane(ctx, dst, src, src_w, src_h, dst_w, dst_h, 0, 0,       0, x, y, main_has_alpha,
     640          12 :                 s->main_desc->comp[1].plane, s->main_desc->comp[1].offset, s->main_desc->comp[1].step);
     641          12 :     blend_plane(ctx, dst, src, src_w, src_h, dst_w, dst_h, 1, hsub, vsub, x, y, main_has_alpha,
     642          12 :                 s->main_desc->comp[2].plane, s->main_desc->comp[2].offset, s->main_desc->comp[2].step);
     643          12 :     blend_plane(ctx, dst, src, src_w, src_h, dst_w, dst_h, 2, hsub, vsub, x, y, main_has_alpha,
     644          12 :                 s->main_desc->comp[0].plane, s->main_desc->comp[0].offset, s->main_desc->comp[0].step);
     645             : 
     646           4 :     if (main_has_alpha)
     647           2 :         alpha_composite(src, dst, src_w, src_h, dst_w, dst_h, x, y);
     648           4 : }
     649             : 
     650         555 : static void blend_image_yuv420(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
     651             : {
     652         555 :     blend_image_yuv(ctx, dst, src, 1, 1, 0, x, y);
     653         555 : }
     654             : 
     655           2 : static void blend_image_yuva420(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
     656             : {
     657           2 :     blend_image_yuv(ctx, dst, src, 1, 1, 1, x, y);
     658           2 : }
     659             : 
     660           2 : static void blend_image_yuv422(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
     661             : {
     662           2 :     blend_image_yuv(ctx, dst, src, 1, 0, 0, x, y);
     663           2 : }
     664             : 
     665          52 : static void blend_image_yuva422(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
     666             : {
     667          52 :     blend_image_yuv(ctx, dst, src, 1, 0, 1, x, y);
     668          52 : }
     669             : 
     670           2 : static void blend_image_yuv444(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
     671             : {
     672           2 :     blend_image_yuv(ctx, dst, src, 0, 0, 0, x, y);
     673           2 : }
     674             : 
     675          52 : static void blend_image_yuva444(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
     676             : {
     677          52 :     blend_image_yuv(ctx, dst, src, 0, 0, 1, x, y);
     678          52 : }
     679             : 
     680           2 : static void blend_image_gbrp(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
     681             : {
     682           2 :     blend_image_planar_rgb(ctx, dst, src, 0, 0, 0, x, y);
     683           2 : }
     684             : 
     685           2 : static void blend_image_gbrap(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
     686             : {
     687           2 :     blend_image_planar_rgb(ctx, dst, src, 0, 0, 1, x, y);
     688           2 : }
     689             : 
     690          52 : static void blend_image_rgb(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
     691             : {
     692          52 :     blend_image_packed_rgb(ctx, dst, src, 0, x, y);
     693          52 : }
     694             : 
     695           2 : static void blend_image_rgba(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
     696             : {
     697           2 :     blend_image_packed_rgb(ctx, dst, src, 1, x, y);
     698           2 : }
     699             : 
     700          20 : static int config_input_main(AVFilterLink *inlink)
     701             : {
     702          20 :     OverlayContext *s = inlink->dst->priv;
     703          20 :     const AVPixFmtDescriptor *pix_desc = av_pix_fmt_desc_get(inlink->format);
     704             : 
     705          20 :     av_image_fill_max_pixsteps(s->main_pix_step,    NULL, pix_desc);
     706             : 
     707          20 :     s->hsub = pix_desc->log2_chroma_w;
     708          20 :     s->vsub = pix_desc->log2_chroma_h;
     709             : 
     710          20 :     s->main_desc = pix_desc;
     711             : 
     712          20 :     s->main_is_packed_rgb =
     713          20 :         ff_fill_rgba_map(s->main_rgba_map, inlink->format) >= 0;
     714          20 :     s->main_has_alpha = ff_fmt_is_in(inlink->format, alpha_pix_fmts);
     715          20 :     switch (s->format) {
     716           9 :     case OVERLAY_FORMAT_YUV420:
     717           9 :         s->blend_image = s->main_has_alpha ? blend_image_yuva420 : blend_image_yuv420;
     718           9 :         break;
     719           3 :     case OVERLAY_FORMAT_YUV422:
     720           3 :         s->blend_image = s->main_has_alpha ? blend_image_yuva422 : blend_image_yuv422;
     721           3 :         break;
     722           3 :     case OVERLAY_FORMAT_YUV444:
     723           3 :         s->blend_image = s->main_has_alpha ? blend_image_yuva444 : blend_image_yuv444;
     724           3 :         break;
     725           3 :     case OVERLAY_FORMAT_RGB:
     726           3 :         s->blend_image = s->main_has_alpha ? blend_image_rgba : blend_image_rgb;
     727           3 :         break;
     728           2 :     case OVERLAY_FORMAT_GBRP:
     729           2 :         s->blend_image = s->main_has_alpha ? blend_image_gbrap : blend_image_gbrp;
     730           2 :         break;
     731           0 :     case OVERLAY_FORMAT_AUTO:
     732           0 :         switch (inlink->format) {
     733           0 :         case AV_PIX_FMT_YUVA420P:
     734           0 :             s->blend_image = blend_image_yuva420;
     735           0 :             break;
     736           0 :         case AV_PIX_FMT_YUVA422P:
     737           0 :             s->blend_image = blend_image_yuva422;
     738           0 :             break;
     739           0 :         case AV_PIX_FMT_YUVA444P:
     740           0 :             s->blend_image = blend_image_yuva444;
     741           0 :             break;
     742           0 :         case AV_PIX_FMT_ARGB:
     743             :         case AV_PIX_FMT_RGBA:
     744             :         case AV_PIX_FMT_BGRA:
     745             :         case AV_PIX_FMT_ABGR:
     746           0 :             s->blend_image = blend_image_rgba;
     747           0 :             break;
     748           0 :         case AV_PIX_FMT_GBRAP:
     749           0 :             s->blend_image = blend_image_gbrap;
     750           0 :             break;
     751           0 :         default:
     752           0 :             av_assert0(0);
     753             :             break;
     754             :         }
     755           0 :         break;
     756             :     }
     757          20 :     return 0;
     758             : }
     759             : 
     760         723 : static int do_blend(FFFrameSync *fs)
     761             : {
     762         723 :     AVFilterContext *ctx = fs->parent;
     763             :     AVFrame *mainpic, *second;
     764         723 :     OverlayContext *s = ctx->priv;
     765         723 :     AVFilterLink *inlink = ctx->inputs[0];
     766             :     int ret;
     767             : 
     768         723 :     ret = ff_framesync_dualinput_get_writable(fs, &mainpic, &second);
     769         723 :     if (ret < 0)
     770           0 :         return ret;
     771         723 :     if (!second)
     772           0 :         return ff_filter_frame(ctx->outputs[0], mainpic);
     773             : 
     774         723 :     if (s->eval_mode == EVAL_MODE_FRAME) {
     775         723 :         int64_t pos = mainpic->pkt_pos;
     776             : 
     777         723 :         s->var_values[VAR_N] = inlink->frame_count_out;
     778        1446 :         s->var_values[VAR_T] = mainpic->pts == AV_NOPTS_VALUE ?
     779         723 :             NAN : mainpic->pts * av_q2d(inlink->time_base);
     780         723 :         s->var_values[VAR_POS] = pos == -1 ? NAN : pos;
     781             : 
     782         723 :         s->var_values[VAR_OVERLAY_W] = s->var_values[VAR_OW] = second->width;
     783         723 :         s->var_values[VAR_OVERLAY_H] = s->var_values[VAR_OH] = second->height;
     784         723 :         s->var_values[VAR_MAIN_W   ] = s->var_values[VAR_MW] = mainpic->width;
     785         723 :         s->var_values[VAR_MAIN_H   ] = s->var_values[VAR_MH] = mainpic->height;
     786             : 
     787         723 :         eval_expr(ctx);
     788         723 :         av_log(ctx, AV_LOG_DEBUG, "n:%f t:%f pos:%f x:%f xi:%d y:%f yi:%d\n",
     789             :                s->var_values[VAR_N], s->var_values[VAR_T], s->var_values[VAR_POS],
     790             :                s->var_values[VAR_X], s->x,
     791             :                s->var_values[VAR_Y], s->y);
     792             :     }
     793             : 
     794         723 :     if (s->x < mainpic->width  && s->x + second->width  >= 0 ||
     795           0 :         s->y < mainpic->height && s->y + second->height >= 0)
     796         723 :         s->blend_image(ctx, mainpic, second, s->x, s->y);
     797         723 :     return ff_filter_frame(ctx->outputs[0], mainpic);
     798             : }
     799             : 
     800          30 : static av_cold int init(AVFilterContext *ctx)
     801             : {
     802          30 :     OverlayContext *s = ctx->priv;
     803             : 
     804          30 :     s->fs.on_event = do_blend;
     805          30 :     return 0;
     806             : }
     807             : 
     808        1937 : static int activate(AVFilterContext *ctx)
     809             : {
     810        1937 :     OverlayContext *s = ctx->priv;
     811        1937 :     return ff_framesync_activate(&s->fs);
     812             : }
     813             : 
     814             : #define OFFSET(x) offsetof(OverlayContext, x)
     815             : #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
     816             : 
     817             : static const AVOption overlay_options[] = {
     818             :     { "x", "set the x expression", OFFSET(x_expr), AV_OPT_TYPE_STRING, {.str = "0"}, CHAR_MIN, CHAR_MAX, FLAGS },
     819             :     { "y", "set the y expression", OFFSET(y_expr), AV_OPT_TYPE_STRING, {.str = "0"}, CHAR_MIN, CHAR_MAX, FLAGS },
     820             :     { "eof_action", "Action to take when encountering EOF from secondary input ",
     821             :         OFFSET(fs.opt_eof_action), AV_OPT_TYPE_INT, { .i64 = EOF_ACTION_REPEAT },
     822             :         EOF_ACTION_REPEAT, EOF_ACTION_PASS, .flags = FLAGS, "eof_action" },
     823             :         { "repeat", "Repeat the previous frame.",   0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_REPEAT }, .flags = FLAGS, "eof_action" },
     824             :         { "endall", "End both streams.",            0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_ENDALL }, .flags = FLAGS, "eof_action" },
     825             :         { "pass",   "Pass through the main input.", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_PASS },   .flags = FLAGS, "eof_action" },
     826             :     { "eval", "specify when to evaluate expressions", OFFSET(eval_mode), AV_OPT_TYPE_INT, {.i64 = EVAL_MODE_FRAME}, 0, EVAL_MODE_NB-1, FLAGS, "eval" },
     827             :          { "init",  "eval expressions once during initialization", 0, AV_OPT_TYPE_CONST, {.i64=EVAL_MODE_INIT},  .flags = FLAGS, .unit = "eval" },
     828             :          { "frame", "eval expressions per-frame",                  0, AV_OPT_TYPE_CONST, {.i64=EVAL_MODE_FRAME}, .flags = FLAGS, .unit = "eval" },
     829             :     { "shortest", "force termination when the shortest input terminates", OFFSET(fs.opt_shortest), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, FLAGS },
     830             :     { "format", "set output format", OFFSET(format), AV_OPT_TYPE_INT, {.i64=OVERLAY_FORMAT_YUV420}, 0, OVERLAY_FORMAT_NB-1, FLAGS, "format" },
     831             :         { "yuv420", "", 0, AV_OPT_TYPE_CONST, {.i64=OVERLAY_FORMAT_YUV420}, .flags = FLAGS, .unit = "format" },
     832             :         { "yuv422", "", 0, AV_OPT_TYPE_CONST, {.i64=OVERLAY_FORMAT_YUV422}, .flags = FLAGS, .unit = "format" },
     833             :         { "yuv444", "", 0, AV_OPT_TYPE_CONST, {.i64=OVERLAY_FORMAT_YUV444}, .flags = FLAGS, .unit = "format" },
     834             :         { "rgb",    "", 0, AV_OPT_TYPE_CONST, {.i64=OVERLAY_FORMAT_RGB},    .flags = FLAGS, .unit = "format" },
     835             :         { "gbrp",   "", 0, AV_OPT_TYPE_CONST, {.i64=OVERLAY_FORMAT_GBRP},   .flags = FLAGS, .unit = "format" },
     836             :         { "auto",   "", 0, AV_OPT_TYPE_CONST, {.i64=OVERLAY_FORMAT_AUTO},   .flags = FLAGS, .unit = "format" },
     837             :     { "repeatlast", "repeat overlay of the last overlay frame", OFFSET(fs.opt_repeatlast), AV_OPT_TYPE_BOOL, {.i64=1}, 0, 1, FLAGS },
     838             :     { NULL }
     839             : };
     840             : 
     841         246 : FRAMESYNC_DEFINE_CLASS(overlay, OverlayContext, fs);
     842             : 
     843             : static const AVFilterPad avfilter_vf_overlay_inputs[] = {
     844             :     {
     845             :         .name         = "main",
     846             :         .type         = AVMEDIA_TYPE_VIDEO,
     847             :         .config_props = config_input_main,
     848             :     },
     849             :     {
     850             :         .name         = "overlay",
     851             :         .type         = AVMEDIA_TYPE_VIDEO,
     852             :         .config_props = config_input_overlay,
     853             :     },
     854             :     { NULL }
     855             : };
     856             : 
     857             : static const AVFilterPad avfilter_vf_overlay_outputs[] = {
     858             :     {
     859             :         .name          = "default",
     860             :         .type          = AVMEDIA_TYPE_VIDEO,
     861             :         .config_props  = config_output,
     862             :     },
     863             :     { NULL }
     864             : };
     865             : 
     866             : AVFilter ff_vf_overlay = {
     867             :     .name          = "overlay",
     868             :     .description   = NULL_IF_CONFIG_SMALL("Overlay a video source on top of the input."),
     869             :     .preinit       = overlay_framesync_preinit,
     870             :     .init          = init,
     871             :     .uninit        = uninit,
     872             :     .priv_size     = sizeof(OverlayContext),
     873             :     .priv_class    = &overlay_class,
     874             :     .query_formats = query_formats,
     875             :     .activate      = activate,
     876             :     .process_command = process_command,
     877             :     .inputs        = avfilter_vf_overlay_inputs,
     878             :     .outputs       = avfilter_vf_overlay_outputs,
     879             :     .flags         = AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL,
     880             : };

Generated by: LCOV version 1.13