LCOV - code coverage report
Current view: top level - libavfilter - vf_framerate.c (source / functions) Hit Total Coverage
Test: coverage.info Lines: 250 347 72.0 %
Date: 2017-12-14 08:27:08 Functions: 13 16 81.2 %

          Line data    Source code
       1             : /*
       2             :  * Copyright (C) 2012 Mark Himsley
       3             :  *
       4             :  * get_scene_score() Copyright (c) 2011 Stefano Sabatini
       5             :  * taken from libavfilter/vf_select.c
       6             :  *
       7             :  * This file is part of FFmpeg.
       8             :  *
       9             :  * FFmpeg is free software; you can redistribute it and/or
      10             :  * modify it under the terms of the GNU Lesser General Public
      11             :  * License as published by the Free Software Foundation; either
      12             :  * version 2.1 of the License, or (at your option) any later version.
      13             :  *
      14             :  * FFmpeg is distributed in the hope that it will be useful,
      15             :  * but WITHOUT ANY WARRANTY; without even the implied warranty of
      16             :  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
      17             :  * Lesser General Public License for more details.
      18             :  *
      19             :  * You should have received a copy of the GNU Lesser General Public
      20             :  * License along with FFmpeg; if not, write to the Free Software
      21             :  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
      22             :  */
      23             : 
      24             : /**
      25             :  * @file
      26             :  * filter for upsampling or downsampling a progressive source
      27             :  */
      28             : 
      29             : #define DEBUG
      30             : 
      31             : #include "libavutil/avassert.h"
      32             : #include "libavutil/imgutils.h"
      33             : #include "libavutil/internal.h"
      34             : #include "libavutil/opt.h"
      35             : #include "libavutil/pixdesc.h"
      36             : #include "libavutil/pixelutils.h"
      37             : 
      38             : #include "avfilter.h"
      39             : #include "internal.h"
      40             : #include "video.h"
      41             : 
      42             : #define N_SRCE 3
      43             : 
      44             : typedef struct FrameRateContext {
      45             :     const AVClass *class;
      46             :     // parameters
      47             :     AVRational dest_frame_rate;         ///< output frames per second
      48             :     int flags;                          ///< flags affecting frame rate conversion algorithm
      49             :     double scene_score;                 ///< score that denotes a scene change has happened
      50             :     int interp_start;                   ///< start of range to apply linear interpolation
      51             :     int interp_end;                     ///< end of range to apply linear interpolation
      52             : 
      53             :     int line_size[4];                   ///< bytes of pixel data per line for each plane
      54             :     int vsub;
      55             : 
      56             :     int frst, next, prev, crnt, last;
      57             :     int pending_srce_frames;            ///< how many input frames are still waiting to be processed
      58             :     int flush;                          ///< are we flushing final frames
      59             :     int pending_end_frame;              ///< flag indicating we are waiting to call filter_frame()
      60             : 
      61             :     AVRational srce_time_base;          ///< timebase of source
      62             : 
      63             :     AVRational dest_time_base;          ///< timebase of destination
      64             :     int32_t dest_frame_num;
      65             :     int64_t last_dest_frame_pts;        ///< pts of the last frame output
      66             :     int64_t average_srce_pts_dest_delta;///< average input pts delta converted from input rate to output rate
      67             :     int64_t average_dest_pts_delta;     ///< calculated average output pts delta
      68             : 
      69             :     av_pixelutils_sad_fn sad;           ///< Sum of the absolute difference function (scene detect only)
      70             :     double prev_mafd;                   ///< previous MAFD                           (scene detect only)
      71             : 
      72             :     AVFrame *srce[N_SRCE];              ///< buffered source frames
      73             :     int64_t srce_pts_dest[N_SRCE];      ///< pts for source frames scaled to output timebase
      74             :     int64_t pts;                        ///< pts of frame we are working on
      75             : 
      76             :     int (*blend_frames)(AVFilterContext *ctx, float interpolate,
      77             :                         AVFrame *copy_src1, AVFrame *copy_src2);
      78             :     int max;
      79             :     int bitdepth;
      80             :     AVFrame *work;
      81             : } FrameRateContext;
      82             : 
      83             : #define OFFSET(x) offsetof(FrameRateContext, x)
      84             : #define V AV_OPT_FLAG_VIDEO_PARAM
      85             : #define F AV_OPT_FLAG_FILTERING_PARAM
      86             : #define FRAMERATE_FLAG_SCD 01
      87             : 
      88             : static const AVOption framerate_options[] = {
      89             :     {"fps",                 "required output frames per second rate", OFFSET(dest_frame_rate), AV_OPT_TYPE_VIDEO_RATE, {.str="50"},             0,       INT_MAX, V|F },
      90             : 
      91             :     {"interp_start",        "point to start linear interpolation",    OFFSET(interp_start),    AV_OPT_TYPE_INT,      {.i64=15},                 0,       255,     V|F },
      92             :     {"interp_end",          "point to end linear interpolation",      OFFSET(interp_end),      AV_OPT_TYPE_INT,      {.i64=240},                0,       255,     V|F },
      93             :     {"scene",               "scene change level",                     OFFSET(scene_score),     AV_OPT_TYPE_DOUBLE,   {.dbl=7.0},                0,       INT_MAX, V|F },
      94             : 
      95             :     {"flags",               "set flags",                              OFFSET(flags),           AV_OPT_TYPE_FLAGS,    {.i64=1},                  0,       INT_MAX, V|F, "flags" },
      96             :     {"scene_change_detect", "enable scene change detection",          0,                       AV_OPT_TYPE_CONST,    {.i64=FRAMERATE_FLAG_SCD}, INT_MIN, INT_MAX, V|F, "flags" },
      97             :     {"scd",                 "enable scene change detection",          0,                       AV_OPT_TYPE_CONST,    {.i64=FRAMERATE_FLAG_SCD}, INT_MIN, INT_MAX, V|F, "flags" },
      98             : 
      99             :     {NULL}
     100             : };
     101             : 
     102             : AVFILTER_DEFINE_CLASS(framerate);
     103             : 
     104           7 : static void next_source(AVFilterContext *ctx)
     105             : {
     106           7 :     FrameRateContext *s = ctx->priv;
     107             :     int i;
     108             : 
     109           7 :     ff_dlog(ctx,  "next_source()\n");
     110             : 
     111           7 :     if (s->srce[s->last] && s->srce[s->last] != s->srce[s->last-1]) {
     112           3 :         ff_dlog(ctx, "next_source() unlink %d\n", s->last);
     113           3 :         av_frame_free(&s->srce[s->last]);
     114             :     }
     115          21 :     for (i = s->last; i > s->frst; i--) {
     116          14 :         ff_dlog(ctx, "next_source() copy %d to %d\n", i - 1, i);
     117          14 :         s->srce[i] = s->srce[i - 1];
     118             :     }
     119           7 :     ff_dlog(ctx, "next_source() make %d null\n", s->frst);
     120           7 :     s->srce[s->frst] = NULL;
     121           7 : }
     122             : 
     123           0 : static av_always_inline int64_t sad_8x8_16(const uint16_t *src1, ptrdiff_t stride1,
     124             :                                            const uint16_t *src2, ptrdiff_t stride2)
     125             : {
     126           0 :     int sum = 0;
     127             :     int x, y;
     128             : 
     129           0 :     for (y = 0; y < 8; y++) {
     130           0 :         for (x = 0; x < 8; x++)
     131           0 :             sum += FFABS(src1[x] - src2[x]);
     132           0 :         src1 += stride1;
     133           0 :         src2 += stride2;
     134             :     }
     135           0 :     return sum;
     136             : }
     137             : 
     138           0 : static double get_scene_score16(AVFilterContext *ctx, AVFrame *crnt, AVFrame *next)
     139             : {
     140           0 :     FrameRateContext *s = ctx->priv;
     141           0 :     double ret = 0;
     142             : 
     143           0 :     ff_dlog(ctx, "get_scene_score16()\n");
     144             : 
     145           0 :     if (crnt &&
     146           0 :         crnt->height == next->height &&
     147           0 :         crnt->width  == next->width) {
     148             :         int x, y;
     149             :         int64_t sad;
     150             :         double mafd, diff;
     151           0 :         const uint16_t *p1 = (const uint16_t *)crnt->data[0];
     152           0 :         const uint16_t *p2 = (const uint16_t *)next->data[0];
     153           0 :         const int p1_linesize = crnt->linesize[0] / 2;
     154           0 :         const int p2_linesize = next->linesize[0] / 2;
     155             : 
     156           0 :         ff_dlog(ctx, "get_scene_score16() process\n");
     157             : 
     158           0 :         for (sad = y = 0; y < crnt->height; y += 8) {
     159           0 :             for (x = 0; x < p1_linesize; x += 8) {
     160           0 :                 sad += sad_8x8_16(p1 + y * p1_linesize + x,
     161             :                                   p1_linesize,
     162           0 :                                   p2 + y * p2_linesize + x,
     163             :                                   p2_linesize);
     164             :             }
     165             :         }
     166           0 :         mafd = sad / (crnt->height * crnt->width * 3);
     167           0 :         diff = fabs(mafd - s->prev_mafd);
     168           0 :         ret  = av_clipf(FFMIN(mafd, diff), 0, 100.0);
     169           0 :         s->prev_mafd = mafd;
     170             :     }
     171           0 :     ff_dlog(ctx, "get_scene_score16() result is:%f\n", ret);
     172           0 :     return ret;
     173             : }
     174             : 
     175           8 : static double get_scene_score(AVFilterContext *ctx, AVFrame *crnt, AVFrame *next)
     176             : {
     177           8 :     FrameRateContext *s = ctx->priv;
     178           8 :     double ret = 0;
     179             : 
     180           8 :     ff_dlog(ctx, "get_scene_score()\n");
     181             : 
     182          16 :     if (crnt &&
     183          16 :         crnt->height == next->height &&
     184           8 :         crnt->width  == next->width) {
     185             :         int x, y;
     186             :         int64_t sad;
     187             :         double mafd, diff;
     188           8 :         uint8_t *p1 = crnt->data[0];
     189           8 :         uint8_t *p2 = next->data[0];
     190           8 :         const int p1_linesize = crnt->linesize[0];
     191           8 :         const int p2_linesize = next->linesize[0];
     192             : 
     193           8 :         ff_dlog(ctx, "get_scene_score() process\n");
     194             : 
     195         248 :         for (sad = y = 0; y < crnt->height; y += 8) {
     196        9840 :             for (x = 0; x < p1_linesize; x += 8) {
     197       19200 :                 sad += s->sad(p1 + y * p1_linesize + x,
     198             :                               p1_linesize,
     199        9600 :                               p2 + y * p2_linesize + x,
     200             :                               p2_linesize);
     201             :             }
     202             :         }
     203           8 :         emms_c();
     204           8 :         mafd = sad / (crnt->height * crnt->width * 3);
     205           8 :         diff = fabs(mafd - s->prev_mafd);
     206           8 :         ret  = av_clipf(FFMIN(mafd, diff), 0, 100.0);
     207           8 :         s->prev_mafd = mafd;
     208             :     }
     209           8 :         ff_dlog(ctx, "get_scene_score() result is:%f\n", ret);
     210           8 :     return ret;
     211             : }
     212             : 
     213           0 : static int blend_frames16(AVFilterContext *ctx, float interpolate,
     214             :                           AVFrame *copy_src1, AVFrame *copy_src2)
     215             : {
     216           0 :     FrameRateContext *s = ctx->priv;
     217           0 :     AVFilterLink *outlink = ctx->outputs[0];
     218           0 :     double interpolate_scene_score = 0;
     219             : 
     220           0 :     if ((s->flags & FRAMERATE_FLAG_SCD) && copy_src2) {
     221           0 :         interpolate_scene_score = get_scene_score16(ctx, copy_src1, copy_src2);
     222           0 :         ff_dlog(ctx, "blend_frames16() interpolate scene score:%f\n", interpolate_scene_score);
     223             :     }
     224             :     // decide if the shot-change detection allows us to blend two frames
     225           0 :     if (interpolate_scene_score < s->scene_score && copy_src2) {
     226           0 :         uint16_t src2_factor = fabsf(interpolate) * (1 << (s->bitdepth - 8));
     227           0 :         uint16_t src1_factor = s->max - src2_factor;
     228           0 :         const int half = s->max / 2;
     229           0 :         const int uv = (s->max + 1) * half;
     230           0 :         const int shift = s->bitdepth;
     231             :         int plane, line, pixel;
     232             : 
     233             :         // get work-space for output frame
     234           0 :         s->work = ff_get_video_buffer(outlink, outlink->w, outlink->h);
     235           0 :         if (!s->work)
     236           0 :             return AVERROR(ENOMEM);
     237             : 
     238           0 :         av_frame_copy_props(s->work, s->srce[s->crnt]);
     239             : 
     240           0 :         ff_dlog(ctx, "blend_frames16() INTERPOLATE to create work frame\n");
     241           0 :         for (plane = 0; plane < 4 && copy_src1->data[plane] && copy_src2->data[plane]; plane++) {
     242           0 :             int cpy_line_width = s->line_size[plane];
     243           0 :             const uint16_t *cpy_src1_data = (const uint16_t *)copy_src1->data[plane];
     244           0 :             int cpy_src1_line_size = copy_src1->linesize[plane] / 2;
     245           0 :             const uint16_t *cpy_src2_data = (const uint16_t *)copy_src2->data[plane];
     246           0 :             int cpy_src2_line_size = copy_src2->linesize[plane] / 2;
     247           0 :             int cpy_src_h = (plane > 0 && plane < 3) ? (copy_src1->height >> s->vsub) : (copy_src1->height);
     248           0 :             uint16_t *cpy_dst_data = (uint16_t *)s->work->data[plane];
     249           0 :             int cpy_dst_line_size = s->work->linesize[plane] / 2;
     250             : 
     251           0 :             if (plane <1 || plane >2) {
     252             :                 // luma or alpha
     253           0 :                 for (line = 0; line < cpy_src_h; line++) {
     254           0 :                     for (pixel = 0; pixel < cpy_line_width; pixel++)
     255           0 :                         cpy_dst_data[pixel] = ((cpy_src1_data[pixel] * src1_factor) + (cpy_src2_data[pixel] * src2_factor) + half) >> shift;
     256           0 :                     cpy_src1_data += cpy_src1_line_size;
     257           0 :                     cpy_src2_data += cpy_src2_line_size;
     258           0 :                     cpy_dst_data += cpy_dst_line_size;
     259             :                 }
     260             :             } else {
     261             :                 // chroma
     262           0 :                 for (line = 0; line < cpy_src_h; line++) {
     263           0 :                     for (pixel = 0; pixel < cpy_line_width; pixel++) {
     264           0 :                         cpy_dst_data[pixel] = (((cpy_src1_data[pixel] - half) * src1_factor) + ((cpy_src2_data[pixel] - half) * src2_factor) + uv) >> shift;
     265             :                     }
     266           0 :                     cpy_src1_data += cpy_src1_line_size;
     267           0 :                     cpy_src2_data += cpy_src2_line_size;
     268           0 :                     cpy_dst_data += cpy_dst_line_size;
     269             :                 }
     270             :             }
     271             :         }
     272           0 :         return 1;
     273             :     }
     274           0 :     return 0;
     275             : }
     276             : 
     277           8 : static int blend_frames8(AVFilterContext *ctx, float interpolate,
     278             :                          AVFrame *copy_src1, AVFrame *copy_src2)
     279             : {
     280           8 :     FrameRateContext *s = ctx->priv;
     281           8 :     AVFilterLink *outlink = ctx->outputs[0];
     282           8 :     double interpolate_scene_score = 0;
     283             : 
     284           8 :     if ((s->flags & FRAMERATE_FLAG_SCD) && copy_src2) {
     285           8 :         interpolate_scene_score = get_scene_score(ctx, copy_src1, copy_src2);
     286           8 :         ff_dlog(ctx, "blend_frames8() interpolate scene score:%f\n", interpolate_scene_score);
     287             :     }
     288             :     // decide if the shot-change detection allows us to blend two frames
     289           8 :     if (interpolate_scene_score < s->scene_score && copy_src2) {
     290           8 :         uint16_t src2_factor = fabsf(interpolate);
     291           8 :         uint16_t src1_factor = 256 - src2_factor;
     292             :         int plane, line, pixel;
     293             : 
     294             :         // get work-space for output frame
     295           8 :         s->work = ff_get_video_buffer(outlink, outlink->w, outlink->h);
     296           8 :         if (!s->work)
     297           0 :             return AVERROR(ENOMEM);
     298             : 
     299           8 :         av_frame_copy_props(s->work, s->srce[s->crnt]);
     300             : 
     301           8 :         ff_dlog(ctx, "blend_frames8() INTERPOLATE to create work frame\n");
     302          32 :         for (plane = 0; plane < 4 && copy_src1->data[plane] && copy_src2->data[plane]; plane++) {
     303          24 :             int cpy_line_width = s->line_size[plane];
     304          24 :             uint8_t *cpy_src1_data = copy_src1->data[plane];
     305          24 :             int cpy_src1_line_size = copy_src1->linesize[plane];
     306          24 :             uint8_t *cpy_src2_data = copy_src2->data[plane];
     307          24 :             int cpy_src2_line_size = copy_src2->linesize[plane];
     308          24 :             int cpy_src_h = (plane > 0 && plane < 3) ? (copy_src1->height >> s->vsub) : (copy_src1->height);
     309          24 :             uint8_t *cpy_dst_data = s->work->data[plane];
     310          24 :             int cpy_dst_line_size = s->work->linesize[plane];
     311          32 :             if (plane <1 || plane >2) {
     312             :                 // luma or alpha
     313        1928 :                 for (line = 0; line < cpy_src_h; line++) {
     314      616320 :                     for (pixel = 0; pixel < cpy_line_width; pixel++) {
     315             :                         // integer version of (src1 * src1_factor) + (src2 + src2_factor) + 0.5
     316             :                         // 0.5 is for rounding
     317             :                         // 128 is the integer representation of 0.5 << 8
     318      614400 :                         cpy_dst_data[pixel] = ((cpy_src1_data[pixel] * src1_factor) + (cpy_src2_data[pixel] * src2_factor) + 128) >> 8;
     319             :                     }
     320        1920 :                     cpy_src1_data += cpy_src1_line_size;
     321        1920 :                     cpy_src2_data += cpy_src2_line_size;
     322        1920 :                     cpy_dst_data += cpy_dst_line_size;
     323             :                 }
     324             :             } else {
     325             :                 // chroma
     326        1936 :                 for (line = 0; line < cpy_src_h; line++) {
     327      309120 :                     for (pixel = 0; pixel < cpy_line_width; pixel++) {
     328             :                         // as above
     329             :                         // because U and V are based around 128 we have to subtract 128 from the components.
     330             :                         // 32896 is the integer representation of 128.5 << 8
     331      307200 :                         cpy_dst_data[pixel] = (((cpy_src1_data[pixel] - 128) * src1_factor) + ((cpy_src2_data[pixel] - 128) * src2_factor) + 32896) >> 8;
     332             :                     }
     333        1920 :                     cpy_src1_data += cpy_src1_line_size;
     334        1920 :                     cpy_src2_data += cpy_src2_line_size;
     335        1920 :                     cpy_dst_data += cpy_dst_line_size;
     336             :                 }
     337             :             }
     338             :         }
     339           8 :         return 1;
     340             :     }
     341           0 :     return 0;
     342             : }
     343             : 
     344          16 : static int process_work_frame(AVFilterContext *ctx, int stop)
     345             : {
     346          16 :     FrameRateContext *s = ctx->priv;
     347             :     int64_t work_next_pts;
     348             :     AVFrame *copy_src1;
     349             :     float interpolate;
     350             : 
     351          16 :     ff_dlog(ctx, "process_work_frame()\n");
     352             : 
     353          16 :     ff_dlog(ctx, "process_work_frame() pending_input_frames %d\n", s->pending_srce_frames);
     354             : 
     355          16 :     if (s->srce[s->prev]) ff_dlog(ctx, "process_work_frame() srce prev pts:%"PRId64"\n", s->srce[s->prev]->pts);
     356          16 :     if (s->srce[s->crnt]) ff_dlog(ctx, "process_work_frame() srce crnt pts:%"PRId64"\n", s->srce[s->crnt]->pts);
     357          16 :     if (s->srce[s->next]) ff_dlog(ctx, "process_work_frame() srce next pts:%"PRId64"\n", s->srce[s->next]->pts);
     358             : 
     359          16 :     if (!s->srce[s->crnt]) {
     360             :         // the filter cannot do anything
     361           2 :         ff_dlog(ctx, "process_work_frame() no current frame cached: move on to next frame, do not output a frame\n");
     362           2 :         next_source(ctx);
     363           2 :         return 0;
     364             :     }
     365             : 
     366          14 :     work_next_pts = s->pts + s->average_dest_pts_delta;
     367             : 
     368          14 :     ff_dlog(ctx, "process_work_frame() work crnt pts:%"PRId64"\n", s->pts);
     369          14 :     ff_dlog(ctx, "process_work_frame() work next pts:%"PRId64"\n", work_next_pts);
     370          14 :     if (s->srce[s->prev])
     371           9 :         ff_dlog(ctx, "process_work_frame() srce prev pts:%"PRId64" at dest time base:%u/%u\n",
     372             :             s->srce_pts_dest[s->prev], s->dest_time_base.num, s->dest_time_base.den);
     373          14 :     if (s->srce[s->crnt])
     374          14 :         ff_dlog(ctx, "process_work_frame() srce crnt pts:%"PRId64" at dest time base:%u/%u\n",
     375             :             s->srce_pts_dest[s->crnt], s->dest_time_base.num, s->dest_time_base.den);
     376          14 :     if (s->srce[s->next])
     377          14 :         ff_dlog(ctx, "process_work_frame() srce next pts:%"PRId64" at dest time base:%u/%u\n",
     378             :             s->srce_pts_dest[s->next], s->dest_time_base.num, s->dest_time_base.den);
     379             : 
     380          14 :     av_assert0(s->srce[s->next]);
     381             : 
     382             :     // should filter be skipping input frame (output frame rate is lower than input frame rate)
     383          14 :     if (!s->flush && s->pts >= s->srce_pts_dest[s->next]) {
     384           1 :         ff_dlog(ctx, "process_work_frame() work crnt pts >= srce next pts: SKIP FRAME, move on to next frame, do not output a frame\n");
     385           1 :         next_source(ctx);
     386           1 :         s->pending_srce_frames--;
     387           1 :         return 0;
     388             :     }
     389             : 
     390             :     // calculate interpolation
     391          13 :     interpolate = ((s->pts - s->srce_pts_dest[s->crnt]) * 256.0 / s->average_srce_pts_dest_delta);
     392          13 :     ff_dlog(ctx, "process_work_frame() interpolate:%f/256\n", interpolate);
     393          13 :     copy_src1 = s->srce[s->crnt];
     394          13 :     if (interpolate > s->interp_end) {
     395           0 :         ff_dlog(ctx, "process_work_frame() source is:NEXT\n");
     396           0 :         copy_src1 = s->srce[s->next];
     397             :     }
     398          13 :     if (s->srce[s->prev] && interpolate < -s->interp_end) {
     399           0 :         ff_dlog(ctx, "process_work_frame() source is:PREV\n");
     400           0 :         copy_src1 = s->srce[s->prev];
     401             :     }
     402             : 
     403             :     // decide whether to blend two frames
     404          13 :     if ((interpolate >= s->interp_start && interpolate <= s->interp_end) || (interpolate <= -s->interp_start && interpolate >= -s->interp_end)) {
     405             :         AVFrame *copy_src2;
     406             : 
     407           8 :         if (interpolate > 0) {
     408           6 :             ff_dlog(ctx, "process_work_frame() interpolate source is:NEXT\n");
     409           6 :             copy_src2 = s->srce[s->next];
     410             :         } else {
     411           2 :             ff_dlog(ctx, "process_work_frame() interpolate source is:PREV\n");
     412           2 :             copy_src2 = s->srce[s->prev];
     413             :         }
     414           8 :         if (s->blend_frames(ctx, interpolate, copy_src1, copy_src2))
     415           8 :             goto copy_done;
     416             :         else
     417           0 :             ff_dlog(ctx, "process_work_frame() CUT - DON'T INTERPOLATE\n");
     418             :     }
     419             : 
     420           5 :     ff_dlog(ctx, "process_work_frame() COPY to the work frame\n");
     421             :     // copy the frame we decided is our base source
     422           5 :     s->work = av_frame_clone(copy_src1);
     423           5 :     if (!s->work)
     424           0 :         return AVERROR(ENOMEM);
     425             : 
     426           5 : copy_done:
     427          13 :     s->work->pts = s->pts;
     428             : 
     429             :     // should filter be re-using input frame (output frame rate is higher than input frame rate)
     430          13 :     if (!s->flush && (work_next_pts + s->average_dest_pts_delta) < (s->srce_pts_dest[s->crnt] + s->average_srce_pts_dest_delta)) {
     431           9 :         ff_dlog(ctx, "process_work_frame() REPEAT FRAME\n");
     432             :     } else {
     433           4 :         ff_dlog(ctx, "process_work_frame() CONSUME FRAME, move to next frame\n");
     434           4 :         s->pending_srce_frames--;
     435           4 :         next_source(ctx);
     436             :     }
     437          13 :     ff_dlog(ctx, "process_work_frame() output a frame\n");
     438          13 :     s->dest_frame_num++;
     439          13 :     if (stop)
     440           5 :         s->pending_end_frame = 0;
     441          13 :     s->last_dest_frame_pts = s->work->pts;
     442             : 
     443          13 :     return 1;
     444             : }
     445             : 
     446          16 : static void set_srce_frame_dest_pts(AVFilterContext *ctx)
     447             : {
     448          16 :     FrameRateContext *s = ctx->priv;
     449             : 
     450          16 :     ff_dlog(ctx, "set_srce_frame_output_pts()\n");
     451             : 
     452             :     // scale the input pts from the timebase difference between input and output
     453          16 :     if (s->srce[s->prev])
     454           9 :         s->srce_pts_dest[s->prev] = av_rescale_q(s->srce[s->prev]->pts, s->srce_time_base, s->dest_time_base);
     455          16 :     if (s->srce[s->crnt])
     456          14 :         s->srce_pts_dest[s->crnt] = av_rescale_q(s->srce[s->crnt]->pts, s->srce_time_base, s->dest_time_base);
     457          16 :     if (s->srce[s->next])
     458          16 :         s->srce_pts_dest[s->next] = av_rescale_q(s->srce[s->next]->pts, s->srce_time_base, s->dest_time_base);
     459          16 : }
     460             : 
     461          13 : static void set_work_frame_pts(AVFilterContext *ctx)
     462             : {
     463          13 :     FrameRateContext *s = ctx->priv;
     464          13 :     int64_t pts, average_srce_pts_delta = 0;
     465             : 
     466          13 :     ff_dlog(ctx, "set_work_frame_pts()\n");
     467             : 
     468          13 :     av_assert0(s->srce[s->next]);
     469          13 :     av_assert0(s->srce[s->crnt]);
     470             : 
     471          13 :     ff_dlog(ctx, "set_work_frame_pts() srce crnt pts:%"PRId64"\n", s->srce[s->crnt]->pts);
     472          13 :     ff_dlog(ctx, "set_work_frame_pts() srce next pts:%"PRId64"\n", s->srce[s->next]->pts);
     473          13 :     if (s->srce[s->prev])
     474           8 :         ff_dlog(ctx, "set_work_frame_pts() srce prev pts:%"PRId64"\n", s->srce[s->prev]->pts);
     475             : 
     476          13 :     average_srce_pts_delta = s->average_srce_pts_dest_delta;
     477          13 :     ff_dlog(ctx, "set_work_frame_pts() initial average srce pts:%"PRId64"\n", average_srce_pts_delta);
     478             : 
     479          13 :     set_srce_frame_dest_pts(ctx);
     480             : 
     481             :     // calculate the PTS delta
     482          13 :     if ((pts = (s->srce_pts_dest[s->next] - s->srce_pts_dest[s->crnt]))) {
     483          13 :         average_srce_pts_delta = average_srce_pts_delta?((average_srce_pts_delta+pts)>>1):pts;
     484           0 :     } else if (s->srce[s->prev] && (pts = (s->srce_pts_dest[s->crnt] - s->srce_pts_dest[s->prev]))) {
     485           0 :         average_srce_pts_delta = average_srce_pts_delta?((average_srce_pts_delta+pts)>>1):pts;
     486             :     }
     487             : 
     488          13 :     s->average_srce_pts_dest_delta = average_srce_pts_delta;
     489          13 :     ff_dlog(ctx, "set_work_frame_pts() average srce pts:%"PRId64"\n", average_srce_pts_delta);
     490          13 :     ff_dlog(ctx, "set_work_frame_pts() average srce pts:%"PRId64" at dest time base:%u/%u\n",
     491             :             s->average_srce_pts_dest_delta, s->dest_time_base.num, s->dest_time_base.den);
     492             : 
     493          13 :     if (ctx->inputs[0] && !s->average_dest_pts_delta) {
     494           2 :         int64_t d = av_q2d(av_inv_q(av_mul_q(s->dest_time_base, s->dest_frame_rate)));
     495           2 :         s->average_dest_pts_delta = d;
     496           2 :         ff_dlog(ctx, "set_work_frame_pts() average dest pts delta:%"PRId64"\n", s->average_dest_pts_delta);
     497             :     }
     498             : 
     499          13 :     if (!s->dest_frame_num) {
     500           2 :         s->pts = s->last_dest_frame_pts = s->srce_pts_dest[s->crnt];
     501             :     } else {
     502          11 :         s->pts = s->last_dest_frame_pts + s->average_dest_pts_delta;
     503             :     }
     504             : 
     505          13 :     ff_dlog(ctx, "set_work_frame_pts() calculated pts:%"PRId64" at dest time base:%u/%u\n",
     506             :             s->pts, s->dest_time_base.num, s->dest_time_base.den);
     507          13 : }
     508             : 
     509           4 : static av_cold int init(AVFilterContext *ctx)
     510             : {
     511           4 :     FrameRateContext *s = ctx->priv;
     512             : 
     513           4 :     s->dest_frame_num = 0;
     514             : 
     515           4 :     s->crnt = (N_SRCE)>>1;
     516           4 :     s->last = N_SRCE - 1;
     517             : 
     518           4 :     s->next = s->crnt - 1;
     519           4 :     s->prev = s->crnt + 1;
     520             : 
     521           4 :     return 0;
     522             : }
     523             : 
     524           4 : static av_cold void uninit(AVFilterContext *ctx)
     525             : {
     526           4 :     FrameRateContext *s = ctx->priv;
     527             :     int i;
     528             : 
     529          12 :     for (i = s->frst; i < s->last; i++) {
     530           8 :         if (s->srce[i] && (s->srce[i] != s->srce[i + 1]))
     531           3 :             av_frame_free(&s->srce[i]);
     532             :     }
     533           4 :     av_frame_free(&s->srce[s->last]);
     534           4 : }
     535             : 
     536           2 : static int query_formats(AVFilterContext *ctx)
     537             : {
     538             :     static const enum AVPixelFormat pix_fmts[] = {
     539             :         AV_PIX_FMT_YUV410P,
     540             :         AV_PIX_FMT_YUV411P, AV_PIX_FMT_YUVJ411P,
     541             :         AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUVJ420P,
     542             :         AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUVJ422P,
     543             :         AV_PIX_FMT_YUV440P, AV_PIX_FMT_YUVJ440P,
     544             :         AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUVJ444P,
     545             :         AV_PIX_FMT_YUV420P9, AV_PIX_FMT_YUV420P10, AV_PIX_FMT_YUV420P12,
     546             :         AV_PIX_FMT_YUV422P9, AV_PIX_FMT_YUV422P10, AV_PIX_FMT_YUV422P12,
     547             :         AV_PIX_FMT_YUV444P9, AV_PIX_FMT_YUV444P10, AV_PIX_FMT_YUV444P12,
     548             :         AV_PIX_FMT_NONE
     549             :     };
     550             : 
     551           2 :     AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
     552           2 :     if (!fmts_list)
     553           0 :         return AVERROR(ENOMEM);
     554           2 :     return ff_set_common_formats(ctx, fmts_list);
     555             : }
     556             : 
     557           2 : static int config_input(AVFilterLink *inlink)
     558             : {
     559           2 :     AVFilterContext *ctx = inlink->dst;
     560           2 :     FrameRateContext *s = ctx->priv;
     561           2 :     const AVPixFmtDescriptor *pix_desc = av_pix_fmt_desc_get(inlink->format);
     562             :     int plane;
     563             : 
     564          10 :     for (plane = 0; plane < 4; plane++) {
     565           8 :         s->line_size[plane] = av_image_get_linesize(inlink->format, inlink->w,
     566             :                                                     plane);
     567             :     }
     568             : 
     569           2 :     s->bitdepth = pix_desc->comp[0].depth;
     570           2 :     s->vsub = pix_desc->log2_chroma_h;
     571             : 
     572           2 :     s->sad = av_pixelutils_get_sad_fn(3, 3, 2, s); // 8x8 both sources aligned
     573           2 :     if (!s->sad)
     574           0 :         return AVERROR(EINVAL);
     575             : 
     576           2 :     s->srce_time_base = inlink->time_base;
     577             : 
     578           2 :     if (s->bitdepth == 8)
     579           2 :         s->blend_frames = blend_frames8;
     580             :     else
     581           0 :         s->blend_frames = blend_frames16;
     582           2 :     s->max = 1 << (s->bitdepth);
     583             : 
     584           2 :     return 0;
     585             : }
     586             : 
     587           8 : static int filter_frame(AVFilterLink *inlink, AVFrame *inpicref)
     588             : {
     589             :     int ret;
     590           8 :     AVFilterContext *ctx = inlink->dst;
     591           8 :     FrameRateContext *s = ctx->priv;
     592             : 
     593             :     // we have one new frame
     594           8 :     s->pending_srce_frames++;
     595             : 
     596           8 :     if (inpicref->interlaced_frame)
     597           0 :         av_log(ctx, AV_LOG_WARNING, "Interlaced frame found - the output will not be correct.\n");
     598             : 
     599             :     // store the pointer to the new frame
     600           8 :     av_frame_free(&s->srce[s->frst]);
     601           8 :     s->srce[s->frst] = inpicref;
     602             : 
     603           8 :     if (!s->pending_end_frame && s->srce[s->crnt]) {
     604           5 :         set_work_frame_pts(ctx);
     605           5 :         s->pending_end_frame = 1;
     606             :     } else {
     607           3 :         set_srce_frame_dest_pts(ctx);
     608             :     }
     609             : 
     610           8 :     ret = process_work_frame(ctx, 1);
     611           8 :     if (ret < 0)
     612           0 :         return ret;
     613           8 :     return ret ? ff_filter_frame(ctx->outputs[0], s->work) : 0;
     614             : }
     615             : 
     616           2 : static int config_output(AVFilterLink *outlink)
     617             : {
     618           2 :     AVFilterContext *ctx = outlink->src;
     619           2 :     FrameRateContext *s = ctx->priv;
     620             :     int exact;
     621             : 
     622           2 :     ff_dlog(ctx, "config_output()\n");
     623             : 
     624           2 :     ff_dlog(ctx,
     625             :            "config_output() input time base:%u/%u (%f)\n",
     626             :            ctx->inputs[0]->time_base.num,ctx->inputs[0]->time_base.den,
     627             :            av_q2d(ctx->inputs[0]->time_base));
     628             : 
     629             :     // make sure timebase is small enough to hold the framerate
     630             : 
     631           6 :     exact = av_reduce(&s->dest_time_base.num, &s->dest_time_base.den,
     632           2 :                       av_gcd((int64_t)s->srce_time_base.num * s->dest_frame_rate.num,
     633           2 :                              (int64_t)s->srce_time_base.den * s->dest_frame_rate.den ),
     634           2 :                       (int64_t)s->srce_time_base.den * s->dest_frame_rate.num, INT_MAX);
     635             : 
     636           2 :     av_log(ctx, AV_LOG_INFO,
     637             :            "time base:%u/%u -> %u/%u exact:%d\n",
     638             :            s->srce_time_base.num, s->srce_time_base.den,
     639             :            s->dest_time_base.num, s->dest_time_base.den, exact);
     640           2 :     if (!exact) {
     641           0 :         av_log(ctx, AV_LOG_WARNING, "Timebase conversion is not exact\n");
     642             :     }
     643             : 
     644           2 :     outlink->frame_rate = s->dest_frame_rate;
     645           2 :     outlink->time_base = s->dest_time_base;
     646             : 
     647           2 :     ff_dlog(ctx,
     648             :            "config_output() output time base:%u/%u (%f) w:%d h:%d\n",
     649             :            outlink->time_base.num, outlink->time_base.den,
     650             :            av_q2d(outlink->time_base),
     651             :            outlink->w, outlink->h);
     652             : 
     653             : 
     654           2 :     av_log(ctx, AV_LOG_INFO, "fps -> fps:%u/%u scene score:%f interpolate start:%d end:%d\n",
     655             :             s->dest_frame_rate.num, s->dest_frame_rate.den,
     656             :             s->scene_score, s->interp_start, s->interp_end);
     657             : 
     658           2 :     return 0;
     659             : }
     660             : 
     661          16 : static int request_frame(AVFilterLink *outlink)
     662             : {
     663          16 :     AVFilterContext *ctx = outlink->src;
     664          16 :     FrameRateContext *s = ctx->priv;
     665             :     int ret, i;
     666             : 
     667          16 :     ff_dlog(ctx, "request_frame()\n");
     668             : 
     669             :     // if there is no "next" frame AND we are not in flush then get one from our input filter
     670          16 :     if (!s->srce[s->frst] && !s->flush)
     671           8 :         goto request;
     672             : 
     673           8 :     ff_dlog(ctx, "request_frame() REPEAT or FLUSH\n");
     674             : 
     675           8 :     if (s->pending_srce_frames <= 0) {
     676           0 :         ff_dlog(ctx, "request_frame() nothing else to do, return:EOF\n");
     677           0 :         return AVERROR_EOF;
     678             :     }
     679             : 
     680             :     // otherwise, make brand-new frame and pass to our output filter
     681           8 :     ff_dlog(ctx, "request_frame() FLUSH\n");
     682             : 
     683             :     // back fill at end of file when source has no more frames
     684          24 :     for (i = s->last; i > s->frst; i--) {
     685          16 :         if (!s->srce[i - 1] && s->srce[i]) {
     686           0 :             ff_dlog(ctx, "request_frame() copy:%d to:%d\n", i, i - 1);
     687           0 :             s->srce[i - 1] = s->srce[i];
     688             :         }
     689             :     }
     690             : 
     691           8 :     set_work_frame_pts(ctx);
     692           8 :     ret = process_work_frame(ctx, 0);
     693           8 :     if (ret < 0)
     694           0 :         return ret;
     695           8 :     if (ret)
     696           8 :         return ff_filter_frame(ctx->outputs[0], s->work);
     697             : 
     698           0 : request:
     699           8 :     ff_dlog(ctx, "request_frame() call source's request_frame()\n");
     700           8 :     ret = ff_request_frame(ctx->inputs[0]);
     701           8 :     if (ret < 0 && (ret != AVERROR_EOF)) {
     702           0 :         ff_dlog(ctx, "request_frame() source's request_frame() returned error:%d\n", ret);
     703           0 :         return ret;
     704           8 :     } else if (ret == AVERROR_EOF) {
     705           0 :         s->flush = 1;
     706             :     }
     707           8 :     ff_dlog(ctx, "request_frame() source's request_frame() returned:%d\n", ret);
     708           8 :     return 0;
     709             : }
     710             : 
     711             : static const AVFilterPad framerate_inputs[] = {
     712             :     {
     713             :         .name         = "default",
     714             :         .type         = AVMEDIA_TYPE_VIDEO,
     715             :         .config_props = config_input,
     716             :         .filter_frame = filter_frame,
     717             :     },
     718             :     { NULL }
     719             : };
     720             : 
     721             : static const AVFilterPad framerate_outputs[] = {
     722             :     {
     723             :         .name          = "default",
     724             :         .type          = AVMEDIA_TYPE_VIDEO,
     725             :         .request_frame = request_frame,
     726             :         .config_props  = config_output,
     727             :     },
     728             :     { NULL }
     729             : };
     730             : 
     731             : AVFilter ff_vf_framerate = {
     732             :     .name          = "framerate",
     733             :     .description   = NULL_IF_CONFIG_SMALL("Upsamples or downsamples progressive source between specified frame rates."),
     734             :     .priv_size     = sizeof(FrameRateContext),
     735             :     .priv_class    = &framerate_class,
     736             :     .init          = init,
     737             :     .uninit        = uninit,
     738             :     .query_formats = query_formats,
     739             :     .inputs        = framerate_inputs,
     740             :     .outputs       = framerate_outputs,
     741             : };

Generated by: LCOV version 1.13