FFmpeg coverage


Directory: ../../../ffmpeg/
File: src/libavfilter/vf_lagfun.c
Date: 2022-12-09 07:38:14
Exec Total Coverage
Lines: 0 47 0.0%
Functions: 0 9 0.0%
Branches: 0 70 0.0%

Line Branch Exec Source
1 /*
2 * Copyright (c) 2019 Paul B Mahol
3 *
4 * This file is part of FFmpeg.
5 *
6 * FFmpeg is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2.1 of the License, or (at your option) any later version.
10 *
11 * FFmpeg is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with FFmpeg; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 */
20
21 #include "libavutil/imgutils.h"
22 #include "libavutil/intreadwrite.h"
23 #include "libavutil/opt.h"
24 #include "libavutil/pixdesc.h"
25
26 #include "avfilter.h"
27 #include "formats.h"
28 #include "internal.h"
29 #include "video.h"
30
31 typedef struct LagfunContext {
32 const AVClass *class;
33 float decay;
34 int planes;
35
36 int depth;
37 int nb_planes;
38 int linesize[4];
39 int planewidth[4];
40 int planeheight[4];
41
42 float *old[4];
43
44 int (*lagfun[2])(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
45 } LagfunContext;
46
47 static const enum AVPixelFormat pixel_fmts[] = {
48 AV_PIX_FMT_GRAY8, AV_PIX_FMT_GRAY9,
49 AV_PIX_FMT_GRAY10, AV_PIX_FMT_GRAY12, AV_PIX_FMT_GRAY14,
50 AV_PIX_FMT_GRAY16,
51 AV_PIX_FMT_YUV410P, AV_PIX_FMT_YUV411P,
52 AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P,
53 AV_PIX_FMT_YUV440P, AV_PIX_FMT_YUV444P,
54 AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ422P,
55 AV_PIX_FMT_YUVJ440P, AV_PIX_FMT_YUVJ444P,
56 AV_PIX_FMT_YUVJ411P,
57 AV_PIX_FMT_YUV420P9, AV_PIX_FMT_YUV422P9, AV_PIX_FMT_YUV444P9,
58 AV_PIX_FMT_YUV420P10, AV_PIX_FMT_YUV422P10, AV_PIX_FMT_YUV444P10,
59 AV_PIX_FMT_YUV440P10,
60 AV_PIX_FMT_YUV444P12, AV_PIX_FMT_YUV422P12, AV_PIX_FMT_YUV420P12,
61 AV_PIX_FMT_YUV440P12,
62 AV_PIX_FMT_YUV444P14, AV_PIX_FMT_YUV422P14, AV_PIX_FMT_YUV420P14,
63 AV_PIX_FMT_YUV420P16, AV_PIX_FMT_YUV422P16, AV_PIX_FMT_YUV444P16,
64 AV_PIX_FMT_GBRP, AV_PIX_FMT_GBRP9, AV_PIX_FMT_GBRP10,
65 AV_PIX_FMT_GBRP12, AV_PIX_FMT_GBRP14, AV_PIX_FMT_GBRP16,
66 AV_PIX_FMT_GRAYF32, AV_PIX_FMT_GBRPF32, AV_PIX_FMT_GBRAPF32,
67 AV_PIX_FMT_NONE
68 };
69
70 typedef struct ThreadData {
71 AVFrame *in, *out;
72 } ThreadData;
73
74 #define LAGFUN(name, type, round, disabled) \
75 static int lagfun_frame##name(AVFilterContext *ctx, void *arg, \
76 int jobnr, int nb_jobs) \
77 { \
78 LagfunContext *s = ctx->priv; \
79 const float decay = s->decay; \
80 ThreadData *td = arg; \
81 AVFrame *in = td->in; \
82 AVFrame *out = td->out; \
83 \
84 for (int p = 0; p < s->nb_planes; p++) { \
85 const int slice_start = (s->planeheight[p] * jobnr) / nb_jobs; \
86 const int slice_end = (s->planeheight[p] * (jobnr+1)) / nb_jobs; \
87 const int width = s->planewidth[p]; \
88 const type *src = (const type *)in->data[p] + \
89 slice_start * in->linesize[p] / sizeof(type); \
90 float *osrc = s->old[p] + slice_start * s->planewidth[p]; \
91 type *dst = (type *)out->data[p] + \
92 slice_start * out->linesize[p] / sizeof(type); \
93 \
94 if (!((1 << p) & s->planes)) { \
95 av_image_copy_plane((uint8_t *)dst, out->linesize[p], \
96 (const uint8_t *)src, in->linesize[p], \
97 s->linesize[p], slice_end - slice_start); \
98 continue; \
99 } \
100 \
101 for (int y = slice_start; y < slice_end; y++) { \
102 for (int x = 0; x < width; x++) { \
103 const float v = fmaxf(src[x], osrc[x] * decay); \
104 \
105 osrc[x] = v; \
106 if (disabled) { \
107 dst[x] = src[x]; \
108 } else { \
109 dst[x] = round(v); \
110 } \
111 } \
112 \
113 src += in->linesize[p] / sizeof(type); \
114 osrc += width; \
115 dst += out->linesize[p] / sizeof(type); \
116 } \
117 } \
118 \
119 return 0; \
120 }
121
122 LAGFUN(8, uint8_t, lrintf, 0)
123 LAGFUN(16, uint16_t, lrintf, 0)
124 LAGFUN(32, float, , 0)
125
126 LAGFUN(d8, uint8_t, lrintf, 1)
127 LAGFUN(d16, uint16_t, lrintf, 1)
128 LAGFUN(d32, float, , 1)
129
130 static int config_output(AVFilterLink *outlink)
131 {
132 AVFilterContext *ctx = outlink->src;
133 LagfunContext *s = ctx->priv;
134 AVFilterLink *inlink = ctx->inputs[0];
135 const AVPixFmtDescriptor *desc;
136 int ret;
137
138 desc = av_pix_fmt_desc_get(outlink->format);
139 if (!desc)
140 return AVERROR_BUG;
141 s->nb_planes = av_pix_fmt_count_planes(outlink->format);
142 s->depth = desc->comp[0].depth;
143 s->lagfun[0] = s->depth <= 8 ? lagfun_frame8 : s->depth <= 16 ? lagfun_frame16 : lagfun_frame32;
144 s->lagfun[1] = s->depth <= 8 ? lagfun_framed8 : s->depth <= 16 ? lagfun_framed16 : lagfun_framed32;
145
146 if ((ret = av_image_fill_linesizes(s->linesize, inlink->format, inlink->w)) < 0)
147 return ret;
148
149 s->planewidth[1] = s->planewidth[2] = AV_CEIL_RSHIFT(inlink->w, desc->log2_chroma_w);
150 s->planewidth[0] = s->planewidth[3] = inlink->w;
151 s->planeheight[1] = s->planeheight[2] = AV_CEIL_RSHIFT(inlink->h, desc->log2_chroma_h);
152 s->planeheight[0] = s->planeheight[3] = inlink->h;
153
154 for (int p = 0; p < s->nb_planes; p++) {
155 s->old[p] = av_calloc(s->planewidth[p] * s->planeheight[p], sizeof(*s->old[0]));
156 if (!s->old[p])
157 return AVERROR(ENOMEM);
158 }
159
160 return 0;
161 }
162
163 static int filter_frame(AVFilterLink *inlink, AVFrame *in)
164 {
165 AVFilterContext *ctx = inlink->dst;
166 AVFilterLink *outlink = ctx->outputs[0];
167 LagfunContext *s = ctx->priv;
168 ThreadData td;
169 AVFrame *out;
170
171 out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
172 if (!out) {
173 av_frame_free(&in);
174 return AVERROR(ENOMEM);
175 }
176 out->pts = in->pts;
177
178 td.out = out;
179 td.in = in;
180 ff_filter_execute(ctx, s->lagfun[!!ctx->is_disabled], &td, NULL,
181 FFMIN(s->planeheight[1], ff_filter_get_nb_threads(ctx)));
182
183 av_frame_free(&in);
184 return ff_filter_frame(outlink, out);
185 }
186
187 static av_cold void uninit(AVFilterContext *ctx)
188 {
189 LagfunContext *s = ctx->priv;
190
191 for (int p = 0; p < s->nb_planes; p++)
192 av_freep(&s->old[p]);
193 }
194
195 #define OFFSET(x) offsetof(LagfunContext, x)
196 #define FLAGS AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_FILTERING_PARAM | AV_OPT_FLAG_RUNTIME_PARAM
197
198 static const AVOption lagfun_options[] = {
199 { "decay", "set decay", OFFSET(decay), AV_OPT_TYPE_FLOAT, {.dbl=.95}, 0, 1, FLAGS },
200 { "planes", "set what planes to filter", OFFSET(planes), AV_OPT_TYPE_FLAGS, {.i64=15}, 0, 15, FLAGS },
201 { NULL },
202 };
203
204 static const AVFilterPad inputs[] = {
205 {
206 .name = "default",
207 .type = AVMEDIA_TYPE_VIDEO,
208 .filter_frame = filter_frame,
209 },
210 };
211
212 static const AVFilterPad outputs[] = {
213 {
214 .name = "default",
215 .type = AVMEDIA_TYPE_VIDEO,
216 .config_props = config_output,
217 },
218 };
219
220 AVFILTER_DEFINE_CLASS(lagfun);
221
222 const AVFilter ff_vf_lagfun = {
223 .name = "lagfun",
224 .description = NULL_IF_CONFIG_SMALL("Slowly update darker pixels."),
225 .priv_size = sizeof(LagfunContext),
226 .priv_class = &lagfun_class,
227 .uninit = uninit,
228 FILTER_OUTPUTS(outputs),
229 FILTER_INPUTS(inputs),
230 FILTER_PIXFMTS_ARRAY(pixel_fmts),
231 .flags = AVFILTER_FLAG_SLICE_THREADS | AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL,
232 .process_command = ff_filter_process_command,
233 };
234