FFmpeg coverage


Directory: ../../../ffmpeg/
File: src/libavfilter/vf_overlay_vaapi.c
Date: 2024-04-23 16:28:37
Exec Total Coverage
Lines: 0 180 0.0%
Functions: 0 12 0.0%
Branches: 0 58 0.0%

Line Branch Exec Source
1 /*
2 * This file is part of FFmpeg.
3 *
4 * FFmpeg is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2.1 of the License, or (at your option) any later version.
8 *
9 * FFmpeg is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
13 *
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with FFmpeg; if not, write to the Free Software
16 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 */
18 #include <string.h>
19
20 #include "libavutil/opt.h"
21 #include "libavutil/pixdesc.h"
22
23 #include "avfilter.h"
24 #include "framesync.h"
25 #include "internal.h"
26 #include "vaapi_vpp.h"
27 #include "video.h"
28 #include "libavutil/eval.h"
29
30 enum var_name {
31 VAR_MAIN_IW, VAR_MW,
32 VAR_MAIN_IH, VAR_MH,
33 VAR_OVERLAY_IW,
34 VAR_OVERLAY_IH,
35 VAR_OVERLAY_X, VAR_OX,
36 VAR_OVERLAY_Y, VAR_OY,
37 VAR_OVERLAY_W, VAR_OW,
38 VAR_OVERLAY_H, VAR_OH,
39 VAR_VARS_NB
40 };
41
42 typedef struct OverlayVAAPIContext {
43 VAAPIVPPContext vpp_ctx; /**< must be the first field */
44 FFFrameSync fs;
45
46 double var_values[VAR_VARS_NB];
47 char *overlay_ox;
48 char *overlay_oy;
49 char *overlay_ow;
50 char *overlay_oh;
51 int ox;
52 int oy;
53 int ow;
54 int oh;
55 float alpha;
56 unsigned int blend_flags;
57 float blend_alpha;
58 } OverlayVAAPIContext;
59
60 static const char *const var_names[] = {
61 "main_w", "W", /* input width of the main layer */
62 "main_h", "H", /* input height of the main layer */
63 "overlay_iw", /* input width of the overlay layer */
64 "overlay_ih", /* input height of the overlay layer */
65 "overlay_x", "x", /* x position of the overlay layer inside of main */
66 "overlay_y", "y", /* y position of the overlay layer inside of main */
67 "overlay_w", "w", /* output width of overlay layer */
68 "overlay_h", "h", /* output height of overlay layer */
69 NULL
70 };
71
72 static int eval_expr(AVFilterContext *avctx)
73 {
74 OverlayVAAPIContext *ctx = avctx->priv;
75 double *var_values = ctx->var_values;
76 int ret = 0;
77 AVExpr *ox_expr = NULL, *oy_expr = NULL;
78 AVExpr *ow_expr = NULL, *oh_expr = NULL;
79
80 #define PARSE_EXPR(e, s) {\
81 ret = av_expr_parse(&(e), s, var_names, NULL, NULL, NULL, NULL, 0, ctx); \
82 if (ret < 0) {\
83 av_log(ctx, AV_LOG_ERROR, "Error when parsing '%s'.\n", s);\
84 goto release;\
85 }\
86 }
87 PARSE_EXPR(ox_expr, ctx->overlay_ox)
88 PARSE_EXPR(oy_expr, ctx->overlay_oy)
89 PARSE_EXPR(ow_expr, ctx->overlay_ow)
90 PARSE_EXPR(oh_expr, ctx->overlay_oh)
91 #undef PASS_EXPR
92
93 var_values[VAR_OVERLAY_W] =
94 var_values[VAR_OW] = av_expr_eval(ow_expr, var_values, NULL);
95 var_values[VAR_OVERLAY_H] =
96 var_values[VAR_OH] = av_expr_eval(oh_expr, var_values, NULL);
97
98 /* calc again in case ow is relative to oh */
99 var_values[VAR_OVERLAY_W] =
100 var_values[VAR_OW] = av_expr_eval(ow_expr, var_values, NULL);
101
102 var_values[VAR_OVERLAY_X] =
103 var_values[VAR_OX] = av_expr_eval(ox_expr, var_values, NULL);
104 var_values[VAR_OVERLAY_Y] =
105 var_values[VAR_OY] = av_expr_eval(oy_expr, var_values, NULL);
106
107 /* calc again in case ox is relative to oy */
108 var_values[VAR_OVERLAY_X] =
109 var_values[VAR_OX] = av_expr_eval(ox_expr, var_values, NULL);
110
111 /* calc overlay_w and overlay_h again incase relative to ox,oy */
112 var_values[VAR_OVERLAY_W] =
113 var_values[VAR_OW] = av_expr_eval(ow_expr, var_values, NULL);
114 var_values[VAR_OVERLAY_H] =
115 var_values[VAR_OH] = av_expr_eval(oh_expr, var_values, NULL);
116 var_values[VAR_OVERLAY_W] =
117 var_values[VAR_OW] = av_expr_eval(ow_expr, var_values, NULL);
118
119 release:
120 av_expr_free(ox_expr);
121 av_expr_free(oy_expr);
122 av_expr_free(ow_expr);
123 av_expr_free(oh_expr);
124
125 return ret;
126 }
127
128 static int overlay_vaapi_build_filter_params(AVFilterContext *avctx)
129 {
130 VAAPIVPPContext *vpp_ctx = avctx->priv;
131 VAStatus vas;
132 int support_flag;
133 VAProcPipelineCaps pipeline_caps;
134
135 memset(&pipeline_caps, 0, sizeof(pipeline_caps));
136 vas = vaQueryVideoProcPipelineCaps(vpp_ctx->hwctx->display,
137 vpp_ctx->va_context,
138 NULL, 0,
139 &pipeline_caps);
140 if (vas != VA_STATUS_SUCCESS) {
141 av_log(avctx, AV_LOG_ERROR, "Failed to query pipeline "
142 "caps: %d (%s).\n", vas, vaErrorStr(vas));
143 return AVERROR(EIO);
144 }
145
146 if (!pipeline_caps.blend_flags) {
147 av_log(avctx, AV_LOG_ERROR, "VAAPI driver doesn't support overlay\n");
148 return AVERROR(EINVAL);
149 }
150
151 support_flag = pipeline_caps.blend_flags & VA_BLEND_GLOBAL_ALPHA;
152 if (!support_flag) {
153 av_log(avctx, AV_LOG_ERROR, "VAAPI driver doesn't support global alpha blending\n");
154 return AVERROR(EINVAL);
155 }
156
157 return 0;
158 }
159
160 static int overlay_vaapi_blend(FFFrameSync *fs)
161 {
162 AVFilterContext *avctx = fs->parent;
163 AVFilterLink *outlink = avctx->outputs[0];
164 OverlayVAAPIContext *ctx = avctx->priv;
165 VAAPIVPPContext *vpp_ctx = avctx->priv;
166 AVFrame *input_main, *input_overlay;
167 AVFrame *output;
168 VAProcPipelineParameterBuffer params[2];
169 VABlendState blend_state = { 0 }; /**< Blend State */
170 VARectangle overlay_region, output_region;
171 int err;
172
173 err = ff_framesync_get_frame(fs, 0, &input_main, 0);
174 if (err < 0)
175 return err;
176 err = ff_framesync_get_frame(fs, 1, &input_overlay, 0);
177 if (err < 0)
178 return err;
179
180 av_log(avctx, AV_LOG_DEBUG, "Filter main: %s, %ux%u (%"PRId64").\n",
181 av_get_pix_fmt_name(input_main->format),
182 input_main->width, input_main->height, input_main->pts);
183
184 if (vpp_ctx->va_context == VA_INVALID_ID)
185 return AVERROR(EINVAL);
186
187 output = ff_get_video_buffer(outlink, outlink->w, outlink->h);
188 if (!output) {
189 err = AVERROR(ENOMEM);
190 goto fail;
191 }
192
193 err = av_frame_copy_props(output, input_main);
194 if (err < 0)
195 goto fail;
196
197 err = ff_vaapi_vpp_init_params(avctx, &params[0],
198 input_main, output);
199 if (err < 0)
200 goto fail;
201
202 output_region = (VARectangle) {
203 .x = 0,
204 .y = 0,
205 .width = output->width,
206 .height = output->height,
207 };
208
209 params[0].output_region = &output_region;
210 params[0].output_background_color = VAAPI_VPP_BACKGROUND_BLACK;
211
212 if (input_overlay) {
213 av_log(avctx, AV_LOG_DEBUG, "Filter overlay: %s, %ux%u (%"PRId64").\n",
214 av_get_pix_fmt_name(input_overlay->format),
215 input_overlay->width, input_overlay->height, input_overlay->pts);
216
217 overlay_region = (VARectangle) {
218 .x = ctx->ox,
219 .y = ctx->oy,
220 .width = ctx->ow ? ctx->ow : input_overlay->width,
221 .height = ctx->oh ? ctx->oh : input_overlay->height,
222 };
223
224 if (overlay_region.x + overlay_region.width > input_main->width ||
225 overlay_region.y + overlay_region.height > input_main->height) {
226 av_log(ctx, AV_LOG_WARNING,
227 "The overlay image exceeds the scope of the main image, "
228 "will crop the overlay image according based on the main image.\n");
229 }
230
231 memcpy(&params[1], &params[0], sizeof(params[0]));
232
233 blend_state.flags = ctx->blend_flags;
234 blend_state.global_alpha = ctx->blend_alpha;
235 params[1].blend_state = &blend_state;
236
237 params[1].surface = (VASurfaceID)(uintptr_t)input_overlay->data[3];
238 params[1].surface_region = NULL;
239 params[1].output_region = &overlay_region;
240 }
241
242 err = ff_vaapi_vpp_render_pictures(avctx, params, input_overlay ? 2 : 1, output);
243 if (err < 0)
244 goto fail;
245
246 av_log(avctx, AV_LOG_DEBUG, "Filter output: %s, %ux%u (%"PRId64").\n",
247 av_get_pix_fmt_name(output->format),
248 output->width, output->height, output->pts);
249
250 return ff_filter_frame(outlink, output);
251
252 fail:
253 av_frame_free(&output);
254 return err;
255 }
256
257 static int have_alpha_planar(AVFilterLink *link)
258 {
259 enum AVPixelFormat pix_fmt = link->format;
260 const AVPixFmtDescriptor *desc;
261 AVHWFramesContext *fctx;
262
263 if (link->format == AV_PIX_FMT_VAAPI) {
264 fctx = (AVHWFramesContext *)link->hw_frames_ctx->data;
265 pix_fmt = fctx->sw_format;
266 }
267
268 desc = av_pix_fmt_desc_get(pix_fmt);
269 if (!desc)
270 return 0;
271
272 return !!(desc->flags & AV_PIX_FMT_FLAG_ALPHA);
273 }
274
275 static int overlay_vaapi_config_input_main(AVFilterLink *inlink)
276 {
277 AVFilterContext *avctx = inlink->dst;
278 OverlayVAAPIContext *ctx = avctx->priv;
279
280 ctx->var_values[VAR_MAIN_IW] =
281 ctx->var_values[VAR_MW] = inlink->w;
282 ctx->var_values[VAR_MAIN_IH] =
283 ctx->var_values[VAR_MH] = inlink->h;
284
285 return ff_vaapi_vpp_config_input(inlink);
286 }
287
288 static int overlay_vaapi_config_input_overlay(AVFilterLink *inlink)
289 {
290 AVFilterContext *avctx = inlink->dst;
291 OverlayVAAPIContext *ctx = avctx->priv;
292 int ret;
293
294 ctx->var_values[VAR_OVERLAY_IW] = inlink->w;
295 ctx->var_values[VAR_OVERLAY_IH] = inlink->h;
296
297 ret = eval_expr(avctx);
298 if (ret < 0)
299 return ret;
300
301 ctx->ox = (int)ctx->var_values[VAR_OX];
302 ctx->oy = (int)ctx->var_values[VAR_OY];
303 ctx->ow = (int)ctx->var_values[VAR_OW];
304 ctx->oh = (int)ctx->var_values[VAR_OH];
305
306 ctx->blend_flags = 0;
307 ctx->blend_alpha = 1.0f;
308
309 if (ctx->alpha < 1.0f) {
310 ctx->blend_flags |= VA_BLEND_GLOBAL_ALPHA;
311 ctx->blend_alpha = ctx->alpha;
312 }
313
314 if (have_alpha_planar(inlink))
315 ctx->blend_flags |= VA_BLEND_PREMULTIPLIED_ALPHA;
316
317 return 0;
318 }
319
320 static int overlay_vaapi_config_output(AVFilterLink *outlink)
321 {
322 AVFilterContext *avctx = outlink->src;
323 OverlayVAAPIContext *ctx = avctx->priv;
324 VAAPIVPPContext *vpp_ctx = avctx->priv;
325 int err;
326
327 outlink->time_base = avctx->inputs[0]->time_base;
328 vpp_ctx->output_width = avctx->inputs[0]->w;
329 vpp_ctx->output_height = avctx->inputs[0]->h;
330
331 err = ff_vaapi_vpp_config_output(outlink);
332 if (err < 0)
333 return err;
334
335 err = overlay_vaapi_build_filter_params(avctx);
336 if (err < 0)
337 return err;
338
339 err = ff_framesync_init_dualinput(&ctx->fs, avctx);
340 if (err < 0)
341 return err;
342
343 ctx->fs.on_event = overlay_vaapi_blend;
344 ctx->fs.time_base = outlink->time_base;
345
346 return ff_framesync_configure(&ctx->fs);
347 }
348
349 static av_cold int overlay_vaapi_init(AVFilterContext *avctx)
350 {
351 VAAPIVPPContext *vpp_ctx = avctx->priv;
352
353 ff_vaapi_vpp_ctx_init(avctx);
354 vpp_ctx->output_format = AV_PIX_FMT_NONE;
355
356 return 0;
357 }
358
359 static int overlay_vaapi_activate(AVFilterContext *avctx)
360 {
361 OverlayVAAPIContext *ctx = avctx->priv;
362
363 return ff_framesync_activate(&ctx->fs);
364 }
365
366 static av_cold void overlay_vaapi_uninit(AVFilterContext *avctx)
367 {
368 OverlayVAAPIContext *ctx = avctx->priv;
369
370 ff_framesync_uninit(&ctx->fs);
371 ff_vaapi_vpp_ctx_uninit(avctx);
372 }
373
374 #define OFFSET(x) offsetof(OverlayVAAPIContext, x)
375 #define FLAGS (AV_OPT_FLAG_FILTERING_PARAM | AV_OPT_FLAG_VIDEO_PARAM)
376 static const AVOption overlay_vaapi_options[] = {
377 { "x", "Overlay x position", OFFSET(overlay_ox), AV_OPT_TYPE_STRING, { .str="0"}, 0, 255, .flags = FLAGS},
378 { "y", "Overlay y position", OFFSET(overlay_oy), AV_OPT_TYPE_STRING, { .str="0"}, 0, 255, .flags = FLAGS},
379 { "w", "Overlay width", OFFSET(overlay_ow), AV_OPT_TYPE_STRING, { .str="overlay_iw"}, 0, 255, .flags = FLAGS},
380 { "h", "Overlay height", OFFSET(overlay_oh), AV_OPT_TYPE_STRING, { .str="overlay_ih*w/overlay_iw"}, 0, 255, .flags = FLAGS},
381 { "alpha", "Overlay global alpha", OFFSET(alpha), AV_OPT_TYPE_FLOAT, { .dbl = 1.0 }, 0.0, 1.0, .flags = FLAGS },
382 { "eof_action", "Action to take when encountering EOF from secondary input ",
383 OFFSET(fs.opt_eof_action), AV_OPT_TYPE_INT, { .i64 = EOF_ACTION_REPEAT },
384 EOF_ACTION_REPEAT, EOF_ACTION_PASS, .flags = FLAGS, .unit = "eof_action" },
385 { "repeat", "Repeat the previous frame.", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_REPEAT }, .flags = FLAGS, .unit = "eof_action" },
386 { "endall", "End both streams.", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_ENDALL }, .flags = FLAGS, .unit = "eof_action" },
387 { "pass", "Pass through the main input.", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_PASS }, .flags = FLAGS, .unit = "eof_action" },
388 { "shortest", "force termination when the shortest input terminates", OFFSET(fs.opt_shortest), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, FLAGS },
389 { "repeatlast", "repeat overlay of the last overlay frame", OFFSET(fs.opt_repeatlast), AV_OPT_TYPE_BOOL, { .i64 = 1 }, 0, 1, FLAGS },
390 { NULL },
391 };
392
393 FRAMESYNC_DEFINE_CLASS(overlay_vaapi, OverlayVAAPIContext, fs);
394
395 static const AVFilterPad overlay_vaapi_inputs[] = {
396 {
397 .name = "main",
398 .type = AVMEDIA_TYPE_VIDEO,
399 .config_props = overlay_vaapi_config_input_main,
400 },
401 {
402 .name = "overlay",
403 .type = AVMEDIA_TYPE_VIDEO,
404 .config_props = overlay_vaapi_config_input_overlay,
405 },
406 };
407
408 static const AVFilterPad overlay_vaapi_outputs[] = {
409 {
410 .name = "default",
411 .type = AVMEDIA_TYPE_VIDEO,
412 .config_props = &overlay_vaapi_config_output,
413 },
414 };
415
416 const AVFilter ff_vf_overlay_vaapi = {
417 .name = "overlay_vaapi",
418 .description = NULL_IF_CONFIG_SMALL("Overlay one video on top of another"),
419 .priv_size = sizeof(OverlayVAAPIContext),
420 .priv_class = &overlay_vaapi_class,
421 .init = &overlay_vaapi_init,
422 .uninit = &overlay_vaapi_uninit,
423 .activate = &overlay_vaapi_activate,
424 .preinit = overlay_vaapi_framesync_preinit,
425 FILTER_INPUTS(overlay_vaapi_inputs),
426 FILTER_OUTPUTS(overlay_vaapi_outputs),
427 FILTER_SINGLE_PIXFMT(AV_PIX_FMT_VAAPI),
428 .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
429 };
430