FFmpeg coverage


Directory: ../../../ffmpeg/
File: src/libavfilter/vf_overlay_vaapi.c
Date: 2024-11-20 23:03:26
Exec Total Coverage
Lines: 0 181 0.0%
Functions: 0 12 0.0%
Branches: 0 58 0.0%

Line Branch Exec Source
1 /*
2 * This file is part of FFmpeg.
3 *
4 * FFmpeg is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2.1 of the License, or (at your option) any later version.
8 *
9 * FFmpeg is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
13 *
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with FFmpeg; if not, write to the Free Software
16 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 */
18 #include <string.h>
19
20 #include "libavutil/opt.h"
21 #include "libavutil/pixdesc.h"
22
23 #include "avfilter.h"
24 #include "filters.h"
25 #include "framesync.h"
26 #include "vaapi_vpp.h"
27 #include "video.h"
28 #include "libavutil/eval.h"
29
30 enum var_name {
31 VAR_MAIN_IW, VAR_MW,
32 VAR_MAIN_IH, VAR_MH,
33 VAR_OVERLAY_IW,
34 VAR_OVERLAY_IH,
35 VAR_OVERLAY_X, VAR_OX,
36 VAR_OVERLAY_Y, VAR_OY,
37 VAR_OVERLAY_W, VAR_OW,
38 VAR_OVERLAY_H, VAR_OH,
39 VAR_VARS_NB
40 };
41
42 typedef struct OverlayVAAPIContext {
43 VAAPIVPPContext vpp_ctx; /**< must be the first field */
44 FFFrameSync fs;
45
46 double var_values[VAR_VARS_NB];
47 char *overlay_ox;
48 char *overlay_oy;
49 char *overlay_ow;
50 char *overlay_oh;
51 int ox;
52 int oy;
53 int ow;
54 int oh;
55 float alpha;
56 unsigned int blend_flags;
57 float blend_alpha;
58 } OverlayVAAPIContext;
59
60 static const char *const var_names[] = {
61 "main_w", "W", /* input width of the main layer */
62 "main_h", "H", /* input height of the main layer */
63 "overlay_iw", /* input width of the overlay layer */
64 "overlay_ih", /* input height of the overlay layer */
65 "overlay_x", "x", /* x position of the overlay layer inside of main */
66 "overlay_y", "y", /* y position of the overlay layer inside of main */
67 "overlay_w", "w", /* output width of overlay layer */
68 "overlay_h", "h", /* output height of overlay layer */
69 NULL
70 };
71
72 static int eval_expr(AVFilterContext *avctx)
73 {
74 OverlayVAAPIContext *ctx = avctx->priv;
75 double *var_values = ctx->var_values;
76 int ret = 0;
77 AVExpr *ox_expr = NULL, *oy_expr = NULL;
78 AVExpr *ow_expr = NULL, *oh_expr = NULL;
79
80 #define PARSE_EXPR(e, s) {\
81 ret = av_expr_parse(&(e), s, var_names, NULL, NULL, NULL, NULL, 0, ctx); \
82 if (ret < 0) {\
83 av_log(ctx, AV_LOG_ERROR, "Error when parsing '%s'.\n", s);\
84 goto release;\
85 }\
86 }
87 PARSE_EXPR(ox_expr, ctx->overlay_ox)
88 PARSE_EXPR(oy_expr, ctx->overlay_oy)
89 PARSE_EXPR(ow_expr, ctx->overlay_ow)
90 PARSE_EXPR(oh_expr, ctx->overlay_oh)
91 #undef PASS_EXPR
92
93 var_values[VAR_OVERLAY_W] =
94 var_values[VAR_OW] = av_expr_eval(ow_expr, var_values, NULL);
95 var_values[VAR_OVERLAY_H] =
96 var_values[VAR_OH] = av_expr_eval(oh_expr, var_values, NULL);
97
98 /* calc again in case ow is relative to oh */
99 var_values[VAR_OVERLAY_W] =
100 var_values[VAR_OW] = av_expr_eval(ow_expr, var_values, NULL);
101
102 var_values[VAR_OVERLAY_X] =
103 var_values[VAR_OX] = av_expr_eval(ox_expr, var_values, NULL);
104 var_values[VAR_OVERLAY_Y] =
105 var_values[VAR_OY] = av_expr_eval(oy_expr, var_values, NULL);
106
107 /* calc again in case ox is relative to oy */
108 var_values[VAR_OVERLAY_X] =
109 var_values[VAR_OX] = av_expr_eval(ox_expr, var_values, NULL);
110
111 /* calc overlay_w and overlay_h again incase relative to ox,oy */
112 var_values[VAR_OVERLAY_W] =
113 var_values[VAR_OW] = av_expr_eval(ow_expr, var_values, NULL);
114 var_values[VAR_OVERLAY_H] =
115 var_values[VAR_OH] = av_expr_eval(oh_expr, var_values, NULL);
116 var_values[VAR_OVERLAY_W] =
117 var_values[VAR_OW] = av_expr_eval(ow_expr, var_values, NULL);
118
119 release:
120 av_expr_free(ox_expr);
121 av_expr_free(oy_expr);
122 av_expr_free(ow_expr);
123 av_expr_free(oh_expr);
124
125 return ret;
126 }
127
128 static int overlay_vaapi_build_filter_params(AVFilterContext *avctx)
129 {
130 VAAPIVPPContext *vpp_ctx = avctx->priv;
131 VAStatus vas;
132 int support_flag;
133 VAProcPipelineCaps pipeline_caps;
134
135 memset(&pipeline_caps, 0, sizeof(pipeline_caps));
136 vas = vaQueryVideoProcPipelineCaps(vpp_ctx->hwctx->display,
137 vpp_ctx->va_context,
138 NULL, 0,
139 &pipeline_caps);
140 if (vas != VA_STATUS_SUCCESS) {
141 av_log(avctx, AV_LOG_ERROR, "Failed to query pipeline "
142 "caps: %d (%s).\n", vas, vaErrorStr(vas));
143 return AVERROR(EIO);
144 }
145
146 if (!pipeline_caps.blend_flags) {
147 av_log(avctx, AV_LOG_ERROR, "VAAPI driver doesn't support overlay\n");
148 return AVERROR(EINVAL);
149 }
150
151 support_flag = pipeline_caps.blend_flags & VA_BLEND_GLOBAL_ALPHA;
152 if (!support_flag) {
153 av_log(avctx, AV_LOG_ERROR, "VAAPI driver doesn't support global alpha blending\n");
154 return AVERROR(EINVAL);
155 }
156
157 return 0;
158 }
159
160 static int overlay_vaapi_blend(FFFrameSync *fs)
161 {
162 AVFilterContext *avctx = fs->parent;
163 AVFilterLink *outlink = avctx->outputs[0];
164 OverlayVAAPIContext *ctx = avctx->priv;
165 VAAPIVPPContext *vpp_ctx = avctx->priv;
166 AVFrame *input_main, *input_overlay;
167 AVFrame *output;
168 VAProcPipelineParameterBuffer params[2];
169 VABlendState blend_state = { 0 }; /**< Blend State */
170 VARectangle overlay_region, output_region;
171 int err;
172
173 err = ff_framesync_get_frame(fs, 0, &input_main, 0);
174 if (err < 0)
175 return err;
176 err = ff_framesync_get_frame(fs, 1, &input_overlay, 0);
177 if (err < 0)
178 return err;
179
180 av_log(avctx, AV_LOG_DEBUG, "Filter main: %s, %ux%u (%"PRId64").\n",
181 av_get_pix_fmt_name(input_main->format),
182 input_main->width, input_main->height, input_main->pts);
183
184 if (vpp_ctx->va_context == VA_INVALID_ID)
185 return AVERROR(EINVAL);
186
187 output = ff_get_video_buffer(outlink, outlink->w, outlink->h);
188 if (!output) {
189 err = AVERROR(ENOMEM);
190 goto fail;
191 }
192
193 err = av_frame_copy_props(output, input_main);
194 if (err < 0)
195 goto fail;
196
197 err = ff_vaapi_vpp_init_params(avctx, &params[0],
198 input_main, output);
199 if (err < 0)
200 goto fail;
201
202 output_region = (VARectangle) {
203 .x = 0,
204 .y = 0,
205 .width = output->width,
206 .height = output->height,
207 };
208
209 params[0].output_region = &output_region;
210 params[0].output_background_color = VAAPI_VPP_BACKGROUND_BLACK;
211
212 if (input_overlay) {
213 av_log(avctx, AV_LOG_DEBUG, "Filter overlay: %s, %ux%u (%"PRId64").\n",
214 av_get_pix_fmt_name(input_overlay->format),
215 input_overlay->width, input_overlay->height, input_overlay->pts);
216
217 overlay_region = (VARectangle) {
218 .x = ctx->ox,
219 .y = ctx->oy,
220 .width = ctx->ow ? ctx->ow : input_overlay->width,
221 .height = ctx->oh ? ctx->oh : input_overlay->height,
222 };
223
224 if (overlay_region.x + overlay_region.width > input_main->width ||
225 overlay_region.y + overlay_region.height > input_main->height) {
226 av_log(ctx, AV_LOG_WARNING,
227 "The overlay image exceeds the scope of the main image, "
228 "will crop the overlay image according based on the main image.\n");
229 }
230
231 memcpy(&params[1], &params[0], sizeof(params[0]));
232
233 blend_state.flags = ctx->blend_flags;
234 blend_state.global_alpha = ctx->blend_alpha;
235 params[1].blend_state = &blend_state;
236
237 params[1].surface = (VASurfaceID)(uintptr_t)input_overlay->data[3];
238 params[1].surface_region = NULL;
239 params[1].output_region = &overlay_region;
240 }
241
242 err = ff_vaapi_vpp_render_pictures(avctx, params, input_overlay ? 2 : 1, output);
243 if (err < 0)
244 goto fail;
245
246 av_log(avctx, AV_LOG_DEBUG, "Filter output: %s, %ux%u (%"PRId64").\n",
247 av_get_pix_fmt_name(output->format),
248 output->width, output->height, output->pts);
249
250 return ff_filter_frame(outlink, output);
251
252 fail:
253 av_frame_free(&output);
254 return err;
255 }
256
257 static int have_alpha_planar(AVFilterLink *link)
258 {
259 FilterLink *l = ff_filter_link(link);
260 enum AVPixelFormat pix_fmt = link->format;
261 const AVPixFmtDescriptor *desc;
262 AVHWFramesContext *fctx;
263
264 if (link->format == AV_PIX_FMT_VAAPI) {
265 fctx = (AVHWFramesContext *)l->hw_frames_ctx->data;
266 pix_fmt = fctx->sw_format;
267 }
268
269 desc = av_pix_fmt_desc_get(pix_fmt);
270 if (!desc)
271 return 0;
272
273 return !!(desc->flags & AV_PIX_FMT_FLAG_ALPHA);
274 }
275
276 static int overlay_vaapi_config_input_main(AVFilterLink *inlink)
277 {
278 AVFilterContext *avctx = inlink->dst;
279 OverlayVAAPIContext *ctx = avctx->priv;
280
281 ctx->var_values[VAR_MAIN_IW] =
282 ctx->var_values[VAR_MW] = inlink->w;
283 ctx->var_values[VAR_MAIN_IH] =
284 ctx->var_values[VAR_MH] = inlink->h;
285
286 return ff_vaapi_vpp_config_input(inlink);
287 }
288
289 static int overlay_vaapi_config_input_overlay(AVFilterLink *inlink)
290 {
291 AVFilterContext *avctx = inlink->dst;
292 OverlayVAAPIContext *ctx = avctx->priv;
293 int ret;
294
295 ctx->var_values[VAR_OVERLAY_IW] = inlink->w;
296 ctx->var_values[VAR_OVERLAY_IH] = inlink->h;
297
298 ret = eval_expr(avctx);
299 if (ret < 0)
300 return ret;
301
302 ctx->ox = (int)ctx->var_values[VAR_OX];
303 ctx->oy = (int)ctx->var_values[VAR_OY];
304 ctx->ow = (int)ctx->var_values[VAR_OW];
305 ctx->oh = (int)ctx->var_values[VAR_OH];
306
307 ctx->blend_flags = 0;
308 ctx->blend_alpha = 1.0f;
309
310 if (ctx->alpha < 1.0f) {
311 ctx->blend_flags |= VA_BLEND_GLOBAL_ALPHA;
312 ctx->blend_alpha = ctx->alpha;
313 }
314
315 if (have_alpha_planar(inlink))
316 ctx->blend_flags |= VA_BLEND_PREMULTIPLIED_ALPHA;
317
318 return 0;
319 }
320
321 static int overlay_vaapi_config_output(AVFilterLink *outlink)
322 {
323 AVFilterContext *avctx = outlink->src;
324 OverlayVAAPIContext *ctx = avctx->priv;
325 VAAPIVPPContext *vpp_ctx = avctx->priv;
326 int err;
327
328 outlink->time_base = avctx->inputs[0]->time_base;
329 vpp_ctx->output_width = avctx->inputs[0]->w;
330 vpp_ctx->output_height = avctx->inputs[0]->h;
331
332 err = ff_vaapi_vpp_config_output(outlink);
333 if (err < 0)
334 return err;
335
336 err = overlay_vaapi_build_filter_params(avctx);
337 if (err < 0)
338 return err;
339
340 err = ff_framesync_init_dualinput(&ctx->fs, avctx);
341 if (err < 0)
342 return err;
343
344 ctx->fs.on_event = overlay_vaapi_blend;
345 ctx->fs.time_base = outlink->time_base;
346
347 return ff_framesync_configure(&ctx->fs);
348 }
349
350 static av_cold int overlay_vaapi_init(AVFilterContext *avctx)
351 {
352 VAAPIVPPContext *vpp_ctx = avctx->priv;
353
354 ff_vaapi_vpp_ctx_init(avctx);
355 vpp_ctx->output_format = AV_PIX_FMT_NONE;
356
357 return 0;
358 }
359
360 static int overlay_vaapi_activate(AVFilterContext *avctx)
361 {
362 OverlayVAAPIContext *ctx = avctx->priv;
363
364 return ff_framesync_activate(&ctx->fs);
365 }
366
367 static av_cold void overlay_vaapi_uninit(AVFilterContext *avctx)
368 {
369 OverlayVAAPIContext *ctx = avctx->priv;
370
371 ff_framesync_uninit(&ctx->fs);
372 ff_vaapi_vpp_ctx_uninit(avctx);
373 }
374
375 #define OFFSET(x) offsetof(OverlayVAAPIContext, x)
376 #define FLAGS (AV_OPT_FLAG_FILTERING_PARAM | AV_OPT_FLAG_VIDEO_PARAM)
377 static const AVOption overlay_vaapi_options[] = {
378 { "x", "Overlay x position", OFFSET(overlay_ox), AV_OPT_TYPE_STRING, { .str="0"}, 0, 255, .flags = FLAGS},
379 { "y", "Overlay y position", OFFSET(overlay_oy), AV_OPT_TYPE_STRING, { .str="0"}, 0, 255, .flags = FLAGS},
380 { "w", "Overlay width", OFFSET(overlay_ow), AV_OPT_TYPE_STRING, { .str="overlay_iw"}, 0, 255, .flags = FLAGS},
381 { "h", "Overlay height", OFFSET(overlay_oh), AV_OPT_TYPE_STRING, { .str="overlay_ih*w/overlay_iw"}, 0, 255, .flags = FLAGS},
382 { "alpha", "Overlay global alpha", OFFSET(alpha), AV_OPT_TYPE_FLOAT, { .dbl = 1.0 }, 0.0, 1.0, .flags = FLAGS },
383 { "eof_action", "Action to take when encountering EOF from secondary input ",
384 OFFSET(fs.opt_eof_action), AV_OPT_TYPE_INT, { .i64 = EOF_ACTION_REPEAT },
385 EOF_ACTION_REPEAT, EOF_ACTION_PASS, .flags = FLAGS, .unit = "eof_action" },
386 { "repeat", "Repeat the previous frame.", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_REPEAT }, .flags = FLAGS, .unit = "eof_action" },
387 { "endall", "End both streams.", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_ENDALL }, .flags = FLAGS, .unit = "eof_action" },
388 { "pass", "Pass through the main input.", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_PASS }, .flags = FLAGS, .unit = "eof_action" },
389 { "shortest", "force termination when the shortest input terminates", OFFSET(fs.opt_shortest), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, FLAGS },
390 { "repeatlast", "repeat overlay of the last overlay frame", OFFSET(fs.opt_repeatlast), AV_OPT_TYPE_BOOL, { .i64 = 1 }, 0, 1, FLAGS },
391 { NULL },
392 };
393
394 FRAMESYNC_DEFINE_CLASS(overlay_vaapi, OverlayVAAPIContext, fs);
395
396 static const AVFilterPad overlay_vaapi_inputs[] = {
397 {
398 .name = "main",
399 .type = AVMEDIA_TYPE_VIDEO,
400 .config_props = overlay_vaapi_config_input_main,
401 },
402 {
403 .name = "overlay",
404 .type = AVMEDIA_TYPE_VIDEO,
405 .config_props = overlay_vaapi_config_input_overlay,
406 },
407 };
408
409 static const AVFilterPad overlay_vaapi_outputs[] = {
410 {
411 .name = "default",
412 .type = AVMEDIA_TYPE_VIDEO,
413 .config_props = &overlay_vaapi_config_output,
414 },
415 };
416
417 const AVFilter ff_vf_overlay_vaapi = {
418 .name = "overlay_vaapi",
419 .description = NULL_IF_CONFIG_SMALL("Overlay one video on top of another"),
420 .priv_size = sizeof(OverlayVAAPIContext),
421 .priv_class = &overlay_vaapi_class,
422 .init = &overlay_vaapi_init,
423 .uninit = &overlay_vaapi_uninit,
424 .activate = &overlay_vaapi_activate,
425 .preinit = overlay_vaapi_framesync_preinit,
426 FILTER_INPUTS(overlay_vaapi_inputs),
427 FILTER_OUTPUTS(overlay_vaapi_outputs),
428 FILTER_SINGLE_PIXFMT(AV_PIX_FMT_VAAPI),
429 .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
430 };
431