FFmpeg coverage


Directory: ../../../ffmpeg/
File: src/libavfilter/vf_overlay_vaapi.c
Date: 2022-12-05 20:26:17
Exec Total Coverage
Lines: 0 227 0.0%
Functions: 0 13 0.0%
Branches: 0 74 0.0%

Line Branch Exec Source
1 /*
2 * This file is part of FFmpeg.
3 *
4 * FFmpeg is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2.1 of the License, or (at your option) any later version.
8 *
9 * FFmpeg is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
13 *
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with FFmpeg; if not, write to the Free Software
16 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 */
18 #include <string.h>
19
20 #include "libavutil/avassert.h"
21 #include "libavutil/mem.h"
22 #include "libavutil/opt.h"
23 #include "libavutil/pixdesc.h"
24
25 #include "avfilter.h"
26 #include "framesync.h"
27 #include "formats.h"
28 #include "internal.h"
29 #include "vaapi_vpp.h"
30 #include "libavutil/eval.h"
31
32 enum var_name {
33 VAR_MAIN_IW, VAR_MW,
34 VAR_MAIN_IH, VAR_MH,
35 VAR_OVERLAY_IW,
36 VAR_OVERLAY_IH,
37 VAR_OVERLAY_X, VAR_OX,
38 VAR_OVERLAY_Y, VAR_OY,
39 VAR_OVERLAY_W, VAR_OW,
40 VAR_OVERLAY_H, VAR_OH,
41 VAR_VARS_NB
42 };
43
44 typedef struct OverlayVAAPIContext {
45 VAAPIVPPContext vpp_ctx; /**< must be the first field */
46 FFFrameSync fs;
47
48 double var_values[VAR_VARS_NB];
49 char *overlay_ox;
50 char *overlay_oy;
51 char *overlay_ow;
52 char *overlay_oh;
53 int ox;
54 int oy;
55 int ow;
56 int oh;
57 float alpha;
58 unsigned int blend_flags;
59 float blend_alpha;
60 } OverlayVAAPIContext;
61
62 static const char *const var_names[] = {
63 "main_w", "W", /* input width of the main layer */
64 "main_h", "H", /* input height of the main layer */
65 "overlay_iw", /* input width of the overlay layer */
66 "overlay_ih", /* input height of the overlay layer */
67 "overlay_x", "x", /* x position of the overlay layer inside of main */
68 "overlay_y", "y", /* y position of the overlay layer inside of main */
69 "overlay_w", "w", /* output width of overlay layer */
70 "overlay_h", "h", /* output height of overlay layer */
71 NULL
72 };
73
74 static int eval_expr(AVFilterContext *avctx)
75 {
76 OverlayVAAPIContext *ctx = avctx->priv;
77 double *var_values = ctx->var_values;
78 int ret = 0;
79 AVExpr *ox_expr = NULL, *oy_expr = NULL;
80 AVExpr *ow_expr = NULL, *oh_expr = NULL;
81
82 #define PARSE_EXPR(e, s) {\
83 ret = av_expr_parse(&(e), s, var_names, NULL, NULL, NULL, NULL, 0, ctx); \
84 if (ret < 0) {\
85 av_log(ctx, AV_LOG_ERROR, "Error when parsing '%s'.\n", s);\
86 goto release;\
87 }\
88 }
89 PARSE_EXPR(ox_expr, ctx->overlay_ox)
90 PARSE_EXPR(oy_expr, ctx->overlay_oy)
91 PARSE_EXPR(ow_expr, ctx->overlay_ow)
92 PARSE_EXPR(oh_expr, ctx->overlay_oh)
93 #undef PASS_EXPR
94
95 var_values[VAR_OVERLAY_W] =
96 var_values[VAR_OW] = av_expr_eval(ow_expr, var_values, NULL);
97 var_values[VAR_OVERLAY_H] =
98 var_values[VAR_OH] = av_expr_eval(oh_expr, var_values, NULL);
99
100 /* calc again in case ow is relative to oh */
101 var_values[VAR_OVERLAY_W] =
102 var_values[VAR_OW] = av_expr_eval(ow_expr, var_values, NULL);
103
104 var_values[VAR_OVERLAY_X] =
105 var_values[VAR_OX] = av_expr_eval(ox_expr, var_values, NULL);
106 var_values[VAR_OVERLAY_Y] =
107 var_values[VAR_OY] = av_expr_eval(oy_expr, var_values, NULL);
108
109 /* calc again in case ox is relative to oy */
110 var_values[VAR_OVERLAY_X] =
111 var_values[VAR_OX] = av_expr_eval(ox_expr, var_values, NULL);
112
113 /* calc overlay_w and overlay_h again incase relative to ox,oy */
114 var_values[VAR_OVERLAY_W] =
115 var_values[VAR_OW] = av_expr_eval(ow_expr, var_values, NULL);
116 var_values[VAR_OVERLAY_H] =
117 var_values[VAR_OH] = av_expr_eval(oh_expr, var_values, NULL);
118 var_values[VAR_OVERLAY_W] =
119 var_values[VAR_OW] = av_expr_eval(ow_expr, var_values, NULL);
120
121 release:
122 av_expr_free(ox_expr);
123 av_expr_free(oy_expr);
124 av_expr_free(ow_expr);
125 av_expr_free(oh_expr);
126
127 return ret;
128 }
129
130 static int overlay_vaapi_build_filter_params(AVFilterContext *avctx)
131 {
132 VAAPIVPPContext *vpp_ctx = avctx->priv;
133 VAStatus vas;
134 int support_flag;
135 VAProcPipelineCaps pipeline_caps;
136
137 memset(&pipeline_caps, 0, sizeof(pipeline_caps));
138 vas = vaQueryVideoProcPipelineCaps(vpp_ctx->hwctx->display,
139 vpp_ctx->va_context,
140 NULL, 0,
141 &pipeline_caps);
142 if (vas != VA_STATUS_SUCCESS) {
143 av_log(avctx, AV_LOG_ERROR, "Failed to query pipeline "
144 "caps: %d (%s).\n", vas, vaErrorStr(vas));
145 return AVERROR(EIO);
146 }
147
148 if (!pipeline_caps.blend_flags) {
149 av_log(avctx, AV_LOG_ERROR, "VAAPI driver doesn't support overlay\n");
150 return AVERROR(EINVAL);
151 }
152
153 support_flag = pipeline_caps.blend_flags & VA_BLEND_GLOBAL_ALPHA;
154 if (!support_flag) {
155 av_log(avctx, AV_LOG_ERROR, "VAAPI driver doesn't support global alpha blending\n");
156 return AVERROR(EINVAL);
157 }
158
159 return 0;
160 }
161
162 static int overlay_vaapi_render_picture(AVFilterContext *avctx,
163 VAProcPipelineParameterBuffer *params,
164 VAProcPipelineParameterBuffer *subpic_params,
165 AVFrame *output_frame)
166 {
167 VAAPIVPPContext *ctx = avctx->priv;
168 VASurfaceID output_surface;
169 VABufferID params_id;
170 VABufferID subpic_params_id;
171 VAStatus vas;
172 int err = 0;
173
174 output_surface = (VASurfaceID)(uintptr_t)output_frame->data[3];
175
176 vas = vaBeginPicture(ctx->hwctx->display,
177 ctx->va_context, output_surface);
178 if (vas != VA_STATUS_SUCCESS) {
179 av_log(avctx, AV_LOG_ERROR, "Failed to attach new picture: "
180 "%d (%s).\n", vas, vaErrorStr(vas));
181 err = AVERROR(EIO);
182 goto fail;
183 }
184
185 vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
186 VAProcPipelineParameterBufferType,
187 sizeof(*params), 1, params, &params_id);
188 if (vas != VA_STATUS_SUCCESS) {
189 av_log(avctx, AV_LOG_ERROR, "Failed to create parameter buffer: "
190 "%d (%s).\n", vas, vaErrorStr(vas));
191 err = AVERROR(EIO);
192 goto fail_after_begin;
193 }
194 av_log(avctx, AV_LOG_DEBUG, "Pipeline parameter buffer is %#x.\n",
195 params_id);
196
197
198 vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context,
199 &params_id, 1);
200 if (vas != VA_STATUS_SUCCESS) {
201 av_log(avctx, AV_LOG_ERROR, "Failed to render parameter buffer: "
202 "%d (%s).\n", vas, vaErrorStr(vas));
203 err = AVERROR(EIO);
204 goto fail_after_begin;
205 }
206
207 if (subpic_params) {
208 vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
209 VAProcPipelineParameterBufferType,
210 sizeof(*subpic_params), 1, subpic_params, &subpic_params_id);
211 if (vas != VA_STATUS_SUCCESS) {
212 av_log(avctx, AV_LOG_ERROR, "Failed to create parameter buffer: "
213 "%d (%s).\n", vas, vaErrorStr(vas));
214 err = AVERROR(EIO);
215 goto fail_after_begin;
216 }
217 av_log(avctx, AV_LOG_DEBUG, "Pipeline subpic parameter buffer is %#x.\n",
218 subpic_params_id);
219
220 vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context,
221 &subpic_params_id, 1);
222 if (vas != VA_STATUS_SUCCESS) {
223 av_log(avctx, AV_LOG_ERROR, "Failed to render subpic parameter buffer: "
224 "%d (%s).\n", vas, vaErrorStr(vas));
225 err = AVERROR(EIO);
226 goto fail_after_begin;
227 }
228 }
229
230 vas = vaEndPicture(ctx->hwctx->display, ctx->va_context);
231 if (vas != VA_STATUS_SUCCESS) {
232 av_log(avctx, AV_LOG_ERROR, "Failed to start picture processing: "
233 "%d (%s).\n", vas, vaErrorStr(vas));
234 err = AVERROR(EIO);
235 goto fail_after_render;
236 }
237
238 if (CONFIG_VAAPI_1 || ctx->hwctx->driver_quirks &
239 AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS) {
240 vas = vaDestroyBuffer(ctx->hwctx->display, params_id);
241 if (vas != VA_STATUS_SUCCESS) {
242 av_log(avctx, AV_LOG_ERROR, "Failed to free parameter buffer: "
243 "%d (%s).\n", vas, vaErrorStr(vas));
244 // And ignore.
245 }
246 }
247
248 return 0;
249
250 // We want to make sure that if vaBeginPicture has been called, we also
251 // call vaRenderPicture and vaEndPicture. These calls may well fail or
252 // do something else nasty, but once we're in this failure case there
253 // isn't much else we can do.
254 fail_after_begin:
255 vaRenderPicture(ctx->hwctx->display, ctx->va_context, &params_id, 1);
256 fail_after_render:
257 vaEndPicture(ctx->hwctx->display, ctx->va_context);
258 fail:
259 return err;
260 }
261
262 static int overlay_vaapi_blend(FFFrameSync *fs)
263 {
264 AVFilterContext *avctx = fs->parent;
265 AVFilterLink *outlink = avctx->outputs[0];
266 OverlayVAAPIContext *ctx = avctx->priv;
267 VAAPIVPPContext *vpp_ctx = avctx->priv;
268 AVFrame *input_main, *input_overlay;
269 AVFrame *output;
270 VAProcPipelineParameterBuffer params, subpic_params;
271 VABlendState blend_state = { 0 }; /**< Blend State */
272 VARectangle overlay_region, output_region;
273 int err;
274
275 err = ff_framesync_get_frame(fs, 0, &input_main, 0);
276 if (err < 0)
277 return err;
278 err = ff_framesync_get_frame(fs, 1, &input_overlay, 0);
279 if (err < 0)
280 return err;
281
282 av_log(avctx, AV_LOG_DEBUG, "Filter main: %s, %ux%u (%"PRId64").\n",
283 av_get_pix_fmt_name(input_main->format),
284 input_main->width, input_main->height, input_main->pts);
285
286 if (vpp_ctx->va_context == VA_INVALID_ID)
287 return AVERROR(EINVAL);
288
289 output = ff_get_video_buffer(outlink, outlink->w, outlink->h);
290 if (!output) {
291 err = AVERROR(ENOMEM);
292 goto fail;
293 }
294
295 err = av_frame_copy_props(output, input_main);
296 if (err < 0)
297 goto fail;
298
299 err = ff_vaapi_vpp_init_params(avctx, &params,
300 input_main, output);
301 if (err < 0)
302 goto fail;
303
304 output_region = (VARectangle) {
305 .x = 0,
306 .y = 0,
307 .width = output->width,
308 .height = output->height,
309 };
310
311 params.filters = &vpp_ctx->filter_buffers[0];
312 params.num_filters = vpp_ctx->nb_filter_buffers;
313
314 params.output_region = &output_region;
315 params.output_background_color = VAAPI_VPP_BACKGROUND_BLACK;
316
317 if (input_overlay) {
318 av_log(avctx, AV_LOG_DEBUG, "Filter overlay: %s, %ux%u (%"PRId64").\n",
319 av_get_pix_fmt_name(input_overlay->format),
320 input_overlay->width, input_overlay->height, input_overlay->pts);
321
322 overlay_region = (VARectangle) {
323 .x = ctx->ox,
324 .y = ctx->oy,
325 .width = ctx->ow ? ctx->ow : input_overlay->width,
326 .height = ctx->oh ? ctx->oh : input_overlay->height,
327 };
328
329 if (overlay_region.x + overlay_region.width > input_main->width ||
330 overlay_region.y + overlay_region.height > input_main->height) {
331 av_log(ctx, AV_LOG_WARNING,
332 "The overlay image exceeds the scope of the main image, "
333 "will crop the overlay image according based on the main image.\n");
334 }
335
336 memcpy(&subpic_params, &params, sizeof(subpic_params));
337
338 blend_state.flags = ctx->blend_flags;
339 blend_state.global_alpha = ctx->blend_alpha;
340 subpic_params.blend_state = &blend_state;
341
342 subpic_params.surface = (VASurfaceID)(uintptr_t)input_overlay->data[3];
343 subpic_params.output_region = &overlay_region;
344 }
345
346 err = overlay_vaapi_render_picture(avctx, &params, input_overlay ? &subpic_params : NULL, output);
347 if (err < 0)
348 goto fail;
349
350 av_log(avctx, AV_LOG_DEBUG, "Filter output: %s, %ux%u (%"PRId64").\n",
351 av_get_pix_fmt_name(output->format),
352 output->width, output->height, output->pts);
353
354 return ff_filter_frame(outlink, output);
355
356 fail:
357 av_frame_free(&output);
358 return err;
359 }
360
361 static int have_alpha_planar(AVFilterLink *link)
362 {
363 enum AVPixelFormat pix_fmt = link->format;
364 const AVPixFmtDescriptor *desc;
365 AVHWFramesContext *fctx;
366
367 if (link->format == AV_PIX_FMT_VAAPI) {
368 fctx = (AVHWFramesContext *)link->hw_frames_ctx->data;
369 pix_fmt = fctx->sw_format;
370 }
371
372 desc = av_pix_fmt_desc_get(pix_fmt);
373 if (!desc)
374 return 0;
375
376 return !!(desc->flags & AV_PIX_FMT_FLAG_ALPHA);
377 }
378
379 static int overlay_vaapi_config_input_main(AVFilterLink *inlink)
380 {
381 AVFilterContext *avctx = inlink->dst;
382 OverlayVAAPIContext *ctx = avctx->priv;
383
384 ctx->var_values[VAR_MAIN_IW] =
385 ctx->var_values[VAR_MW] = inlink->w;
386 ctx->var_values[VAR_MAIN_IH] =
387 ctx->var_values[VAR_MH] = inlink->h;
388
389 return ff_vaapi_vpp_config_input(inlink);
390 }
391
392 static int overlay_vaapi_config_input_overlay(AVFilterLink *inlink)
393 {
394 AVFilterContext *avctx = inlink->dst;
395 OverlayVAAPIContext *ctx = avctx->priv;
396 int ret;
397
398 ctx->var_values[VAR_OVERLAY_IW] = inlink->w;
399 ctx->var_values[VAR_OVERLAY_IH] = inlink->h;
400
401 ret = eval_expr(avctx);
402 if (ret < 0)
403 return ret;
404
405 ctx->ox = (int)ctx->var_values[VAR_OX];
406 ctx->oy = (int)ctx->var_values[VAR_OY];
407 ctx->ow = (int)ctx->var_values[VAR_OW];
408 ctx->oh = (int)ctx->var_values[VAR_OH];
409
410 ctx->blend_flags = 0;
411 ctx->blend_alpha = 1.0f;
412
413 if (ctx->alpha < 1.0f) {
414 ctx->blend_flags |= VA_BLEND_GLOBAL_ALPHA;
415 ctx->blend_alpha = ctx->alpha;
416 }
417
418 if (have_alpha_planar(inlink))
419 ctx->blend_flags |= VA_BLEND_PREMULTIPLIED_ALPHA;
420
421 return 0;
422 }
423
424 static int overlay_vaapi_config_output(AVFilterLink *outlink)
425 {
426 AVFilterContext *avctx = outlink->src;
427 OverlayVAAPIContext *ctx = avctx->priv;
428 VAAPIVPPContext *vpp_ctx = avctx->priv;
429 int err;
430
431 outlink->time_base = avctx->inputs[0]->time_base;
432 vpp_ctx->output_width = avctx->inputs[0]->w;
433 vpp_ctx->output_height = avctx->inputs[0]->h;
434
435 err = ff_vaapi_vpp_config_output(outlink);
436 if (err < 0)
437 return err;
438
439 err = overlay_vaapi_build_filter_params(avctx);
440 if (err < 0)
441 return err;
442
443 err = ff_framesync_init_dualinput(&ctx->fs, avctx);
444 if (err < 0)
445 return err;
446
447 ctx->fs.on_event = overlay_vaapi_blend;
448 ctx->fs.time_base = outlink->time_base;
449
450 return ff_framesync_configure(&ctx->fs);
451 }
452
453 static av_cold int overlay_vaapi_init(AVFilterContext *avctx)
454 {
455 VAAPIVPPContext *vpp_ctx = avctx->priv;
456
457 ff_vaapi_vpp_ctx_init(avctx);
458 vpp_ctx->output_format = AV_PIX_FMT_NONE;
459
460 return 0;
461 }
462
463 static int overlay_vaapi_activate(AVFilterContext *avctx)
464 {
465 OverlayVAAPIContext *ctx = avctx->priv;
466
467 return ff_framesync_activate(&ctx->fs);
468 }
469
470 static av_cold void overlay_vaapi_uninit(AVFilterContext *avctx)
471 {
472 OverlayVAAPIContext *ctx = avctx->priv;
473
474 ff_framesync_uninit(&ctx->fs);
475 ff_vaapi_vpp_ctx_uninit(avctx);
476 }
477
478 #define OFFSET(x) offsetof(OverlayVAAPIContext, x)
479 #define FLAGS (AV_OPT_FLAG_FILTERING_PARAM | AV_OPT_FLAG_VIDEO_PARAM)
480 static const AVOption overlay_vaapi_options[] = {
481 { "x", "Overlay x position", OFFSET(overlay_ox), AV_OPT_TYPE_STRING, { .str="0"}, 0, 255, .flags = FLAGS},
482 { "y", "Overlay y position", OFFSET(overlay_oy), AV_OPT_TYPE_STRING, { .str="0"}, 0, 255, .flags = FLAGS},
483 { "w", "Overlay width", OFFSET(overlay_ow), AV_OPT_TYPE_STRING, { .str="overlay_iw"}, 0, 255, .flags = FLAGS},
484 { "h", "Overlay height", OFFSET(overlay_oh), AV_OPT_TYPE_STRING, { .str="overlay_ih*w/overlay_iw"}, 0, 255, .flags = FLAGS},
485 { "alpha", "Overlay global alpha", OFFSET(alpha), AV_OPT_TYPE_FLOAT, { .dbl = 1.0 }, 0.0, 1.0, .flags = FLAGS },
486 { "eof_action", "Action to take when encountering EOF from secondary input ",
487 OFFSET(fs.opt_eof_action), AV_OPT_TYPE_INT, { .i64 = EOF_ACTION_REPEAT },
488 EOF_ACTION_REPEAT, EOF_ACTION_PASS, .flags = FLAGS, "eof_action" },
489 { "repeat", "Repeat the previous frame.", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_REPEAT }, .flags = FLAGS, "eof_action" },
490 { "endall", "End both streams.", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_ENDALL }, .flags = FLAGS, "eof_action" },
491 { "pass", "Pass through the main input.", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_PASS }, .flags = FLAGS, "eof_action" },
492 { "shortest", "force termination when the shortest input terminates", OFFSET(fs.opt_shortest), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, FLAGS },
493 { "repeatlast", "repeat overlay of the last overlay frame", OFFSET(fs.opt_repeatlast), AV_OPT_TYPE_BOOL, { .i64 = 1 }, 0, 1, FLAGS },
494 { NULL },
495 };
496
497 FRAMESYNC_DEFINE_CLASS(overlay_vaapi, OverlayVAAPIContext, fs);
498
499 static const AVFilterPad overlay_vaapi_inputs[] = {
500 {
501 .name = "main",
502 .type = AVMEDIA_TYPE_VIDEO,
503 .config_props = overlay_vaapi_config_input_main,
504 },
505 {
506 .name = "overlay",
507 .type = AVMEDIA_TYPE_VIDEO,
508 .config_props = overlay_vaapi_config_input_overlay,
509 },
510 };
511
512 static const AVFilterPad overlay_vaapi_outputs[] = {
513 {
514 .name = "default",
515 .type = AVMEDIA_TYPE_VIDEO,
516 .config_props = &overlay_vaapi_config_output,
517 },
518 };
519
520 const AVFilter ff_vf_overlay_vaapi = {
521 .name = "overlay_vaapi",
522 .description = NULL_IF_CONFIG_SMALL("Overlay one video on top of another"),
523 .priv_size = sizeof(OverlayVAAPIContext),
524 .priv_class = &overlay_vaapi_class,
525 .init = &overlay_vaapi_init,
526 .uninit = &overlay_vaapi_uninit,
527 .activate = &overlay_vaapi_activate,
528 .preinit = overlay_vaapi_framesync_preinit,
529 FILTER_INPUTS(overlay_vaapi_inputs),
530 FILTER_OUTPUTS(overlay_vaapi_outputs),
531 FILTER_SINGLE_PIXFMT(AV_PIX_FMT_VAAPI),
532 .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
533 };
534