FFmpeg coverage


Directory: ../../../ffmpeg/
File: src/fftools/ffplay_renderer.c
Date: 2026-01-22 21:25:49
Exec Total Coverage
Lines: 0 13 0.0%
Functions: 0 6 0.0%
Branches: 0 0 -%

Line Branch Exec Source
1 /*
2 * This file is part of FFmpeg.
3 *
4 * FFmpeg is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2.1 of the License, or (at your option) any later version.
8 *
9 * FFmpeg is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
13 *
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with FFmpeg; if not, write to the Free Software
16 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 */
18
19 #define VK_NO_PROTOTYPES
20 #define VK_ENABLE_BETA_EXTENSIONS
21
22 #include "config.h"
23 #include "ffplay_renderer.h"
24
25 #if (SDL_VERSION_ATLEAST(2, 0, 6) && CONFIG_LIBPLACEBO)
26 /* Get PL_API_VER */
27 #include <libplacebo/config.h>
28 #define HAVE_VULKAN_RENDERER (PL_API_VER >= 278)
29 #else
30 #define HAVE_VULKAN_RENDERER 0
31 #endif
32
33 #if HAVE_VULKAN_RENDERER
34
35 #if defined(_WIN32) && !defined(VK_USE_PLATFORM_WIN32_KHR)
36 #define VK_USE_PLATFORM_WIN32_KHR
37 #endif
38
39 #include <libplacebo/vulkan.h>
40 #include <libplacebo/utils/frame_queue.h>
41 #include <libplacebo/utils/libav.h>
42 #include <SDL_vulkan.h>
43
44 #include "libavutil/bprint.h"
45 #include "libavutil/mem.h"
46
47 #endif
48
49 struct VkRenderer {
50 const AVClass *class;
51
52 int (*create)(VkRenderer *renderer, SDL_Window *window, AVDictionary *dict);
53
54 int (*get_hw_dev)(VkRenderer *renderer, AVBufferRef **dev);
55
56 int (*display)(VkRenderer *renderer, AVFrame *frame, RenderParams *params);
57
58 int (*resize)(VkRenderer *renderer, int width, int height);
59
60 void (*destroy)(VkRenderer *renderer);
61 };
62
63 #if HAVE_VULKAN_RENDERER
64
65 typedef struct RendererContext {
66 VkRenderer api;
67
68 // Can be NULL when vulkan instance is created by avutil
69 pl_vk_inst placebo_instance;
70 pl_vulkan placebo_vulkan;
71 pl_swapchain swapchain;
72 VkSurfaceKHR vk_surface;
73 pl_renderer renderer;
74 pl_tex tex[4];
75
76 pl_log vk_log;
77
78 AVBufferRef *hw_device_ref;
79 AVBufferRef *hw_frame_ref;
80 enum AVPixelFormat *transfer_formats;
81 AVHWFramesConstraints *constraints;
82
83 PFN_vkGetInstanceProcAddr get_proc_addr;
84 // This field is a copy from pl_vk_inst->instance or hw_device_ref instance.
85 VkInstance inst;
86
87 AVFrame *vk_frame;
88 } RendererContext;
89
90 static void vk_log_cb(void *log_priv, enum pl_log_level level,
91 const char *msg)
92 {
93 static const int level_map[] = {
94 AV_LOG_QUIET,
95 AV_LOG_FATAL,
96 AV_LOG_ERROR,
97 AV_LOG_WARNING,
98 AV_LOG_INFO,
99 AV_LOG_DEBUG,
100 AV_LOG_TRACE,
101 };
102
103 if (level > 0 && level < FF_ARRAY_ELEMS(level_map))
104 av_log(log_priv, level_map[level], "%s\n", msg);
105 }
106
107 static inline int enable_debug(const AVDictionary *opt)
108 {
109 AVDictionaryEntry *entry = av_dict_get(opt, "debug", NULL, 0);
110 int debug = entry && strtol(entry->value, NULL, 10);
111 return debug;
112 }
113
114 static void hwctx_lock_queue(void *priv, uint32_t qf, uint32_t qidx)
115 {
116 AVHWDeviceContext *avhwctx = priv;
117 const AVVulkanDeviceContext *hwctx = avhwctx->hwctx;
118 hwctx->lock_queue(avhwctx, qf, qidx);
119 }
120
121 static void hwctx_unlock_queue(void *priv, uint32_t qf, uint32_t qidx)
122 {
123 AVHWDeviceContext *avhwctx = priv;
124 const AVVulkanDeviceContext *hwctx = avhwctx->hwctx;
125 hwctx->unlock_queue(avhwctx, qf, qidx);
126 }
127
128 static int add_instance_extension(const char **ext, unsigned num_ext,
129 const AVDictionary *opt,
130 AVDictionary **dict)
131 {
132 const char *inst_ext_key = "instance_extensions";
133 AVDictionaryEntry *entry;
134 AVBPrint buf;
135 char *ext_list = NULL;
136 int ret;
137
138 av_bprint_init(&buf, 0, AV_BPRINT_SIZE_AUTOMATIC);
139 for (int i = 0; i < num_ext; i++) {
140 if (i)
141 av_bprintf(&buf, "+%s", ext[i]);
142 else
143 av_bprintf(&buf, "%s", ext[i]);
144 }
145
146 entry = av_dict_get(opt, inst_ext_key, NULL, 0);
147 if (entry && entry->value && entry->value[0]) {
148 if (num_ext)
149 av_bprintf(&buf, "+");
150 av_bprintf(&buf, "%s", entry->value);
151 }
152
153 ret = av_bprint_finalize(&buf, &ext_list);
154 if (ret < 0)
155 return ret;
156 return av_dict_set(dict, inst_ext_key, ext_list, AV_DICT_DONT_STRDUP_VAL);
157 }
158
159 static int add_device_extension(const AVDictionary *opt,
160 AVDictionary **dict)
161 {
162 const char *dev_ext_key = "device_extensions";
163 AVDictionaryEntry *entry;
164 AVBPrint buf;
165 char *ext_list = NULL;
166 int ret;
167
168 av_bprint_init(&buf, 0, AV_BPRINT_SIZE_AUTOMATIC);
169 av_bprintf(&buf, "%s", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
170 for (int i = 0; i < pl_vulkan_num_recommended_extensions; i++)
171 av_bprintf(&buf, "+%s", pl_vulkan_recommended_extensions[i]);
172
173 entry = av_dict_get(opt, dev_ext_key, NULL, 0);
174 if (entry && entry->value && entry->value[0])
175 av_bprintf(&buf, "+%s", entry->value);
176
177 ret = av_bprint_finalize(&buf, &ext_list);
178 if (ret < 0)
179 return ret;
180 return av_dict_set(dict, dev_ext_key, ext_list, AV_DICT_DONT_STRDUP_VAL);
181 }
182
183 static const char *select_device(const AVDictionary *opt)
184 {
185 const AVDictionaryEntry *entry;
186
187 entry = av_dict_get(opt, "device", NULL, 0);
188 if (entry)
189 return entry->value;
190 return NULL;
191 }
192
193 static int create_vk_by_hwcontext(VkRenderer *renderer,
194 const char **ext, unsigned num_ext,
195 const AVDictionary *opt)
196 {
197 RendererContext *ctx = (RendererContext *) renderer;
198 AVHWDeviceContext *dev;
199 AVVulkanDeviceContext *hwctx;
200 AVDictionary *dict = NULL;
201 int ret;
202
203 ret = add_instance_extension(ext, num_ext, opt, &dict);
204 if (ret < 0)
205 return ret;
206 ret = add_device_extension(opt, &dict);
207 if (ret) {
208 av_dict_free(&dict);
209 return ret;
210 }
211
212 ret = av_hwdevice_ctx_create(&ctx->hw_device_ref, AV_HWDEVICE_TYPE_VULKAN,
213 select_device(opt), dict, 0);
214 av_dict_free(&dict);
215 if (ret < 0)
216 return ret;
217
218 dev = (AVHWDeviceContext *) ctx->hw_device_ref->data;
219 hwctx = dev->hwctx;
220
221 // There is no way to pass SDL GetInstanceProcAddr to hwdevice.
222 // Check the result and return error if they don't match.
223 if (hwctx->get_proc_addr != SDL_Vulkan_GetVkGetInstanceProcAddr()) {
224 av_log(renderer, AV_LOG_ERROR,
225 "hwdevice and SDL use different get_proc_addr. "
226 "Try -vulkan_params create_by_placebo=1\n");
227 return AVERROR_PATCHWELCOME;
228 }
229
230 ctx->get_proc_addr = hwctx->get_proc_addr;
231 ctx->inst = hwctx->inst;
232
233 struct pl_vulkan_import_params import_params = {
234 .instance = hwctx->inst,
235 .get_proc_addr = hwctx->get_proc_addr,
236 .phys_device = hwctx->phys_dev,
237 .device = hwctx->act_dev,
238 .extensions = hwctx->enabled_dev_extensions,
239 .num_extensions = hwctx->nb_enabled_dev_extensions,
240 .features = &hwctx->device_features,
241 .lock_queue = hwctx_lock_queue,
242 .unlock_queue = hwctx_unlock_queue,
243 .queue_ctx = dev,
244 .queue_graphics = {
245 .index = VK_QUEUE_FAMILY_IGNORED,
246 .count = 0,
247 },
248 .queue_compute = {
249 .index = VK_QUEUE_FAMILY_IGNORED,
250 .count = 0,
251 },
252 .queue_transfer = {
253 .index = VK_QUEUE_FAMILY_IGNORED,
254 .count = 0,
255 },
256 };
257 for (int i = 0; i < hwctx->nb_qf; i++) {
258 const AVVulkanDeviceQueueFamily *qf = &hwctx->qf[i];
259
260 if (qf->flags & VK_QUEUE_GRAPHICS_BIT) {
261 import_params.queue_graphics.index = qf->idx;
262 import_params.queue_graphics.count = qf->num;
263 }
264 if (qf->flags & VK_QUEUE_COMPUTE_BIT) {
265 import_params.queue_compute.index = qf->idx;
266 import_params.queue_compute.count = qf->num;
267 }
268 if (qf->flags & VK_QUEUE_TRANSFER_BIT) {
269 import_params.queue_transfer.index = qf->idx;
270 import_params.queue_transfer.count = qf->num;
271 }
272 }
273
274 ctx->placebo_vulkan = pl_vulkan_import(ctx->vk_log, &import_params);
275 if (!ctx->placebo_vulkan)
276 return AVERROR_EXTERNAL;
277
278 return 0;
279 }
280
281 static void placebo_lock_queue(struct AVHWDeviceContext *dev_ctx,
282 uint32_t queue_family, uint32_t index)
283 {
284 RendererContext *ctx = dev_ctx->user_opaque;
285 pl_vulkan vk = ctx->placebo_vulkan;
286 vk->lock_queue(vk, queue_family, index);
287 }
288
289 static void placebo_unlock_queue(struct AVHWDeviceContext *dev_ctx,
290 uint32_t queue_family,
291 uint32_t index)
292 {
293 RendererContext *ctx = dev_ctx->user_opaque;
294 pl_vulkan vk = ctx->placebo_vulkan;
295 vk->unlock_queue(vk, queue_family, index);
296 }
297
298 static int get_decode_queue(VkRenderer *renderer, int *index, int *count)
299 {
300 RendererContext *ctx = (RendererContext *) renderer;
301 VkQueueFamilyProperties *queue_family_prop = NULL;
302 uint32_t num_queue_family_prop = 0;
303 PFN_vkGetPhysicalDeviceQueueFamilyProperties get_queue_family_prop;
304 PFN_vkGetInstanceProcAddr get_proc_addr = ctx->get_proc_addr;
305
306 *index = -1;
307 *count = 0;
308 get_queue_family_prop = (PFN_vkGetPhysicalDeviceQueueFamilyProperties)
309 get_proc_addr(ctx->placebo_instance->instance,
310 "vkGetPhysicalDeviceQueueFamilyProperties");
311 get_queue_family_prop(ctx->placebo_vulkan->phys_device,
312 &num_queue_family_prop, NULL);
313 if (!num_queue_family_prop)
314 return AVERROR_EXTERNAL;
315
316 queue_family_prop = av_calloc(num_queue_family_prop,
317 sizeof(*queue_family_prop));
318 if (!queue_family_prop)
319 return AVERROR(ENOMEM);
320
321 get_queue_family_prop(ctx->placebo_vulkan->phys_device,
322 &num_queue_family_prop,
323 queue_family_prop);
324
325 for (int i = 0; i < num_queue_family_prop; i++) {
326 if (queue_family_prop[i].queueFlags & VK_QUEUE_VIDEO_DECODE_BIT_KHR) {
327 *index = i;
328 *count = queue_family_prop[i].queueCount;
329 break;
330 }
331 }
332 av_free(queue_family_prop);
333
334 return 0;
335 }
336
337 static int create_vk_by_placebo(VkRenderer *renderer,
338 const char **ext, unsigned num_ext,
339 const AVDictionary *opt)
340 {
341 RendererContext *ctx = (RendererContext *) renderer;
342 AVHWDeviceContext *device_ctx;
343 AVVulkanDeviceContext *vk_dev_ctx;
344 int decode_index;
345 int decode_count;
346 int ret;
347 const char **dev_exts;
348 int num_dev_exts;
349
350 ctx->get_proc_addr = SDL_Vulkan_GetVkGetInstanceProcAddr();
351
352 ctx->placebo_instance = pl_vk_inst_create(ctx->vk_log, pl_vk_inst_params(
353 .get_proc_addr = ctx->get_proc_addr,
354 .debug = enable_debug(opt),
355 .extensions = ext,
356 .num_extensions = num_ext
357 ));
358 if (!ctx->placebo_instance) {
359 return AVERROR_EXTERNAL;
360 }
361 ctx->inst = ctx->placebo_instance->instance;
362
363 dev_exts = av_vk_get_optional_device_extensions(&num_dev_exts);
364 if (!dev_exts)
365 return AVERROR(ENOMEM);
366
367 ctx->placebo_vulkan = pl_vulkan_create(ctx->vk_log, pl_vulkan_params(
368 .instance = ctx->placebo_instance->instance,
369 .get_proc_addr = ctx->placebo_instance->get_proc_addr,
370 .surface = ctx->vk_surface,
371 .allow_software = false,
372 .opt_extensions = dev_exts,
373 .num_opt_extensions = num_dev_exts,
374 .extra_queues = VK_QUEUE_VIDEO_DECODE_BIT_KHR,
375 .device_name = select_device(opt),
376 ));
377 av_free(dev_exts);
378 if (!ctx->placebo_vulkan)
379 return AVERROR_EXTERNAL;
380 ctx->hw_device_ref = av_hwdevice_ctx_alloc(AV_HWDEVICE_TYPE_VULKAN);
381 if (!ctx->hw_device_ref) {
382 return AVERROR(ENOMEM);
383 }
384
385 device_ctx = (AVHWDeviceContext *) ctx->hw_device_ref->data;
386 device_ctx->user_opaque = ctx;
387
388 vk_dev_ctx = device_ctx->hwctx;
389 vk_dev_ctx->lock_queue = placebo_lock_queue;
390 vk_dev_ctx->unlock_queue = placebo_unlock_queue;
391
392 vk_dev_ctx->get_proc_addr = ctx->placebo_instance->get_proc_addr;
393
394 vk_dev_ctx->inst = ctx->placebo_instance->instance;
395 vk_dev_ctx->phys_dev = ctx->placebo_vulkan->phys_device;
396 vk_dev_ctx->act_dev = ctx->placebo_vulkan->device;
397
398 vk_dev_ctx->device_features = *ctx->placebo_vulkan->features;
399
400 vk_dev_ctx->enabled_inst_extensions = ctx->placebo_instance->extensions;
401 vk_dev_ctx->nb_enabled_inst_extensions = ctx->placebo_instance->num_extensions;
402
403 vk_dev_ctx->enabled_dev_extensions = ctx->placebo_vulkan->extensions;
404 vk_dev_ctx->nb_enabled_dev_extensions = ctx->placebo_vulkan->num_extensions;
405
406 int nb_qf = 0;
407 vk_dev_ctx->qf[nb_qf] = (AVVulkanDeviceQueueFamily) {
408 .idx = ctx->placebo_vulkan->queue_graphics.index,
409 .num = ctx->placebo_vulkan->queue_graphics.count,
410 .flags = VK_QUEUE_GRAPHICS_BIT,
411 };
412 nb_qf++;
413 vk_dev_ctx->qf[nb_qf] = (AVVulkanDeviceQueueFamily) {
414 .idx = ctx->placebo_vulkan->queue_transfer.index,
415 .num = ctx->placebo_vulkan->queue_transfer.count,
416 .flags = VK_QUEUE_TRANSFER_BIT,
417 };
418 nb_qf++;
419 vk_dev_ctx->qf[nb_qf] = (AVVulkanDeviceQueueFamily) {
420 .idx = ctx->placebo_vulkan->queue_compute.index,
421 .num = ctx->placebo_vulkan->queue_compute.count,
422 .flags = VK_QUEUE_COMPUTE_BIT,
423 };
424 nb_qf++;
425
426 ret = get_decode_queue(renderer, &decode_index, &decode_count);
427 if (ret < 0)
428 return ret;
429
430 vk_dev_ctx->qf[nb_qf] = (AVVulkanDeviceQueueFamily) {
431 .idx = decode_index,
432 .num = decode_count,
433 .flags = VK_QUEUE_VIDEO_DECODE_BIT_KHR,
434 };
435 nb_qf++;
436
437 vk_dev_ctx->nb_qf = nb_qf;
438
439 ret = av_hwdevice_ctx_init(ctx->hw_device_ref);
440 if (ret < 0)
441 return ret;
442
443 return 0;
444 }
445
446 static int create(VkRenderer *renderer, SDL_Window *window, AVDictionary *opt)
447 {
448 int ret = 0;
449 unsigned num_ext = 0;
450 const char **ext = NULL;
451 int w, h;
452 struct pl_log_params vk_log_params = {
453 .log_cb = vk_log_cb,
454 .log_level = PL_LOG_DEBUG,
455 .log_priv = renderer,
456 };
457 RendererContext *ctx = (RendererContext *) renderer;
458 AVDictionaryEntry *entry;
459
460 ctx->vk_log = pl_log_create(PL_API_VER, &vk_log_params);
461
462 if (!SDL_Vulkan_GetInstanceExtensions(window, &num_ext, NULL)) {
463 av_log(NULL, AV_LOG_FATAL, "Failed to get vulkan extensions: %s\n",
464 SDL_GetError());
465 return AVERROR_EXTERNAL;
466 }
467
468 ext = av_calloc(num_ext, sizeof(*ext));
469 if (!ext) {
470 ret = AVERROR(ENOMEM);
471 goto out;
472 }
473
474 SDL_Vulkan_GetInstanceExtensions(window, &num_ext, ext);
475
476 entry = av_dict_get(opt, "create_by_placebo", NULL, 0);
477 if (entry && strtol(entry->value, NULL, 10))
478 ret = create_vk_by_placebo(renderer, ext, num_ext, opt);
479 else
480 ret = create_vk_by_hwcontext(renderer, ext, num_ext, opt);
481 if (ret < 0)
482 goto out;
483
484 if (!SDL_Vulkan_CreateSurface(window, ctx->inst, &ctx->vk_surface)) {
485 ret = AVERROR_EXTERNAL;
486 goto out;
487 }
488
489 ctx->swapchain = pl_vulkan_create_swapchain(
490 ctx->placebo_vulkan,
491 pl_vulkan_swapchain_params(
492 .surface = ctx->vk_surface,
493 .present_mode = VK_PRESENT_MODE_FIFO_KHR));
494 if (!ctx->swapchain) {
495 ret = AVERROR_EXTERNAL;
496 goto out;
497 }
498
499 SDL_Vulkan_GetDrawableSize(window, &w, &h);
500 pl_swapchain_resize(ctx->swapchain, &w, &h);
501
502 ctx->renderer = pl_renderer_create(ctx->vk_log, ctx->placebo_vulkan->gpu);
503 if (!ctx->renderer) {
504 ret = AVERROR_EXTERNAL;
505 goto out;
506 }
507
508 ctx->vk_frame = av_frame_alloc();
509 if (!ctx->vk_frame) {
510 ret = AVERROR(ENOMEM);
511 goto out;
512 }
513
514 ret = 0;
515
516 out:
517 av_free(ext);
518 return ret;
519 }
520
521 static int get_hw_dev(VkRenderer *renderer, AVBufferRef **dev)
522 {
523 RendererContext *ctx = (RendererContext *) renderer;
524
525 *dev = ctx->hw_device_ref;
526 return 0;
527 }
528
529 static int create_hw_frame(VkRenderer *renderer, AVFrame *frame)
530 {
531 RendererContext *ctx = (RendererContext *) renderer;
532 AVHWFramesContext *src_hw_frame = (AVHWFramesContext *)
533 frame->hw_frames_ctx->data;
534 AVHWFramesContext *hw_frame;
535 AVVulkanFramesContext *vk_frame_ctx;
536 int ret;
537
538 if (ctx->hw_frame_ref) {
539 hw_frame = (AVHWFramesContext *) ctx->hw_frame_ref->data;
540
541 if (hw_frame->width == frame->width &&
542 hw_frame->height == frame->height &&
543 hw_frame->sw_format == src_hw_frame->sw_format)
544 return 0;
545
546 av_buffer_unref(&ctx->hw_frame_ref);
547 }
548
549 if (!ctx->constraints) {
550 ctx->constraints = av_hwdevice_get_hwframe_constraints(
551 ctx->hw_device_ref, NULL);
552 if (!ctx->constraints)
553 return AVERROR(ENOMEM);
554 }
555
556 // Check constraints and skip create hwframe. Don't take it as error since
557 // we can fallback to memory copy from GPU to CPU.
558 if ((ctx->constraints->max_width &&
559 ctx->constraints->max_width < frame->width) ||
560 (ctx->constraints->max_height &&
561 ctx->constraints->max_height < frame->height) ||
562 (ctx->constraints->min_width &&
563 ctx->constraints->min_width > frame->width) ||
564 (ctx->constraints->min_height &&
565 ctx->constraints->min_height > frame->height))
566 return 0;
567
568 if (ctx->constraints->valid_sw_formats) {
569 enum AVPixelFormat *sw_formats = ctx->constraints->valid_sw_formats;
570 while (*sw_formats != AV_PIX_FMT_NONE) {
571 if (*sw_formats == src_hw_frame->sw_format)
572 break;
573 sw_formats++;
574 }
575 if (*sw_formats == AV_PIX_FMT_NONE)
576 return 0;
577 }
578
579 ctx->hw_frame_ref = av_hwframe_ctx_alloc(ctx->hw_device_ref);
580 if (!ctx->hw_frame_ref)
581 return AVERROR(ENOMEM);
582
583 hw_frame = (AVHWFramesContext *) ctx->hw_frame_ref->data;
584 hw_frame->format = AV_PIX_FMT_VULKAN;
585 hw_frame->sw_format = src_hw_frame->sw_format;
586 hw_frame->width = frame->width;
587 hw_frame->height = frame->height;
588
589 if (frame->format == AV_PIX_FMT_CUDA) {
590 vk_frame_ctx = hw_frame->hwctx;
591 vk_frame_ctx->flags = AV_VK_FRAME_FLAG_DISABLE_MULTIPLANE;
592 }
593
594 ret = av_hwframe_ctx_init(ctx->hw_frame_ref);
595 if (ret < 0) {
596 av_log(renderer, AV_LOG_ERROR, "Create hwframe context failed, %s\n",
597 av_err2str(ret));
598 return ret;
599 }
600
601 av_hwframe_transfer_get_formats(ctx->hw_frame_ref,
602 AV_HWFRAME_TRANSFER_DIRECTION_TO,
603 &ctx->transfer_formats, 0);
604
605 return 0;
606 }
607
608 static inline int check_hw_transfer(RendererContext *ctx, AVFrame *frame)
609 {
610 if (!ctx->hw_frame_ref || !ctx->transfer_formats)
611 return 0;
612
613 for (int i = 0; ctx->transfer_formats[i] != AV_PIX_FMT_NONE; i++)
614 if (ctx->transfer_formats[i] == frame->format)
615 return 1;
616
617 return 0;
618 }
619
620 static inline int move_to_output_frame(RendererContext *ctx, AVFrame *frame)
621 {
622 int ret = av_frame_copy_props(ctx->vk_frame, frame);
623 if (ret < 0)
624 return ret;
625 av_frame_unref(frame);
626 av_frame_move_ref(frame, ctx->vk_frame);
627 return 0;
628 }
629
630 static int map_frame(VkRenderer *renderer, AVFrame *frame, int use_hw_frame)
631 {
632 RendererContext *ctx = (RendererContext *) renderer;
633 int ret;
634
635 if (use_hw_frame && !ctx->hw_frame_ref)
636 return AVERROR(ENOSYS);
637
638 // Try map data first
639 av_frame_unref(ctx->vk_frame);
640 if (use_hw_frame) {
641 ctx->vk_frame->hw_frames_ctx = av_buffer_ref(ctx->hw_frame_ref);
642 ctx->vk_frame->format = AV_PIX_FMT_VULKAN;
643 }
644 ret = av_hwframe_map(ctx->vk_frame, frame, 0);
645 if (!ret)
646 return move_to_output_frame(ctx, frame);
647
648 if (ret != AVERROR(ENOSYS))
649 av_log(NULL, AV_LOG_FATAL, "Map frame failed: %s\n", av_err2str(ret));
650 return ret;
651 }
652
653 static int transfer_frame(VkRenderer *renderer, AVFrame *frame, int use_hw_frame)
654 {
655 RendererContext *ctx = (RendererContext *) renderer;
656 int ret;
657
658 if (use_hw_frame && !check_hw_transfer(ctx, frame))
659 return AVERROR(ENOSYS);
660
661 av_frame_unref(ctx->vk_frame);
662 if (use_hw_frame)
663 av_hwframe_get_buffer(ctx->hw_frame_ref, ctx->vk_frame, 0);
664 ret = av_hwframe_transfer_data(ctx->vk_frame, frame, 1);
665 if (!ret)
666 return move_to_output_frame(ctx, frame);
667
668 if (ret != AVERROR(ENOSYS))
669 av_log(NULL, AV_LOG_FATAL, "Transfer frame failed: %s\n",
670 av_err2str(ret));
671 return ret;
672 }
673
674 static int convert_frame(VkRenderer *renderer, AVFrame *frame)
675 {
676 int ret;
677
678 if (!frame->hw_frames_ctx)
679 return 0;
680
681 if (frame->format == AV_PIX_FMT_VULKAN)
682 return 0;
683
684 ret = create_hw_frame(renderer, frame);
685 if (ret < 0)
686 return ret;
687
688 for (int use_hw = 1; use_hw >=0; use_hw--) {
689 ret = map_frame(renderer, frame, use_hw);
690 if (!ret)
691 return 0;
692 if (ret != AVERROR(ENOSYS))
693 return ret;
694
695 ret = transfer_frame(renderer, frame, use_hw);
696 if (!ret)
697 return 0;
698 if (ret != AVERROR(ENOSYS))
699 return ret;
700 }
701
702 return ret;
703 }
704
705 static int display(VkRenderer *renderer, AVFrame *frame, RenderParams *params)
706 {
707 SDL_Rect *rect = &params->target_rect;
708 struct pl_swapchain_frame swap_frame = {0};
709 struct pl_frame pl_frame = {0};
710 struct pl_frame target = {0};
711 struct pl_render_params pl_params = pl_render_default_params;
712 RendererContext *ctx = (RendererContext *) renderer;
713 int ret = 0;
714 struct pl_color_space hint = {0};
715
716 ret = convert_frame(renderer, frame);
717 if (ret < 0)
718 return ret;
719
720 if (!pl_map_avframe_ex(ctx->placebo_vulkan->gpu, &pl_frame, pl_avframe_params(
721 .frame = frame,
722 .tex = ctx->tex))) {
723 av_log(NULL, AV_LOG_ERROR, "pl_map_avframe_ex failed\n");
724 return AVERROR_EXTERNAL;
725 }
726
727 pl_color_space_from_avframe(&hint, frame);
728 pl_swapchain_colorspace_hint(ctx->swapchain, &hint);
729 if (!pl_swapchain_start_frame(ctx->swapchain, &swap_frame)) {
730 av_log(NULL, AV_LOG_ERROR, "start frame failed\n");
731 ret = AVERROR_EXTERNAL;
732 goto out;
733 }
734
735 pl_frame_from_swapchain(&target, &swap_frame);
736
737 target.crop = (pl_rect2df){.x0 = rect->x, .x1 = rect->x + rect->w,
738 .y0 = rect->y, .y1 = rect->y + rect->h};
739 switch (params->video_background_type) {
740 case VIDEO_BACKGROUND_TILES:
741 pl_params.background = PL_CLEAR_TILES;
742 pl_params.tile_size = VIDEO_BACKGROUND_TILE_SIZE * 2;
743 break;
744 case VIDEO_BACKGROUND_COLOR:
745 pl_params.background = PL_CLEAR_COLOR;
746 for (int i = 0; i < 3; i++)
747 pl_params.background_color[i] = params->video_background_color[i] / 255.0;
748 pl_params.background_transparency = (255 - params->video_background_color[3]) / 255.0;
749 break;
750 case VIDEO_BACKGROUND_NONE:
751 pl_frame.repr.alpha = PL_ALPHA_NONE;
752 break;
753 }
754
755 if (!pl_render_image(ctx->renderer, &pl_frame, &target, &pl_params)) {
756 av_log(NULL, AV_LOG_ERROR, "pl_render_image failed\n");
757 ret = AVERROR_EXTERNAL;
758 goto out;
759 }
760
761 if (!pl_swapchain_submit_frame(ctx->swapchain)) {
762 av_log(NULL, AV_LOG_ERROR, "pl_swapchain_submit_frame failed\n");
763 ret = AVERROR_EXTERNAL;
764 goto out;
765 }
766 pl_swapchain_swap_buffers(ctx->swapchain);
767
768 out:
769 pl_unmap_avframe(ctx->placebo_vulkan->gpu, &pl_frame);
770 return ret;
771 }
772
773 static int resize(VkRenderer *renderer, int width, int height)
774 {
775 RendererContext *ctx = (RendererContext *) renderer;
776
777 if (!pl_swapchain_resize(ctx->swapchain, &width, &height))
778 return AVERROR_EXTERNAL;
779 return 0;
780 }
781
782 static void destroy(VkRenderer *renderer)
783 {
784 RendererContext *ctx = (RendererContext *) renderer;
785 PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR;
786
787 av_frame_free(&ctx->vk_frame);
788 av_freep(&ctx->transfer_formats);
789 av_hwframe_constraints_free(&ctx->constraints);
790 av_buffer_unref(&ctx->hw_frame_ref);
791
792 if (ctx->placebo_vulkan) {
793 for (int i = 0; i < FF_ARRAY_ELEMS(ctx->tex); i++)
794 pl_tex_destroy(ctx->placebo_vulkan->gpu, &ctx->tex[i]);
795 pl_renderer_destroy(&ctx->renderer);
796 pl_swapchain_destroy(&ctx->swapchain);
797 pl_vulkan_destroy(&ctx->placebo_vulkan);
798 }
799
800 if (ctx->vk_surface) {
801 vkDestroySurfaceKHR = (PFN_vkDestroySurfaceKHR)
802 ctx->get_proc_addr(ctx->inst, "vkDestroySurfaceKHR");
803 vkDestroySurfaceKHR(ctx->inst, ctx->vk_surface, NULL);
804 ctx->vk_surface = VK_NULL_HANDLE;
805 }
806
807 av_buffer_unref(&ctx->hw_device_ref);
808 pl_vk_inst_destroy(&ctx->placebo_instance);
809
810 pl_log_destroy(&ctx->vk_log);
811 }
812
813 static const AVClass vulkan_renderer_class = {
814 .class_name = "Vulkan Renderer",
815 .item_name = av_default_item_name,
816 .version = LIBAVUTIL_VERSION_INT,
817 };
818
819 VkRenderer *vk_get_renderer(void)
820 {
821 RendererContext *ctx = av_mallocz(sizeof(*ctx));
822 VkRenderer *renderer;
823
824 if (!ctx)
825 return NULL;
826
827 renderer = &ctx->api;
828 renderer->class = &vulkan_renderer_class;
829 renderer->get_hw_dev = get_hw_dev;
830 renderer->create = create;
831 renderer->display = display;
832 renderer->resize = resize;
833 renderer->destroy = destroy;
834
835 return renderer;
836 }
837
838 #else
839
840 VkRenderer *vk_get_renderer(void)
841 {
842 return NULL;
843 }
844
845 #endif
846
847 int vk_renderer_create(VkRenderer *renderer, SDL_Window *window,
848 AVDictionary *opt)
849 {
850 return renderer->create(renderer, window, opt);
851 }
852
853 int vk_renderer_get_hw_dev(VkRenderer *renderer, AVBufferRef **dev)
854 {
855 return renderer->get_hw_dev(renderer, dev);
856 }
857
858 int vk_renderer_display(VkRenderer *renderer, AVFrame *frame, RenderParams *render_params)
859 {
860 return renderer->display(renderer, frame, render_params);
861 }
862
863 int vk_renderer_resize(VkRenderer *renderer, int width, int height)
864 {
865 return renderer->resize(renderer, width, height);
866 }
867
868 void vk_renderer_destroy(VkRenderer *renderer)
869 {
870 renderer->destroy(renderer);
871 }
872