FFmpeg coverage


Directory: ../../../ffmpeg/
File: src/fftools/ffplay_renderer.c
Date: 2026-04-23 02:20:26
Exec Total Coverage
Lines: 0 13 0.0%
Functions: 0 6 0.0%
Branches: 0 0 -%

Line Branch Exec Source
1 /*
2 * This file is part of FFmpeg.
3 *
4 * FFmpeg is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2.1 of the License, or (at your option) any later version.
8 *
9 * FFmpeg is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
13 *
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with FFmpeg; if not, write to the Free Software
16 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 */
18
19 #define VK_NO_PROTOTYPES
20 #define VK_ENABLE_BETA_EXTENSIONS
21
22 #include "config.h"
23 #include "ffplay_renderer.h"
24
25 #if (SDL_VERSION_ATLEAST(2, 0, 6) && CONFIG_LIBPLACEBO)
26 /* Get PL_API_VER */
27 #include <libplacebo/config.h>
28 #define HAVE_VULKAN_RENDERER (PL_API_VER >= 278)
29 #else
30 #define HAVE_VULKAN_RENDERER 0
31 #endif
32
33 #if HAVE_VULKAN_RENDERER
34
35 #if defined(_WIN32) && !defined(VK_USE_PLATFORM_WIN32_KHR)
36 #define VK_USE_PLATFORM_WIN32_KHR
37 #endif
38
39 #include <libplacebo/vulkan.h>
40 #include <libplacebo/utils/frame_queue.h>
41 #include <libplacebo/utils/libav.h>
42 #include <SDL_vulkan.h>
43
44 #include "libavutil/bprint.h"
45 #include "libavutil/mem.h"
46 #include "libavutil/internal.h"
47
48 #endif
49
50 struct VkRenderer {
51 const AVClass *class;
52
53 int (*create)(VkRenderer *renderer, SDL_Window *window, AVDictionary *dict);
54
55 int (*get_hw_dev)(VkRenderer *renderer, AVBufferRef **dev);
56
57 int (*display)(VkRenderer *renderer, AVFrame *frame, RenderParams *params);
58
59 int (*resize)(VkRenderer *renderer, int width, int height);
60
61 void (*destroy)(VkRenderer *renderer);
62 };
63
64 #if HAVE_VULKAN_RENDERER
65
66 typedef struct RendererContext {
67 VkRenderer api;
68
69 // Can be NULL when vulkan instance is created by avutil
70 pl_vk_inst placebo_instance;
71 pl_vulkan placebo_vulkan;
72 pl_swapchain swapchain;
73 VkSurfaceKHR vk_surface;
74 pl_renderer renderer;
75 pl_tex tex[4];
76
77 pl_log vk_log;
78
79 AVBufferRef *hw_device_ref;
80 AVBufferRef *hw_frame_ref;
81 enum AVPixelFormat *transfer_formats;
82 AVHWFramesConstraints *constraints;
83
84 PFN_vkGetInstanceProcAddr get_proc_addr;
85 // This field is a copy from pl_vk_inst->instance or hw_device_ref instance.
86 VkInstance inst;
87
88 AVFrame *vk_frame;
89 } RendererContext;
90
91 static void vk_log_cb(void *log_priv, enum pl_log_level level,
92 const char *msg)
93 {
94 static const int level_map[] = {
95 AV_LOG_QUIET,
96 AV_LOG_FATAL,
97 AV_LOG_ERROR,
98 AV_LOG_WARNING,
99 AV_LOG_INFO,
100 AV_LOG_DEBUG,
101 AV_LOG_TRACE,
102 };
103
104 if (level > 0 && level < FF_ARRAY_ELEMS(level_map))
105 av_log(log_priv, level_map[level], "%s\n", msg);
106 }
107
108 static inline int enable_debug(const AVDictionary *opt)
109 {
110 AVDictionaryEntry *entry = av_dict_get(opt, "debug", NULL, 0);
111 int debug = entry && strtol(entry->value, NULL, 10);
112 return debug;
113 }
114
115 static void hwctx_lock_queue(void *priv, uint32_t qf, uint32_t qidx)
116 {
117 AVHWDeviceContext *avhwctx = priv;
118 const AVVulkanDeviceContext *hwctx = avhwctx->hwctx;
119 #if FF_API_VULKAN_SYNC_QUEUES
120 FF_DISABLE_DEPRECATION_WARNINGS
121 hwctx->lock_queue(avhwctx, qf, qidx);
122 FF_ENABLE_DEPRECATION_WARNINGS
123 #endif
124 }
125
126 static void hwctx_unlock_queue(void *priv, uint32_t qf, uint32_t qidx)
127 {
128 AVHWDeviceContext *avhwctx = priv;
129 const AVVulkanDeviceContext *hwctx = avhwctx->hwctx;
130 #if FF_API_VULKAN_SYNC_QUEUES
131 FF_DISABLE_DEPRECATION_WARNINGS
132 hwctx->unlock_queue(avhwctx, qf, qidx);
133 FF_ENABLE_DEPRECATION_WARNINGS
134 #endif
135 }
136
137 static int add_instance_extension(const char **ext, unsigned num_ext,
138 const AVDictionary *opt,
139 AVDictionary **dict)
140 {
141 const char *inst_ext_key = "instance_extensions";
142 AVDictionaryEntry *entry;
143 AVBPrint buf;
144 char *ext_list = NULL;
145 int ret;
146
147 av_bprint_init(&buf, 0, AV_BPRINT_SIZE_AUTOMATIC);
148 for (int i = 0; i < num_ext; i++) {
149 if (i)
150 av_bprintf(&buf, "+%s", ext[i]);
151 else
152 av_bprintf(&buf, "%s", ext[i]);
153 }
154
155 entry = av_dict_get(opt, inst_ext_key, NULL, 0);
156 if (entry && entry->value && entry->value[0]) {
157 if (num_ext)
158 av_bprintf(&buf, "+");
159 av_bprintf(&buf, "%s", entry->value);
160 }
161
162 ret = av_bprint_finalize(&buf, &ext_list);
163 if (ret < 0)
164 return ret;
165 return av_dict_set(dict, inst_ext_key, ext_list, AV_DICT_DONT_STRDUP_VAL);
166 }
167
168 static int add_device_extension(const AVDictionary *opt,
169 AVDictionary **dict)
170 {
171 const char *dev_ext_key = "device_extensions";
172 AVDictionaryEntry *entry;
173 AVBPrint buf;
174 char *ext_list = NULL;
175 int ret;
176
177 av_bprint_init(&buf, 0, AV_BPRINT_SIZE_AUTOMATIC);
178 av_bprintf(&buf, "%s", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
179 for (int i = 0; i < pl_vulkan_num_recommended_extensions; i++)
180 av_bprintf(&buf, "+%s", pl_vulkan_recommended_extensions[i]);
181
182 entry = av_dict_get(opt, dev_ext_key, NULL, 0);
183 if (entry && entry->value && entry->value[0])
184 av_bprintf(&buf, "+%s", entry->value);
185
186 ret = av_bprint_finalize(&buf, &ext_list);
187 if (ret < 0)
188 return ret;
189 return av_dict_set(dict, dev_ext_key, ext_list, AV_DICT_DONT_STRDUP_VAL);
190 }
191
192 static const char *select_device(const AVDictionary *opt)
193 {
194 const AVDictionaryEntry *entry;
195
196 entry = av_dict_get(opt, "device", NULL, 0);
197 if (entry)
198 return entry->value;
199 return NULL;
200 }
201
202 static int create_vk_by_hwcontext(VkRenderer *renderer,
203 const char **ext, unsigned num_ext,
204 const AVDictionary *opt)
205 {
206 RendererContext *ctx = (RendererContext *) renderer;
207 AVHWDeviceContext *dev;
208 AVVulkanDeviceContext *hwctx;
209 AVDictionary *dict = NULL;
210 int ret;
211
212 ret = add_instance_extension(ext, num_ext, opt, &dict);
213 if (ret < 0)
214 return ret;
215 ret = add_device_extension(opt, &dict);
216 if (ret) {
217 av_dict_free(&dict);
218 return ret;
219 }
220
221 ret = av_hwdevice_ctx_create(&ctx->hw_device_ref, AV_HWDEVICE_TYPE_VULKAN,
222 select_device(opt), dict, 0);
223 av_dict_free(&dict);
224 if (ret < 0)
225 return ret;
226
227 dev = (AVHWDeviceContext *) ctx->hw_device_ref->data;
228 hwctx = dev->hwctx;
229
230 // There is no way to pass SDL GetInstanceProcAddr to hwdevice.
231 // Check the result and return error if they don't match.
232 if (hwctx->get_proc_addr != SDL_Vulkan_GetVkGetInstanceProcAddr()) {
233 av_log(renderer, AV_LOG_ERROR,
234 "hwdevice and SDL use different get_proc_addr. "
235 "Try -vulkan_params create_by_placebo=1\n");
236 return AVERROR_PATCHWELCOME;
237 }
238
239 ctx->get_proc_addr = hwctx->get_proc_addr;
240 ctx->inst = hwctx->inst;
241
242 struct pl_vulkan_import_params import_params = {
243 .instance = hwctx->inst,
244 .get_proc_addr = hwctx->get_proc_addr,
245 .phys_device = hwctx->phys_dev,
246 .device = hwctx->act_dev,
247 .extensions = hwctx->enabled_dev_extensions,
248 .num_extensions = hwctx->nb_enabled_dev_extensions,
249 .features = &hwctx->device_features,
250 .lock_queue = hwctx_lock_queue,
251 .unlock_queue = hwctx_unlock_queue,
252 .queue_ctx = dev,
253 .queue_graphics = {
254 .index = VK_QUEUE_FAMILY_IGNORED,
255 .count = 0,
256 },
257 .queue_compute = {
258 .index = VK_QUEUE_FAMILY_IGNORED,
259 .count = 0,
260 },
261 .queue_transfer = {
262 .index = VK_QUEUE_FAMILY_IGNORED,
263 .count = 0,
264 },
265 };
266 for (int i = 0; i < hwctx->nb_qf; i++) {
267 const AVVulkanDeviceQueueFamily *qf = &hwctx->qf[i];
268
269 if (qf->flags & VK_QUEUE_GRAPHICS_BIT) {
270 import_params.queue_graphics.index = qf->idx;
271 import_params.queue_graphics.count = qf->num;
272 }
273 if (qf->flags & VK_QUEUE_COMPUTE_BIT) {
274 import_params.queue_compute.index = qf->idx;
275 import_params.queue_compute.count = qf->num;
276 }
277 if (qf->flags & VK_QUEUE_TRANSFER_BIT) {
278 import_params.queue_transfer.index = qf->idx;
279 import_params.queue_transfer.count = qf->num;
280 }
281 }
282
283 ctx->placebo_vulkan = pl_vulkan_import(ctx->vk_log, &import_params);
284 if (!ctx->placebo_vulkan)
285 return AVERROR_EXTERNAL;
286
287 return 0;
288 }
289
290 static void placebo_lock_queue(struct AVHWDeviceContext *dev_ctx,
291 uint32_t queue_family, uint32_t index)
292 {
293 RendererContext *ctx = dev_ctx->user_opaque;
294 pl_vulkan vk = ctx->placebo_vulkan;
295 #if FF_API_VULKAN_SYNC_QUEUES
296 FF_DISABLE_DEPRECATION_WARNINGS
297 vk->lock_queue(vk, queue_family, index);
298 FF_ENABLE_DEPRECATION_WARNINGS
299 #endif
300 }
301
302 static void placebo_unlock_queue(struct AVHWDeviceContext *dev_ctx,
303 uint32_t queue_family,
304 uint32_t index)
305 {
306 RendererContext *ctx = dev_ctx->user_opaque;
307 pl_vulkan vk = ctx->placebo_vulkan;
308 #if FF_API_VULKAN_SYNC_QUEUES
309 FF_DISABLE_DEPRECATION_WARNINGS
310 vk->unlock_queue(vk, queue_family, index);
311 FF_ENABLE_DEPRECATION_WARNINGS
312 #endif
313 }
314
315 static int get_decode_queue(VkRenderer *renderer, int *index, int *count)
316 {
317 RendererContext *ctx = (RendererContext *) renderer;
318 VkQueueFamilyProperties *queue_family_prop = NULL;
319 uint32_t num_queue_family_prop = 0;
320 PFN_vkGetPhysicalDeviceQueueFamilyProperties get_queue_family_prop;
321 PFN_vkGetInstanceProcAddr get_proc_addr = ctx->get_proc_addr;
322
323 *index = -1;
324 *count = 0;
325 get_queue_family_prop = (PFN_vkGetPhysicalDeviceQueueFamilyProperties)
326 get_proc_addr(ctx->placebo_instance->instance,
327 "vkGetPhysicalDeviceQueueFamilyProperties");
328 get_queue_family_prop(ctx->placebo_vulkan->phys_device,
329 &num_queue_family_prop, NULL);
330 if (!num_queue_family_prop)
331 return AVERROR_EXTERNAL;
332
333 queue_family_prop = av_calloc(num_queue_family_prop,
334 sizeof(*queue_family_prop));
335 if (!queue_family_prop)
336 return AVERROR(ENOMEM);
337
338 get_queue_family_prop(ctx->placebo_vulkan->phys_device,
339 &num_queue_family_prop,
340 queue_family_prop);
341
342 for (int i = 0; i < num_queue_family_prop; i++) {
343 if (queue_family_prop[i].queueFlags & VK_QUEUE_VIDEO_DECODE_BIT_KHR) {
344 *index = i;
345 *count = queue_family_prop[i].queueCount;
346 break;
347 }
348 }
349 av_free(queue_family_prop);
350
351 return 0;
352 }
353
354 static int create_vk_by_placebo(VkRenderer *renderer,
355 const char **ext, unsigned num_ext,
356 const AVDictionary *opt)
357 {
358 RendererContext *ctx = (RendererContext *) renderer;
359 AVHWDeviceContext *device_ctx;
360 AVVulkanDeviceContext *vk_dev_ctx;
361 int decode_index;
362 int decode_count;
363 int ret;
364 const char **dev_exts;
365 int num_dev_exts;
366
367 ctx->get_proc_addr = SDL_Vulkan_GetVkGetInstanceProcAddr();
368
369 ctx->placebo_instance = pl_vk_inst_create(ctx->vk_log, pl_vk_inst_params(
370 .get_proc_addr = ctx->get_proc_addr,
371 .debug = enable_debug(opt),
372 .extensions = ext,
373 .num_extensions = num_ext
374 ));
375 if (!ctx->placebo_instance) {
376 return AVERROR_EXTERNAL;
377 }
378 ctx->inst = ctx->placebo_instance->instance;
379
380 dev_exts = av_vk_get_optional_device_extensions(&num_dev_exts);
381 if (!dev_exts)
382 return AVERROR(ENOMEM);
383
384 ctx->placebo_vulkan = pl_vulkan_create(ctx->vk_log, pl_vulkan_params(
385 .instance = ctx->placebo_instance->instance,
386 .get_proc_addr = ctx->placebo_instance->get_proc_addr,
387 .surface = ctx->vk_surface,
388 .allow_software = false,
389 .opt_extensions = dev_exts,
390 .num_opt_extensions = num_dev_exts,
391 .extra_queues = VK_QUEUE_VIDEO_DECODE_BIT_KHR,
392 .device_name = select_device(opt),
393 ));
394 av_free(dev_exts);
395 if (!ctx->placebo_vulkan)
396 return AVERROR_EXTERNAL;
397 ctx->hw_device_ref = av_hwdevice_ctx_alloc(AV_HWDEVICE_TYPE_VULKAN);
398 if (!ctx->hw_device_ref) {
399 return AVERROR(ENOMEM);
400 }
401
402 device_ctx = (AVHWDeviceContext *) ctx->hw_device_ref->data;
403 device_ctx->user_opaque = ctx;
404
405 vk_dev_ctx = device_ctx->hwctx;
406 #if FF_API_VULKAN_SYNC_QUEUES
407 FF_DISABLE_DEPRECATION_WARNINGS
408 vk_dev_ctx->lock_queue = placebo_lock_queue;
409 vk_dev_ctx->unlock_queue = placebo_unlock_queue;
410 FF_ENABLE_DEPRECATION_WARNINGS
411 #endif
412
413 vk_dev_ctx->get_proc_addr = ctx->placebo_instance->get_proc_addr;
414
415 vk_dev_ctx->inst = ctx->placebo_instance->instance;
416 vk_dev_ctx->phys_dev = ctx->placebo_vulkan->phys_device;
417 vk_dev_ctx->act_dev = ctx->placebo_vulkan->device;
418
419 vk_dev_ctx->device_features = *ctx->placebo_vulkan->features;
420
421 vk_dev_ctx->enabled_inst_extensions = ctx->placebo_instance->extensions;
422 vk_dev_ctx->nb_enabled_inst_extensions = ctx->placebo_instance->num_extensions;
423
424 vk_dev_ctx->enabled_dev_extensions = ctx->placebo_vulkan->extensions;
425 vk_dev_ctx->nb_enabled_dev_extensions = ctx->placebo_vulkan->num_extensions;
426
427 int nb_qf = 0;
428 vk_dev_ctx->qf[nb_qf] = (AVVulkanDeviceQueueFamily) {
429 .idx = ctx->placebo_vulkan->queue_graphics.index,
430 .num = ctx->placebo_vulkan->queue_graphics.count,
431 .flags = VK_QUEUE_GRAPHICS_BIT,
432 };
433 nb_qf++;
434 vk_dev_ctx->qf[nb_qf] = (AVVulkanDeviceQueueFamily) {
435 .idx = ctx->placebo_vulkan->queue_transfer.index,
436 .num = ctx->placebo_vulkan->queue_transfer.count,
437 .flags = VK_QUEUE_TRANSFER_BIT,
438 };
439 nb_qf++;
440 vk_dev_ctx->qf[nb_qf] = (AVVulkanDeviceQueueFamily) {
441 .idx = ctx->placebo_vulkan->queue_compute.index,
442 .num = ctx->placebo_vulkan->queue_compute.count,
443 .flags = VK_QUEUE_COMPUTE_BIT,
444 };
445 nb_qf++;
446
447 ret = get_decode_queue(renderer, &decode_index, &decode_count);
448 if (ret < 0)
449 return ret;
450
451 vk_dev_ctx->qf[nb_qf] = (AVVulkanDeviceQueueFamily) {
452 .idx = decode_index,
453 .num = decode_count,
454 .flags = VK_QUEUE_VIDEO_DECODE_BIT_KHR,
455 };
456 nb_qf++;
457
458 vk_dev_ctx->nb_qf = nb_qf;
459
460 ret = av_hwdevice_ctx_init(ctx->hw_device_ref);
461 if (ret < 0)
462 return ret;
463
464 return 0;
465 }
466
467 static int create(VkRenderer *renderer, SDL_Window *window, AVDictionary *opt)
468 {
469 int ret = 0;
470 unsigned num_ext = 0;
471 const char **ext = NULL;
472 int w, h;
473 struct pl_log_params vk_log_params = {
474 .log_cb = vk_log_cb,
475 .log_level = PL_LOG_DEBUG,
476 .log_priv = renderer,
477 };
478 RendererContext *ctx = (RendererContext *) renderer;
479 AVDictionaryEntry *entry;
480
481 ctx->vk_log = pl_log_create(PL_API_VER, &vk_log_params);
482
483 if (!SDL_Vulkan_GetInstanceExtensions(window, &num_ext, NULL)) {
484 av_log(NULL, AV_LOG_FATAL, "Failed to get vulkan extensions: %s\n",
485 SDL_GetError());
486 return AVERROR_EXTERNAL;
487 }
488
489 ext = av_calloc(num_ext, sizeof(*ext));
490 if (!ext) {
491 ret = AVERROR(ENOMEM);
492 goto out;
493 }
494
495 SDL_Vulkan_GetInstanceExtensions(window, &num_ext, ext);
496
497 entry = av_dict_get(opt, "create_by_placebo", NULL, 0);
498 if (entry && strtol(entry->value, NULL, 10))
499 ret = create_vk_by_placebo(renderer, ext, num_ext, opt);
500 else
501 ret = create_vk_by_hwcontext(renderer, ext, num_ext, opt);
502 if (ret < 0)
503 goto out;
504
505 if (!SDL_Vulkan_CreateSurface(window, ctx->inst, &ctx->vk_surface)) {
506 ret = AVERROR_EXTERNAL;
507 goto out;
508 }
509
510 ctx->swapchain = pl_vulkan_create_swapchain(
511 ctx->placebo_vulkan,
512 pl_vulkan_swapchain_params(
513 .surface = ctx->vk_surface,
514 .present_mode = VK_PRESENT_MODE_FIFO_KHR));
515 if (!ctx->swapchain) {
516 ret = AVERROR_EXTERNAL;
517 goto out;
518 }
519
520 SDL_Vulkan_GetDrawableSize(window, &w, &h);
521 pl_swapchain_resize(ctx->swapchain, &w, &h);
522
523 ctx->renderer = pl_renderer_create(ctx->vk_log, ctx->placebo_vulkan->gpu);
524 if (!ctx->renderer) {
525 ret = AVERROR_EXTERNAL;
526 goto out;
527 }
528
529 ctx->vk_frame = av_frame_alloc();
530 if (!ctx->vk_frame) {
531 ret = AVERROR(ENOMEM);
532 goto out;
533 }
534
535 ret = 0;
536
537 out:
538 av_free(ext);
539 return ret;
540 }
541
542 static int get_hw_dev(VkRenderer *renderer, AVBufferRef **dev)
543 {
544 RendererContext *ctx = (RendererContext *) renderer;
545
546 *dev = ctx->hw_device_ref;
547 return 0;
548 }
549
550 static int create_hw_frame(VkRenderer *renderer, AVFrame *frame)
551 {
552 RendererContext *ctx = (RendererContext *) renderer;
553 AVHWFramesContext *src_hw_frame = (AVHWFramesContext *)
554 frame->hw_frames_ctx->data;
555 AVHWFramesContext *hw_frame;
556 AVVulkanFramesContext *vk_frame_ctx;
557 int ret;
558
559 if (ctx->hw_frame_ref) {
560 hw_frame = (AVHWFramesContext *) ctx->hw_frame_ref->data;
561
562 if (hw_frame->width == frame->width &&
563 hw_frame->height == frame->height &&
564 hw_frame->sw_format == src_hw_frame->sw_format)
565 return 0;
566
567 av_buffer_unref(&ctx->hw_frame_ref);
568 }
569
570 if (!ctx->constraints) {
571 ctx->constraints = av_hwdevice_get_hwframe_constraints(
572 ctx->hw_device_ref, NULL);
573 if (!ctx->constraints)
574 return AVERROR(ENOMEM);
575 }
576
577 // Check constraints and skip create hwframe. Don't take it as error since
578 // we can fallback to memory copy from GPU to CPU.
579 if ((ctx->constraints->max_width &&
580 ctx->constraints->max_width < frame->width) ||
581 (ctx->constraints->max_height &&
582 ctx->constraints->max_height < frame->height) ||
583 (ctx->constraints->min_width &&
584 ctx->constraints->min_width > frame->width) ||
585 (ctx->constraints->min_height &&
586 ctx->constraints->min_height > frame->height))
587 return 0;
588
589 if (ctx->constraints->valid_sw_formats) {
590 enum AVPixelFormat *sw_formats = ctx->constraints->valid_sw_formats;
591 while (*sw_formats != AV_PIX_FMT_NONE) {
592 if (*sw_formats == src_hw_frame->sw_format)
593 break;
594 sw_formats++;
595 }
596 if (*sw_formats == AV_PIX_FMT_NONE)
597 return 0;
598 }
599
600 ctx->hw_frame_ref = av_hwframe_ctx_alloc(ctx->hw_device_ref);
601 if (!ctx->hw_frame_ref)
602 return AVERROR(ENOMEM);
603
604 hw_frame = (AVHWFramesContext *) ctx->hw_frame_ref->data;
605 hw_frame->format = AV_PIX_FMT_VULKAN;
606 hw_frame->sw_format = src_hw_frame->sw_format;
607 hw_frame->width = frame->width;
608 hw_frame->height = frame->height;
609
610 if (frame->format == AV_PIX_FMT_CUDA) {
611 vk_frame_ctx = hw_frame->hwctx;
612 vk_frame_ctx->flags = AV_VK_FRAME_FLAG_DISABLE_MULTIPLANE;
613 }
614
615 ret = av_hwframe_ctx_init(ctx->hw_frame_ref);
616 if (ret < 0) {
617 av_log(renderer, AV_LOG_ERROR, "Create hwframe context failed, %s\n",
618 av_err2str(ret));
619 return ret;
620 }
621
622 av_hwframe_transfer_get_formats(ctx->hw_frame_ref,
623 AV_HWFRAME_TRANSFER_DIRECTION_TO,
624 &ctx->transfer_formats, 0);
625
626 return 0;
627 }
628
629 static inline int check_hw_transfer(RendererContext *ctx, AVFrame *frame)
630 {
631 if (!ctx->hw_frame_ref || !ctx->transfer_formats)
632 return 0;
633
634 for (int i = 0; ctx->transfer_formats[i] != AV_PIX_FMT_NONE; i++)
635 if (ctx->transfer_formats[i] == frame->format)
636 return 1;
637
638 return 0;
639 }
640
641 static inline int move_to_output_frame(RendererContext *ctx, AVFrame *frame)
642 {
643 int ret = av_frame_copy_props(ctx->vk_frame, frame);
644 if (ret < 0)
645 return ret;
646 av_frame_unref(frame);
647 av_frame_move_ref(frame, ctx->vk_frame);
648 return 0;
649 }
650
651 static int map_frame(VkRenderer *renderer, AVFrame *frame, int use_hw_frame)
652 {
653 RendererContext *ctx = (RendererContext *) renderer;
654 int ret;
655
656 if (use_hw_frame && !ctx->hw_frame_ref)
657 return AVERROR(ENOSYS);
658
659 // Try map data first
660 av_frame_unref(ctx->vk_frame);
661 if (use_hw_frame) {
662 ctx->vk_frame->hw_frames_ctx = av_buffer_ref(ctx->hw_frame_ref);
663 ctx->vk_frame->format = AV_PIX_FMT_VULKAN;
664 }
665 ret = av_hwframe_map(ctx->vk_frame, frame, 0);
666 if (!ret)
667 return move_to_output_frame(ctx, frame);
668
669 if (ret != AVERROR(ENOSYS))
670 av_log(NULL, AV_LOG_FATAL, "Map frame failed: %s\n", av_err2str(ret));
671 return ret;
672 }
673
674 static int transfer_frame(VkRenderer *renderer, AVFrame *frame, int use_hw_frame)
675 {
676 RendererContext *ctx = (RendererContext *) renderer;
677 int ret;
678
679 if (use_hw_frame && !check_hw_transfer(ctx, frame))
680 return AVERROR(ENOSYS);
681
682 av_frame_unref(ctx->vk_frame);
683 if (use_hw_frame)
684 av_hwframe_get_buffer(ctx->hw_frame_ref, ctx->vk_frame, 0);
685 ret = av_hwframe_transfer_data(ctx->vk_frame, frame, 1);
686 if (!ret)
687 return move_to_output_frame(ctx, frame);
688
689 if (ret != AVERROR(ENOSYS))
690 av_log(NULL, AV_LOG_FATAL, "Transfer frame failed: %s\n",
691 av_err2str(ret));
692 return ret;
693 }
694
695 static int convert_frame(VkRenderer *renderer, AVFrame *frame)
696 {
697 int ret;
698
699 if (!frame->hw_frames_ctx)
700 return 0;
701
702 if (frame->format == AV_PIX_FMT_VULKAN)
703 return 0;
704
705 ret = create_hw_frame(renderer, frame);
706 if (ret < 0)
707 return ret;
708
709 for (int use_hw = 1; use_hw >=0; use_hw--) {
710 ret = map_frame(renderer, frame, use_hw);
711 if (!ret)
712 return 0;
713 if (ret != AVERROR(ENOSYS))
714 return ret;
715
716 ret = transfer_frame(renderer, frame, use_hw);
717 if (!ret)
718 return 0;
719 if (ret != AVERROR(ENOSYS))
720 return ret;
721 }
722
723 return ret;
724 }
725
726 static int display(VkRenderer *renderer, AVFrame *frame, RenderParams *params)
727 {
728 SDL_Rect *rect = &params->target_rect;
729 struct pl_swapchain_frame swap_frame = {0};
730 struct pl_frame pl_frame = {0};
731 struct pl_frame target = {0};
732 struct pl_render_params pl_params = pl_render_default_params;
733 RendererContext *ctx = (RendererContext *) renderer;
734 int ret = 0;
735 struct pl_color_space hint = {0};
736
737 ret = convert_frame(renderer, frame);
738 if (ret < 0)
739 return ret;
740
741 if (!pl_map_avframe_ex(ctx->placebo_vulkan->gpu, &pl_frame, pl_avframe_params(
742 .frame = frame,
743 .tex = ctx->tex))) {
744 av_log(NULL, AV_LOG_ERROR, "pl_map_avframe_ex failed\n");
745 return AVERROR_EXTERNAL;
746 }
747
748 pl_color_space_from_avframe(&hint, frame);
749 pl_swapchain_colorspace_hint(ctx->swapchain, &hint);
750 if (!pl_swapchain_start_frame(ctx->swapchain, &swap_frame)) {
751 av_log(NULL, AV_LOG_ERROR, "start frame failed\n");
752 ret = AVERROR_EXTERNAL;
753 goto out;
754 }
755
756 pl_frame_from_swapchain(&target, &swap_frame);
757
758 target.crop = (pl_rect2df){.x0 = rect->x, .x1 = rect->x + rect->w,
759 .y0 = rect->y, .y1 = rect->y + rect->h};
760 switch (params->video_background_type) {
761 case VIDEO_BACKGROUND_TILES:
762 pl_params.background = PL_CLEAR_TILES;
763 pl_params.tile_size = VIDEO_BACKGROUND_TILE_SIZE * 2;
764 break;
765 case VIDEO_BACKGROUND_COLOR:
766 pl_params.background = PL_CLEAR_COLOR;
767 for (int i = 0; i < 3; i++)
768 pl_params.background_color[i] = params->video_background_color[i] / 255.0;
769 pl_params.background_transparency = (255 - params->video_background_color[3]) / 255.0;
770 break;
771 case VIDEO_BACKGROUND_NONE:
772 pl_frame.repr.alpha = PL_ALPHA_NONE;
773 break;
774 }
775
776 if (!pl_render_image(ctx->renderer, &pl_frame, &target, &pl_params)) {
777 av_log(NULL, AV_LOG_ERROR, "pl_render_image failed\n");
778 ret = AVERROR_EXTERNAL;
779 goto out;
780 }
781
782 if (!pl_swapchain_submit_frame(ctx->swapchain)) {
783 av_log(NULL, AV_LOG_ERROR, "pl_swapchain_submit_frame failed\n");
784 ret = AVERROR_EXTERNAL;
785 goto out;
786 }
787 pl_swapchain_swap_buffers(ctx->swapchain);
788
789 out:
790 pl_unmap_avframe(ctx->placebo_vulkan->gpu, &pl_frame);
791 return ret;
792 }
793
794 static int resize(VkRenderer *renderer, int width, int height)
795 {
796 RendererContext *ctx = (RendererContext *) renderer;
797
798 if (!pl_swapchain_resize(ctx->swapchain, &width, &height))
799 return AVERROR_EXTERNAL;
800 return 0;
801 }
802
803 static void destroy(VkRenderer *renderer)
804 {
805 RendererContext *ctx = (RendererContext *) renderer;
806 PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR;
807
808 av_frame_free(&ctx->vk_frame);
809 av_freep(&ctx->transfer_formats);
810 av_hwframe_constraints_free(&ctx->constraints);
811 av_buffer_unref(&ctx->hw_frame_ref);
812
813 if (ctx->placebo_vulkan) {
814 for (int i = 0; i < FF_ARRAY_ELEMS(ctx->tex); i++)
815 pl_tex_destroy(ctx->placebo_vulkan->gpu, &ctx->tex[i]);
816 pl_renderer_destroy(&ctx->renderer);
817 pl_swapchain_destroy(&ctx->swapchain);
818 pl_vulkan_destroy(&ctx->placebo_vulkan);
819 }
820
821 if (ctx->vk_surface) {
822 vkDestroySurfaceKHR = (PFN_vkDestroySurfaceKHR)
823 ctx->get_proc_addr(ctx->inst, "vkDestroySurfaceKHR");
824 vkDestroySurfaceKHR(ctx->inst, ctx->vk_surface, NULL);
825 ctx->vk_surface = VK_NULL_HANDLE;
826 }
827
828 av_buffer_unref(&ctx->hw_device_ref);
829 pl_vk_inst_destroy(&ctx->placebo_instance);
830
831 pl_log_destroy(&ctx->vk_log);
832 }
833
834 static const AVClass vulkan_renderer_class = {
835 .class_name = "Vulkan Renderer",
836 .item_name = av_default_item_name,
837 .version = LIBAVUTIL_VERSION_INT,
838 };
839
840 VkRenderer *vk_get_renderer(void)
841 {
842 RendererContext *ctx = av_mallocz(sizeof(*ctx));
843 VkRenderer *renderer;
844
845 if (!ctx)
846 return NULL;
847
848 renderer = &ctx->api;
849 renderer->class = &vulkan_renderer_class;
850 renderer->get_hw_dev = get_hw_dev;
851 renderer->create = create;
852 renderer->display = display;
853 renderer->resize = resize;
854 renderer->destroy = destroy;
855
856 return renderer;
857 }
858
859 #else
860
861 VkRenderer *vk_get_renderer(void)
862 {
863 return NULL;
864 }
865
866 #endif
867
868 int vk_renderer_create(VkRenderer *renderer, SDL_Window *window,
869 AVDictionary *opt)
870 {
871 return renderer->create(renderer, window, opt);
872 }
873
874 int vk_renderer_get_hw_dev(VkRenderer *renderer, AVBufferRef **dev)
875 {
876 return renderer->get_hw_dev(renderer, dev);
877 }
878
879 int vk_renderer_display(VkRenderer *renderer, AVFrame *frame, RenderParams *render_params)
880 {
881 return renderer->display(renderer, frame, render_params);
882 }
883
884 int vk_renderer_resize(VkRenderer *renderer, int width, int height)
885 {
886 return renderer->resize(renderer, width, height);
887 }
888
889 void vk_renderer_destroy(VkRenderer *renderer)
890 {
891 renderer->destroy(renderer);
892 }
893