FFmpeg coverage


Directory: ../../../ffmpeg/
File: src/fftools/ffplay_renderer.c
Date: 2025-07-01 21:35:40
Exec Total Coverage
Lines: 0 13 0.0%
Functions: 0 6 0.0%
Branches: 0 0 -%

Line Branch Exec Source
1 /*
2 * This file is part of FFmpeg.
3 *
4 * FFmpeg is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2.1 of the License, or (at your option) any later version.
8 *
9 * FFmpeg is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
13 *
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with FFmpeg; if not, write to the Free Software
16 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 */
18
19 #define VK_NO_PROTOTYPES
20 #define VK_ENABLE_BETA_EXTENSIONS
21
22 #include "config.h"
23 #include "ffplay_renderer.h"
24
25 #if (SDL_VERSION_ATLEAST(2, 0, 6) && CONFIG_LIBPLACEBO)
26 /* Get PL_API_VER */
27 #include <libplacebo/config.h>
28 #define HAVE_VULKAN_RENDERER (PL_API_VER >= 278)
29 #else
30 #define HAVE_VULKAN_RENDERER 0
31 #endif
32
33 #if HAVE_VULKAN_RENDERER
34
35 #if defined(_WIN32) && !defined(VK_USE_PLATFORM_WIN32_KHR)
36 #define VK_USE_PLATFORM_WIN32_KHR
37 #endif
38
39 #include <libplacebo/vulkan.h>
40 #include <libplacebo/utils/frame_queue.h>
41 #include <libplacebo/utils/libav.h>
42 #include <SDL_vulkan.h>
43
44 #include "libavutil/bprint.h"
45 #include "libavutil/mem.h"
46
47 #endif
48
49 struct VkRenderer {
50 const AVClass *class;
51
52 int (*create)(VkRenderer *renderer, SDL_Window *window, AVDictionary *dict);
53
54 int (*get_hw_dev)(VkRenderer *renderer, AVBufferRef **dev);
55
56 int (*display)(VkRenderer *renderer, AVFrame *frame);
57
58 int (*resize)(VkRenderer *renderer, int width, int height);
59
60 void (*destroy)(VkRenderer *renderer);
61 };
62
63 #if HAVE_VULKAN_RENDERER
64
65 typedef struct RendererContext {
66 VkRenderer api;
67
68 // Can be NULL when vulkan instance is created by avutil
69 pl_vk_inst placebo_instance;
70 pl_vulkan placebo_vulkan;
71 pl_swapchain swapchain;
72 VkSurfaceKHR vk_surface;
73 pl_renderer renderer;
74 pl_tex tex[4];
75
76 pl_log vk_log;
77
78 AVBufferRef *hw_device_ref;
79 AVBufferRef *hw_frame_ref;
80 enum AVPixelFormat *transfer_formats;
81 AVHWFramesConstraints *constraints;
82
83 PFN_vkGetInstanceProcAddr get_proc_addr;
84 // This field is a copy from pl_vk_inst->instance or hw_device_ref instance.
85 VkInstance inst;
86
87 AVFrame *vk_frame;
88 } RendererContext;
89
90 static void vk_log_cb(void *log_priv, enum pl_log_level level,
91 const char *msg)
92 {
93 static const int level_map[] = {
94 AV_LOG_QUIET,
95 AV_LOG_FATAL,
96 AV_LOG_ERROR,
97 AV_LOG_WARNING,
98 AV_LOG_INFO,
99 AV_LOG_DEBUG,
100 AV_LOG_TRACE,
101 };
102
103 if (level > 0 && level < FF_ARRAY_ELEMS(level_map))
104 av_log(log_priv, level_map[level], "%s\n", msg);
105 }
106
107 // Should keep sync with optional_device_exts inside hwcontext_vulkan.c
108 static const char *optional_device_exts[] = {
109 /* Misc or required by other extensions */
110 VK_KHR_PORTABILITY_SUBSET_EXTENSION_NAME,
111 VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME,
112 VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME,
113 VK_EXT_DESCRIPTOR_BUFFER_EXTENSION_NAME,
114 VK_EXT_PHYSICAL_DEVICE_DRM_EXTENSION_NAME,
115 VK_EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME,
116 VK_KHR_COOPERATIVE_MATRIX_EXTENSION_NAME,
117
118 /* Imports/exports */
119 VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME,
120 VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME,
121 VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME,
122 VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME,
123 VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME,
124 #ifdef _WIN32
125 VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME,
126 VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME,
127 #endif
128
129 /* Video encoding/decoding */
130 VK_KHR_VIDEO_QUEUE_EXTENSION_NAME,
131 VK_KHR_VIDEO_DECODE_QUEUE_EXTENSION_NAME,
132 VK_KHR_VIDEO_DECODE_H264_EXTENSION_NAME,
133 VK_KHR_VIDEO_DECODE_H265_EXTENSION_NAME,
134 "VK_MESA_video_decode_av1",
135 };
136
137 static inline int enable_debug(const AVDictionary *opt)
138 {
139 AVDictionaryEntry *entry = av_dict_get(opt, "debug", NULL, 0);
140 int debug = entry && strtol(entry->value, NULL, 10);
141 return debug;
142 }
143
144 static void hwctx_lock_queue(void *priv, uint32_t qf, uint32_t qidx)
145 {
146 AVHWDeviceContext *avhwctx = priv;
147 const AVVulkanDeviceContext *hwctx = avhwctx->hwctx;
148 hwctx->lock_queue(avhwctx, qf, qidx);
149 }
150
151 static void hwctx_unlock_queue(void *priv, uint32_t qf, uint32_t qidx)
152 {
153 AVHWDeviceContext *avhwctx = priv;
154 const AVVulkanDeviceContext *hwctx = avhwctx->hwctx;
155 hwctx->unlock_queue(avhwctx, qf, qidx);
156 }
157
158 static int add_instance_extension(const char **ext, unsigned num_ext,
159 const AVDictionary *opt,
160 AVDictionary **dict)
161 {
162 const char *inst_ext_key = "instance_extensions";
163 AVDictionaryEntry *entry;
164 AVBPrint buf;
165 char *ext_list = NULL;
166 int ret;
167
168 av_bprint_init(&buf, 0, AV_BPRINT_SIZE_AUTOMATIC);
169 for (int i = 0; i < num_ext; i++) {
170 if (i)
171 av_bprintf(&buf, "+%s", ext[i]);
172 else
173 av_bprintf(&buf, "%s", ext[i]);
174 }
175
176 entry = av_dict_get(opt, inst_ext_key, NULL, 0);
177 if (entry && entry->value && entry->value[0]) {
178 if (num_ext)
179 av_bprintf(&buf, "+");
180 av_bprintf(&buf, "%s", entry->value);
181 }
182
183 ret = av_bprint_finalize(&buf, &ext_list);
184 if (ret < 0)
185 return ret;
186 return av_dict_set(dict, inst_ext_key, ext_list, AV_DICT_DONT_STRDUP_VAL);
187 }
188
189 static int add_device_extension(const AVDictionary *opt,
190 AVDictionary **dict)
191 {
192 const char *dev_ext_key = "device_extensions";
193 AVDictionaryEntry *entry;
194 AVBPrint buf;
195 char *ext_list = NULL;
196 int ret;
197
198 av_bprint_init(&buf, 0, AV_BPRINT_SIZE_AUTOMATIC);
199 av_bprintf(&buf, "%s", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
200 for (int i = 0; i < pl_vulkan_num_recommended_extensions; i++)
201 av_bprintf(&buf, "+%s", pl_vulkan_recommended_extensions[i]);
202
203 entry = av_dict_get(opt, dev_ext_key, NULL, 0);
204 if (entry && entry->value && entry->value[0])
205 av_bprintf(&buf, "+%s", entry->value);
206
207 ret = av_bprint_finalize(&buf, &ext_list);
208 if (ret < 0)
209 return ret;
210 return av_dict_set(dict, dev_ext_key, ext_list, AV_DICT_DONT_STRDUP_VAL);
211 }
212
213 static const char *select_device(const AVDictionary *opt)
214 {
215 const AVDictionaryEntry *entry;
216
217 entry = av_dict_get(opt, "device", NULL, 0);
218 if (entry)
219 return entry->value;
220 return NULL;
221 }
222
223 static int create_vk_by_hwcontext(VkRenderer *renderer,
224 const char **ext, unsigned num_ext,
225 const AVDictionary *opt)
226 {
227 RendererContext *ctx = (RendererContext *) renderer;
228 AVHWDeviceContext *dev;
229 AVVulkanDeviceContext *hwctx;
230 AVDictionary *dict = NULL;
231 int ret;
232
233 ret = add_instance_extension(ext, num_ext, opt, &dict);
234 if (ret < 0)
235 return ret;
236 ret = add_device_extension(opt, &dict);
237 if (ret) {
238 av_dict_free(&dict);
239 return ret;
240 }
241
242 ret = av_hwdevice_ctx_create(&ctx->hw_device_ref, AV_HWDEVICE_TYPE_VULKAN,
243 select_device(opt), dict, 0);
244 av_dict_free(&dict);
245 if (ret < 0)
246 return ret;
247
248 dev = (AVHWDeviceContext *) ctx->hw_device_ref->data;
249 hwctx = dev->hwctx;
250
251 // There is no way to pass SDL GetInstanceProcAddr to hwdevice.
252 // Check the result and return error if they don't match.
253 if (hwctx->get_proc_addr != SDL_Vulkan_GetVkGetInstanceProcAddr()) {
254 av_log(renderer, AV_LOG_ERROR,
255 "hwdevice and SDL use different get_proc_addr. "
256 "Try -vulkan_params create_by_placebo=1\n");
257 return AVERROR_PATCHWELCOME;
258 }
259
260 ctx->get_proc_addr = hwctx->get_proc_addr;
261 ctx->inst = hwctx->inst;
262
263 struct pl_vulkan_import_params import_params = {
264 .instance = hwctx->inst,
265 .get_proc_addr = hwctx->get_proc_addr,
266 .phys_device = hwctx->phys_dev,
267 .device = hwctx->act_dev,
268 .extensions = hwctx->enabled_dev_extensions,
269 .num_extensions = hwctx->nb_enabled_dev_extensions,
270 .features = &hwctx->device_features,
271 .lock_queue = hwctx_lock_queue,
272 .unlock_queue = hwctx_unlock_queue,
273 .queue_ctx = dev,
274 .queue_graphics = {
275 .index = VK_QUEUE_FAMILY_IGNORED,
276 .count = 0,
277 },
278 .queue_compute = {
279 .index = VK_QUEUE_FAMILY_IGNORED,
280 .count = 0,
281 },
282 .queue_transfer = {
283 .index = VK_QUEUE_FAMILY_IGNORED,
284 .count = 0,
285 },
286 };
287 for (int i = 0; i < hwctx->nb_qf; i++) {
288 const AVVulkanDeviceQueueFamily *qf = &hwctx->qf[i];
289
290 if (qf->flags & VK_QUEUE_GRAPHICS_BIT) {
291 import_params.queue_graphics.index = qf->idx;
292 import_params.queue_graphics.count = qf->num;
293 }
294 if (qf->flags & VK_QUEUE_COMPUTE_BIT) {
295 import_params.queue_compute.index = qf->idx;
296 import_params.queue_compute.count = qf->num;
297 }
298 if (qf->flags & VK_QUEUE_TRANSFER_BIT) {
299 import_params.queue_transfer.index = qf->idx;
300 import_params.queue_transfer.count = qf->num;
301 }
302 }
303
304 ctx->placebo_vulkan = pl_vulkan_import(ctx->vk_log, &import_params);
305 if (!ctx->placebo_vulkan)
306 return AVERROR_EXTERNAL;
307
308 return 0;
309 }
310
311 static void placebo_lock_queue(struct AVHWDeviceContext *dev_ctx,
312 uint32_t queue_family, uint32_t index)
313 {
314 RendererContext *ctx = dev_ctx->user_opaque;
315 pl_vulkan vk = ctx->placebo_vulkan;
316 vk->lock_queue(vk, queue_family, index);
317 }
318
319 static void placebo_unlock_queue(struct AVHWDeviceContext *dev_ctx,
320 uint32_t queue_family,
321 uint32_t index)
322 {
323 RendererContext *ctx = dev_ctx->user_opaque;
324 pl_vulkan vk = ctx->placebo_vulkan;
325 vk->unlock_queue(vk, queue_family, index);
326 }
327
328 static int get_decode_queue(VkRenderer *renderer, int *index, int *count)
329 {
330 RendererContext *ctx = (RendererContext *) renderer;
331 VkQueueFamilyProperties *queue_family_prop = NULL;
332 uint32_t num_queue_family_prop = 0;
333 PFN_vkGetPhysicalDeviceQueueFamilyProperties get_queue_family_prop;
334 PFN_vkGetInstanceProcAddr get_proc_addr = ctx->get_proc_addr;
335
336 *index = -1;
337 *count = 0;
338 get_queue_family_prop = (PFN_vkGetPhysicalDeviceQueueFamilyProperties)
339 get_proc_addr(ctx->placebo_instance->instance,
340 "vkGetPhysicalDeviceQueueFamilyProperties");
341 get_queue_family_prop(ctx->placebo_vulkan->phys_device,
342 &num_queue_family_prop, NULL);
343 if (!num_queue_family_prop)
344 return AVERROR_EXTERNAL;
345
346 queue_family_prop = av_calloc(num_queue_family_prop,
347 sizeof(*queue_family_prop));
348 if (!queue_family_prop)
349 return AVERROR(ENOMEM);
350
351 get_queue_family_prop(ctx->placebo_vulkan->phys_device,
352 &num_queue_family_prop,
353 queue_family_prop);
354
355 for (int i = 0; i < num_queue_family_prop; i++) {
356 if (queue_family_prop[i].queueFlags & VK_QUEUE_VIDEO_DECODE_BIT_KHR) {
357 *index = i;
358 *count = queue_family_prop[i].queueCount;
359 break;
360 }
361 }
362 av_free(queue_family_prop);
363
364 return 0;
365 }
366
367 static int create_vk_by_placebo(VkRenderer *renderer,
368 const char **ext, unsigned num_ext,
369 const AVDictionary *opt)
370 {
371 RendererContext *ctx = (RendererContext *) renderer;
372 AVHWDeviceContext *device_ctx;
373 AVVulkanDeviceContext *vk_dev_ctx;
374 int decode_index;
375 int decode_count;
376 int ret;
377
378 ctx->get_proc_addr = SDL_Vulkan_GetVkGetInstanceProcAddr();
379
380 ctx->placebo_instance = pl_vk_inst_create(ctx->vk_log, pl_vk_inst_params(
381 .get_proc_addr = ctx->get_proc_addr,
382 .debug = enable_debug(opt),
383 .extensions = ext,
384 .num_extensions = num_ext
385 ));
386 if (!ctx->placebo_instance) {
387 return AVERROR_EXTERNAL;
388 }
389 ctx->inst = ctx->placebo_instance->instance;
390
391 ctx->placebo_vulkan = pl_vulkan_create(ctx->vk_log, pl_vulkan_params(
392 .instance = ctx->placebo_instance->instance,
393 .get_proc_addr = ctx->placebo_instance->get_proc_addr,
394 .surface = ctx->vk_surface,
395 .allow_software = false,
396 .opt_extensions = optional_device_exts,
397 .num_opt_extensions = FF_ARRAY_ELEMS(optional_device_exts),
398 .extra_queues = VK_QUEUE_VIDEO_DECODE_BIT_KHR,
399 .device_name = select_device(opt),
400 ));
401 if (!ctx->placebo_vulkan)
402 return AVERROR_EXTERNAL;
403 ctx->hw_device_ref = av_hwdevice_ctx_alloc(AV_HWDEVICE_TYPE_VULKAN);
404 if (!ctx->hw_device_ref) {
405 return AVERROR(ENOMEM);
406 }
407
408 device_ctx = (AVHWDeviceContext *) ctx->hw_device_ref->data;
409 device_ctx->user_opaque = ctx;
410
411 vk_dev_ctx = device_ctx->hwctx;
412 vk_dev_ctx->lock_queue = placebo_lock_queue;
413 vk_dev_ctx->unlock_queue = placebo_unlock_queue;
414
415 vk_dev_ctx->get_proc_addr = ctx->placebo_instance->get_proc_addr;
416
417 vk_dev_ctx->inst = ctx->placebo_instance->instance;
418 vk_dev_ctx->phys_dev = ctx->placebo_vulkan->phys_device;
419 vk_dev_ctx->act_dev = ctx->placebo_vulkan->device;
420
421 vk_dev_ctx->device_features = *ctx->placebo_vulkan->features;
422
423 vk_dev_ctx->enabled_inst_extensions = ctx->placebo_instance->extensions;
424 vk_dev_ctx->nb_enabled_inst_extensions = ctx->placebo_instance->num_extensions;
425
426 vk_dev_ctx->enabled_dev_extensions = ctx->placebo_vulkan->extensions;
427 vk_dev_ctx->nb_enabled_dev_extensions = ctx->placebo_vulkan->num_extensions;
428
429 int nb_qf = 0;
430 vk_dev_ctx->qf[nb_qf] = (AVVulkanDeviceQueueFamily) {
431 .idx = ctx->placebo_vulkan->queue_graphics.index,
432 .num = ctx->placebo_vulkan->queue_graphics.count,
433 .flags = VK_QUEUE_GRAPHICS_BIT,
434 };
435 nb_qf++;
436 vk_dev_ctx->qf[nb_qf] = (AVVulkanDeviceQueueFamily) {
437 .idx = ctx->placebo_vulkan->queue_transfer.index,
438 .num = ctx->placebo_vulkan->queue_transfer.count,
439 .flags = VK_QUEUE_TRANSFER_BIT,
440 };
441 nb_qf++;
442 vk_dev_ctx->qf[nb_qf] = (AVVulkanDeviceQueueFamily) {
443 .idx = ctx->placebo_vulkan->queue_compute.index,
444 .num = ctx->placebo_vulkan->queue_compute.count,
445 .flags = VK_QUEUE_COMPUTE_BIT,
446 };
447 nb_qf++;
448
449 ret = get_decode_queue(renderer, &decode_index, &decode_count);
450 if (ret < 0)
451 return ret;
452
453 vk_dev_ctx->qf[nb_qf] = (AVVulkanDeviceQueueFamily) {
454 .idx = decode_index,
455 .num = decode_count,
456 .flags = VK_QUEUE_VIDEO_DECODE_BIT_KHR,
457 };
458 nb_qf++;
459
460 vk_dev_ctx->nb_qf = nb_qf;
461
462 ret = av_hwdevice_ctx_init(ctx->hw_device_ref);
463 if (ret < 0)
464 return ret;
465
466 return 0;
467 }
468
469 static int create(VkRenderer *renderer, SDL_Window *window, AVDictionary *opt)
470 {
471 int ret = 0;
472 unsigned num_ext = 0;
473 const char **ext = NULL;
474 int w, h;
475 struct pl_log_params vk_log_params = {
476 .log_cb = vk_log_cb,
477 .log_level = PL_LOG_DEBUG,
478 .log_priv = renderer,
479 };
480 RendererContext *ctx = (RendererContext *) renderer;
481 AVDictionaryEntry *entry;
482
483 ctx->vk_log = pl_log_create(PL_API_VER, &vk_log_params);
484
485 if (!SDL_Vulkan_GetInstanceExtensions(window, &num_ext, NULL)) {
486 av_log(NULL, AV_LOG_FATAL, "Failed to get vulkan extensions: %s\n",
487 SDL_GetError());
488 return AVERROR_EXTERNAL;
489 }
490
491 ext = av_calloc(num_ext, sizeof(*ext));
492 if (!ext) {
493 ret = AVERROR(ENOMEM);
494 goto out;
495 }
496
497 SDL_Vulkan_GetInstanceExtensions(window, &num_ext, ext);
498
499 entry = av_dict_get(opt, "create_by_placebo", NULL, 0);
500 if (entry && strtol(entry->value, NULL, 10))
501 ret = create_vk_by_placebo(renderer, ext, num_ext, opt);
502 else
503 ret = create_vk_by_hwcontext(renderer, ext, num_ext, opt);
504 if (ret < 0)
505 goto out;
506
507 if (!SDL_Vulkan_CreateSurface(window, ctx->inst, &ctx->vk_surface)) {
508 ret = AVERROR_EXTERNAL;
509 goto out;
510 }
511
512 ctx->swapchain = pl_vulkan_create_swapchain(
513 ctx->placebo_vulkan,
514 pl_vulkan_swapchain_params(
515 .surface = ctx->vk_surface,
516 .present_mode = VK_PRESENT_MODE_FIFO_KHR));
517 if (!ctx->swapchain) {
518 ret = AVERROR_EXTERNAL;
519 goto out;
520 }
521
522 SDL_Vulkan_GetDrawableSize(window, &w, &h);
523 pl_swapchain_resize(ctx->swapchain, &w, &h);
524
525 ctx->renderer = pl_renderer_create(ctx->vk_log, ctx->placebo_vulkan->gpu);
526 if (!ctx->renderer) {
527 ret = AVERROR_EXTERNAL;
528 goto out;
529 }
530
531 ctx->vk_frame = av_frame_alloc();
532 if (!ctx->vk_frame) {
533 ret = AVERROR(ENOMEM);
534 goto out;
535 }
536
537 ret = 0;
538
539 out:
540 av_free(ext);
541 return ret;
542 }
543
544 static int get_hw_dev(VkRenderer *renderer, AVBufferRef **dev)
545 {
546 RendererContext *ctx = (RendererContext *) renderer;
547
548 *dev = ctx->hw_device_ref;
549 return 0;
550 }
551
552 static int create_hw_frame(VkRenderer *renderer, AVFrame *frame)
553 {
554 RendererContext *ctx = (RendererContext *) renderer;
555 AVHWFramesContext *src_hw_frame = (AVHWFramesContext *)
556 frame->hw_frames_ctx->data;
557 AVHWFramesContext *hw_frame;
558 AVVulkanFramesContext *vk_frame_ctx;
559 int ret;
560
561 if (ctx->hw_frame_ref) {
562 hw_frame = (AVHWFramesContext *) ctx->hw_frame_ref->data;
563
564 if (hw_frame->width == frame->width &&
565 hw_frame->height == frame->height &&
566 hw_frame->sw_format == src_hw_frame->sw_format)
567 return 0;
568
569 av_buffer_unref(&ctx->hw_frame_ref);
570 }
571
572 if (!ctx->constraints) {
573 ctx->constraints = av_hwdevice_get_hwframe_constraints(
574 ctx->hw_device_ref, NULL);
575 if (!ctx->constraints)
576 return AVERROR(ENOMEM);
577 }
578
579 // Check constraints and skip create hwframe. Don't take it as error since
580 // we can fallback to memory copy from GPU to CPU.
581 if ((ctx->constraints->max_width &&
582 ctx->constraints->max_width < frame->width) ||
583 (ctx->constraints->max_height &&
584 ctx->constraints->max_height < frame->height) ||
585 (ctx->constraints->min_width &&
586 ctx->constraints->min_width > frame->width) ||
587 (ctx->constraints->min_height &&
588 ctx->constraints->min_height > frame->height))
589 return 0;
590
591 if (ctx->constraints->valid_sw_formats) {
592 enum AVPixelFormat *sw_formats = ctx->constraints->valid_sw_formats;
593 while (*sw_formats != AV_PIX_FMT_NONE) {
594 if (*sw_formats == src_hw_frame->sw_format)
595 break;
596 sw_formats++;
597 }
598 if (*sw_formats == AV_PIX_FMT_NONE)
599 return 0;
600 }
601
602 ctx->hw_frame_ref = av_hwframe_ctx_alloc(ctx->hw_device_ref);
603 if (!ctx->hw_frame_ref)
604 return AVERROR(ENOMEM);
605
606 hw_frame = (AVHWFramesContext *) ctx->hw_frame_ref->data;
607 hw_frame->format = AV_PIX_FMT_VULKAN;
608 hw_frame->sw_format = src_hw_frame->sw_format;
609 hw_frame->width = frame->width;
610 hw_frame->height = frame->height;
611
612 if (frame->format == AV_PIX_FMT_CUDA) {
613 vk_frame_ctx = hw_frame->hwctx;
614 vk_frame_ctx->flags = AV_VK_FRAME_FLAG_DISABLE_MULTIPLANE;
615 }
616
617 ret = av_hwframe_ctx_init(ctx->hw_frame_ref);
618 if (ret < 0) {
619 av_log(renderer, AV_LOG_ERROR, "Create hwframe context failed, %s\n",
620 av_err2str(ret));
621 return ret;
622 }
623
624 av_hwframe_transfer_get_formats(ctx->hw_frame_ref,
625 AV_HWFRAME_TRANSFER_DIRECTION_TO,
626 &ctx->transfer_formats, 0);
627
628 return 0;
629 }
630
631 static inline int check_hw_transfer(RendererContext *ctx, AVFrame *frame)
632 {
633 if (!ctx->hw_frame_ref || !ctx->transfer_formats)
634 return 0;
635
636 for (int i = 0; ctx->transfer_formats[i] != AV_PIX_FMT_NONE; i++)
637 if (ctx->transfer_formats[i] == frame->format)
638 return 1;
639
640 return 0;
641 }
642
643 static inline int move_to_output_frame(RendererContext *ctx, AVFrame *frame)
644 {
645 int ret = av_frame_copy_props(ctx->vk_frame, frame);
646 if (ret < 0)
647 return ret;
648 av_frame_unref(frame);
649 av_frame_move_ref(frame, ctx->vk_frame);
650 return 0;
651 }
652
653 static int map_frame(VkRenderer *renderer, AVFrame *frame, int use_hw_frame)
654 {
655 RendererContext *ctx = (RendererContext *) renderer;
656 int ret;
657
658 if (use_hw_frame && !ctx->hw_frame_ref)
659 return AVERROR(ENOSYS);
660
661 // Try map data first
662 av_frame_unref(ctx->vk_frame);
663 if (use_hw_frame) {
664 ctx->vk_frame->hw_frames_ctx = av_buffer_ref(ctx->hw_frame_ref);
665 ctx->vk_frame->format = AV_PIX_FMT_VULKAN;
666 }
667 ret = av_hwframe_map(ctx->vk_frame, frame, 0);
668 if (!ret)
669 return move_to_output_frame(ctx, frame);
670
671 if (ret != AVERROR(ENOSYS))
672 av_log(NULL, AV_LOG_FATAL, "Map frame failed: %s\n", av_err2str(ret));
673 return ret;
674 }
675
676 static int transfer_frame(VkRenderer *renderer, AVFrame *frame, int use_hw_frame)
677 {
678 RendererContext *ctx = (RendererContext *) renderer;
679 int ret;
680
681 if (use_hw_frame && !check_hw_transfer(ctx, frame))
682 return AVERROR(ENOSYS);
683
684 av_frame_unref(ctx->vk_frame);
685 if (use_hw_frame)
686 av_hwframe_get_buffer(ctx->hw_frame_ref, ctx->vk_frame, 0);
687 ret = av_hwframe_transfer_data(ctx->vk_frame, frame, 1);
688 if (!ret)
689 return move_to_output_frame(ctx, frame);
690
691 if (ret != AVERROR(ENOSYS))
692 av_log(NULL, AV_LOG_FATAL, "Transfer frame failed: %s\n",
693 av_err2str(ret));
694 return ret;
695 }
696
697 static int convert_frame(VkRenderer *renderer, AVFrame *frame)
698 {
699 int ret;
700
701 if (!frame->hw_frames_ctx)
702 return 0;
703
704 if (frame->format == AV_PIX_FMT_VULKAN)
705 return 0;
706
707 ret = create_hw_frame(renderer, frame);
708 if (ret < 0)
709 return ret;
710
711 for (int use_hw = 1; use_hw >=0; use_hw--) {
712 ret = map_frame(renderer, frame, use_hw);
713 if (!ret)
714 return 0;
715 if (ret != AVERROR(ENOSYS))
716 return ret;
717
718 ret = transfer_frame(renderer, frame, use_hw);
719 if (!ret)
720 return 0;
721 if (ret != AVERROR(ENOSYS))
722 return ret;
723 }
724
725 return ret;
726 }
727
728 static int display(VkRenderer *renderer, AVFrame *frame)
729 {
730 struct pl_swapchain_frame swap_frame = {0};
731 struct pl_frame pl_frame = {0};
732 struct pl_frame target = {0};
733 RendererContext *ctx = (RendererContext *) renderer;
734 int ret = 0;
735 struct pl_color_space hint = {0};
736
737 ret = convert_frame(renderer, frame);
738 if (ret < 0)
739 return ret;
740
741 if (!pl_map_avframe_ex(ctx->placebo_vulkan->gpu, &pl_frame, pl_avframe_params(
742 .frame = frame,
743 .tex = ctx->tex))) {
744 av_log(NULL, AV_LOG_ERROR, "pl_map_avframe_ex failed\n");
745 return AVERROR_EXTERNAL;
746 }
747
748 pl_color_space_from_avframe(&hint, frame);
749 pl_swapchain_colorspace_hint(ctx->swapchain, &hint);
750 if (!pl_swapchain_start_frame(ctx->swapchain, &swap_frame)) {
751 av_log(NULL, AV_LOG_ERROR, "start frame failed\n");
752 ret = AVERROR_EXTERNAL;
753 goto out;
754 }
755
756 pl_frame_from_swapchain(&target, &swap_frame);
757 if (!pl_render_image(ctx->renderer, &pl_frame, &target,
758 &pl_render_default_params)) {
759 av_log(NULL, AV_LOG_ERROR, "pl_render_image failed\n");
760 ret = AVERROR_EXTERNAL;
761 goto out;
762 }
763
764 if (!pl_swapchain_submit_frame(ctx->swapchain)) {
765 av_log(NULL, AV_LOG_ERROR, "pl_swapchain_submit_frame failed\n");
766 ret = AVERROR_EXTERNAL;
767 goto out;
768 }
769 pl_swapchain_swap_buffers(ctx->swapchain);
770
771 out:
772 pl_unmap_avframe(ctx->placebo_vulkan->gpu, &pl_frame);
773 return ret;
774 }
775
776 static int resize(VkRenderer *renderer, int width, int height)
777 {
778 RendererContext *ctx = (RendererContext *) renderer;
779
780 if (!pl_swapchain_resize(ctx->swapchain, &width, &height))
781 return AVERROR_EXTERNAL;
782 return 0;
783 }
784
785 static void destroy(VkRenderer *renderer)
786 {
787 RendererContext *ctx = (RendererContext *) renderer;
788 PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR;
789
790 av_frame_free(&ctx->vk_frame);
791 av_freep(&ctx->transfer_formats);
792 av_hwframe_constraints_free(&ctx->constraints);
793 av_buffer_unref(&ctx->hw_frame_ref);
794
795 if (ctx->placebo_vulkan) {
796 for (int i = 0; i < FF_ARRAY_ELEMS(ctx->tex); i++)
797 pl_tex_destroy(ctx->placebo_vulkan->gpu, &ctx->tex[i]);
798 pl_renderer_destroy(&ctx->renderer);
799 pl_swapchain_destroy(&ctx->swapchain);
800 pl_vulkan_destroy(&ctx->placebo_vulkan);
801 }
802
803 if (ctx->vk_surface) {
804 vkDestroySurfaceKHR = (PFN_vkDestroySurfaceKHR)
805 ctx->get_proc_addr(ctx->inst, "vkDestroySurfaceKHR");
806 vkDestroySurfaceKHR(ctx->inst, ctx->vk_surface, NULL);
807 ctx->vk_surface = VK_NULL_HANDLE;
808 }
809
810 av_buffer_unref(&ctx->hw_device_ref);
811 pl_vk_inst_destroy(&ctx->placebo_instance);
812
813 pl_log_destroy(&ctx->vk_log);
814 }
815
816 static const AVClass vulkan_renderer_class = {
817 .class_name = "Vulkan Renderer",
818 .item_name = av_default_item_name,
819 .version = LIBAVUTIL_VERSION_INT,
820 };
821
822 VkRenderer *vk_get_renderer(void)
823 {
824 RendererContext *ctx = av_mallocz(sizeof(*ctx));
825 VkRenderer *renderer;
826
827 if (!ctx)
828 return NULL;
829
830 renderer = &ctx->api;
831 renderer->class = &vulkan_renderer_class;
832 renderer->get_hw_dev = get_hw_dev;
833 renderer->create = create;
834 renderer->display = display;
835 renderer->resize = resize;
836 renderer->destroy = destroy;
837
838 return renderer;
839 }
840
841 #else
842
843 VkRenderer *vk_get_renderer(void)
844 {
845 return NULL;
846 }
847
848 #endif
849
850 int vk_renderer_create(VkRenderer *renderer, SDL_Window *window,
851 AVDictionary *opt)
852 {
853 return renderer->create(renderer, window, opt);
854 }
855
856 int vk_renderer_get_hw_dev(VkRenderer *renderer, AVBufferRef **dev)
857 {
858 return renderer->get_hw_dev(renderer, dev);
859 }
860
861 int vk_renderer_display(VkRenderer *renderer, AVFrame *frame)
862 {
863 return renderer->display(renderer, frame);
864 }
865
866 int vk_renderer_resize(VkRenderer *renderer, int width, int height)
867 {
868 return renderer->resize(renderer, width, height);
869 }
870
871 void vk_renderer_destroy(VkRenderer *renderer)
872 {
873 renderer->destroy(renderer);
874 }
875