FFmpeg coverage


Directory: ../../../ffmpeg/
File: src/fftools/ffplay_renderer.c
Date: 2023-12-04 05:51:44
Exec Total Coverage
Lines: 0 13 0.0%
Functions: 0 6 0.0%
Branches: 0 0 -%

Line Branch Exec Source
1 /*
2 * This file is part of FFmpeg.
3 *
4 * FFmpeg is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2.1 of the License, or (at your option) any later version.
8 *
9 * FFmpeg is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
13 *
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with FFmpeg; if not, write to the Free Software
16 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 */
18
19 #define VK_NO_PROTOTYPES
20 #define VK_ENABLE_BETA_EXTENSIONS
21
22 #include "config.h"
23 #include "ffplay_renderer.h"
24
25 #if (SDL_VERSION_ATLEAST(2, 0, 6) && CONFIG_LIBPLACEBO)
26 /* Get PL_API_VER */
27 #include <libplacebo/config.h>
28 #define HAVE_VULKAN_RENDERER (PL_API_VER >= 278)
29 #else
30 #define HAVE_VULKAN_RENDERER 0
31 #endif
32
33 #if HAVE_VULKAN_RENDERER
34
35 #if defined(_WIN32) && !defined(VK_USE_PLATFORM_WIN32_KHR)
36 #define VK_USE_PLATFORM_WIN32_KHR
37 #endif
38
39 #include <libplacebo/vulkan.h>
40 #include <libplacebo/utils/frame_queue.h>
41 #include <libplacebo/utils/libav.h>
42 #include <SDL_vulkan.h>
43
44 #include "libavutil/bprint.h"
45
46 #endif
47
48 struct VkRenderer {
49 const AVClass *class;
50
51 int (*create)(VkRenderer *renderer, SDL_Window *window, AVDictionary *dict);
52
53 int (*get_hw_dev)(VkRenderer *renderer, AVBufferRef **dev);
54
55 int (*display)(VkRenderer *renderer, AVFrame *frame);
56
57 int (*resize)(VkRenderer *renderer, int width, int height);
58
59 void (*destroy)(VkRenderer *renderer);
60 };
61
62 #if HAVE_VULKAN_RENDERER
63
64 typedef struct RendererContext {
65 VkRenderer api;
66
67 // Can be NULL when vulkan instance is created by avutil
68 pl_vk_inst placebo_instance;
69 pl_vulkan placebo_vulkan;
70 pl_swapchain swapchain;
71 VkSurfaceKHR vk_surface;
72 pl_renderer renderer;
73 pl_tex tex[4];
74
75 pl_log vk_log;
76
77 AVBufferRef *hw_device_ref;
78 AVBufferRef *hw_frame_ref;
79 enum AVPixelFormat *transfer_formats;
80 AVHWFramesConstraints *constraints;
81
82 PFN_vkGetInstanceProcAddr get_proc_addr;
83 // This field is a copy from pl_vk_inst->instance or hw_device_ref instance.
84 VkInstance inst;
85
86 AVFrame *vk_frame;
87 } RendererContext;
88
89 static void vk_log_cb(void *log_priv, enum pl_log_level level,
90 const char *msg)
91 {
92 static const int level_map[] = {
93 AV_LOG_QUIET,
94 AV_LOG_FATAL,
95 AV_LOG_ERROR,
96 AV_LOG_WARNING,
97 AV_LOG_INFO,
98 AV_LOG_DEBUG,
99 AV_LOG_TRACE,
100 };
101
102 if (level > 0 && level < FF_ARRAY_ELEMS(level_map))
103 av_log(log_priv, level_map[level], "%s\n", msg);
104 }
105
106 // Should keep sync with optional_device_exts inside hwcontext_vulkan.c
107 static const char *optional_device_exts[] = {
108 /* Misc or required by other extensions */
109 VK_KHR_PORTABILITY_SUBSET_EXTENSION_NAME,
110 VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME,
111 VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME,
112 VK_EXT_DESCRIPTOR_BUFFER_EXTENSION_NAME,
113 VK_EXT_PHYSICAL_DEVICE_DRM_EXTENSION_NAME,
114 VK_EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME,
115 VK_KHR_COOPERATIVE_MATRIX_EXTENSION_NAME,
116
117 /* Imports/exports */
118 VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME,
119 VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME,
120 VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME,
121 VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME,
122 VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME,
123 #ifdef _WIN32
124 VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME,
125 VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME,
126 #endif
127
128 /* Video encoding/decoding */
129 VK_KHR_VIDEO_QUEUE_EXTENSION_NAME,
130 VK_KHR_VIDEO_DECODE_QUEUE_EXTENSION_NAME,
131 VK_KHR_VIDEO_DECODE_H264_EXTENSION_NAME,
132 VK_KHR_VIDEO_DECODE_H265_EXTENSION_NAME,
133 "VK_MESA_video_decode_av1",
134 };
135
136 static inline int enable_debug(const AVDictionary *opt)
137 {
138 AVDictionaryEntry *entry = av_dict_get(opt, "debug", NULL, 0);
139 int debug = entry && strtol(entry->value, NULL, 10);
140 return debug;
141 }
142
143 static void hwctx_lock_queue(void *priv, uint32_t qf, uint32_t qidx)
144 {
145 AVHWDeviceContext *avhwctx = priv;
146 const AVVulkanDeviceContext *hwctx = avhwctx->hwctx;
147 hwctx->lock_queue(avhwctx, qf, qidx);
148 }
149
150 static void hwctx_unlock_queue(void *priv, uint32_t qf, uint32_t qidx)
151 {
152 AVHWDeviceContext *avhwctx = priv;
153 const AVVulkanDeviceContext *hwctx = avhwctx->hwctx;
154 hwctx->unlock_queue(avhwctx, qf, qidx);
155 }
156
157 static int add_instance_extension(const char **ext, unsigned num_ext,
158 const AVDictionary *opt,
159 AVDictionary **dict)
160 {
161 const char *inst_ext_key = "instance_extensions";
162 AVDictionaryEntry *entry;
163 AVBPrint buf;
164 char *ext_list = NULL;
165 int ret;
166
167 av_bprint_init(&buf, 0, AV_BPRINT_SIZE_AUTOMATIC);
168 for (int i = 0; i < num_ext; i++) {
169 if (i)
170 av_bprintf(&buf, "+%s", ext[i]);
171 else
172 av_bprintf(&buf, "%s", ext[i]);
173 }
174
175 entry = av_dict_get(opt, inst_ext_key, NULL, 0);
176 if (entry && entry->value && entry->value[0]) {
177 if (num_ext)
178 av_bprintf(&buf, "+");
179 av_bprintf(&buf, "%s", entry->value);
180 }
181
182 ret = av_bprint_finalize(&buf, &ext_list);
183 if (ret < 0)
184 return ret;
185 return av_dict_set(dict, inst_ext_key, ext_list, AV_DICT_DONT_STRDUP_VAL);
186 }
187
188 static int add_device_extension(const AVDictionary *opt,
189 AVDictionary **dict)
190 {
191 const char *dev_ext_key = "device_extensions";
192 AVDictionaryEntry *entry;
193 AVBPrint buf;
194 char *ext_list = NULL;
195 int ret;
196
197 av_bprint_init(&buf, 0, AV_BPRINT_SIZE_AUTOMATIC);
198 av_bprintf(&buf, "%s", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
199 for (int i = 0; i < pl_vulkan_num_recommended_extensions; i++)
200 av_bprintf(&buf, "+%s", pl_vulkan_recommended_extensions[i]);
201
202 entry = av_dict_get(opt, dev_ext_key, NULL, 0);
203 if (entry && entry->value && entry->value[0])
204 av_bprintf(&buf, "+%s", entry->value);
205
206 ret = av_bprint_finalize(&buf, &ext_list);
207 if (ret < 0)
208 return ret;
209 return av_dict_set(dict, dev_ext_key, ext_list, AV_DICT_DONT_STRDUP_VAL);
210 }
211
212 static int create_vk_by_hwcontext(VkRenderer *renderer,
213 const char **ext, unsigned num_ext,
214 const AVDictionary *opt)
215 {
216 RendererContext *ctx = (RendererContext *) renderer;
217 AVHWDeviceContext *dev;
218 AVVulkanDeviceContext *hwctx;
219 AVDictionary *dict = NULL;
220 int ret;
221
222 ret = add_instance_extension(ext, num_ext, opt, &dict);
223 if (ret < 0)
224 return ret;
225 ret = add_device_extension(opt, &dict);
226 if (ret) {
227 av_dict_free(&dict);
228 return ret;
229 }
230
231 ret = av_hwdevice_ctx_create(&ctx->hw_device_ref, AV_HWDEVICE_TYPE_VULKAN,
232 NULL, dict, 0);
233 av_dict_free(&dict);
234 if (ret < 0)
235 return ret;
236
237 dev = (AVHWDeviceContext *) ctx->hw_device_ref->data;
238 hwctx = dev->hwctx;
239
240 // There is no way to pass SDL GetInstanceProcAddr to hwdevice.
241 // Check the result and return error if they don't match.
242 if (hwctx->get_proc_addr != SDL_Vulkan_GetVkGetInstanceProcAddr()) {
243 av_log(renderer, AV_LOG_ERROR,
244 "hwdevice and SDL use different get_proc_addr. "
245 "Try -vulkan_params create_by_placebo=1\n");
246 return AVERROR_PATCHWELCOME;
247 }
248
249 ctx->get_proc_addr = hwctx->get_proc_addr;
250 ctx->inst = hwctx->inst;
251 ctx->placebo_vulkan = pl_vulkan_import(ctx->vk_log,
252 pl_vulkan_import_params(
253 .instance = hwctx->inst,
254 .get_proc_addr = hwctx->get_proc_addr,
255 .phys_device = hwctx->phys_dev,
256 .device = hwctx->act_dev,
257 .extensions = hwctx->enabled_dev_extensions,
258 .num_extensions = hwctx->nb_enabled_dev_extensions,
259 .features = &hwctx->device_features,
260 .lock_queue = hwctx_lock_queue,
261 .unlock_queue = hwctx_unlock_queue,
262 .queue_ctx = dev,
263 .queue_graphics = {
264 .index = hwctx->queue_family_index,
265 .count = hwctx->nb_graphics_queues,
266 },
267 .queue_compute = {
268 .index = hwctx->queue_family_comp_index,
269 .count = hwctx->nb_comp_queues,
270 },
271 .queue_transfer = {
272 .index = hwctx->queue_family_tx_index,
273 .count = hwctx->nb_tx_queues,
274 },
275 ));
276 if (!ctx->placebo_vulkan)
277 return AVERROR_EXTERNAL;
278
279 return 0;
280 }
281
282 static void placebo_lock_queue(struct AVHWDeviceContext *dev_ctx,
283 uint32_t queue_family, uint32_t index)
284 {
285 RendererContext *ctx = dev_ctx->user_opaque;
286 pl_vulkan vk = ctx->placebo_vulkan;
287 vk->lock_queue(vk, queue_family, index);
288 }
289
290 static void placebo_unlock_queue(struct AVHWDeviceContext *dev_ctx,
291 uint32_t queue_family,
292 uint32_t index)
293 {
294 RendererContext *ctx = dev_ctx->user_opaque;
295 pl_vulkan vk = ctx->placebo_vulkan;
296 vk->unlock_queue(vk, queue_family, index);
297 }
298
299 static int get_decode_queue(VkRenderer *renderer, int *index, int *count)
300 {
301 RendererContext *ctx = (RendererContext *) renderer;
302 VkQueueFamilyProperties *queue_family_prop = NULL;
303 uint32_t num_queue_family_prop = 0;
304 PFN_vkGetPhysicalDeviceQueueFamilyProperties get_queue_family_prop;
305 PFN_vkGetInstanceProcAddr get_proc_addr = ctx->get_proc_addr;
306
307 *index = -1;
308 *count = 0;
309 get_queue_family_prop = (PFN_vkGetPhysicalDeviceQueueFamilyProperties)
310 get_proc_addr(ctx->placebo_instance->instance,
311 "vkGetPhysicalDeviceQueueFamilyProperties");
312 get_queue_family_prop(ctx->placebo_vulkan->phys_device,
313 &num_queue_family_prop, NULL);
314 if (!num_queue_family_prop)
315 return AVERROR_EXTERNAL;
316
317 queue_family_prop = av_calloc(num_queue_family_prop,
318 sizeof(*queue_family_prop));
319 if (!queue_family_prop)
320 return AVERROR(ENOMEM);
321
322 get_queue_family_prop(ctx->placebo_vulkan->phys_device,
323 &num_queue_family_prop,
324 queue_family_prop);
325
326 for (int i = 0; i < num_queue_family_prop; i++) {
327 if (queue_family_prop[i].queueFlags & VK_QUEUE_VIDEO_DECODE_BIT_KHR) {
328 *index = i;
329 *count = queue_family_prop[i].queueCount;
330 break;
331 }
332 }
333 av_free(queue_family_prop);
334
335 return 0;
336 }
337
338 static int create_vk_by_placebo(VkRenderer *renderer,
339 const char **ext, unsigned num_ext,
340 const AVDictionary *opt)
341 {
342 RendererContext *ctx = (RendererContext *) renderer;
343 AVHWDeviceContext *device_ctx;
344 AVVulkanDeviceContext *vk_dev_ctx;
345 int decode_index;
346 int decode_count;
347 int ret;
348
349 ctx->get_proc_addr = SDL_Vulkan_GetVkGetInstanceProcAddr();
350
351 ctx->placebo_instance = pl_vk_inst_create(ctx->vk_log, pl_vk_inst_params(
352 .get_proc_addr = ctx->get_proc_addr,
353 .debug = enable_debug(opt),
354 .extensions = ext,
355 .num_extensions = num_ext
356 ));
357 if (!ctx->placebo_instance) {
358 return AVERROR_EXTERNAL;
359 }
360 ctx->inst = ctx->placebo_instance->instance;
361
362 ctx->placebo_vulkan = pl_vulkan_create(ctx->vk_log, pl_vulkan_params(
363 .instance = ctx->placebo_instance->instance,
364 .get_proc_addr = ctx->placebo_instance->get_proc_addr,
365 .surface = ctx->vk_surface,
366 .allow_software = false,
367 .opt_extensions = optional_device_exts,
368 .num_opt_extensions = FF_ARRAY_ELEMS(optional_device_exts),
369 .extra_queues = VK_QUEUE_VIDEO_DECODE_BIT_KHR,
370 ));
371 if (!ctx->placebo_vulkan)
372 return AVERROR_EXTERNAL;
373 ctx->hw_device_ref = av_hwdevice_ctx_alloc(AV_HWDEVICE_TYPE_VULKAN);
374 if (!ctx->hw_device_ref) {
375 return AVERROR(ENOMEM);
376 }
377
378 device_ctx = (AVHWDeviceContext *) ctx->hw_device_ref->data;
379 device_ctx->user_opaque = ctx;
380
381 vk_dev_ctx = device_ctx->hwctx;
382 vk_dev_ctx->lock_queue = placebo_lock_queue,
383 vk_dev_ctx->unlock_queue = placebo_unlock_queue;
384
385 vk_dev_ctx->get_proc_addr = ctx->placebo_instance->get_proc_addr;
386
387 vk_dev_ctx->inst = ctx->placebo_instance->instance;
388 vk_dev_ctx->phys_dev = ctx->placebo_vulkan->phys_device;
389 vk_dev_ctx->act_dev = ctx->placebo_vulkan->device;
390
391 vk_dev_ctx->device_features = *ctx->placebo_vulkan->features;
392
393 vk_dev_ctx->enabled_inst_extensions = ctx->placebo_instance->extensions;
394 vk_dev_ctx->nb_enabled_inst_extensions = ctx->placebo_instance->num_extensions;
395
396 vk_dev_ctx->enabled_dev_extensions = ctx->placebo_vulkan->extensions;
397 vk_dev_ctx->nb_enabled_dev_extensions = ctx->placebo_vulkan->num_extensions;
398
399 vk_dev_ctx->queue_family_index = ctx->placebo_vulkan->queue_graphics.index;
400 vk_dev_ctx->nb_graphics_queues = ctx->placebo_vulkan->queue_graphics.count;
401
402 vk_dev_ctx->queue_family_tx_index = ctx->placebo_vulkan->queue_transfer.index;
403 vk_dev_ctx->nb_tx_queues = ctx->placebo_vulkan->queue_transfer.count;
404
405 vk_dev_ctx->queue_family_comp_index = ctx->placebo_vulkan->queue_compute.index;
406 vk_dev_ctx->nb_comp_queues = ctx->placebo_vulkan->queue_compute.count;
407
408 ret = get_decode_queue(renderer, &decode_index, &decode_count);
409 if (ret < 0)
410 return ret;
411
412 vk_dev_ctx->queue_family_decode_index = decode_index;
413 vk_dev_ctx->nb_decode_queues = decode_count;
414
415 ret = av_hwdevice_ctx_init(ctx->hw_device_ref);
416 if (ret < 0)
417 return ret;
418
419 return 0;
420 }
421
422 static int create(VkRenderer *renderer, SDL_Window *window, AVDictionary *opt)
423 {
424 int ret = 0;
425 unsigned num_ext = 0;
426 const char **ext = NULL;
427 int w, h;
428 struct pl_log_params vk_log_params = {
429 .log_cb = vk_log_cb,
430 .log_level = PL_LOG_DEBUG,
431 .log_priv = renderer,
432 };
433 RendererContext *ctx = (RendererContext *) renderer;
434 AVDictionaryEntry *entry;
435
436 ctx->vk_log = pl_log_create(PL_API_VER, &vk_log_params);
437
438 if (!SDL_Vulkan_GetInstanceExtensions(window, &num_ext, NULL)) {
439 av_log(NULL, AV_LOG_FATAL, "Failed to get vulkan extensions: %s\n",
440 SDL_GetError());
441 return AVERROR_EXTERNAL;
442 }
443
444 ext = av_calloc(num_ext, sizeof(*ext));
445 if (!ext) {
446 ret = AVERROR(ENOMEM);
447 goto out;
448 }
449
450 SDL_Vulkan_GetInstanceExtensions(window, &num_ext, ext);
451
452 entry = av_dict_get(opt, "create_by_placebo", NULL, 0);
453 if (entry && strtol(entry->value, NULL, 10))
454 ret = create_vk_by_placebo(renderer, ext, num_ext, opt);
455 else
456 ret = create_vk_by_hwcontext(renderer, ext, num_ext, opt);
457 if (ret < 0)
458 goto out;
459
460 if (!SDL_Vulkan_CreateSurface(window, ctx->inst, &ctx->vk_surface)) {
461 ret = AVERROR_EXTERNAL;
462 goto out;
463 }
464
465 ctx->swapchain = pl_vulkan_create_swapchain(
466 ctx->placebo_vulkan,
467 pl_vulkan_swapchain_params(
468 .surface = ctx->vk_surface,
469 .present_mode = VK_PRESENT_MODE_FIFO_KHR));
470 if (!ctx->swapchain) {
471 ret = AVERROR_EXTERNAL;
472 goto out;
473 }
474
475 SDL_Vulkan_GetDrawableSize(window, &w, &h);
476 pl_swapchain_resize(ctx->swapchain, &w, &h);
477
478 ctx->renderer = pl_renderer_create(ctx->vk_log, ctx->placebo_vulkan->gpu);
479 if (!ctx->renderer) {
480 ret = AVERROR_EXTERNAL;
481 goto out;
482 }
483
484 ctx->vk_frame = av_frame_alloc();
485 if (!ctx->vk_frame) {
486 ret = AVERROR(ENOMEM);
487 goto out;
488 }
489
490 ret = 0;
491
492 out:
493 av_free(ext);
494 return ret;
495 }
496
497 static int get_hw_dev(VkRenderer *renderer, AVBufferRef **dev)
498 {
499 RendererContext *ctx = (RendererContext *) renderer;
500
501 *dev = ctx->hw_device_ref;
502 return 0;
503 }
504
505 static int create_hw_frame(VkRenderer *renderer, AVFrame *frame)
506 {
507 RendererContext *ctx = (RendererContext *) renderer;
508 AVHWFramesContext *src_hw_frame = (AVHWFramesContext *)
509 frame->hw_frames_ctx->data;
510 AVHWFramesContext *hw_frame;
511 AVVulkanFramesContext *vk_frame_ctx;
512 int ret;
513
514 if (ctx->hw_frame_ref) {
515 hw_frame = (AVHWFramesContext *) ctx->hw_frame_ref->data;
516
517 if (hw_frame->width == frame->width &&
518 hw_frame->height == frame->height &&
519 hw_frame->sw_format == src_hw_frame->sw_format)
520 return 0;
521
522 av_buffer_unref(&ctx->hw_frame_ref);
523 }
524
525 if (!ctx->constraints) {
526 ctx->constraints = av_hwdevice_get_hwframe_constraints(
527 ctx->hw_device_ref, NULL);
528 if (!ctx->constraints)
529 return AVERROR(ENOMEM);
530 }
531
532 // Check constraints and skip create hwframe. Don't take it as error since
533 // we can fallback to memory copy from GPU to CPU.
534 if ((ctx->constraints->max_width &&
535 ctx->constraints->max_width < frame->width) ||
536 (ctx->constraints->max_height &&
537 ctx->constraints->max_height < frame->height) ||
538 (ctx->constraints->min_width &&
539 ctx->constraints->min_width > frame->width) ||
540 (ctx->constraints->min_height &&
541 ctx->constraints->min_height > frame->height))
542 return 0;
543
544 if (ctx->constraints->valid_sw_formats) {
545 enum AVPixelFormat *sw_formats = ctx->constraints->valid_sw_formats;
546 while (*sw_formats != AV_PIX_FMT_NONE) {
547 if (*sw_formats == src_hw_frame->sw_format)
548 break;
549 sw_formats++;
550 }
551 if (*sw_formats == AV_PIX_FMT_NONE)
552 return 0;
553 }
554
555 ctx->hw_frame_ref = av_hwframe_ctx_alloc(ctx->hw_device_ref);
556 if (!ctx->hw_frame_ref)
557 return AVERROR(ENOMEM);
558
559 hw_frame = (AVHWFramesContext *) ctx->hw_frame_ref->data;
560 hw_frame->format = AV_PIX_FMT_VULKAN;
561 hw_frame->sw_format = src_hw_frame->sw_format;
562 hw_frame->width = frame->width;
563 hw_frame->height = frame->height;
564
565 if (frame->format == AV_PIX_FMT_CUDA) {
566 vk_frame_ctx = hw_frame->hwctx;
567 vk_frame_ctx->flags = AV_VK_FRAME_FLAG_DISABLE_MULTIPLANE;
568 }
569
570 ret = av_hwframe_ctx_init(ctx->hw_frame_ref);
571 if (ret < 0) {
572 av_log(renderer, AV_LOG_ERROR, "Create hwframe context failed, %s\n",
573 av_err2str(ret));
574 return ret;
575 }
576
577 av_hwframe_transfer_get_formats(ctx->hw_frame_ref,
578 AV_HWFRAME_TRANSFER_DIRECTION_TO,
579 &ctx->transfer_formats, 0);
580
581 return 0;
582 }
583
584 static inline int check_hw_transfer(RendererContext *ctx, AVFrame *frame)
585 {
586 if (!ctx->hw_frame_ref || !ctx->transfer_formats)
587 return 0;
588
589 for (int i = 0; ctx->transfer_formats[i] != AV_PIX_FMT_NONE; i++)
590 if (ctx->transfer_formats[i] == frame->format)
591 return 1;
592
593 return 0;
594 }
595
596 static inline int move_to_output_frame(RendererContext *ctx, AVFrame *frame)
597 {
598 int ret = av_frame_copy_props(ctx->vk_frame, frame);
599 if (ret < 0)
600 return ret;
601 av_frame_unref(frame);
602 av_frame_move_ref(frame, ctx->vk_frame);
603 return 0;
604 }
605
606 static int map_frame(VkRenderer *renderer, AVFrame *frame, int use_hw_frame)
607 {
608 RendererContext *ctx = (RendererContext *) renderer;
609 int ret;
610
611 if (use_hw_frame && !ctx->hw_frame_ref)
612 return AVERROR(ENOSYS);
613
614 // Try map data first
615 av_frame_unref(ctx->vk_frame);
616 if (use_hw_frame) {
617 ctx->vk_frame->hw_frames_ctx = av_buffer_ref(ctx->hw_frame_ref);
618 ctx->vk_frame->format = AV_PIX_FMT_VULKAN;
619 }
620 ret = av_hwframe_map(ctx->vk_frame, frame, 0);
621 if (!ret)
622 return move_to_output_frame(ctx, frame);
623
624 if (ret != AVERROR(ENOSYS))
625 av_log(NULL, AV_LOG_FATAL, "Map frame failed: %s\n", av_err2str(ret));
626 return ret;
627 }
628
629 static int transfer_frame(VkRenderer *renderer, AVFrame *frame, int use_hw_frame)
630 {
631 RendererContext *ctx = (RendererContext *) renderer;
632 int ret;
633
634 if (use_hw_frame && !check_hw_transfer(ctx, frame))
635 return AVERROR(ENOSYS);
636
637 av_frame_unref(ctx->vk_frame);
638 if (use_hw_frame)
639 av_hwframe_get_buffer(ctx->hw_frame_ref, ctx->vk_frame, 0);
640 ret = av_hwframe_transfer_data(ctx->vk_frame, frame, 1);
641 if (!ret)
642 return move_to_output_frame(ctx, frame);
643
644 if (ret != AVERROR(ENOSYS))
645 av_log(NULL, AV_LOG_FATAL, "Transfer frame failed: %s\n",
646 av_err2str(ret));
647 return ret;
648 }
649
650 static int convert_frame(VkRenderer *renderer, AVFrame *frame)
651 {
652 int ret;
653
654 if (!frame->hw_frames_ctx)
655 return 0;
656
657 if (frame->format == AV_PIX_FMT_VULKAN)
658 return 0;
659
660 ret = create_hw_frame(renderer, frame);
661 if (ret < 0)
662 return ret;
663
664 for (int use_hw = 1; use_hw >=0; use_hw--) {
665 ret = map_frame(renderer, frame, use_hw);
666 if (!ret)
667 return 0;
668 if (ret != AVERROR(ENOSYS))
669 return ret;
670
671 ret = transfer_frame(renderer, frame, use_hw);
672 if (!ret)
673 return 0;
674 if (ret != AVERROR(ENOSYS))
675 return ret;
676 }
677
678 return ret;
679 }
680
681 static int display(VkRenderer *renderer, AVFrame *frame)
682 {
683 struct pl_swapchain_frame swap_frame = {0};
684 struct pl_frame pl_frame = {0};
685 struct pl_frame target = {0};
686 RendererContext *ctx = (RendererContext *) renderer;
687 int ret = 0;
688
689 ret = convert_frame(renderer, frame);
690 if (ret < 0)
691 return ret;
692
693 if (!pl_map_avframe_ex(ctx->placebo_vulkan->gpu, &pl_frame, pl_avframe_params(
694 .frame = frame,
695 .tex = ctx->tex))) {
696 av_log(NULL, AV_LOG_ERROR, "pl_map_avframe_ex failed\n");
697 return AVERROR_EXTERNAL;
698 }
699
700 if (!pl_swapchain_start_frame(ctx->swapchain, &swap_frame)) {
701 av_log(NULL, AV_LOG_ERROR, "start frame failed\n");
702 ret = AVERROR_EXTERNAL;
703 goto out;
704 }
705
706 pl_frame_from_swapchain(&target, &swap_frame);
707 if (!pl_render_image(ctx->renderer, &pl_frame, &target,
708 &pl_render_default_params)) {
709 av_log(NULL, AV_LOG_ERROR, "pl_render_image failed\n");
710 ret = AVERROR_EXTERNAL;
711 goto out;
712 }
713
714 if (!pl_swapchain_submit_frame(ctx->swapchain)) {
715 av_log(NULL, AV_LOG_ERROR, "pl_swapchain_submit_frame failed\n");
716 ret = AVERROR_EXTERNAL;
717 goto out;
718 }
719 pl_swapchain_swap_buffers(ctx->swapchain);
720
721 out:
722 pl_unmap_avframe(ctx->placebo_vulkan->gpu, &pl_frame);
723 return ret;
724 }
725
726 static int resize(VkRenderer *renderer, int width, int height)
727 {
728 RendererContext *ctx = (RendererContext *) renderer;
729
730 if (!pl_swapchain_resize(ctx->swapchain, &width, &height))
731 return AVERROR_EXTERNAL;
732 return 0;
733 }
734
735 static void destroy(VkRenderer *renderer)
736 {
737 RendererContext *ctx = (RendererContext *) renderer;
738 PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR;
739
740 av_frame_free(&ctx->vk_frame);
741 av_freep(&ctx->transfer_formats);
742 av_hwframe_constraints_free(&ctx->constraints);
743 av_buffer_unref(&ctx->hw_frame_ref);
744
745 if (ctx->placebo_vulkan) {
746 for (int i = 0; i < FF_ARRAY_ELEMS(ctx->tex); i++)
747 pl_tex_destroy(ctx->placebo_vulkan->gpu, &ctx->tex[i]);
748 pl_renderer_destroy(&ctx->renderer);
749 pl_swapchain_destroy(&ctx->swapchain);
750 pl_vulkan_destroy(&ctx->placebo_vulkan);
751 }
752
753 if (ctx->vk_surface) {
754 vkDestroySurfaceKHR = (PFN_vkDestroySurfaceKHR)
755 ctx->get_proc_addr(ctx->inst, "vkDestroySurfaceKHR");
756 vkDestroySurfaceKHR(ctx->inst, ctx->vk_surface, NULL);
757 ctx->vk_surface = NULL;
758 }
759
760 av_buffer_unref(&ctx->hw_device_ref);
761 pl_vk_inst_destroy(&ctx->placebo_instance);
762
763 pl_log_destroy(&ctx->vk_log);
764 }
765
766 static const AVClass vulkan_renderer_class = {
767 .class_name = "Vulkan Renderer",
768 .item_name = av_default_item_name,
769 .version = LIBAVUTIL_VERSION_INT,
770 };
771
772 VkRenderer *vk_get_renderer(void)
773 {
774 RendererContext *ctx = av_mallocz(sizeof(*ctx));
775 VkRenderer *renderer;
776
777 if (!ctx)
778 return NULL;
779
780 renderer = &ctx->api;
781 renderer->class = &vulkan_renderer_class;
782 renderer->get_hw_dev = get_hw_dev;
783 renderer->create = create;
784 renderer->display = display;
785 renderer->resize = resize;
786 renderer->destroy = destroy;
787
788 return renderer;
789 }
790
791 #else
792
793 VkRenderer *vk_get_renderer(void)
794 {
795 return NULL;
796 }
797
798 #endif
799
800 int vk_renderer_create(VkRenderer *renderer, SDL_Window *window,
801 AVDictionary *opt)
802 {
803 return renderer->create(renderer, window, opt);
804 }
805
806 int vk_renderer_get_hw_dev(VkRenderer *renderer, AVBufferRef **dev)
807 {
808 return renderer->get_hw_dev(renderer, dev);
809 }
810
811 int vk_renderer_display(VkRenderer *renderer, AVFrame *frame)
812 {
813 return renderer->display(renderer, frame);
814 }
815
816 int vk_renderer_resize(VkRenderer *renderer, int width, int height)
817 {
818 return renderer->resize(renderer, width, height);
819 }
820
821 void vk_renderer_destroy(VkRenderer *renderer)
822 {
823 renderer->destroy(renderer);
824 }
825