blob: 1797c797814fcafc4edb5515fb3efc30fec60bff [file] [log] [blame]
Chia-I Wu9b2d22b2020-04-17 15:29:05 -07001/*
2 * Copyright 2020 Google LLC
3 * SPDX-License-Identifier: MIT
4 */
5
6#include "vkr_renderer.h"
7
8#include <assert.h>
9#include <errno.h>
10#include <stdio.h>
11#include <stdlib.h>
12
13#include "c11/threads.h"
14#include "pipe/p_state.h"
15#include "util/u_double_list.h"
16#include "util/u_hash_table.h"
17#include "util/u_math.h"
18#include "util/u_memory.h"
19#include "util/u_pointer.h"
20
21#include "venus-protocol/vn_protocol_renderer.h"
22#include "virgl_context.h"
23#include "virgl_protocol.h" /* for transfer_mode */
24#include "virgl_resource.h"
25#include "virgl_util.h"
26#include "virglrenderer.h"
27#include "virglrenderer_hw.h"
28#include "vkr_cs.h"
29#include "vkr_object.h"
30#include "vkr_ring.h"
31#include "vrend_debug.h"
32#include "vrend_iov.h"
33
34/*
35 * TODO what extensions do we need from the host driver?
36 *
37 * We don't check vkGetPhysicalDeviceExternalBufferProperties, etc. yet. Even
38 * if we did, silently adding external memory info to vkCreateBuffer or
39 * vkCreateImage could change the results of vkGetBufferMemoryRequirements or
40 * vkGetImageMemoryRequirements and confuse the guest.
41 */
42#define FORCE_ENABLE_DMABUF
43
44/*
45 * TODO Most of the functions are generated. Some of them are then
46 * hand-edited. Find a better/cleaner way to reduce manual works.
47 */
48#define CREATE_OBJECT(obj, vkr_type, vk_obj, vk_cmd, vk_arg) \
49 struct vkr_ ## vkr_type *obj = calloc(1, sizeof(*obj)); \
50 if (!obj) { \
51 args->ret = VK_ERROR_OUT_OF_HOST_MEMORY; \
52 return; \
53 } \
54 obj->base.type = VK_OBJECT_TYPE_ ## vk_obj; \
55 obj->base.id = vkr_cs_handle_load_id( \
56 (const void **)args->vk_arg, obj->base.type); \
57 \
58 vn_replace_ ## vk_cmd ## _args_handle(args); \
59 args->ret = vk_cmd(args->device, args->pCreateInfo, NULL, \
60 &obj->base.handle.vkr_type); \
61 if (args->ret != VK_SUCCESS) { \
62 free(obj); \
63 return; \
64 } \
65 (void)obj
66
67#define DESTROY_OBJECT(obj, vkr_type, vk_obj, vk_cmd, vk_arg) \
68 struct vkr_ ## vkr_type *obj = \
69 (struct vkr_ ## vkr_type *)(uintptr_t)args->vk_arg; \
70 if (!obj || obj->base.type != VK_OBJECT_TYPE_ ## vk_obj) { \
71 if (obj) \
72 vkr_cs_decoder_set_fatal(&ctx->decoder); \
73 return; \
74 } \
75 \
76 vn_replace_ ## vk_cmd ## _args_handle(args); \
77 vk_cmd(args->device, args->vk_arg, NULL)
78
79struct vkr_physical_device;
80
81struct vkr_instance {
82 struct vkr_object base;
83
84 uint32_t api_version;
85 PFN_vkGetMemoryFdKHR get_memory_fd;
86 PFN_vkGetFenceFdKHR get_fence_fd;
87
88 uint32_t physical_device_count;
89 VkPhysicalDevice *physical_device_handles;
90 struct vkr_physical_device **physical_devices;
91};
92
93struct vkr_physical_device {
94 struct vkr_object base;
95
96 VkPhysicalDeviceProperties properties;
97 uint32_t api_version;
98
99 VkExtensionProperties *extensions;
100 uint32_t extension_count;
101
102 VkPhysicalDeviceMemoryProperties memory_properties;
103
104 bool KHR_external_memory_fd;
105 bool EXT_external_memory_dma_buf;
106
107 bool KHR_external_fence_fd;
108};
109
110struct vkr_queue_sync {
111 VkFence fence;
112
113 uint32_t flags;
114 void *fence_cookie;
115
116 struct list_head head;
117};
118
119struct vkr_device {
120 struct vkr_object base;
121
122 struct vkr_physical_device *physical_device;
123
124 /* Vulkan 1.2 */
125 PFN_vkGetSemaphoreCounterValue GetSemaphoreCounterValue;
126 PFN_vkWaitSemaphores WaitSemaphores;
127 PFN_vkSignalSemaphore SignalSemaphore;
128 PFN_vkGetDeviceMemoryOpaqueCaptureAddress GetDeviceMemoryOpaqueCaptureAddress;
129 PFN_vkGetBufferOpaqueCaptureAddress GetBufferOpaqueCaptureAddress;
130 PFN_vkGetBufferDeviceAddress GetBufferDeviceAddress;
131 PFN_vkResetQueryPool ResetQueryPool;
132 PFN_vkCreateRenderPass2 CreateRenderPass2;
133 PFN_vkCmdBeginRenderPass2 CmdBeginRenderPass2;
134 PFN_vkCmdNextSubpass2 CmdNextSubpass2;
135 PFN_vkCmdEndRenderPass2 CmdEndRenderPass2;
136 PFN_vkCmdDrawIndirectCount CmdDrawIndirectCount;
137 PFN_vkCmdDrawIndexedIndirectCount CmdDrawIndexedIndirectCount;
138
139 PFN_vkCmdBindTransformFeedbackBuffersEXT cmd_bind_transform_feedback_buffers;
140 PFN_vkCmdBeginTransformFeedbackEXT cmd_begin_transform_feedback;
141 PFN_vkCmdEndTransformFeedbackEXT cmd_end_transform_feedback;
142 PFN_vkCmdBeginQueryIndexedEXT cmd_begin_query_indexed;
143 PFN_vkCmdEndQueryIndexedEXT cmd_end_query_indexed;
144 PFN_vkCmdDrawIndirectByteCountEXT cmd_draw_indirect_byte_count;
145
146 PFN_vkGetImageDrmFormatModifierPropertiesEXT get_image_drm_format_modifier_properties;
147
Yiwei Zhang508ff682021-04-13 06:47:38 +0000148 PFN_vkGetMemoryFdPropertiesKHR get_memory_fd_properties;
149
Chia-I Wu9b2d22b2020-04-17 15:29:05 -0700150 struct list_head queues;
151
152 struct list_head free_syncs;
153};
154
155struct vkr_queue {
156 struct vkr_object base;
157
158 struct vkr_device *device;
159
160 uint32_t family;
161 uint32_t index;
162
163 bool has_thread;
164 int eventfd;
165 thrd_t thread;
166 mtx_t mutex;
167 cnd_t cond;
168 bool join;
169 struct list_head pending_syncs;
170 struct list_head signaled_syncs;
171
172 struct list_head head;
173 struct list_head busy_head;
174};
175
176struct vkr_device_memory {
177 struct vkr_object base;
178
179 VkDevice device;
180 uint32_t property_flags;
181 uint32_t valid_fd_types;
182
183 bool exported;
184 uint32_t exported_res_id;
185 struct list_head head;
186};
187
188struct vkr_fence {
189 struct vkr_object base;
190};
191
192struct vkr_semaphore {
193 struct vkr_object base;
194};
195
196struct vkr_buffer {
197 struct vkr_object base;
198};
199
200struct vkr_buffer_view {
201 struct vkr_object base;
202};
203
204struct vkr_image {
205 struct vkr_object base;
206};
207
208struct vkr_image_view {
209 struct vkr_object base;
210};
211
212struct vkr_sampler {
213 struct vkr_object base;
214};
215
216struct vkr_sampler_ycbcr_conversion {
217 struct vkr_object base;
218};
219
220struct vkr_descriptor_set_layout {
221 struct vkr_object base;
222};
223
224struct vkr_descriptor_pool {
225 struct vkr_object base;
226
227 struct list_head descriptor_sets;
228};
229
230struct vkr_descriptor_set {
231 struct vkr_object base;
232
233 struct list_head head;
234};
235
236struct vkr_descriptor_update_template {
237 struct vkr_object base;
238};
239
240struct vkr_render_pass {
241 struct vkr_object base;
242};
243
244struct vkr_framebuffer {
245 struct vkr_object base;
246};
247
248struct vkr_event {
249 struct vkr_object base;
250};
251
252struct vkr_query_pool {
253 struct vkr_object base;
254};
255
256struct vkr_shader_module {
257 struct vkr_object base;
258};
259
260struct vkr_pipeline_layout {
261 struct vkr_object base;
262};
263
264struct vkr_pipeline_cache {
265 struct vkr_object base;
266};
267
268struct vkr_pipeline {
269 struct vkr_object base;
270};
271
272struct vkr_command_pool {
273 struct vkr_object base;
274
275 struct list_head command_buffers;
276};
277
278struct vkr_command_buffer {
279 struct vkr_object base;
280
281 struct vkr_device *device;
282
283 struct list_head head;
284};
285
286/*
287 * When a virgl_resource is attached in vkr_context_attach_resource, a
288 * vkr_resource_attachment is created. A vkr_resource_attachment is valid
289 * until the resource it tracks is detached.
290 *
291 * To support transfers to resources not backed by coherent dma-bufs, we
292 * associate a vkr_resource_attachment with a (list of) vkr_device_memory.
293 * This way, we can find a vkr_device_memory from a vkr_resource_attachment
294 * and do transfers using VkDeviceMemory.
295 */
296struct vkr_resource_attachment {
297 struct virgl_resource *resource;
298 struct list_head memories;
299};
300
301struct vkr_context {
302 struct virgl_context base;
303
304 char *debug_name;
305
306 mtx_t mutex;
307
308 struct list_head rings;
309 struct util_hash_table_u64 *object_table;
310 struct util_hash_table *resource_table;
311 struct list_head newly_exported_memories;
312
313 struct vkr_cs_encoder encoder;
314 struct vkr_cs_decoder decoder;
315 struct vn_dispatch_context dispatch;
316
317 int fence_eventfd;
318 struct list_head busy_queues;
319
320 struct vkr_instance *instance;
321};
322
323static uint32_t vkr_renderer_flags;
324
325struct object_array {
326 uint32_t count;
327 void **objects;
328 void *handle_storage;
329
330 /* true if the ownership of the objects has been transferred (to
331 * vkr_context::object_table)
332 */
333 bool objects_stolen;
334};
335
336static void
337object_array_fini(struct object_array *arr)
338{
339 if (!arr->objects_stolen) {
340 for (uint32_t i = 0; i < arr->count; i++)
341 free(arr->objects[i]);
342 }
343
344 free(arr->objects);
345 free(arr->handle_storage);
346}
347
348static bool
349object_array_init(struct object_array *arr,
350 uint32_t count,
351 VkObjectType obj_type,
352 size_t obj_size,
353 size_t handle_size,
354 const void *handles)
355{
356 arr->count = count;
357
358 arr->objects = malloc(sizeof(*arr->objects) * count);
359 if (!arr->objects)
360 return false;
361
362 arr->handle_storage = malloc(handle_size * count);
363 if (!arr->handle_storage) {
364 free(arr->objects);
365 return false;
366 }
367
368 arr->objects_stolen = false;
369 for (uint32_t i = 0; i < count; i++) {
370 struct vkr_object *obj = calloc(1, obj_size);
371 if (!obj) {
372 arr->count = i;
373 object_array_fini(arr);
374 return false;
375 }
376
377 obj->type = obj_type;
378 obj->id = vkr_cs_handle_load_id(
379 (const void **)((char *)handles + handle_size * i), obj->type);
380
381 arr->objects[i] = obj;
382 }
383
384 return arr;
385}
386
387static void
388vkr_dispatch_vkSetReplyCommandStreamMESA(struct vn_dispatch_context *dispatch, struct vn_command_vkSetReplyCommandStreamMESA *args)
389{
390 struct vkr_context *ctx = dispatch->data;
391 struct vkr_resource_attachment *att;
392
393 att = util_hash_table_get(ctx->resource_table,
394 uintptr_to_pointer(args->pStream->resourceId));
395 if (!att)
396 return;
397
398 vkr_cs_encoder_set_stream(&ctx->encoder,
399 att->resource->iov,
400 att->resource->iov_count,
401 args->pStream->offset,
402 args->pStream->size);
403}
404
405static void
406vkr_dispatch_vkSeekReplyCommandStreamMESA(struct vn_dispatch_context *dispatch, struct vn_command_vkSeekReplyCommandStreamMESA *args)
407{
408 struct vkr_context *ctx = dispatch->data;
409 vkr_cs_encoder_seek_stream(&ctx->encoder, args->position);
410}
411
412static void *
413copy_command_stream(struct vkr_context *ctx,
414 const VkCommandStreamDescriptionMESA *stream)
415{
416 struct vkr_resource_attachment *att;
417 struct virgl_resource *res;
418
419 att = util_hash_table_get(ctx->resource_table,
420 uintptr_to_pointer(stream->resourceId));
421 if (!att)
422 return NULL;
423 res = att->resource;
424
425 /* seek to offset */
426 size_t iov_offset = stream->offset;
427 const struct iovec *iov = NULL;
428 for (int i = 0; i < res->iov_count; i++) {
429 if (iov_offset < res->iov[i].iov_len) {
430 iov = &res->iov[i];
431 break;
432 }
433 iov_offset -= res->iov[i].iov_len;
434 }
435 if (!iov)
436 return NULL;
437
438 /* XXX until the decoder supports scatter-gather and is robust enough,
439 * always make a copy in case the caller modifies the commands while we
440 * parse
441 */
442 uint8_t *data = malloc(stream->size);
443 if (!data)
444 return NULL;
445
446 uint32_t copied = 0;
447 while (true) {
448 const size_t s = MIN2(stream->size - copied, iov->iov_len - iov_offset);
449 memcpy(data + copied, (const uint8_t *)iov->iov_base + iov_offset, s);
450
451 copied += s;
452 if (copied == stream->size) {
453 break;
454 } else if (iov == &res->iov[res->iov_count - 1]) {
455 free(data);
456 return NULL;
457 }
458
459 iov++;
460 iov_offset = 0;
461 }
462
463 return data;
464}
465
466static void
467vkr_dispatch_vkExecuteCommandStreamsMESA(struct vn_dispatch_context *dispatch, struct vn_command_vkExecuteCommandStreamsMESA *args)
468{
469 struct vkr_context *ctx = dispatch->data;
470
471 /* note that nested vkExecuteCommandStreamsMESA is not allowed */
472 if (!vkr_cs_decoder_push_state(&ctx->decoder)) {
473 vkr_cs_decoder_set_fatal(&ctx->decoder);
474 return;
475 }
476
477 for (uint32_t i = 0; i < args->streamCount; i++) {
478 const VkCommandStreamDescriptionMESA *stream = &args->pStreams[i];
479
480 if (args->pReplyPositions)
481 vkr_cs_encoder_seek_stream(&ctx->encoder, args->pReplyPositions[i]);
482
483 if (!stream->size)
484 continue;
485
486 void *data = copy_command_stream(ctx, stream);
487 if (!data) {
488 vkr_cs_decoder_set_fatal(&ctx->decoder);
489 break;
490 }
491
492 vkr_cs_decoder_set_stream(&ctx->decoder, data, stream->size);
493 while (vkr_cs_decoder_has_command(&ctx->decoder)) {
494 vn_dispatch_command(&ctx->dispatch);
495 if (vkr_cs_decoder_get_fatal(&ctx->decoder))
496 break;
497 }
498
499 free(data);
500
501 if (vkr_cs_decoder_get_fatal(&ctx->decoder))
502 break;
503 }
504
505 vkr_cs_decoder_pop_state(&ctx->decoder);
506}
507
508static struct vkr_ring *
509lookup_ring(struct vkr_context *ctx, uint64_t ring_id)
510{
511 struct vkr_ring *ring;
512 LIST_FOR_EACH_ENTRY(ring, &ctx->rings, head) {
513 if (ring->id == ring_id)
514 return ring;
515 }
516 return NULL;
517}
518
519static void
520vkr_dispatch_vkCreateRingMESA(struct vn_dispatch_context *dispatch, struct vn_command_vkCreateRingMESA *args)
521{
522 struct vkr_context *ctx = dispatch->data;
523 const VkRingCreateInfoMESA *info = args->pCreateInfo;
524 const struct vkr_resource_attachment *att;
525 uint8_t *shared;
526 size_t size;
527 struct vkr_ring *ring;
528
529 att = util_hash_table_get(ctx->resource_table,
530 uintptr_to_pointer(info->resourceId));
531 if (!att) {
532 vkr_cs_decoder_set_fatal(&ctx->decoder);
533 return;
534 }
535
536 /* TODO support scatter-gather or require logically contiguous resources */
537 if (att->resource->iov_count != 1) {
538 vrend_printf("vkr: no scatter-gather support for ring buffers (TODO)");
539 vkr_cs_decoder_set_fatal(&ctx->decoder);
540 return;
541 }
542
543 shared = att->resource->iov[0].iov_base;
544 size = att->resource->iov[0].iov_len;
545 if (info->offset > size || info->size > size - info->offset) {
546 vkr_cs_decoder_set_fatal(&ctx->decoder);
547 return;
548 }
549
550 shared += info->offset;
551 size = info->size;
552 if (info->headOffset > size ||
553 info->tailOffset > size ||
554 info->statusOffset > size ||
555 info->bufferOffset > size ||
556 info->extraOffset > size) {
557 vkr_cs_decoder_set_fatal(&ctx->decoder);
558 return;
559 }
560 if (sizeof(uint32_t) > size - info->headOffset ||
561 sizeof(uint32_t) > size - info->tailOffset ||
562 sizeof(uint32_t) > size - info->statusOffset ||
563 info->bufferSize > size - info->bufferOffset ||
564 info->extraSize > size - info->extraOffset) {
565 vkr_cs_decoder_set_fatal(&ctx->decoder);
566 return;
567 }
568 if (!info->bufferSize || !util_is_power_of_two(info->bufferSize)) {
569 vkr_cs_decoder_set_fatal(&ctx->decoder);
570 return;
571 }
572
573 const struct vkr_ring_layout layout = {
574 .head_offset = info->headOffset,
575 .tail_offset = info->tailOffset,
576 .status_offset = info->statusOffset,
577 .buffer_offset = info->bufferOffset,
578 .buffer_size = info->bufferSize,
579 .extra_offset = info->extraOffset,
580 .extra_size = info->extraSize,
581 };
582
583 ring = vkr_ring_create(&layout, shared, &ctx->base, info->idleTimeout);
584 if (!ring) {
585 vkr_cs_decoder_set_fatal(&ctx->decoder);
586 return;
587 }
588
589 ring->id = args->ring;
590 list_addtail(&ring->head, &ctx->rings);
591
592 vkr_ring_start(ring);
593}
594
595static void
596vkr_dispatch_vkDestroyRingMESA(struct vn_dispatch_context *dispatch, struct vn_command_vkDestroyRingMESA *args)
597{
598 struct vkr_context *ctx = dispatch->data;
599 struct vkr_ring *ring = lookup_ring(ctx, args->ring);
600 if (!ring || !vkr_ring_stop(ring)) {
601 vkr_cs_decoder_set_fatal(&ctx->decoder);
602 return;
603 }
604
605 list_del(&ring->head);
606 vkr_ring_destroy(ring);
607}
608
609static void
610vkr_dispatch_vkNotifyRingMESA(struct vn_dispatch_context *dispatch, struct vn_command_vkNotifyRingMESA *args)
611{
612 struct vkr_context *ctx = dispatch->data;
613 struct vkr_ring *ring = lookup_ring(ctx, args->ring);
614 if (!ring) {
615 vkr_cs_decoder_set_fatal(&ctx->decoder);
616 return;
617 }
618
619 vkr_ring_notify(ring);
620}
621
622static void
623vkr_dispatch_vkWriteRingExtraMESA(struct vn_dispatch_context *dispatch, struct vn_command_vkWriteRingExtraMESA *args)
624{
625 struct vkr_context *ctx = dispatch->data;
626 struct vkr_ring *ring = lookup_ring(ctx, args->ring);
627 if (!ring) {
628 vkr_cs_decoder_set_fatal(&ctx->decoder);
629 return;
630 }
631
632 if (!vkr_ring_write_extra(ring, args->offset, args->value))
633 vkr_cs_decoder_set_fatal(&ctx->decoder);
634}
635
636static void
637vkr_dispatch_vkEnumerateInstanceVersion(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkEnumerateInstanceVersion *args)
638{
639 vn_replace_vkEnumerateInstanceVersion_args_handle(args);
640 args->ret = vkEnumerateInstanceVersion(args->pApiVersion);
641}
642
643static void
644vkr_dispatch_vkEnumerateInstanceExtensionProperties(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkEnumerateInstanceExtensionProperties *args)
645{
646 VkExtensionProperties private_extensions[] = {
647 {
648 .extensionName = "VK_EXT_command_serialization",
649 },
650 {
651 .extensionName = "VK_MESA_venus_protocol",
652 },
653 };
654
655 if (!args->pProperties) {
656 *args->pPropertyCount = ARRAY_SIZE(private_extensions);
657 args->ret = VK_SUCCESS;
658 return;
659 }
660
661 for (uint32_t i = 0; i < ARRAY_SIZE(private_extensions); i++) {
662 VkExtensionProperties *props = &private_extensions[i];
663 props->specVersion = vn_info_extension_spec_version(props->extensionName);
664 }
665
666 const uint32_t count =
667 MIN2(*args->pPropertyCount, ARRAY_SIZE(private_extensions));
668 memcpy(args->pProperties, private_extensions, sizeof(*args->pProperties) * count);
669 *args->pPropertyCount = count;
670 args->ret = count == ARRAY_SIZE(private_extensions) ? VK_SUCCESS : VK_INCOMPLETE;
671}
672
673static void
674vkr_dispatch_vkCreateInstance(struct vn_dispatch_context *dispatch, struct vn_command_vkCreateInstance *args)
675{
676 struct vkr_context *ctx = dispatch->data;
677
678 if (ctx->instance) {
679 vkr_cs_decoder_set_fatal(&ctx->decoder);
680 return;
681 }
682
683 if (args->pCreateInfo->enabledExtensionCount) {
684 args->ret = VK_ERROR_EXTENSION_NOT_PRESENT;
685 return;
686 }
687
688 struct vkr_instance *instance = calloc(1, sizeof(*instance));
689 if (!instance) {
690 args->ret = VK_ERROR_OUT_OF_HOST_MEMORY;
691 return;
692 }
693
694 uint32_t instance_version;
695 args->ret = vkEnumerateInstanceVersion(&instance_version);
696 if (args->ret != VK_SUCCESS) {
697 free(instance);
698 return;
699 }
700
701 /* require Vulkan 1.1 */
702 if (instance_version < VK_API_VERSION_1_1) {
703 args->ret = VK_ERROR_INITIALIZATION_FAILED;
704 return;
705 }
706
707 VkApplicationInfo app_info = {
708 .sType = VK_STRUCTURE_TYPE_APPLICATION_INFO,
709 .apiVersion = VK_API_VERSION_1_1,
710 };
711 if (args->pCreateInfo->pApplicationInfo) {
712 app_info = *args->pCreateInfo->pApplicationInfo;
713 if (app_info.apiVersion < VK_API_VERSION_1_1)
714 app_info.apiVersion = VK_API_VERSION_1_1;
715 }
716 ((VkInstanceCreateInfo *)args->pCreateInfo)->pApplicationInfo = &app_info;
717
718 instance->base.type = VK_OBJECT_TYPE_INSTANCE;
719 instance->base.id = vkr_cs_handle_load_id((const void **)args->pInstance,
720 instance->base.type);
721 instance->api_version = app_info.apiVersion;
722
723 vn_replace_vkCreateInstance_args_handle(args);
724 args->ret = vkCreateInstance(args->pCreateInfo, NULL, &instance->base.handle.instance);
725 if (args->ret != VK_SUCCESS) {
726 free(instance);
727 return;
728 }
729
730 instance->get_memory_fd = (PFN_vkGetMemoryFdKHR)
731 vkGetInstanceProcAddr(instance->base.handle.instance, "vkGetMemoryFdKHR");
732 instance->get_fence_fd = (PFN_vkGetFenceFdKHR)
733 vkGetInstanceProcAddr(instance->base.handle.instance, "vkGetFenceFdKHR");
734
735 util_hash_table_set_u64(ctx->object_table, instance->base.id, instance);
736
737 ctx->instance = instance;
738}
739
740static void
741vkr_dispatch_vkDestroyInstance(struct vn_dispatch_context *dispatch, struct vn_command_vkDestroyInstance *args)
742{
743 struct vkr_context *ctx = dispatch->data;
744 struct vkr_instance *instance = (struct vkr_instance *)args->instance;
745
746 if (ctx->instance != instance) {
747 vkr_cs_decoder_set_fatal(&ctx->decoder);
748 return;
749 }
750
751 vn_replace_vkDestroyInstance_args_handle(args);
752 vkDestroyInstance(args->instance, NULL);
753
754 /* TODO cleanup all objects instead? */
755 for (uint32_t i = 0; i < instance->physical_device_count; i++) {
756 struct vkr_physical_device *physical_dev = instance->physical_devices[i];
757 if (!physical_dev)
758 break;
759 free(physical_dev->extensions);
760 util_hash_table_remove_u64(ctx->object_table, physical_dev->base.id);
761 }
762 free(instance->physical_device_handles);
763 free(instance->physical_devices);
764
765 util_hash_table_remove_u64(ctx->object_table, instance->base.id);
766
767 ctx->instance = NULL;
768}
769
770static VkResult
771vkr_instance_enumerate_physical_devices(struct vkr_instance *instance)
772{
773 if (instance->physical_device_count)
774 return VK_SUCCESS;
775
776 uint32_t count;
777 VkResult result = vkEnumeratePhysicalDevices(instance->base.handle.instance, &count, NULL);
778 if (result != VK_SUCCESS)
779 return result;
780
781 VkPhysicalDevice *handles = calloc(count, sizeof(*handles));
782 struct vkr_physical_device **physical_devs = calloc(count, sizeof(*physical_devs));
783 if (!handles || !physical_devs) {
784 free(physical_devs);
785 free(handles);
786 return VK_ERROR_OUT_OF_HOST_MEMORY;
787 }
788
789 result = vkEnumeratePhysicalDevices(instance->base.handle.instance, &count, handles);
790 if (result != VK_SUCCESS) {
791 free(physical_devs);
792 free(handles);
793 return result;
794 }
795
796 instance->physical_device_count = count;
797 instance->physical_device_handles = handles;
798 instance->physical_devices = physical_devs;
799
800 return VK_SUCCESS;
801}
802
803static struct vkr_physical_device *
804vkr_instance_lookup_physical_device(struct vkr_instance *instance, VkPhysicalDevice handle)
805{
806 for (uint32_t i = 0; i < instance->physical_device_count; i++) {
807 /* XXX this assumes VkPhysicalDevice handles are unique */
808 if (instance->physical_device_handles[i] == handle)
809 return instance->physical_devices[i];
810 }
811 return NULL;
812}
813
814static void
815vkr_physical_device_init_memory_properties(struct vkr_physical_device *physical_dev)
816{
817 VkPhysicalDevice handle = physical_dev->base.handle.physical_device;
818 vkGetPhysicalDeviceMemoryProperties(handle, &physical_dev->memory_properties);
819}
820
821static void
822vkr_physical_device_init_extensions(struct vkr_physical_device *physical_dev)
823{
824 VkPhysicalDevice handle = physical_dev->base.handle.physical_device;
825
826 VkExtensionProperties *exts;
827 uint32_t count;
828 VkResult result = vkEnumerateDeviceExtensionProperties(handle, NULL, &count, NULL);
829 if (result != VK_SUCCESS)
830 return;
831
832 exts = malloc(sizeof(*exts) * count);
833 if (!exts)
834 return;
835
836 result = vkEnumerateDeviceExtensionProperties(handle, NULL, &count, exts);
837 if (result != VK_SUCCESS) {
838 free(exts);
839 return;
840 }
841
842 uint32_t advertised_count = 0;
843 for (uint32_t i = 0; i < count; i++) {
844 VkExtensionProperties *props = &exts[i];
845
846 if (!strcmp(props->extensionName, "VK_KHR_external_memory_fd"))
847 physical_dev->KHR_external_memory_fd = true;
848 else if (!strcmp(props->extensionName, "VK_EXT_external_memory_dma_buf"))
849 physical_dev->EXT_external_memory_dma_buf = true;
850 else if (!strcmp(props->extensionName, "VK_KHR_external_fence_fd"))
851 physical_dev->KHR_external_fence_fd = true;
852
853 const uint32_t spec_ver = vn_info_extension_spec_version(props->extensionName);
854 if (spec_ver) {
855 if (props->specVersion > spec_ver)
856 props->specVersion = spec_ver;
857 exts[advertised_count++] = exts[i];
858 }
859 }
860
861 physical_dev->extensions = exts;
862 physical_dev->extension_count = advertised_count;
863}
864
865static void
866vkr_physical_device_init_properties(struct vkr_physical_device *physical_dev)
867{
868 VkPhysicalDevice handle = physical_dev->base.handle.physical_device;
869 vkGetPhysicalDeviceProperties(handle, &physical_dev->properties);
870
871 VkPhysicalDeviceProperties *props = &physical_dev->properties;
872 props->driverVersion = 0;
873 props->vendorID = 0;
874 props->deviceID = 0;
875 props->deviceType = VK_PHYSICAL_DEVICE_TYPE_OTHER;
876 memset(props->deviceName, 0, sizeof(props->deviceName));
877
878 /* TODO lie about props->pipelineCacheUUID and patch cache header */
879}
880
881static void
882vkr_dispatch_vkEnumeratePhysicalDevices(struct vn_dispatch_context *dispatch, struct vn_command_vkEnumeratePhysicalDevices *args)
883{
884 struct vkr_context *ctx = dispatch->data;
885
886 struct vkr_instance *instance = (struct vkr_instance *)args->instance;
887 if (instance != ctx->instance) {
888 vkr_cs_decoder_set_fatal(&ctx->decoder);
889 return;
890 }
891
892 args->ret = vkr_instance_enumerate_physical_devices(instance);
893 if (args->ret != VK_SUCCESS)
894 return;
895
896 uint32_t count = instance->physical_device_count;
897 if (!args->pPhysicalDevices) {
898 *args->pPhysicalDeviceCount = count;
899 args->ret = VK_SUCCESS;
900 return;
901 }
902
903 if (count > *args->pPhysicalDeviceCount) {
904 count = *args->pPhysicalDeviceCount;
905 args->ret = VK_INCOMPLETE;
906 } else {
907 *args->pPhysicalDeviceCount = count;
908 args->ret = VK_SUCCESS;
909 }
910
911 uint32_t i;
912 for (i = 0; i < count; i++) {
913 struct vkr_physical_device *physical_dev = instance->physical_devices[i];
914 const vkr_object_id id = vkr_cs_handle_load_id(
915 (const void **)&args->pPhysicalDevices[i],
916 VK_OBJECT_TYPE_PHYSICAL_DEVICE);
917
918 if (physical_dev) {
919 if (physical_dev->base.id != id) {
920 vkr_cs_decoder_set_fatal(&ctx->decoder);
921 break;
922 }
923 continue;
924 }
925
926 physical_dev = calloc(1, sizeof(*physical_dev));
927 if (!physical_dev) {
928 args->ret = VK_ERROR_OUT_OF_HOST_MEMORY;
929 break;
930 }
931
932 physical_dev->base.type = VK_OBJECT_TYPE_PHYSICAL_DEVICE;
933 physical_dev->base.id = id;
934 physical_dev->base.handle.physical_device = instance->physical_device_handles[i];
935
936 vkr_physical_device_init_properties(physical_dev);
937 physical_dev->api_version = MIN2(physical_dev->properties.apiVersion,
938 instance->api_version);
939 vkr_physical_device_init_extensions(physical_dev);
940 vkr_physical_device_init_memory_properties(physical_dev);
941
942 instance->physical_devices[i] = physical_dev;
943
944 util_hash_table_set_u64(ctx->object_table, physical_dev->base.id, physical_dev);
945 }
946 /* remove all physical devices on errors */
947 if (i < count) {
948 for (i = 0; i < instance->physical_device_count; i++) {
949 struct vkr_physical_device *physical_dev = instance->physical_devices[i];
950 if (!physical_dev)
951 break;
952 free(physical_dev->extensions);
953 util_hash_table_remove_u64(ctx->object_table, physical_dev->base.id);
954 instance->physical_devices[i] = NULL;
955 }
956 }
957}
958
959static void
960vkr_dispatch_vkEnumeratePhysicalDeviceGroups(struct vn_dispatch_context *dispatch, struct vn_command_vkEnumeratePhysicalDeviceGroups *args)
961{
962 struct vkr_context *ctx = dispatch->data;
963
964 struct vkr_instance *instance = (struct vkr_instance *)args->instance;
965 if (instance != ctx->instance) {
966 vkr_cs_decoder_set_fatal(&ctx->decoder);
967 return;
968 }
969
970 args->ret = vkr_instance_enumerate_physical_devices(instance);
971 if (args->ret != VK_SUCCESS)
972 return;
973
974 VkPhysicalDeviceGroupProperties *orig_props = args->pPhysicalDeviceGroupProperties;
975 if (orig_props) {
976 args->pPhysicalDeviceGroupProperties =
977 malloc(sizeof(*orig_props) * *args->pPhysicalDeviceGroupCount);
978 if (!args->pPhysicalDeviceGroupProperties) {
979 args->ret = VK_ERROR_OUT_OF_HOST_MEMORY;
980 return;
981 }
982 }
983
984 vn_replace_vkEnumeratePhysicalDeviceGroups_args_handle(args);
985 args->ret = vkEnumeratePhysicalDeviceGroups(args->instance, args->pPhysicalDeviceGroupCount, args->pPhysicalDeviceGroupProperties);
986 if (args->ret != VK_SUCCESS)
987 return;
988
989 if (!orig_props)
990 return;
991
992 /* XXX this assumes vkEnumeratePhysicalDevices is called first */
993 /* replace VkPhysicalDevice handles by object ids */
994 for (uint32_t i = 0; i < *args->pPhysicalDeviceGroupCount; i++) {
995 const VkPhysicalDeviceGroupProperties *props = &args->pPhysicalDeviceGroupProperties[i];
996 VkPhysicalDeviceGroupProperties *out = &orig_props[i];
997
998 out->physicalDeviceCount = props->physicalDeviceCount;
999 out->subsetAllocation = props->subsetAllocation;
1000 for (uint32_t j = 0; j < props->physicalDeviceCount; j++) {
1001 const struct vkr_physical_device *physical_dev =
1002 vkr_instance_lookup_physical_device(instance, props->physicalDevices[j]);
1003 vkr_cs_handle_store_id((void **)&out->physicalDevices[j], physical_dev->base.id,
1004 VK_OBJECT_TYPE_PHYSICAL_DEVICE);
1005 }
1006 }
1007
1008 free(args->pPhysicalDeviceGroupProperties);
1009 args->pPhysicalDeviceGroupProperties = orig_props;
1010}
1011
1012static void
1013vkr_dispatch_vkEnumerateDeviceExtensionProperties(struct vn_dispatch_context *dispatch, struct vn_command_vkEnumerateDeviceExtensionProperties *args)
1014{
1015 struct vkr_context *ctx = dispatch->data;
1016
1017 struct vkr_physical_device *physical_dev = (struct vkr_physical_device *)args->physicalDevice;
1018 if (!physical_dev || physical_dev->base.type != VK_OBJECT_TYPE_PHYSICAL_DEVICE) {
1019 vkr_cs_decoder_set_fatal(&ctx->decoder);
1020 return;
1021 }
1022 if (args->pLayerName) {
1023 vkr_cs_decoder_set_fatal(&ctx->decoder);
1024 return;
1025 }
1026
1027 if (!args->pProperties) {
1028 *args->pPropertyCount = physical_dev->extension_count;
1029 args->ret = VK_SUCCESS;
1030 return;
1031 }
1032
1033 uint32_t count = physical_dev->extension_count;
1034 if (count > *args->pPropertyCount) {
1035 count = *args->pPropertyCount;
1036 args->ret = VK_INCOMPLETE;
1037 } else {
1038 *args->pPropertyCount = count;
1039 args->ret = VK_SUCCESS;
1040 }
1041
1042 memcpy(args->pProperties, physical_dev->extensions, sizeof(*args->pProperties) * count);
1043}
1044
1045static void
1046vkr_dispatch_vkGetPhysicalDeviceFeatures(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkGetPhysicalDeviceFeatures *args)
1047{
1048 vn_replace_vkGetPhysicalDeviceFeatures_args_handle(args);
1049 vkGetPhysicalDeviceFeatures(args->physicalDevice, args->pFeatures);
1050}
1051
1052static void
1053vkr_dispatch_vkGetPhysicalDeviceProperties(struct vn_dispatch_context *dispatch, struct vn_command_vkGetPhysicalDeviceProperties *args)
1054{
1055 struct vkr_context *ctx = dispatch->data;
1056 struct vkr_physical_device *physical_dev = (struct vkr_physical_device *)args->physicalDevice;
1057 if (!physical_dev || physical_dev->base.type != VK_OBJECT_TYPE_PHYSICAL_DEVICE) {
1058 vkr_cs_decoder_set_fatal(&ctx->decoder);
1059 return;
1060 }
1061
1062 *args->pProperties = physical_dev->properties;
1063}
1064
1065static void
1066vkr_dispatch_vkGetPhysicalDeviceQueueFamilyProperties(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkGetPhysicalDeviceQueueFamilyProperties *args)
1067{
1068 vn_replace_vkGetPhysicalDeviceQueueFamilyProperties_args_handle(args);
1069 vkGetPhysicalDeviceQueueFamilyProperties(args->physicalDevice, args->pQueueFamilyPropertyCount, args->pQueueFamilyProperties);
1070}
1071
1072static void
1073vkr_dispatch_vkGetPhysicalDeviceMemoryProperties(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkGetPhysicalDeviceMemoryProperties *args)
1074{
1075 /* TODO lie about this */
1076 vn_replace_vkGetPhysicalDeviceMemoryProperties_args_handle(args);
1077 vkGetPhysicalDeviceMemoryProperties(args->physicalDevice, args->pMemoryProperties);
1078}
1079
1080static void
1081vkr_dispatch_vkGetPhysicalDeviceFormatProperties(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkGetPhysicalDeviceFormatProperties *args)
1082{
1083 vn_replace_vkGetPhysicalDeviceFormatProperties_args_handle(args);
1084 vkGetPhysicalDeviceFormatProperties(args->physicalDevice, args->format, args->pFormatProperties);
1085}
1086
1087static void
1088vkr_dispatch_vkGetPhysicalDeviceImageFormatProperties(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkGetPhysicalDeviceImageFormatProperties *args)
1089{
1090 vn_replace_vkGetPhysicalDeviceImageFormatProperties_args_handle(args);
1091 args->ret = vkGetPhysicalDeviceImageFormatProperties(args->physicalDevice, args->format, args->type, args->tiling, args->usage, args->flags, args->pImageFormatProperties);
1092}
1093
1094static void
1095vkr_dispatch_vkGetPhysicalDeviceSparseImageFormatProperties(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkGetPhysicalDeviceSparseImageFormatProperties *args)
1096{
1097 vn_replace_vkGetPhysicalDeviceSparseImageFormatProperties_args_handle(args);
1098 vkGetPhysicalDeviceSparseImageFormatProperties(args->physicalDevice, args->format, args->type, args->samples, args->usage, args->tiling, args->pPropertyCount, args->pProperties);
1099}
1100
1101static void
1102vkr_dispatch_vkGetPhysicalDeviceFeatures2(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkGetPhysicalDeviceFeatures2 *args)
1103{
1104 vn_replace_vkGetPhysicalDeviceFeatures2_args_handle(args);
1105 vkGetPhysicalDeviceFeatures2(args->physicalDevice, args->pFeatures);
1106}
1107
1108static void
1109vkr_dispatch_vkGetPhysicalDeviceProperties2(struct vn_dispatch_context *dispatch, struct vn_command_vkGetPhysicalDeviceProperties2 *args)
1110{
1111 struct vkr_context *ctx = dispatch->data;
1112 struct vkr_physical_device *physical_dev = (struct vkr_physical_device *)args->physicalDevice;
1113 if (!physical_dev || physical_dev->base.type != VK_OBJECT_TYPE_PHYSICAL_DEVICE) {
1114 vkr_cs_decoder_set_fatal(&ctx->decoder);
1115 return;
1116 }
1117
1118 vn_replace_vkGetPhysicalDeviceProperties2_args_handle(args);
1119 vkGetPhysicalDeviceProperties2(args->physicalDevice, args->pProperties);
1120
1121 union {
1122 VkBaseOutStructure *pnext;
1123 VkPhysicalDeviceProperties2 *props;
1124 VkPhysicalDeviceVulkan11Properties *vk11;
1125 VkPhysicalDeviceVulkan12Properties *vk12;
1126 VkPhysicalDeviceIDProperties *id;
1127 VkPhysicalDeviceDriverProperties *driver;
1128 } u;
1129
1130 u.pnext = (VkBaseOutStructure *)args->pProperties;
1131 while (u.pnext) {
1132 switch (u.pnext->sType) {
1133 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2:
1134 u.props->properties = physical_dev->properties;
1135 break;
1136 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES:
1137 memset(u.vk11->deviceUUID, 0, sizeof(u.vk11->deviceUUID));
1138 memset(u.vk11->driverUUID, 0, sizeof(u.vk11->driverUUID));
1139 memset(u.vk11->deviceLUID, 0, sizeof(u.vk11->deviceLUID));
1140 u.vk11->deviceNodeMask = 0;
1141 u.vk11->deviceLUIDValid = false;
1142 break;
1143 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES:
1144 u.vk12->driverID = 0;
1145 memset(u.vk12->driverName, 0, sizeof(u.vk12->driverName));
1146 memset(u.vk12->driverInfo, 0, sizeof(u.vk12->driverInfo));
1147 memset(&u.vk12->conformanceVersion, 0, sizeof(u.vk12->conformanceVersion));
1148 break;
1149 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES:
1150 memset(u.id->deviceUUID, 0, sizeof(u.id->deviceUUID));
1151 memset(u.id->driverUUID, 0, sizeof(u.id->driverUUID));
1152 memset(u.id->deviceLUID, 0, sizeof(u.id->deviceLUID));
1153 u.id->deviceNodeMask = 0;
1154 u.id->deviceLUIDValid = false;
1155 break;
1156 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES:
1157 u.driver->driverID = 0;
1158 memset(u.driver->driverName, 0, sizeof(u.driver->driverName));
1159 memset(u.driver->driverInfo, 0, sizeof(u.driver->driverInfo));
1160 memset(&u.driver->conformanceVersion, 0, sizeof(u.driver->conformanceVersion));
1161 break;
1162 default:
1163 break;
1164 }
1165
1166 u.pnext = u.pnext->pNext;
1167 }
1168}
1169
1170static void
1171vkr_dispatch_vkGetPhysicalDeviceQueueFamilyProperties2(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkGetPhysicalDeviceQueueFamilyProperties2 *args)
1172{
1173 vn_replace_vkGetPhysicalDeviceQueueFamilyProperties2_args_handle(args);
1174 vkGetPhysicalDeviceQueueFamilyProperties2(args->physicalDevice, args->pQueueFamilyPropertyCount, args->pQueueFamilyProperties);
1175}
1176
1177static void
1178vkr_dispatch_vkGetPhysicalDeviceMemoryProperties2(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkGetPhysicalDeviceMemoryProperties2 *args)
1179{
1180 /* TODO lie about this */
1181 vn_replace_vkGetPhysicalDeviceMemoryProperties2_args_handle(args);
1182 vkGetPhysicalDeviceMemoryProperties2(args->physicalDevice, args->pMemoryProperties);
1183}
1184
1185static void
1186vkr_dispatch_vkGetPhysicalDeviceFormatProperties2(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkGetPhysicalDeviceFormatProperties2 *args)
1187{
1188 vn_replace_vkGetPhysicalDeviceFormatProperties2_args_handle(args);
1189 vkGetPhysicalDeviceFormatProperties2(args->physicalDevice, args->format, args->pFormatProperties);
1190}
1191
1192static void
1193vkr_dispatch_vkGetPhysicalDeviceImageFormatProperties2(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkGetPhysicalDeviceImageFormatProperties2 *args)
1194{
1195 vn_replace_vkGetPhysicalDeviceImageFormatProperties2_args_handle(args);
1196 args->ret = vkGetPhysicalDeviceImageFormatProperties2(args->physicalDevice, args->pImageFormatInfo, args->pImageFormatProperties);
1197}
1198
1199static void
1200vkr_dispatch_vkGetPhysicalDeviceSparseImageFormatProperties2(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkGetPhysicalDeviceSparseImageFormatProperties2 *args)
1201{
1202 vn_replace_vkGetPhysicalDeviceSparseImageFormatProperties2_args_handle(args);
1203 vkGetPhysicalDeviceSparseImageFormatProperties2(args->physicalDevice, args->pFormatInfo, args->pPropertyCount, args->pProperties);
1204}
1205
1206static void
1207vkr_queue_retire_syncs(struct vkr_queue *queue,
1208 struct list_head *retired_syncs,
1209 bool *queue_empty)
1210{
1211 struct vkr_device *dev = queue->device;
1212 struct vkr_queue_sync *sync, *tmp;
1213
1214 list_inithead(retired_syncs);
1215
1216 if (queue->has_thread) {
1217 mtx_lock(&queue->mutex);
1218
1219 LIST_FOR_EACH_ENTRY_SAFE(sync, tmp, &queue->signaled_syncs, head) {
1220 if (sync->head.next == &queue->signaled_syncs ||
1221 !(sync->flags & VIRGL_RENDERER_FENCE_FLAG_MERGEABLE))
1222 list_addtail(&sync->head, retired_syncs);
1223 else
1224 list_addtail(&sync->head, &dev->free_syncs);
1225 }
1226 list_inithead(&queue->signaled_syncs);
1227
1228 *queue_empty = LIST_IS_EMPTY(&queue->pending_syncs);
1229
1230 mtx_unlock(&queue->mutex);
1231 } else {
1232 LIST_FOR_EACH_ENTRY_SAFE(sync, tmp, &queue->pending_syncs, head) {
1233 VkResult result = vkGetFenceStatus(dev->base.handle.device, sync->fence);
1234 if (result == VK_NOT_READY)
1235 break;
1236
1237 list_del(&sync->head);
1238 if (sync->head.next == &queue->pending_syncs ||
1239 !(sync->flags & VIRGL_RENDERER_FENCE_FLAG_MERGEABLE))
1240 list_addtail(&sync->head, retired_syncs);
1241 else
1242 list_addtail(&sync->head, &dev->free_syncs);
1243 }
1244
1245 *queue_empty = LIST_IS_EMPTY(&queue->pending_syncs);
1246 }
1247}
1248
1249static int
1250vkr_queue_thread(void *arg)
1251{
1252 struct vkr_queue *queue = arg;
1253 struct vkr_device *dev = queue->device;
1254 const uint64_t ns_per_sec = 1000000000llu;
1255
1256 mtx_lock(&queue->mutex);
1257 while (true) {
1258 while (LIST_IS_EMPTY(&queue->pending_syncs) && !queue->join)
1259 cnd_wait(&queue->cond, &queue->mutex);
1260
1261 if (queue->join)
1262 break;
1263
1264 struct vkr_queue_sync *sync =
1265 LIST_ENTRY(struct vkr_queue_sync, queue->pending_syncs.next, head);
1266
1267 mtx_unlock(&queue->mutex);
1268
1269 VkResult result = vkWaitForFences(dev->base.handle.device, 1, &sync->fence,
1270 false, ns_per_sec * 3);
1271
1272 mtx_lock(&queue->mutex);
1273
1274 if (result != VK_TIMEOUT) {
1275 list_del(&sync->head);
1276 list_addtail(&sync->head, &queue->signaled_syncs);
1277 write_eventfd(queue->eventfd, 1);
1278 }
1279 }
1280 mtx_unlock(&queue->mutex);
1281
1282 return 0;
1283}
1284
1285static void
1286vkr_queue_destroy(struct vkr_context *ctx,
1287 struct vkr_queue *queue)
1288{
1289 struct vkr_queue_sync *sync, *tmp;
1290
1291 if (queue->has_thread) {
1292 mtx_lock(&queue->mutex);
1293 queue->join = true;
1294 mtx_unlock(&queue->mutex);
1295
1296 cnd_signal(&queue->cond);
1297 thrd_join(queue->thread, NULL);
1298
1299 LIST_FOR_EACH_ENTRY_SAFE(sync, tmp, &queue->signaled_syncs, head)
1300 list_addtail(&sync->head, &queue->device->free_syncs);
1301 } else {
1302 assert(LIST_IS_EMPTY(&queue->signaled_syncs));
1303 }
1304
1305 LIST_FOR_EACH_ENTRY_SAFE(sync, tmp, &queue->pending_syncs, head)
1306 list_addtail(&sync->head, &queue->device->free_syncs);
1307
1308 mtx_destroy(&queue->mutex);
1309 cnd_destroy(&queue->cond);
1310
1311 list_del(&queue->head);
1312 list_del(&queue->busy_head);
1313
1314 util_hash_table_remove_u64(ctx->object_table, queue->base.id);
1315}
1316
1317static struct vkr_queue *
1318vkr_queue_create(struct vkr_context *ctx,
1319 struct vkr_device *dev,
1320 vkr_object_id id,
1321 VkQueue handle,
1322 uint32_t family,
1323 uint32_t index)
1324{
1325 struct vkr_queue *queue;
1326 int ret;
1327
1328 LIST_FOR_EACH_ENTRY(queue, &dev->queues, head) {
1329 if (queue->family == family && queue->index == index)
1330 return queue;
1331 }
1332
1333 queue = calloc(1, sizeof(*queue));
1334 if (!queue)
1335 return NULL;
1336
1337 queue->base.type = VK_OBJECT_TYPE_QUEUE;
1338 queue->base.id = id;
1339 queue->base.handle.queue = handle;
1340
1341 queue->device = dev;
1342 queue->family = family;
1343 queue->index = index;
1344
1345 list_inithead(&queue->pending_syncs);
1346 list_inithead(&queue->signaled_syncs);
1347
1348 ret = mtx_init(&queue->mutex, mtx_plain);
1349 if (ret != thrd_success) {
1350 free(queue);
1351 return NULL;
1352 }
1353 ret = cnd_init(&queue->cond);
1354 if (ret != thrd_success) {
1355 mtx_destroy(&queue->mutex);
1356 free(queue);
1357 return NULL;
1358 }
1359
1360 if (ctx->fence_eventfd >= 0) {
1361 ret = thrd_create(&queue->thread, vkr_queue_thread, queue);
1362 if (ret != thrd_success) {
1363 mtx_destroy(&queue->mutex);
1364 cnd_destroy(&queue->cond);
1365 free(queue);
1366 return NULL;
1367 }
1368 queue->has_thread = true;
1369 queue->eventfd = ctx->fence_eventfd;
1370 }
1371
1372 list_addtail(&queue->head, &dev->queues);
1373 list_inithead(&queue->busy_head);
1374
1375 util_hash_table_set_u64(ctx->object_table, queue->base.id, queue);
1376
1377 return queue;
1378}
1379
1380static void
1381vkr_dispatch_vkCreateDevice(struct vn_dispatch_context *dispatch, struct vn_command_vkCreateDevice *args)
1382{
1383 struct vkr_context *ctx = dispatch->data;
1384
1385 struct vkr_physical_device *physical_dev = (struct vkr_physical_device *)args->physicalDevice;
1386 if (!physical_dev || physical_dev->base.type != VK_OBJECT_TYPE_PHYSICAL_DEVICE) {
1387 vkr_cs_decoder_set_fatal(&ctx->decoder);
1388 return;
1389 }
1390
1391 /* append extensions for our own use */
1392 const char **exts = NULL;
1393 uint32_t ext_count = args->pCreateInfo->enabledExtensionCount;
1394 ext_count += physical_dev->KHR_external_memory_fd;
1395 ext_count += physical_dev->EXT_external_memory_dma_buf;
1396 ext_count += physical_dev->KHR_external_fence_fd;
1397 if (ext_count > args->pCreateInfo->enabledExtensionCount) {
1398 exts = malloc(sizeof(*exts) * ext_count);
1399 if (!exts) {
1400 args->ret = VK_ERROR_OUT_OF_HOST_MEMORY;
1401 return;
1402 }
1403 for (uint32_t i = 0; i < args->pCreateInfo->enabledExtensionCount; i++)
1404 exts[i] = args->pCreateInfo->ppEnabledExtensionNames[i];
1405
1406 ext_count = args->pCreateInfo->enabledExtensionCount;
1407 if (physical_dev->KHR_external_memory_fd)
1408 exts[ext_count++] = "VK_KHR_external_memory_fd";
1409 if (physical_dev->EXT_external_memory_dma_buf)
1410 exts[ext_count++] = "VK_EXT_external_memory_dma_buf";
1411 if (physical_dev->KHR_external_fence_fd)
1412 exts[ext_count++] = "VK_KHR_external_fence_fd";
1413
1414 ((VkDeviceCreateInfo *)args->pCreateInfo)->ppEnabledExtensionNames = exts;
1415 ((VkDeviceCreateInfo *)args->pCreateInfo)->enabledExtensionCount = ext_count;
1416 }
1417
1418 struct vkr_device *dev = calloc(1, sizeof(*dev));
1419 if (!dev) {
1420 args->ret = VK_ERROR_OUT_OF_HOST_MEMORY;
1421 free(exts);
1422 return;
1423 }
1424
1425 dev->base.type = VK_OBJECT_TYPE_DEVICE;
1426 dev->base.id = vkr_cs_handle_load_id((const void **)args->pDevice, dev->base.type);
1427
1428 vn_replace_vkCreateDevice_args_handle(args);
1429 args->ret = vkCreateDevice(args->physicalDevice, args->pCreateInfo, NULL, &dev->base.handle.device);
1430 if (args->ret != VK_SUCCESS) {
1431 free(exts);
1432 free(dev);
1433 return;
1434 }
1435
1436 free(exts);
1437
1438 dev->physical_device = physical_dev;
1439
1440 VkDevice handle = dev->base.handle.device;
1441 if (physical_dev->api_version >= VK_API_VERSION_1_2) {
1442 dev->GetSemaphoreCounterValue = (PFN_vkGetSemaphoreCounterValue)
1443 vkGetDeviceProcAddr(handle, "vkGetSemaphoreCounterValue");
1444 dev->WaitSemaphores = (PFN_vkWaitSemaphores)
1445 vkGetDeviceProcAddr(handle, "vkWaitSemaphores");
1446 dev->SignalSemaphore = (PFN_vkSignalSemaphore)
1447 vkGetDeviceProcAddr(handle, "vkSignalSemaphore");
1448 dev->GetDeviceMemoryOpaqueCaptureAddress = (PFN_vkGetDeviceMemoryOpaqueCaptureAddress)
1449 vkGetDeviceProcAddr(handle, "vkGetDeviceMemoryOpaqueCaptureAddress");
1450 dev->GetBufferOpaqueCaptureAddress = (PFN_vkGetBufferOpaqueCaptureAddress)
1451 vkGetDeviceProcAddr(handle, "vkGetBufferOpaqueCaptureAddress");
1452 dev->GetBufferDeviceAddress = (PFN_vkGetBufferDeviceAddress)
1453 vkGetDeviceProcAddr(handle, "vkGetBufferDeviceAddress");
1454 dev->ResetQueryPool = (PFN_vkResetQueryPool)
1455 vkGetDeviceProcAddr(handle, "vkResetQueryPool");
1456 dev->CreateRenderPass2 = (PFN_vkCreateRenderPass2)
1457 vkGetDeviceProcAddr(handle, "vkCreateRenderPass2");
1458 dev->CmdBeginRenderPass2 = (PFN_vkCmdBeginRenderPass2)
1459 vkGetDeviceProcAddr(handle, "vkCmdBeginRenderPass2");
1460 dev->CmdNextSubpass2 = (PFN_vkCmdNextSubpass2)
1461 vkGetDeviceProcAddr(handle, "vkCmdNextSubpass2");
1462 dev->CmdEndRenderPass2 = (PFN_vkCmdEndRenderPass2)
1463 vkGetDeviceProcAddr(handle, "vkCmdEndRenderPass2");
1464 dev->CmdDrawIndirectCount = (PFN_vkCmdDrawIndirectCount)
1465 vkGetDeviceProcAddr(handle, "vkCmdDrawIndirectCount");
1466 dev->CmdDrawIndexedIndirectCount = (PFN_vkCmdDrawIndexedIndirectCount)
1467 vkGetDeviceProcAddr(handle, "vkCmdDrawIndexedIndirectCount");
1468 } else {
1469 dev->GetSemaphoreCounterValue = (PFN_vkGetSemaphoreCounterValue)
1470 vkGetDeviceProcAddr(handle, "vkGetSemaphoreCounterValueKHR");
1471 dev->WaitSemaphores = (PFN_vkWaitSemaphores)
1472 vkGetDeviceProcAddr(handle, "vkWaitSemaphoresKHR");
1473 dev->SignalSemaphore = (PFN_vkSignalSemaphore)
1474 vkGetDeviceProcAddr(handle, "vkSignalSemaphoreKHR");
1475 dev->GetDeviceMemoryOpaqueCaptureAddress = (PFN_vkGetDeviceMemoryOpaqueCaptureAddress)
1476 vkGetDeviceProcAddr(handle, "vkGetDeviceMemoryOpaqueCaptureAddressKHR");
1477 dev->GetBufferOpaqueCaptureAddress = (PFN_vkGetBufferOpaqueCaptureAddress)
1478 vkGetDeviceProcAddr(handle, "vkGetBufferOpaqueCaptureAddressKHR");
1479 dev->GetBufferDeviceAddress = (PFN_vkGetBufferDeviceAddress)
1480 vkGetDeviceProcAddr(handle, "vkGetBufferDeviceAddressKHR");
1481 dev->ResetQueryPool = (PFN_vkResetQueryPool)
1482 vkGetDeviceProcAddr(handle, "vkResetQueryPoolEXT");
1483 dev->CreateRenderPass2 = (PFN_vkCreateRenderPass2)
1484 vkGetDeviceProcAddr(handle, "vkCreateRenderPass2KHR");
1485 dev->CmdBeginRenderPass2 = (PFN_vkCmdBeginRenderPass2)
1486 vkGetDeviceProcAddr(handle, "vkCmdBeginRenderPass2KHR");
1487 dev->CmdNextSubpass2 = (PFN_vkCmdNextSubpass2)
1488 vkGetDeviceProcAddr(handle, "vkCmdNextSubpass2KHR");
1489 dev->CmdEndRenderPass2 = (PFN_vkCmdEndRenderPass2)
1490 vkGetDeviceProcAddr(handle, "vkCmdEndRenderPass2KHR");
1491 dev->CmdDrawIndirectCount = (PFN_vkCmdDrawIndirectCount)
1492 vkGetDeviceProcAddr(handle, "vkCmdDrawIndirectCountKHR");
1493 dev->CmdDrawIndexedIndirectCount = (PFN_vkCmdDrawIndexedIndirectCount)
1494 vkGetDeviceProcAddr(handle, "vkCmdDrawIndexedIndirectCountKHR");
1495 }
1496
1497 dev->cmd_bind_transform_feedback_buffers = (PFN_vkCmdBindTransformFeedbackBuffersEXT)
1498 vkGetDeviceProcAddr(handle, "vkCmdBindTransformFeedbackBuffersEXT");
1499 dev->cmd_begin_transform_feedback = (PFN_vkCmdBeginTransformFeedbackEXT)
1500 vkGetDeviceProcAddr(handle, "vkCmdBeginTransformFeedbackEXT");
1501 dev->cmd_end_transform_feedback = (PFN_vkCmdEndTransformFeedbackEXT)
1502 vkGetDeviceProcAddr(handle, "vkCmdEndTransformFeedbackEXT");
1503 dev->cmd_begin_query_indexed = (PFN_vkCmdBeginQueryIndexedEXT)
1504 vkGetDeviceProcAddr(handle, "vkCmdBeginQueryIndexedEXT");
1505 dev->cmd_end_query_indexed = (PFN_vkCmdEndQueryIndexedEXT)
1506 vkGetDeviceProcAddr(handle, "vkCmdEndQueryIndexedEXT");
1507 dev->cmd_draw_indirect_byte_count = (PFN_vkCmdDrawIndirectByteCountEXT)
1508 vkGetDeviceProcAddr(handle, "vkCmdDrawIndirectByteCountEXT");
1509
1510 dev->get_image_drm_format_modifier_properties = (PFN_vkGetImageDrmFormatModifierPropertiesEXT)
1511 vkGetDeviceProcAddr(handle, "vkGetImageDrmFormatModifierPropertiesEXT");
1512
Yiwei Zhang508ff682021-04-13 06:47:38 +00001513 dev->get_memory_fd_properties = (PFN_vkGetMemoryFdPropertiesKHR)
1514 vkGetDeviceProcAddr(handle, "vkGetMemoryFdPropertiesKHR");
1515
Chia-I Wu9b2d22b2020-04-17 15:29:05 -07001516 list_inithead(&dev->queues);
1517 list_inithead(&dev->free_syncs);
1518
1519 util_hash_table_set_u64(ctx->object_table, dev->base.id, dev);
1520}
1521
1522static void
1523vkr_dispatch_vkDestroyDevice(struct vn_dispatch_context *dispatch, struct vn_command_vkDestroyDevice *args)
1524{
1525 struct vkr_context *ctx = dispatch->data;
1526
1527 struct vkr_device *dev = (struct vkr_device *)args->device;
1528 if (!dev || dev->base.type != VK_OBJECT_TYPE_DEVICE) {
1529 if (dev)
1530 vkr_cs_decoder_set_fatal(&ctx->decoder);
1531 return;
1532 }
1533
1534 /* TODO cleanup all objects here? */
1535 struct vkr_queue *queue, *queue_tmp;
1536 LIST_FOR_EACH_ENTRY_SAFE(queue, queue_tmp, &dev->queues, head)
1537 vkr_queue_destroy(ctx, queue);
1538
1539 struct vkr_queue_sync *sync, *sync_tmp;
1540 LIST_FOR_EACH_ENTRY_SAFE(sync, sync_tmp, &dev->free_syncs, head) {
1541 vkDestroyFence(dev->base.handle.device, sync->fence, NULL);
1542 free(sync);
1543 }
1544
1545 vn_replace_vkDestroyDevice_args_handle(args);
1546 vkDestroyDevice(args->device, NULL);
1547
1548 util_hash_table_remove_u64(ctx->object_table, dev->base.id);
1549}
1550
1551static void
1552vkr_dispatch_vkGetDeviceGroupPeerMemoryFeatures(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkGetDeviceGroupPeerMemoryFeatures *args)
1553{
1554 vn_replace_vkGetDeviceGroupPeerMemoryFeatures_args_handle(args);
1555 vkGetDeviceGroupPeerMemoryFeatures(args->device, args->heapIndex, args->localDeviceIndex, args->remoteDeviceIndex, args->pPeerMemoryFeatures);
1556}
1557
1558static void
1559vkr_dispatch_vkDeviceWaitIdle(struct vn_dispatch_context *dispatch, UNUSED struct vn_command_vkDeviceWaitIdle *args)
1560{
1561 struct vkr_context *ctx = dispatch->data;
1562 /* no blocking call */
1563 vkr_cs_decoder_set_fatal(&ctx->decoder);
1564}
1565
1566static void
1567vkr_dispatch_vkGetDeviceQueue(struct vn_dispatch_context *dispatch, struct vn_command_vkGetDeviceQueue *args)
1568{
1569 struct vkr_context *ctx = dispatch->data;
1570
1571 struct vkr_device *dev = (struct vkr_device *)args->device;
1572 if (!dev || dev->base.type != VK_OBJECT_TYPE_DEVICE) {
1573 vkr_cs_decoder_set_fatal(&ctx->decoder);
1574 return;
1575 }
1576
1577 const vkr_object_id id =
1578 vkr_cs_handle_load_id((const void **)args->pQueue, VK_OBJECT_TYPE_QUEUE);
1579
1580 VkQueue handle;
1581 vn_replace_vkGetDeviceQueue_args_handle(args);
1582 vkGetDeviceQueue(args->device, args->queueFamilyIndex, args->queueIndex, &handle);
1583
1584 struct vkr_queue *queue = vkr_queue_create(ctx, dev, id, handle,
1585 args->queueFamilyIndex, args->queueIndex);
1586 /* TODO create queues with device and deal with failures there */
1587 if (!queue)
1588 vrend_printf("failed to create queue\n");
1589}
1590
1591static void
1592vkr_dispatch_vkGetDeviceQueue2(struct vn_dispatch_context *dispatch, struct vn_command_vkGetDeviceQueue2 *args)
1593{
1594 struct vkr_context *ctx = dispatch->data;
1595
1596 struct vkr_device *dev = (struct vkr_device *)args->device;
1597 if (!dev || dev->base.type != VK_OBJECT_TYPE_DEVICE) {
1598 vkr_cs_decoder_set_fatal(&ctx->decoder);
1599 return;
1600 }
1601
1602 const vkr_object_id id =
1603 vkr_cs_handle_load_id((const void **)args->pQueue, VK_OBJECT_TYPE_QUEUE);
1604
1605 VkQueue handle;
1606 vn_replace_vkGetDeviceQueue2_args_handle(args);
1607 vkGetDeviceQueue2(args->device, args->pQueueInfo, &handle);
1608
1609 /* TODO deal with errors */
1610 vkr_queue_create(ctx, dev, id, handle, args->pQueueInfo->queueFamilyIndex,
1611 args->pQueueInfo->queueIndex);
1612}
1613
1614static void
1615vkr_dispatch_vkQueueSubmit(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkQueueSubmit *args)
1616{
1617 vn_replace_vkQueueSubmit_args_handle(args);
1618 args->ret = vkQueueSubmit(args->queue, args->submitCount, args->pSubmits, args->fence);
1619}
1620
1621static void
1622vkr_dispatch_vkQueueBindSparse(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkQueueBindSparse *args)
1623{
1624 vn_replace_vkQueueBindSparse_args_handle(args);
1625 args->ret = vkQueueBindSparse(args->queue, args->bindInfoCount, args->pBindInfo, args->fence);
1626}
1627
1628static void
1629vkr_dispatch_vkQueueWaitIdle(struct vn_dispatch_context *dispatch, UNUSED struct vn_command_vkQueueWaitIdle *args)
1630{
1631 struct vkr_context *ctx = dispatch->data;
1632 /* no blocking call */
1633 vkr_cs_decoder_set_fatal(&ctx->decoder);
1634}
1635
Yiwei Zhang508ff682021-04-13 06:47:38 +00001636static bool
1637vkr_get_fd_handle_type_from_virgl_fd_type(struct vkr_physical_device *dev, enum virgl_resource_fd_type fd_type, VkExternalMemoryHandleTypeFlagBits *out_handle_type)
1638{
1639 assert(dev);
1640 assert(out_handle_type);
1641
1642 switch (fd_type) {
1643 case VIRGL_RESOURCE_FD_DMABUF:
1644 if (!dev->EXT_external_memory_dma_buf)
1645 return false;
1646 *out_handle_type = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
1647 break;
1648 case VIRGL_RESOURCE_FD_OPAQUE:
1649 if (!dev->KHR_external_memory_fd)
1650 return false;
1651 *out_handle_type = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT;
1652 break;
1653 default:
1654 return false;
1655 }
1656
1657 return true;
1658}
1659
Chia-I Wu9b2d22b2020-04-17 15:29:05 -07001660static void
1661vkr_dispatch_vkAllocateMemory(struct vn_dispatch_context *dispatch, struct vn_command_vkAllocateMemory *args)
1662{
1663 struct vkr_context *ctx = dispatch->data;
1664
1665 struct vkr_device *dev = (struct vkr_device *)args->device;
1666 if (!dev || dev->base.type != VK_OBJECT_TYPE_DEVICE) {
1667 vkr_cs_decoder_set_fatal(&ctx->decoder);
1668 return;
1669 }
1670
1671#ifdef FORCE_ENABLE_DMABUF
1672 const VkExportMemoryAllocateInfo export_info = {
1673 .sType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO,
1674 .pNext = args->pAllocateInfo->pNext,
1675 .handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
1676 };
1677 if (dev->physical_device->EXT_external_memory_dma_buf)
1678 ((VkMemoryAllocateInfo *)args->pAllocateInfo)->pNext = &export_info;
1679#endif
1680
Yiwei Zhang508ff682021-04-13 06:47:38 +00001681 /* translate VkImportMemoryResourceInfoMESA into VkImportMemoryFdInfoKHR */
1682 VkImportMemoryResourceInfoMESA *import_resource_info = NULL;
1683 VkImportMemoryFdInfoKHR import_fd_info = {
1684 .sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
1685 .fd = -1,
1686 };
1687 VkBaseInStructure *pprev = (VkBaseInStructure *)args->pAllocateInfo;
1688 while (pprev->pNext) {
1689 if (pprev->pNext->sType == VK_STRUCTURE_TYPE_IMPORT_MEMORY_RESOURCE_INFO_MESA) {
1690 import_resource_info = (VkImportMemoryResourceInfoMESA *)pprev->pNext;
1691 import_fd_info.pNext = pprev->pNext->pNext;
1692 pprev->pNext = (const struct VkBaseInStructure *)&import_fd_info;
1693 break;
1694 }
1695 pprev = (VkBaseInStructure *)pprev->pNext;
1696 }
1697 if (import_resource_info) {
1698 uint32_t res_id = import_resource_info->resourceId;
1699 struct vkr_resource_attachment *att = util_hash_table_get(ctx->resource_table, uintptr_to_pointer(res_id));
1700 if (!att || !att->resource) {
1701 args->ret = VK_ERROR_INVALID_EXTERNAL_HANDLE;
1702 return;
1703 }
1704
1705 enum virgl_resource_fd_type fd_type = virgl_resource_export_fd(att->resource, &import_fd_info.fd);
1706 if (!vkr_get_fd_handle_type_from_virgl_fd_type(dev->physical_device, fd_type, &import_fd_info.handleType)) {
1707 close(import_fd_info.fd);
1708 args->ret = VK_ERROR_INVALID_EXTERNAL_HANDLE;
1709 return;
1710 }
1711 }
1712
Chia-I Wu9b2d22b2020-04-17 15:29:05 -07001713 struct vkr_device_memory *mem = calloc(1, sizeof(*mem));
1714 if (!mem) {
Yiwei Zhang508ff682021-04-13 06:47:38 +00001715 if (import_resource_info)
1716 close(import_fd_info.fd);
Chia-I Wu9b2d22b2020-04-17 15:29:05 -07001717 args->ret = VK_ERROR_OUT_OF_HOST_MEMORY;
1718 return;
1719 }
1720
1721 mem->base.type = VK_OBJECT_TYPE_DEVICE_MEMORY;
1722 mem->base.id = vkr_cs_handle_load_id((const void **)args->pMemory, mem->base.type);
1723
1724 vn_replace_vkAllocateMemory_args_handle(args);
1725 args->ret = vkAllocateMemory(args->device, args->pAllocateInfo, NULL, &mem->base.handle.device_memory);
1726 if (args->ret != VK_SUCCESS) {
Yiwei Zhang508ff682021-04-13 06:47:38 +00001727 if (import_resource_info)
1728 close(import_fd_info.fd);
Chia-I Wu9b2d22b2020-04-17 15:29:05 -07001729 free(mem);
1730 return;
1731 }
1732
1733 const VkPhysicalDeviceMemoryProperties *mem_props = &dev->physical_device->memory_properties;
1734 const uint32_t mt_index = args->pAllocateInfo->memoryTypeIndex;
1735 const uint32_t property_flags = mem_props->memoryTypes[mt_index].propertyFlags;
1736
1737 /* get valid fd types */
1738 uint32_t valid_fd_types = 0;
1739 const VkBaseInStructure *pnext = args->pAllocateInfo->pNext;
1740 while (pnext) {
1741 if (pnext->sType == VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO) {
1742 const VkExportMemoryAllocateInfo *export = (const void *)pnext;
1743
1744 if (export->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT)
1745 valid_fd_types |= 1 << VIRGL_RESOURCE_FD_OPAQUE;
1746 if (export->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT)
1747 valid_fd_types |= 1 << VIRGL_RESOURCE_FD_DMABUF;
1748
1749 break;
1750 }
1751 pnext = pnext->pNext;
1752 }
1753
1754 mem->device = args->device;
1755 mem->property_flags = property_flags;
1756 mem->valid_fd_types = valid_fd_types;
1757 list_inithead(&mem->head);
1758
1759 util_hash_table_set_u64(ctx->object_table, mem->base.id, mem);
1760}
1761
1762static void
1763vkr_dispatch_vkFreeMemory(struct vn_dispatch_context *dispatch, struct vn_command_vkFreeMemory *args)
1764{
1765 struct vkr_context *ctx = dispatch->data;
1766
1767 struct vkr_device_memory *mem = (struct vkr_device_memory *)(uintptr_t)args->memory;
1768 if (!mem || mem->base.type != VK_OBJECT_TYPE_DEVICE_MEMORY) {
1769 if (mem)
1770 vkr_cs_decoder_set_fatal(&ctx->decoder);
1771 return;
1772 }
1773
1774 vn_replace_vkFreeMemory_args_handle(args);
1775 vkFreeMemory(args->device, args->memory, NULL);
1776
1777 list_del(&mem->head);
1778
1779 util_hash_table_remove_u64(ctx->object_table, mem->base.id);
1780}
1781
1782static void
1783vkr_dispatch_vkGetDeviceMemoryCommitment(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkGetDeviceMemoryCommitment *args)
1784{
1785 vn_replace_vkGetDeviceMemoryCommitment_args_handle(args);
1786 vkGetDeviceMemoryCommitment(args->device, args->memory, args->pCommittedMemoryInBytes);
1787}
1788
1789static void
1790vkr_dispatch_vkGetDeviceMemoryOpaqueCaptureAddress(struct vn_dispatch_context *dispatch, struct vn_command_vkGetDeviceMemoryOpaqueCaptureAddress *args)
1791{
1792 struct vkr_context *ctx = dispatch->data;
1793 struct vkr_device *dev = (struct vkr_device *)args->device;
1794 if (!dev || dev->base.type != VK_OBJECT_TYPE_DEVICE) {
1795 vkr_cs_decoder_set_fatal(&ctx->decoder);
1796 return;
1797 }
1798
1799 vn_replace_vkGetDeviceMemoryOpaqueCaptureAddress_args_handle(args);
1800 args->ret = dev->GetDeviceMemoryOpaqueCaptureAddress(args->device, args->pInfo);
1801}
1802
1803static void
1804vkr_dispatch_vkGetBufferMemoryRequirements(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkGetBufferMemoryRequirements *args)
1805{
1806 vn_replace_vkGetBufferMemoryRequirements_args_handle(args);
1807 vkGetBufferMemoryRequirements(args->device, args->buffer, args->pMemoryRequirements);
1808}
1809
1810static void
1811vkr_dispatch_vkGetBufferMemoryRequirements2(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkGetBufferMemoryRequirements2 *args)
1812{
1813 vn_replace_vkGetBufferMemoryRequirements2_args_handle(args);
1814 vkGetBufferMemoryRequirements2(args->device, args->pInfo, args->pMemoryRequirements);
1815}
1816
1817static void
1818vkr_dispatch_vkBindBufferMemory(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkBindBufferMemory *args)
1819{
1820 vn_replace_vkBindBufferMemory_args_handle(args);
1821 args->ret = vkBindBufferMemory(args->device, args->buffer, args->memory, args->memoryOffset);
1822}
1823
1824static void
1825vkr_dispatch_vkBindBufferMemory2(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkBindBufferMemory2 *args)
1826{
1827 vn_replace_vkBindBufferMemory2_args_handle(args);
1828 args->ret = vkBindBufferMemory2(args->device, args->bindInfoCount, args->pBindInfos);
1829}
1830
1831static void
1832vkr_dispatch_vkGetBufferOpaqueCaptureAddress(struct vn_dispatch_context *dispatch, struct vn_command_vkGetBufferOpaqueCaptureAddress *args)
1833{
1834 struct vkr_context *ctx = dispatch->data;
1835 struct vkr_device *dev = (struct vkr_device *)args->device;
1836 if (!dev || dev->base.type != VK_OBJECT_TYPE_DEVICE) {
1837 vkr_cs_decoder_set_fatal(&ctx->decoder);
1838 return;
1839 }
1840
1841 vn_replace_vkGetBufferOpaqueCaptureAddress_args_handle(args);
1842 args->ret = dev->GetBufferOpaqueCaptureAddress(args->device, args->pInfo);
1843}
1844
1845static void
1846vkr_dispatch_vkGetBufferDeviceAddress(struct vn_dispatch_context *dispatch, struct vn_command_vkGetBufferDeviceAddress *args)
1847{
1848 struct vkr_context *ctx = dispatch->data;
1849 struct vkr_device *dev = (struct vkr_device *)args->device;
1850 if (!dev || dev->base.type != VK_OBJECT_TYPE_DEVICE) {
1851 vkr_cs_decoder_set_fatal(&ctx->decoder);
1852 return;
1853 }
1854
1855 vn_replace_vkGetBufferDeviceAddress_args_handle(args);
1856 args->ret = dev->GetBufferDeviceAddress(args->device, args->pInfo);
1857}
1858
1859static void
1860vkr_dispatch_vkGetImageMemoryRequirements(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkGetImageMemoryRequirements *args)
1861{
1862 vn_replace_vkGetImageMemoryRequirements_args_handle(args);
1863 vkGetImageMemoryRequirements(args->device, args->image, args->pMemoryRequirements);
1864}
1865
1866static void
1867vkr_dispatch_vkGetImageMemoryRequirements2(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkGetImageMemoryRequirements2 *args)
1868{
1869 vn_replace_vkGetImageMemoryRequirements2_args_handle(args);
1870 vkGetImageMemoryRequirements2(args->device, args->pInfo, args->pMemoryRequirements);
1871}
1872
1873static void
1874vkr_dispatch_vkGetImageSparseMemoryRequirements(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkGetImageSparseMemoryRequirements *args)
1875{
1876 vn_replace_vkGetImageSparseMemoryRequirements_args_handle(args);
1877 vkGetImageSparseMemoryRequirements(args->device, args->image, args->pSparseMemoryRequirementCount, args->pSparseMemoryRequirements);
1878}
1879
1880static void
1881vkr_dispatch_vkGetImageSparseMemoryRequirements2(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkGetImageSparseMemoryRequirements2 *args)
1882{
1883 vn_replace_vkGetImageSparseMemoryRequirements2_args_handle(args);
1884 vkGetImageSparseMemoryRequirements2(args->device, args->pInfo, args->pSparseMemoryRequirementCount, args->pSparseMemoryRequirements);
1885}
1886
1887static void
1888vkr_dispatch_vkBindImageMemory(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkBindImageMemory *args)
1889{
1890 vn_replace_vkBindImageMemory_args_handle(args);
1891 args->ret = vkBindImageMemory(args->device, args->image, args->memory, args->memoryOffset);
1892}
1893
1894static void
1895vkr_dispatch_vkBindImageMemory2(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkBindImageMemory2 *args)
1896{
1897 vn_replace_vkBindImageMemory2_args_handle(args);
1898 args->ret = vkBindImageMemory2(args->device, args->bindInfoCount, args->pBindInfos);
1899}
1900
1901static void
1902vkr_dispatch_vkGetImageSubresourceLayout(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkGetImageSubresourceLayout *args)
1903{
1904 vn_replace_vkGetImageSubresourceLayout_args_handle(args);
1905 vkGetImageSubresourceLayout(args->device, args->image, args->pSubresource, args->pLayout);
1906}
1907
1908static void
1909vkr_dispatch_vkCreateFence(struct vn_dispatch_context *dispatch, struct vn_command_vkCreateFence *args)
1910{
1911 struct vkr_context *ctx = dispatch->data;
1912
1913 CREATE_OBJECT(fence, fence, FENCE, vkCreateFence, pFence);
1914
1915 util_hash_table_set_u64(ctx->object_table, fence->base.id, fence);
1916}
1917
1918static void
1919vkr_dispatch_vkDestroyFence(struct vn_dispatch_context *dispatch, struct vn_command_vkDestroyFence *args)
1920{
1921 struct vkr_context *ctx = dispatch->data;
1922
1923 DESTROY_OBJECT(fence, fence, FENCE, vkDestroyFence, fence);
1924
1925 util_hash_table_remove_u64(ctx->object_table, fence->base.id);
1926}
1927
1928static void
1929vkr_dispatch_vkResetFences(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkResetFences *args)
1930{
1931 vn_replace_vkResetFences_args_handle(args);
1932 args->ret = vkResetFences(args->device, args->fenceCount, args->pFences);
1933}
1934
1935static void
1936vkr_dispatch_vkGetFenceStatus(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkGetFenceStatus *args)
1937{
1938 vn_replace_vkGetFenceStatus_args_handle(args);
1939 args->ret = vkGetFenceStatus(args->device, args->fence);
1940}
1941
1942static void
1943vkr_dispatch_vkWaitForFences(struct vn_dispatch_context *dispatch, struct vn_command_vkWaitForFences *args)
1944{
1945 struct vkr_context *ctx = dispatch->data;
1946
1947 /* Being single-threaded, we cannot afford potential blocking calls. It
1948 * also leads to GPU lost when the wait never returns and can only be
1949 * unblocked by a following command (e.g., vkCmdWaitEvents that is
1950 * unblocked by a following vkSetEvent).
1951 */
1952 if (args->timeout) {
1953 vkr_cs_decoder_set_fatal(&ctx->decoder);
1954 return;
1955 }
1956
1957 vn_replace_vkWaitForFences_args_handle(args);
1958 args->ret = vkWaitForFences(args->device, args->fenceCount, args->pFences, args->waitAll, args->timeout);
1959}
1960
1961static void
1962vkr_dispatch_vkCreateSemaphore(struct vn_dispatch_context *dispatch, struct vn_command_vkCreateSemaphore *args)
1963{
1964 struct vkr_context *ctx = dispatch->data;
1965
1966 CREATE_OBJECT(sem, semaphore, SEMAPHORE, vkCreateSemaphore, pSemaphore);
1967
1968 util_hash_table_set_u64(ctx->object_table, sem->base.id, sem);
1969}
1970
1971static void
1972vkr_dispatch_vkDestroySemaphore(struct vn_dispatch_context *dispatch, struct vn_command_vkDestroySemaphore *args)
1973{
1974 struct vkr_context *ctx = dispatch->data;
1975
1976 DESTROY_OBJECT(sem, semaphore, SEMAPHORE, vkDestroySemaphore, semaphore);
1977
1978 util_hash_table_remove_u64(ctx->object_table, sem->base.id);
1979}
1980
1981static void
1982vkr_dispatch_vkGetSemaphoreCounterValue(struct vn_dispatch_context *dispatch, struct vn_command_vkGetSemaphoreCounterValue *args)
1983{
1984 struct vkr_context *ctx = dispatch->data;
1985 struct vkr_device *dev = (struct vkr_device *)args->device;
1986 if (!dev || dev->base.type != VK_OBJECT_TYPE_DEVICE) {
1987 vkr_cs_decoder_set_fatal(&ctx->decoder);
1988 return;
1989 }
1990
1991 vn_replace_vkGetSemaphoreCounterValue_args_handle(args);
1992 args->ret = dev->GetSemaphoreCounterValue(args->device, args->semaphore, args->pValue);
1993}
1994
1995static void
1996vkr_dispatch_vkWaitSemaphores(struct vn_dispatch_context *dispatch, struct vn_command_vkWaitSemaphores *args)
1997{
1998 struct vkr_context *ctx = dispatch->data;
1999 struct vkr_device *dev = (struct vkr_device *)args->device;
2000
2001 if (!dev || dev->base.type != VK_OBJECT_TYPE_DEVICE) {
2002 vkr_cs_decoder_set_fatal(&ctx->decoder);
2003 return;
2004 }
2005
2006 /* no blocking call */
2007 if (args->timeout) {
2008 vkr_cs_decoder_set_fatal(&ctx->decoder);
2009 return;
2010 }
2011
2012 vn_replace_vkWaitSemaphores_args_handle(args);
2013 args->ret = dev->WaitSemaphores(args->device, args->pWaitInfo, args->timeout);
2014}
2015
2016static void
2017vkr_dispatch_vkSignalSemaphore(struct vn_dispatch_context *dispatch, struct vn_command_vkSignalSemaphore *args)
2018{
2019 struct vkr_context *ctx = dispatch->data;
2020 struct vkr_device *dev = (struct vkr_device *)args->device;
2021 if (!dev || dev->base.type != VK_OBJECT_TYPE_DEVICE) {
2022 vkr_cs_decoder_set_fatal(&ctx->decoder);
2023 return;
2024 }
2025
2026 vn_replace_vkSignalSemaphore_args_handle(args);
2027 args->ret = dev->SignalSemaphore(args->device, args->pSignalInfo);
2028}
2029
2030static void
2031vkr_dispatch_vkCreateBuffer(struct vn_dispatch_context *dispatch, struct vn_command_vkCreateBuffer *args)
2032{
2033 struct vkr_context *ctx = dispatch->data;
2034
2035 struct vkr_device *dev = (struct vkr_device *)args->device;
2036 if (!dev || dev->base.type != VK_OBJECT_TYPE_DEVICE) {
2037 vkr_cs_decoder_set_fatal(&ctx->decoder);
2038 return;
2039 }
2040
2041#ifdef FORCE_ENABLE_DMABUF
2042 const VkExternalMemoryBufferCreateInfo external_info = {
2043 .sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO,
2044 .pNext = args->pCreateInfo->pNext,
2045 .handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
2046 };
2047 if (dev->physical_device->EXT_external_memory_dma_buf)
2048 ((VkBufferCreateInfo *)args->pCreateInfo)->pNext = &external_info;
2049#endif
2050
2051 CREATE_OBJECT(buf, buffer, BUFFER, vkCreateBuffer, pBuffer);
2052
2053 util_hash_table_set_u64(ctx->object_table, buf->base.id, buf);
2054}
2055
2056static void
2057vkr_dispatch_vkDestroyBuffer(struct vn_dispatch_context *dispatch, struct vn_command_vkDestroyBuffer *args)
2058{
2059 struct vkr_context *ctx = dispatch->data;
2060
2061 DESTROY_OBJECT(buf, buffer, BUFFER, vkDestroyBuffer, buffer);
2062
2063 util_hash_table_remove_u64(ctx->object_table, buf->base.id);
2064}
2065
2066static void
2067vkr_dispatch_vkCreateBufferView(struct vn_dispatch_context *dispatch, struct vn_command_vkCreateBufferView *args)
2068{
2069 struct vkr_context *ctx = dispatch->data;
2070
2071 CREATE_OBJECT(view, buffer_view, BUFFER_VIEW, vkCreateBufferView, pView);
2072
2073 util_hash_table_set_u64(ctx->object_table, view->base.id, view);
2074}
2075
2076static void
2077vkr_dispatch_vkDestroyBufferView(struct vn_dispatch_context *dispatch, struct vn_command_vkDestroyBufferView *args)
2078{
2079 struct vkr_context *ctx = dispatch->data;
2080
2081 DESTROY_OBJECT(view, buffer_view, BUFFER_VIEW, vkDestroyBufferView, bufferView);
2082
2083 util_hash_table_remove_u64(ctx->object_table, view->base.id);
2084}
2085
2086static void
2087vkr_dispatch_vkCreateImage(struct vn_dispatch_context *dispatch, struct vn_command_vkCreateImage *args)
2088{
2089 struct vkr_context *ctx = dispatch->data;
2090
2091 struct vkr_device *dev = (struct vkr_device *)args->device;
2092 if (!dev || dev->base.type != VK_OBJECT_TYPE_DEVICE) {
2093 vkr_cs_decoder_set_fatal(&ctx->decoder);
2094 return;
2095 }
2096
2097#ifdef FORCE_ENABLE_DMABUF
2098 const VkExternalMemoryImageCreateInfo external_info = {
2099 .sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
2100 .pNext = args->pCreateInfo->pNext,
2101 .handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
2102 };
2103 if (dev->physical_device->EXT_external_memory_dma_buf)
2104 ((VkImageCreateInfo *)args->pCreateInfo)->pNext = &external_info;
2105#endif
2106
2107 CREATE_OBJECT(img, image, IMAGE, vkCreateImage, pImage);
2108
2109 util_hash_table_set_u64(ctx->object_table, img->base.id, img);
2110}
2111
2112static void
2113vkr_dispatch_vkDestroyImage(struct vn_dispatch_context *dispatch, struct vn_command_vkDestroyImage *args)
2114{
2115 struct vkr_context *ctx = dispatch->data;
2116
2117 DESTROY_OBJECT(img, image, IMAGE, vkDestroyImage, image);
2118
2119 util_hash_table_remove_u64(ctx->object_table, img->base.id);
2120}
2121
2122static void
2123vkr_dispatch_vkCreateImageView(struct vn_dispatch_context *dispatch, struct vn_command_vkCreateImageView *args)
2124{
2125 struct vkr_context *ctx = dispatch->data;
2126
2127 CREATE_OBJECT(view, image_view, IMAGE_VIEW, vkCreateImageView, pView);
2128
2129 util_hash_table_set_u64(ctx->object_table, view->base.id, view);
2130}
2131
2132static void
2133vkr_dispatch_vkDestroyImageView(struct vn_dispatch_context *dispatch, struct vn_command_vkDestroyImageView *args)
2134{
2135 struct vkr_context *ctx = dispatch->data;
2136
2137 DESTROY_OBJECT(view, image_view, IMAGE_VIEW, vkDestroyImageView, imageView);
2138
2139 util_hash_table_remove_u64(ctx->object_table, view->base.id);
2140}
2141
2142static void
2143vkr_dispatch_vkCreateSampler(struct vn_dispatch_context *dispatch, struct vn_command_vkCreateSampler *args)
2144{
2145 struct vkr_context *ctx = dispatch->data;
2146
2147 CREATE_OBJECT(sampler, sampler, SAMPLER, vkCreateSampler, pSampler);
2148
2149 util_hash_table_set_u64(ctx->object_table, sampler->base.id, sampler);
2150}
2151
2152static void
2153vkr_dispatch_vkDestroySampler(struct vn_dispatch_context *dispatch, struct vn_command_vkDestroySampler *args)
2154{
2155 struct vkr_context *ctx = dispatch->data;
2156
2157 DESTROY_OBJECT(sampler, sampler, SAMPLER, vkDestroySampler, sampler);
2158
2159 util_hash_table_remove_u64(ctx->object_table, sampler->base.id);
2160}
2161
2162static void
2163vkr_dispatch_vkCreateSamplerYcbcrConversion(struct vn_dispatch_context *dispatch, struct vn_command_vkCreateSamplerYcbcrConversion *args)
2164{
2165 struct vkr_context *ctx = dispatch->data;
2166
2167 CREATE_OBJECT(conv, sampler_ycbcr_conversion, SAMPLER_YCBCR_CONVERSION, vkCreateSamplerYcbcrConversion, pYcbcrConversion);
2168
2169 util_hash_table_set_u64(ctx->object_table, conv->base.id, conv);
2170}
2171
2172static void
2173vkr_dispatch_vkDestroySamplerYcbcrConversion(struct vn_dispatch_context *dispatch, struct vn_command_vkDestroySamplerYcbcrConversion *args)
2174{
2175 struct vkr_context *ctx = dispatch->data;
2176
2177 DESTROY_OBJECT(conv, sampler_ycbcr_conversion, SAMPLER_YCBCR_CONVERSION, vkDestroySamplerYcbcrConversion, ycbcrConversion);
2178
2179 util_hash_table_remove_u64(ctx->object_table, conv->base.id);
2180}
2181
2182static void
2183vkr_dispatch_vkGetDescriptorSetLayoutSupport(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkGetDescriptorSetLayoutSupport *args)
2184{
2185 vn_replace_vkGetDescriptorSetLayoutSupport_args_handle(args);
2186 vkGetDescriptorSetLayoutSupport(args->device, args->pCreateInfo, args->pSupport);
2187}
2188
2189static void
2190vkr_dispatch_vkCreateDescriptorSetLayout(struct vn_dispatch_context *dispatch, struct vn_command_vkCreateDescriptorSetLayout *args)
2191{
2192 struct vkr_context *ctx = dispatch->data;
2193
2194 CREATE_OBJECT(layout, descriptor_set_layout, DESCRIPTOR_SET_LAYOUT, vkCreateDescriptorSetLayout, pSetLayout);
2195
2196 util_hash_table_set_u64(ctx->object_table, layout->base.id, layout);
2197}
2198
2199static void
2200vkr_dispatch_vkDestroyDescriptorSetLayout(struct vn_dispatch_context *dispatch, struct vn_command_vkDestroyDescriptorSetLayout *args)
2201{
2202 struct vkr_context *ctx = dispatch->data;
2203
2204 DESTROY_OBJECT(layout, descriptor_set_layout, DESCRIPTOR_SET_LAYOUT, vkDestroyDescriptorSetLayout, descriptorSetLayout);
2205
2206 util_hash_table_remove_u64(ctx->object_table, layout->base.id);
2207}
2208
2209static void
2210vkr_dispatch_vkCreateDescriptorPool(struct vn_dispatch_context *dispatch, struct vn_command_vkCreateDescriptorPool *args)
2211{
2212 struct vkr_context *ctx = dispatch->data;
2213
2214 CREATE_OBJECT(pool, descriptor_pool, DESCRIPTOR_POOL, vkCreateDescriptorPool, pDescriptorPool);
2215
2216 list_inithead(&pool->descriptor_sets);
2217
2218 util_hash_table_set_u64(ctx->object_table, pool->base.id, pool);
2219}
2220
2221static void
2222vkr_dispatch_vkDestroyDescriptorPool(struct vn_dispatch_context *dispatch, struct vn_command_vkDestroyDescriptorPool *args)
2223{
2224 struct vkr_context *ctx = dispatch->data;
2225
2226 DESTROY_OBJECT(pool, descriptor_pool, DESCRIPTOR_POOL, vkDestroyDescriptorPool, descriptorPool);
2227
2228 struct vkr_descriptor_set *set, *tmp;
2229 LIST_FOR_EACH_ENTRY_SAFE(set, tmp, &pool->descriptor_sets, head)
2230 util_hash_table_remove_u64(ctx->object_table, set->base.id);
2231
2232 util_hash_table_remove_u64(ctx->object_table, pool->base.id);
2233}
2234
2235static void
2236vkr_dispatch_vkResetDescriptorPool(struct vn_dispatch_context *dispatch, struct vn_command_vkResetDescriptorPool *args)
2237{
2238 struct vkr_context *ctx = dispatch->data;
2239
2240 struct vkr_descriptor_pool *pool = (struct vkr_descriptor_pool *)(uintptr_t)args->descriptorPool;
2241 if (!pool || pool->base.type != VK_OBJECT_TYPE_DESCRIPTOR_POOL) {
2242 vkr_cs_decoder_set_fatal(&ctx->decoder);
2243 return;
2244 }
2245
2246 vn_replace_vkResetDescriptorPool_args_handle(args);
2247 args->ret = vkResetDescriptorPool(args->device, args->descriptorPool, args->flags);
2248
2249 struct vkr_descriptor_set *set, *tmp;
2250 LIST_FOR_EACH_ENTRY_SAFE(set, tmp, &pool->descriptor_sets, head)
2251 util_hash_table_remove_u64(ctx->object_table, set->base.id);
2252 list_inithead(&pool->descriptor_sets);
2253}
2254
2255static void
2256vkr_dispatch_vkAllocateDescriptorSets(struct vn_dispatch_context *dispatch, struct vn_command_vkAllocateDescriptorSets *args)
2257{
2258 struct vkr_context *ctx = dispatch->data;
2259
2260 struct vkr_descriptor_pool *pool = (struct vkr_descriptor_pool *)(uintptr_t)args->pAllocateInfo->descriptorPool;
2261 if (!pool || pool->base.type != VK_OBJECT_TYPE_DESCRIPTOR_POOL) {
2262 vkr_cs_decoder_set_fatal(&ctx->decoder);
2263 return;
2264 }
2265
2266 struct object_array arr;
2267 if (!object_array_init(&arr,
2268 args->pAllocateInfo->descriptorSetCount,
2269 VK_OBJECT_TYPE_DESCRIPTOR_SET,
2270 sizeof(struct vkr_descriptor_set),
2271 sizeof(VkDescriptorSet),
2272 args->pDescriptorSets)) {
2273 args->ret = VK_ERROR_OUT_OF_HOST_MEMORY;
2274 return;
2275 }
2276
2277 vn_replace_vkAllocateDescriptorSets_args_handle(args);
2278 args->ret = vkAllocateDescriptorSets(args->device, args->pAllocateInfo, arr.handle_storage);
2279 if (args->ret != VK_SUCCESS) {
2280 object_array_fini(&arr);
2281 return;
2282 }
2283
2284 for (uint32_t i = 0; i < arr.count; i++) {
2285 struct vkr_descriptor_set *set = arr.objects[i];
2286
2287 set->base.handle.descriptor_set = ((VkDescriptorSet *)arr.handle_storage)[i];
2288 list_add(&set->head, &pool->descriptor_sets);
2289
2290 util_hash_table_set_u64(ctx->object_table, set->base.id, set);
2291 }
2292
2293 arr.objects_stolen = true;
2294 object_array_fini(&arr);
2295}
2296
2297static void
2298vkr_dispatch_vkFreeDescriptorSets(struct vn_dispatch_context *dispatch, struct vn_command_vkFreeDescriptorSets *args)
2299{
2300 struct vkr_context *ctx = dispatch->data;
2301 struct list_head free_sets;
2302
2303 list_inithead(&free_sets);
2304 for (uint32_t i = 0; i < args->descriptorSetCount; i++) {
2305 struct vkr_descriptor_set *set = (struct vkr_descriptor_set *)(uintptr_t)args->pDescriptorSets[i];
2306 if (!set)
2307 continue;
2308 if (set->base.type != VK_OBJECT_TYPE_DESCRIPTOR_SET) {
2309 vkr_cs_decoder_set_fatal(&ctx->decoder);
2310 return;
2311 }
2312
2313 list_del(&set->head);
2314 list_addtail(&set->head, &free_sets);
2315 }
2316
2317 vn_replace_vkFreeDescriptorSets_args_handle(args);
2318 args->ret = vkFreeDescriptorSets(args->device, args->descriptorPool, args->descriptorSetCount, args->pDescriptorSets);
2319
2320 struct vkr_descriptor_set *set, *tmp;
2321 LIST_FOR_EACH_ENTRY_SAFE(set, tmp, &free_sets, head)
2322 util_hash_table_remove_u64(ctx->object_table, set->base.id);
2323}
2324
2325static void
2326vkr_dispatch_vkUpdateDescriptorSets(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkUpdateDescriptorSets *args)
2327{
2328 vn_replace_vkUpdateDescriptorSets_args_handle(args);
2329 vkUpdateDescriptorSets(args->device, args->descriptorWriteCount, args->pDescriptorWrites, args->descriptorCopyCount, args->pDescriptorCopies);
2330}
2331
2332static void
2333vkr_dispatch_vkCreateDescriptorUpdateTemplate(struct vn_dispatch_context *dispatch, struct vn_command_vkCreateDescriptorUpdateTemplate *args)
2334{
2335 struct vkr_context *ctx = dispatch->data;
2336
2337 CREATE_OBJECT(templ, descriptor_update_template, DESCRIPTOR_UPDATE_TEMPLATE, vkCreateDescriptorUpdateTemplate, pDescriptorUpdateTemplate);
2338
2339 util_hash_table_set_u64(ctx->object_table, templ->base.id, templ);
2340}
2341
2342static void
2343vkr_dispatch_vkDestroyDescriptorUpdateTemplate(struct vn_dispatch_context *dispatch, struct vn_command_vkDestroyDescriptorUpdateTemplate *args)
2344{
2345 struct vkr_context *ctx = dispatch->data;
2346
2347 DESTROY_OBJECT(templ, descriptor_update_template, DESCRIPTOR_UPDATE_TEMPLATE, vkDestroyDescriptorUpdateTemplate, descriptorUpdateTemplate);
2348
2349 util_hash_table_remove_u64(ctx->object_table, templ->base.id);
2350}
2351
2352static void
2353vkr_dispatch_vkCreateRenderPass(struct vn_dispatch_context *dispatch, struct vn_command_vkCreateRenderPass *args)
2354{
2355 struct vkr_context *ctx = dispatch->data;
2356
2357 CREATE_OBJECT(pass, render_pass, RENDER_PASS, vkCreateRenderPass, pRenderPass);
2358
2359 util_hash_table_set_u64(ctx->object_table, pass->base.id, pass);
2360}
2361
2362static void
2363vkr_dispatch_vkCreateRenderPass2(struct vn_dispatch_context *dispatch, struct vn_command_vkCreateRenderPass2 *args)
2364{
2365 struct vkr_context *ctx = dispatch->data;
2366 struct vkr_device *dev = (struct vkr_device *)args->device;
2367 if (!dev || dev->base.type != VK_OBJECT_TYPE_DEVICE) {
2368 vkr_cs_decoder_set_fatal(&ctx->decoder);
2369 return;
2370 }
2371
2372 struct vkr_render_pass *pass = calloc(1, sizeof(*pass));
2373 if (!pass) {
2374 args->ret = VK_ERROR_OUT_OF_HOST_MEMORY;
2375 return;
2376 }
2377 pass->base.type = VK_OBJECT_TYPE_RENDER_PASS;
2378 pass->base.id = vkr_cs_handle_load_id((const void **)args->pRenderPass, pass->base.type);
2379
2380 vn_replace_vkCreateRenderPass2_args_handle(args);
2381 args->ret = dev->CreateRenderPass2(args->device, args->pCreateInfo,
2382 NULL, &pass->base.handle.render_pass);
2383 if (args->ret != VK_SUCCESS) {
2384 free(pass);
2385 return;
2386 }
2387
2388 util_hash_table_set_u64(ctx->object_table, pass->base.id, pass);
2389}
2390
2391static void
2392vkr_dispatch_vkDestroyRenderPass(struct vn_dispatch_context *dispatch, struct vn_command_vkDestroyRenderPass *args)
2393{
2394 struct vkr_context *ctx = dispatch->data;
2395
2396 DESTROY_OBJECT(pass, render_pass, RENDER_PASS, vkDestroyRenderPass, renderPass);
2397
2398 util_hash_table_remove_u64(ctx->object_table, pass->base.id);
2399}
2400
2401static void
2402vkr_dispatch_vkGetRenderAreaGranularity(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkGetRenderAreaGranularity *args)
2403{
2404 vn_replace_vkGetRenderAreaGranularity_args_handle(args);
2405 vkGetRenderAreaGranularity(args->device, args->renderPass, args->pGranularity);
2406}
2407
2408static void
2409vkr_dispatch_vkCreateFramebuffer(struct vn_dispatch_context *dispatch, struct vn_command_vkCreateFramebuffer *args)
2410{
2411 struct vkr_context *ctx = dispatch->data;
2412
2413 CREATE_OBJECT(fb, framebuffer, FRAMEBUFFER, vkCreateFramebuffer, pFramebuffer);
2414
2415 util_hash_table_set_u64(ctx->object_table, fb->base.id, fb);
2416}
2417
2418static void
2419vkr_dispatch_vkDestroyFramebuffer(struct vn_dispatch_context *dispatch, struct vn_command_vkDestroyFramebuffer *args)
2420{
2421 struct vkr_context *ctx = dispatch->data;
2422
2423 DESTROY_OBJECT(fb, framebuffer, FRAMEBUFFER, vkDestroyFramebuffer, framebuffer);
2424
2425 util_hash_table_remove_u64(ctx->object_table, fb->base.id);
2426}
2427
2428static void
2429vkr_dispatch_vkCreateEvent(struct vn_dispatch_context *dispatch, struct vn_command_vkCreateEvent *args)
2430{
2431 struct vkr_context *ctx = dispatch->data;
2432
2433 CREATE_OBJECT(ev, event, EVENT, vkCreateEvent, pEvent);
2434
2435 util_hash_table_set_u64(ctx->object_table, ev->base.id, ev);
2436}
2437
2438static void
2439vkr_dispatch_vkDestroyEvent(struct vn_dispatch_context *dispatch, struct vn_command_vkDestroyEvent *args)
2440{
2441 struct vkr_context *ctx = dispatch->data;
2442
2443 DESTROY_OBJECT(ev, event, EVENT, vkDestroyEvent, event);
2444
2445 util_hash_table_remove_u64(ctx->object_table, ev->base.id);
2446}
2447
2448static void
2449vkr_dispatch_vkGetEventStatus(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkGetEventStatus *args)
2450{
2451 vn_replace_vkGetEventStatus_args_handle(args);
2452 args->ret = vkGetEventStatus(args->device, args->event);
2453}
2454
2455static void
2456vkr_dispatch_vkSetEvent(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkSetEvent *args)
2457{
2458 vn_replace_vkSetEvent_args_handle(args);
2459 args->ret = vkSetEvent(args->device, args->event);
2460}
2461
2462static void
2463vkr_dispatch_vkResetEvent(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkResetEvent *args)
2464{
2465 vn_replace_vkResetEvent_args_handle(args);
2466 args->ret = vkResetEvent(args->device, args->event);
2467}
2468
2469static void
2470vkr_dispatch_vkCreateQueryPool(struct vn_dispatch_context *dispatch, struct vn_command_vkCreateQueryPool *args)
2471{
2472 struct vkr_context *ctx = dispatch->data;
2473
2474 CREATE_OBJECT(pool, query_pool, QUERY_POOL, vkCreateQueryPool, pQueryPool);
2475
2476 util_hash_table_set_u64(ctx->object_table, pool->base.id, pool);
2477}
2478
2479static void
2480vkr_dispatch_vkDestroyQueryPool(struct vn_dispatch_context *dispatch, struct vn_command_vkDestroyQueryPool *args)
2481{
2482 struct vkr_context *ctx = dispatch->data;
2483
2484 DESTROY_OBJECT(pool, query_pool, QUERY_POOL, vkDestroyQueryPool, queryPool);
2485
2486 util_hash_table_remove_u64(ctx->object_table, pool->base.id);
2487}
2488
2489static void
2490vkr_dispatch_vkGetQueryPoolResults(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkGetQueryPoolResults *args)
2491{
2492 vn_replace_vkGetQueryPoolResults_args_handle(args);
2493 args->ret = vkGetQueryPoolResults(args->device, args->queryPool, args->firstQuery, args->queryCount, args->dataSize, args->pData, args->stride, args->flags);
2494}
2495
2496static void
2497vkr_dispatch_vkResetQueryPool(struct vn_dispatch_context *dispatch, struct vn_command_vkResetQueryPool *args)
2498{
2499 struct vkr_context *ctx = dispatch->data;
2500 struct vkr_device *dev = (struct vkr_device *)args->device;
2501 if (!dev || dev->base.type != VK_OBJECT_TYPE_DEVICE) {
2502 vkr_cs_decoder_set_fatal(&ctx->decoder);
2503 return;
2504 }
2505
2506 vn_replace_vkResetQueryPool_args_handle(args);
2507 dev->ResetQueryPool(args->device, args->queryPool, args->firstQuery, args->queryCount);
2508}
2509
2510static void
2511vkr_dispatch_vkCreateShaderModule(struct vn_dispatch_context *dispatch, struct vn_command_vkCreateShaderModule *args)
2512{
2513 struct vkr_context *ctx = dispatch->data;
2514
2515 CREATE_OBJECT(mod, shader_module, SHADER_MODULE, vkCreateShaderModule, pShaderModule);
2516
2517 util_hash_table_set_u64(ctx->object_table, mod->base.id, mod);
2518}
2519
2520static void
2521vkr_dispatch_vkDestroyShaderModule(struct vn_dispatch_context *dispatch, struct vn_command_vkDestroyShaderModule *args)
2522{
2523 struct vkr_context *ctx = dispatch->data;
2524
2525 DESTROY_OBJECT(mod, shader_module, SHADER_MODULE, vkDestroyShaderModule, shaderModule);
2526
2527 util_hash_table_remove_u64(ctx->object_table, mod->base.id);
2528}
2529
2530static void
2531vkr_dispatch_vkCreatePipelineLayout(struct vn_dispatch_context *dispatch, struct vn_command_vkCreatePipelineLayout *args)
2532{
2533 struct vkr_context *ctx = dispatch->data;
2534
2535 CREATE_OBJECT(layout, pipeline_layout, PIPELINE_LAYOUT, vkCreatePipelineLayout, pPipelineLayout);
2536
2537 util_hash_table_set_u64(ctx->object_table, layout->base.id, layout);
2538}
2539
2540static void
2541vkr_dispatch_vkDestroyPipelineLayout(struct vn_dispatch_context *dispatch, struct vn_command_vkDestroyPipelineLayout *args)
2542{
2543 struct vkr_context *ctx = dispatch->data;
2544
2545 DESTROY_OBJECT(layout, pipeline_layout, PIPELINE_LAYOUT, vkDestroyPipelineLayout, pipelineLayout);
2546
2547 util_hash_table_remove_u64(ctx->object_table, layout->base.id);
2548}
2549
2550static void
2551vkr_dispatch_vkCreatePipelineCache(struct vn_dispatch_context *dispatch, struct vn_command_vkCreatePipelineCache *args)
2552{
2553 struct vkr_context *ctx = dispatch->data;
2554
2555 CREATE_OBJECT(cache, pipeline_cache, PIPELINE_CACHE, vkCreatePipelineCache, pPipelineCache);
2556
2557 util_hash_table_set_u64(ctx->object_table, cache->base.id, cache);
2558}
2559
2560static void
2561vkr_dispatch_vkDestroyPipelineCache(struct vn_dispatch_context *dispatch, struct vn_command_vkDestroyPipelineCache *args)
2562{
2563 struct vkr_context *ctx = dispatch->data;
2564
2565 DESTROY_OBJECT(cache, pipeline_cache, PIPELINE_CACHE, vkDestroyPipelineCache, pipelineCache);
2566
2567 util_hash_table_remove_u64(ctx->object_table, cache->base.id);
2568}
2569
2570static void
2571vkr_dispatch_vkGetPipelineCacheData(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkGetPipelineCacheData *args)
2572{
2573 vn_replace_vkGetPipelineCacheData_args_handle(args);
2574 args->ret = vkGetPipelineCacheData(args->device, args->pipelineCache, args->pDataSize, args->pData);
2575}
2576
2577static void
2578vkr_dispatch_vkMergePipelineCaches(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkMergePipelineCaches *args)
2579{
2580 vn_replace_vkMergePipelineCaches_args_handle(args);
2581 args->ret = vkMergePipelineCaches(args->device, args->dstCache, args->srcCacheCount, args->pSrcCaches);
2582}
2583
2584static void
2585vkr_dispatch_vkCreateGraphicsPipelines(struct vn_dispatch_context *dispatch, struct vn_command_vkCreateGraphicsPipelines *args)
2586{
2587 struct vkr_context *ctx = dispatch->data;
2588
2589 struct object_array arr;
2590 if (!object_array_init(&arr,
2591 args->createInfoCount,
2592 VK_OBJECT_TYPE_PIPELINE,
2593 sizeof(struct vkr_pipeline),
2594 sizeof(VkPipeline),
2595 args->pPipelines)) {
2596 args->ret = VK_ERROR_OUT_OF_HOST_MEMORY;
2597 return;
2598 }
2599
2600 vn_replace_vkCreateGraphicsPipelines_args_handle(args);
2601 args->ret = vkCreateGraphicsPipelines(args->device, args->pipelineCache, args->createInfoCount, args->pCreateInfos, NULL, arr.handle_storage);
2602 if (args->ret != VK_SUCCESS) {
2603 object_array_fini(&arr);
2604 return;
2605 }
2606
2607 for (uint32_t i = 0; i < arr.count; i++) {
2608 struct vkr_pipeline *pipeline = arr.objects[i];
2609
2610 pipeline->base.handle.pipeline = ((VkPipeline *)arr.handle_storage)[i];
2611
2612 util_hash_table_set_u64(ctx->object_table, pipeline->base.id, pipeline);
2613 }
2614
2615 arr.objects_stolen = true;
2616 object_array_fini(&arr);
2617}
2618
2619static void
2620vkr_dispatch_vkCreateComputePipelines(struct vn_dispatch_context *dispatch, struct vn_command_vkCreateComputePipelines *args)
2621{
2622 struct vkr_context *ctx = dispatch->data;
2623
2624 struct object_array arr;
2625 if (!object_array_init(&arr,
2626 args->createInfoCount,
2627 VK_OBJECT_TYPE_PIPELINE,
2628 sizeof(struct vkr_pipeline),
2629 sizeof(VkPipeline),
2630 args->pPipelines)) {
2631 args->ret = VK_ERROR_OUT_OF_HOST_MEMORY;
2632 return;
2633 }
2634
2635 vn_replace_vkCreateComputePipelines_args_handle(args);
2636 args->ret = vkCreateComputePipelines(args->device, args->pipelineCache, args->createInfoCount, args->pCreateInfos, NULL, arr.handle_storage);
2637 if (args->ret != VK_SUCCESS) {
2638 object_array_fini(&arr);
2639 return;
2640 }
2641
2642 for (uint32_t i = 0; i < arr.count; i++) {
2643 struct vkr_pipeline *pipeline = arr.objects[i];
2644
2645 pipeline->base.handle.pipeline = ((VkPipeline *)arr.handle_storage)[i];
2646
2647 util_hash_table_set_u64(ctx->object_table, pipeline->base.id, pipeline);
2648 }
2649
2650 arr.objects_stolen = true;
2651 object_array_fini(&arr);
2652}
2653
2654static void
2655vkr_dispatch_vkDestroyPipeline(struct vn_dispatch_context *dispatch, struct vn_command_vkDestroyPipeline *args)
2656{
2657 struct vkr_context *ctx = dispatch->data;
2658
2659 DESTROY_OBJECT(pipeline, pipeline, PIPELINE, vkDestroyPipeline, pipeline);
2660
2661 util_hash_table_remove_u64(ctx->object_table, pipeline->base.id);
2662}
2663
2664static void
2665vkr_dispatch_vkCreateCommandPool(struct vn_dispatch_context *dispatch, struct vn_command_vkCreateCommandPool *args)
2666{
2667 struct vkr_context *ctx = dispatch->data;
2668
2669 CREATE_OBJECT(pool, command_pool, COMMAND_POOL, vkCreateCommandPool, pCommandPool);
2670
2671 list_inithead(&pool->command_buffers);
2672
2673 util_hash_table_set_u64(ctx->object_table, pool->base.id, pool);
2674}
2675
2676static void
2677vkr_dispatch_vkDestroyCommandPool(struct vn_dispatch_context *dispatch, struct vn_command_vkDestroyCommandPool *args)
2678{
2679 struct vkr_context *ctx = dispatch->data;
2680
2681 DESTROY_OBJECT(pool, command_pool, COMMAND_POOL, vkDestroyCommandPool, commandPool);
2682
2683 struct vkr_command_buffer *cmd, *tmp;
2684 LIST_FOR_EACH_ENTRY_SAFE(cmd, tmp, &pool->command_buffers, head)
2685 util_hash_table_remove_u64(ctx->object_table, cmd->base.id);
2686
2687 util_hash_table_remove_u64(ctx->object_table, pool->base.id);
2688}
2689
2690static void
2691vkr_dispatch_vkResetCommandPool(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkResetCommandPool *args)
2692{
2693 vn_replace_vkResetCommandPool_args_handle(args);
2694 args->ret = vkResetCommandPool(args->device, args->commandPool, args->flags);
2695}
2696
2697static void
2698vkr_dispatch_vkTrimCommandPool(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkTrimCommandPool *args)
2699{
2700 vn_replace_vkTrimCommandPool_args_handle(args);
2701 vkTrimCommandPool(args->device, args->commandPool, args->flags);
2702}
2703
2704static void
2705vkr_dispatch_vkAllocateCommandBuffers(struct vn_dispatch_context *dispatch, struct vn_command_vkAllocateCommandBuffers *args)
2706{
2707 struct vkr_context *ctx = dispatch->data;
2708
2709 struct vkr_device *dev = (struct vkr_device *)args->device;
2710 if (!dev || dev->base.type != VK_OBJECT_TYPE_DEVICE) {
2711 vkr_cs_decoder_set_fatal(&ctx->decoder);
2712 return;
2713 }
2714
2715 struct vkr_command_pool *pool = (struct vkr_command_pool *)(uintptr_t)args->pAllocateInfo->commandPool;
2716 if (!pool || pool->base.type != VK_OBJECT_TYPE_COMMAND_POOL) {
2717 vkr_cs_decoder_set_fatal(&ctx->decoder);
2718 return;
2719 }
2720
2721 struct object_array arr;
2722 if (!object_array_init(&arr,
2723 args->pAllocateInfo->commandBufferCount,
2724 VK_OBJECT_TYPE_COMMAND_BUFFER,
2725 sizeof(struct vkr_command_buffer),
2726 sizeof(VkCommandBuffer),
2727 args->pCommandBuffers)) {
2728 args->ret = VK_ERROR_OUT_OF_HOST_MEMORY;
2729 return;
2730 }
2731
2732 vn_replace_vkAllocateCommandBuffers_args_handle(args);
2733 args->ret = vkAllocateCommandBuffers(args->device, args->pAllocateInfo, arr.handle_storage);
2734 if (args->ret != VK_SUCCESS) {
2735 object_array_fini(&arr);
2736 return;
2737 }
2738
2739 for (uint32_t i = 0; i < arr.count; i++) {
2740 struct vkr_command_buffer *cmd = arr.objects[i];
2741
2742 cmd->base.handle.command_buffer = ((VkCommandBuffer *)arr.handle_storage)[i];
2743 cmd->device = dev;
2744 list_add(&cmd->head, &pool->command_buffers);
2745
2746 util_hash_table_set_u64(ctx->object_table, cmd->base.id, cmd);
2747 }
2748
2749 arr.objects_stolen = true;
2750 object_array_fini(&arr);
2751}
2752
2753static void
2754vkr_dispatch_vkFreeCommandBuffers(struct vn_dispatch_context *dispatch, struct vn_command_vkFreeCommandBuffers *args)
2755{
2756 struct vkr_context *ctx = dispatch->data;
2757 struct list_head free_cmds;
2758
2759 list_inithead(&free_cmds);
2760 for (uint32_t i = 0; i < args->commandBufferCount; i++) {
2761 struct vkr_command_buffer *cmd = (struct vkr_command_buffer *)args->pCommandBuffers[i];
2762 if (!cmd)
2763 continue;
2764 if (cmd->base.type != VK_OBJECT_TYPE_COMMAND_BUFFER) {
2765 vkr_cs_decoder_set_fatal(&ctx->decoder);
2766 return;
2767 }
2768
2769 list_del(&cmd->head);
2770 list_addtail(&cmd->head, &free_cmds);
2771 }
2772
2773 vn_replace_vkFreeCommandBuffers_args_handle(args);
2774 vkFreeCommandBuffers(args->device, args->commandPool, args->commandBufferCount, args->pCommandBuffers);
2775
2776 struct vkr_command_buffer *cmd, *tmp;
2777 LIST_FOR_EACH_ENTRY_SAFE(cmd, tmp, &free_cmds, head)
2778 util_hash_table_remove_u64(ctx->object_table, cmd->base.id);
2779}
2780
2781static void
2782vkr_dispatch_vkResetCommandBuffer(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkResetCommandBuffer *args)
2783{
2784 vn_replace_vkResetCommandBuffer_args_handle(args);
2785 args->ret = vkResetCommandBuffer(args->commandBuffer, args->flags);
2786}
2787
2788static void
2789vkr_dispatch_vkBeginCommandBuffer(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkBeginCommandBuffer *args)
2790{
2791 vn_replace_vkBeginCommandBuffer_args_handle(args);
2792 args->ret = vkBeginCommandBuffer(args->commandBuffer, args->pBeginInfo);
2793}
2794
2795static void
2796vkr_dispatch_vkEndCommandBuffer(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkEndCommandBuffer *args)
2797{
2798 vn_replace_vkEndCommandBuffer_args_handle(args);
2799 args->ret = vkEndCommandBuffer(args->commandBuffer);
2800}
2801
2802static void
2803vkr_dispatch_vkCmdBindPipeline(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdBindPipeline *args)
2804{
2805 vn_replace_vkCmdBindPipeline_args_handle(args);
2806 vkCmdBindPipeline(args->commandBuffer, args->pipelineBindPoint, args->pipeline);
2807}
2808
2809static void
2810vkr_dispatch_vkCmdSetViewport(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdSetViewport *args)
2811{
2812 vn_replace_vkCmdSetViewport_args_handle(args);
2813 vkCmdSetViewport(args->commandBuffer, args->firstViewport, args->viewportCount, args->pViewports);
2814}
2815
2816static void
2817vkr_dispatch_vkCmdSetScissor(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdSetScissor *args)
2818{
2819 vn_replace_vkCmdSetScissor_args_handle(args);
2820 vkCmdSetScissor(args->commandBuffer, args->firstScissor, args->scissorCount, args->pScissors);
2821}
2822
2823static void
2824vkr_dispatch_vkCmdSetLineWidth(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdSetLineWidth *args)
2825{
2826 vn_replace_vkCmdSetLineWidth_args_handle(args);
2827 vkCmdSetLineWidth(args->commandBuffer, args->lineWidth);
2828}
2829
2830static void
2831vkr_dispatch_vkCmdSetDepthBias(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdSetDepthBias *args)
2832{
2833 vn_replace_vkCmdSetDepthBias_args_handle(args);
2834 vkCmdSetDepthBias(args->commandBuffer, args->depthBiasConstantFactor, args->depthBiasClamp, args->depthBiasSlopeFactor);
2835}
2836
2837static void
2838vkr_dispatch_vkCmdSetBlendConstants(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdSetBlendConstants *args)
2839{
2840 vn_replace_vkCmdSetBlendConstants_args_handle(args);
2841 vkCmdSetBlendConstants(args->commandBuffer, args->blendConstants);
2842}
2843
2844static void
2845vkr_dispatch_vkCmdSetDepthBounds(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdSetDepthBounds *args)
2846{
2847 vn_replace_vkCmdSetDepthBounds_args_handle(args);
2848 vkCmdSetDepthBounds(args->commandBuffer, args->minDepthBounds, args->maxDepthBounds);
2849}
2850
2851static void
2852vkr_dispatch_vkCmdSetStencilCompareMask(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdSetStencilCompareMask *args)
2853{
2854 vn_replace_vkCmdSetStencilCompareMask_args_handle(args);
2855 vkCmdSetStencilCompareMask(args->commandBuffer, args->faceMask, args->compareMask);
2856}
2857
2858static void
2859vkr_dispatch_vkCmdSetStencilWriteMask(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdSetStencilWriteMask *args)
2860{
2861 vn_replace_vkCmdSetStencilWriteMask_args_handle(args);
2862 vkCmdSetStencilWriteMask(args->commandBuffer, args->faceMask, args->writeMask);
2863}
2864
2865static void
2866vkr_dispatch_vkCmdSetStencilReference(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdSetStencilReference *args)
2867{
2868 vn_replace_vkCmdSetStencilReference_args_handle(args);
2869 vkCmdSetStencilReference(args->commandBuffer, args->faceMask, args->reference);
2870}
2871
2872static void
2873vkr_dispatch_vkCmdBindDescriptorSets(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdBindDescriptorSets *args)
2874{
2875 vn_replace_vkCmdBindDescriptorSets_args_handle(args);
2876 vkCmdBindDescriptorSets(args->commandBuffer, args->pipelineBindPoint, args->layout, args->firstSet, args->descriptorSetCount, args->pDescriptorSets, args->dynamicOffsetCount, args->pDynamicOffsets);
2877}
2878
2879static void
2880vkr_dispatch_vkCmdBindIndexBuffer(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdBindIndexBuffer *args)
2881{
2882 vn_replace_vkCmdBindIndexBuffer_args_handle(args);
2883 vkCmdBindIndexBuffer(args->commandBuffer, args->buffer, args->offset, args->indexType);
2884}
2885
2886static void
2887vkr_dispatch_vkCmdBindVertexBuffers(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdBindVertexBuffers *args)
2888{
2889 vn_replace_vkCmdBindVertexBuffers_args_handle(args);
2890 vkCmdBindVertexBuffers(args->commandBuffer, args->firstBinding, args->bindingCount, args->pBuffers, args->pOffsets);
2891}
2892
2893static void
2894vkr_dispatch_vkCmdDraw(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdDraw *args)
2895{
2896 vn_replace_vkCmdDraw_args_handle(args);
2897 vkCmdDraw(args->commandBuffer, args->vertexCount, args->instanceCount, args->firstVertex, args->firstInstance);
2898}
2899
2900static void
2901vkr_dispatch_vkCmdDrawIndexed(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdDrawIndexed *args)
2902{
2903 vn_replace_vkCmdDrawIndexed_args_handle(args);
2904 vkCmdDrawIndexed(args->commandBuffer, args->indexCount, args->instanceCount, args->firstIndex, args->vertexOffset, args->firstInstance);
2905}
2906
2907static void
2908vkr_dispatch_vkCmdDrawIndirect(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdDrawIndirect *args)
2909{
2910 vn_replace_vkCmdDrawIndirect_args_handle(args);
2911 vkCmdDrawIndirect(args->commandBuffer, args->buffer, args->offset, args->drawCount, args->stride);
2912}
2913
2914static void
2915vkr_dispatch_vkCmdDrawIndexedIndirect(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdDrawIndexedIndirect *args)
2916{
2917 vn_replace_vkCmdDrawIndexedIndirect_args_handle(args);
2918 vkCmdDrawIndexedIndirect(args->commandBuffer, args->buffer, args->offset, args->drawCount, args->stride);
2919}
2920
2921static void
2922vkr_dispatch_vkCmdDispatch(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdDispatch *args)
2923{
2924 vn_replace_vkCmdDispatch_args_handle(args);
2925 vkCmdDispatch(args->commandBuffer, args->groupCountX, args->groupCountY, args->groupCountZ);
2926}
2927
2928static void
2929vkr_dispatch_vkCmdDispatchIndirect(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdDispatchIndirect *args)
2930{
2931 vn_replace_vkCmdDispatchIndirect_args_handle(args);
2932 vkCmdDispatchIndirect(args->commandBuffer, args->buffer, args->offset);
2933}
2934
2935static void
2936vkr_dispatch_vkCmdCopyBuffer(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdCopyBuffer *args)
2937{
2938 vn_replace_vkCmdCopyBuffer_args_handle(args);
2939 vkCmdCopyBuffer(args->commandBuffer, args->srcBuffer, args->dstBuffer, args->regionCount, args->pRegions);
2940}
2941
2942static void
2943vkr_dispatch_vkCmdCopyImage(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdCopyImage *args)
2944{
2945 vn_replace_vkCmdCopyImage_args_handle(args);
2946 vkCmdCopyImage(args->commandBuffer, args->srcImage, args->srcImageLayout, args->dstImage, args->dstImageLayout, args->regionCount, args->pRegions);
2947}
2948
2949static void
2950vkr_dispatch_vkCmdBlitImage(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdBlitImage *args)
2951{
2952 vn_replace_vkCmdBlitImage_args_handle(args);
2953 vkCmdBlitImage(args->commandBuffer, args->srcImage, args->srcImageLayout, args->dstImage, args->dstImageLayout, args->regionCount, args->pRegions, args->filter);
2954}
2955
2956static void
2957vkr_dispatch_vkCmdCopyBufferToImage(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdCopyBufferToImage *args)
2958{
2959 vn_replace_vkCmdCopyBufferToImage_args_handle(args);
2960 vkCmdCopyBufferToImage(args->commandBuffer, args->srcBuffer, args->dstImage, args->dstImageLayout, args->regionCount, args->pRegions);
2961}
2962
2963static void
2964vkr_dispatch_vkCmdCopyImageToBuffer(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdCopyImageToBuffer *args)
2965{
2966 vn_replace_vkCmdCopyImageToBuffer_args_handle(args);
2967 vkCmdCopyImageToBuffer(args->commandBuffer, args->srcImage, args->srcImageLayout, args->dstBuffer, args->regionCount, args->pRegions);
2968}
2969
2970static void
2971vkr_dispatch_vkCmdUpdateBuffer(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdUpdateBuffer *args)
2972{
2973 vn_replace_vkCmdUpdateBuffer_args_handle(args);
2974 vkCmdUpdateBuffer(args->commandBuffer, args->dstBuffer, args->dstOffset, args->dataSize, args->pData);
2975}
2976
2977static void
2978vkr_dispatch_vkCmdFillBuffer(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdFillBuffer *args)
2979{
2980 vn_replace_vkCmdFillBuffer_args_handle(args);
2981 vkCmdFillBuffer(args->commandBuffer, args->dstBuffer, args->dstOffset, args->size, args->data);
2982}
2983
2984static void
2985vkr_dispatch_vkCmdClearColorImage(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdClearColorImage *args)
2986{
2987 vn_replace_vkCmdClearColorImage_args_handle(args);
2988 vkCmdClearColorImage(args->commandBuffer, args->image, args->imageLayout, args->pColor, args->rangeCount, args->pRanges);
2989}
2990
2991static void
2992vkr_dispatch_vkCmdClearDepthStencilImage(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdClearDepthStencilImage *args)
2993{
2994 vn_replace_vkCmdClearDepthStencilImage_args_handle(args);
2995 vkCmdClearDepthStencilImage(args->commandBuffer, args->image, args->imageLayout, args->pDepthStencil, args->rangeCount, args->pRanges);
2996}
2997
2998static void
2999vkr_dispatch_vkCmdClearAttachments(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdClearAttachments *args)
3000{
3001 vn_replace_vkCmdClearAttachments_args_handle(args);
3002 vkCmdClearAttachments(args->commandBuffer, args->attachmentCount, args->pAttachments, args->rectCount, args->pRects);
3003}
3004
3005static void
3006vkr_dispatch_vkCmdResolveImage(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdResolveImage *args)
3007{
3008 vn_replace_vkCmdResolveImage_args_handle(args);
3009 vkCmdResolveImage(args->commandBuffer, args->srcImage, args->srcImageLayout, args->dstImage, args->dstImageLayout, args->regionCount, args->pRegions);
3010}
3011
3012static void
3013vkr_dispatch_vkCmdSetEvent(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdSetEvent *args)
3014{
3015 vn_replace_vkCmdSetEvent_args_handle(args);
3016 vkCmdSetEvent(args->commandBuffer, args->event, args->stageMask);
3017}
3018
3019static void
3020vkr_dispatch_vkCmdResetEvent(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdResetEvent *args)
3021{
3022 vn_replace_vkCmdResetEvent_args_handle(args);
3023 vkCmdResetEvent(args->commandBuffer, args->event, args->stageMask);
3024}
3025
3026static void
3027vkr_dispatch_vkCmdWaitEvents(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdWaitEvents *args)
3028{
3029 vn_replace_vkCmdWaitEvents_args_handle(args);
3030 vkCmdWaitEvents(args->commandBuffer, args->eventCount, args->pEvents, args->srcStageMask, args->dstStageMask, args->memoryBarrierCount, args->pMemoryBarriers, args->bufferMemoryBarrierCount, args->pBufferMemoryBarriers, args->imageMemoryBarrierCount, args->pImageMemoryBarriers);
3031}
3032
3033static void
3034vkr_dispatch_vkCmdPipelineBarrier(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdPipelineBarrier *args)
3035{
3036 vn_replace_vkCmdPipelineBarrier_args_handle(args);
3037 vkCmdPipelineBarrier(args->commandBuffer, args->srcStageMask, args->dstStageMask, args->dependencyFlags, args->memoryBarrierCount, args->pMemoryBarriers, args->bufferMemoryBarrierCount, args->pBufferMemoryBarriers, args->imageMemoryBarrierCount, args->pImageMemoryBarriers);
3038}
3039
3040static void
3041vkr_dispatch_vkCmdBeginQuery(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdBeginQuery *args)
3042{
3043 vn_replace_vkCmdBeginQuery_args_handle(args);
3044 vkCmdBeginQuery(args->commandBuffer, args->queryPool, args->query, args->flags);
3045}
3046
3047static void
3048vkr_dispatch_vkCmdEndQuery(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdEndQuery *args)
3049{
3050 vn_replace_vkCmdEndQuery_args_handle(args);
3051 vkCmdEndQuery(args->commandBuffer, args->queryPool, args->query);
3052}
3053
3054static void
3055vkr_dispatch_vkCmdResetQueryPool(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdResetQueryPool *args)
3056{
3057 vn_replace_vkCmdResetQueryPool_args_handle(args);
3058 vkCmdResetQueryPool(args->commandBuffer, args->queryPool, args->firstQuery, args->queryCount);
3059}
3060
3061static void
3062vkr_dispatch_vkCmdWriteTimestamp(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdWriteTimestamp *args)
3063{
3064 vn_replace_vkCmdWriteTimestamp_args_handle(args);
3065 vkCmdWriteTimestamp(args->commandBuffer, args->pipelineStage, args->queryPool, args->query);
3066}
3067
3068static void
3069vkr_dispatch_vkCmdCopyQueryPoolResults(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdCopyQueryPoolResults *args)
3070{
3071 vn_replace_vkCmdCopyQueryPoolResults_args_handle(args);
3072 vkCmdCopyQueryPoolResults(args->commandBuffer, args->queryPool, args->firstQuery, args->queryCount, args->dstBuffer, args->dstOffset, args->stride, args->flags);
3073}
3074
3075static void
3076vkr_dispatch_vkCmdPushConstants(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdPushConstants *args)
3077{
3078 vn_replace_vkCmdPushConstants_args_handle(args);
3079 vkCmdPushConstants(args->commandBuffer, args->layout, args->stageFlags, args->offset, args->size, args->pValues);
3080}
3081
3082static void
3083vkr_dispatch_vkCmdBeginRenderPass(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdBeginRenderPass *args)
3084{
3085 vn_replace_vkCmdBeginRenderPass_args_handle(args);
3086 vkCmdBeginRenderPass(args->commandBuffer, args->pRenderPassBegin, args->contents);
3087}
3088
3089static void
3090vkr_dispatch_vkCmdNextSubpass(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdNextSubpass *args)
3091{
3092 vn_replace_vkCmdNextSubpass_args_handle(args);
3093 vkCmdNextSubpass(args->commandBuffer, args->contents);
3094}
3095
3096static void
3097vkr_dispatch_vkCmdEndRenderPass(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdEndRenderPass *args)
3098{
3099 vn_replace_vkCmdEndRenderPass_args_handle(args);
3100 vkCmdEndRenderPass(args->commandBuffer);
3101}
3102
3103static void
3104vkr_dispatch_vkCmdExecuteCommands(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdExecuteCommands *args)
3105{
3106 vn_replace_vkCmdExecuteCommands_args_handle(args);
3107 vkCmdExecuteCommands(args->commandBuffer, args->commandBufferCount, args->pCommandBuffers);
3108}
3109
3110static void
3111vkr_dispatch_vkCmdSetDeviceMask(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdSetDeviceMask *args)
3112{
3113 vn_replace_vkCmdSetDeviceMask_args_handle(args);
3114 vkCmdSetDeviceMask(args->commandBuffer, args->deviceMask);
3115}
3116
3117static void
3118vkr_dispatch_vkCmdDispatchBase(UNUSED struct vn_dispatch_context *dispatch, struct vn_command_vkCmdDispatchBase *args)
3119{
3120 vn_replace_vkCmdDispatchBase_args_handle(args);
3121 vkCmdDispatchBase(args->commandBuffer, args->baseGroupX, args->baseGroupY, args->baseGroupZ, args->groupCountX, args->groupCountY, args->groupCountZ);
3122}
3123
3124static void
3125vkr_dispatch_vkCmdBeginRenderPass2(struct vn_dispatch_context *dispatch, struct vn_command_vkCmdBeginRenderPass2 *args)
3126{
3127 struct vkr_context *ctx = dispatch->data;
3128 struct vkr_command_buffer *cmd = (struct vkr_command_buffer *)args->commandBuffer;
3129 if (!cmd || cmd->base.type != VK_OBJECT_TYPE_COMMAND_BUFFER) {
3130 vkr_cs_decoder_set_fatal(&ctx->decoder);
3131 return;
3132 }
3133
3134 vn_replace_vkCmdBeginRenderPass2_args_handle(args);
3135 cmd->device->CmdBeginRenderPass2(args->commandBuffer, args->pRenderPassBegin, args->pSubpassBeginInfo);
3136}
3137
3138static void
3139vkr_dispatch_vkCmdNextSubpass2(struct vn_dispatch_context *dispatch, struct vn_command_vkCmdNextSubpass2 *args)
3140{
3141 struct vkr_context *ctx = dispatch->data;
3142 struct vkr_command_buffer *cmd = (struct vkr_command_buffer *)args->commandBuffer;
3143 if (!cmd || cmd->base.type != VK_OBJECT_TYPE_COMMAND_BUFFER) {
3144 vkr_cs_decoder_set_fatal(&ctx->decoder);
3145 return;
3146 }
3147
3148 vn_replace_vkCmdNextSubpass2_args_handle(args);
3149 cmd->device->CmdNextSubpass2(args->commandBuffer, args->pSubpassBeginInfo, args->pSubpassEndInfo);
3150}
3151
3152static void
3153vkr_dispatch_vkCmdEndRenderPass2(struct vn_dispatch_context *dispatch, struct vn_command_vkCmdEndRenderPass2 *args)
3154{
3155 struct vkr_context *ctx = dispatch->data;
3156 struct vkr_command_buffer *cmd = (struct vkr_command_buffer *)args->commandBuffer;
3157 if (!cmd || cmd->base.type != VK_OBJECT_TYPE_COMMAND_BUFFER) {
3158 vkr_cs_decoder_set_fatal(&ctx->decoder);
3159 return;
3160 }
3161
3162 vn_replace_vkCmdEndRenderPass2_args_handle(args);
3163 cmd->device->CmdEndRenderPass2(args->commandBuffer, args->pSubpassEndInfo);
3164}
3165
3166static void
3167vkr_dispatch_vkCmdDrawIndirectCount(struct vn_dispatch_context *dispatch, struct vn_command_vkCmdDrawIndirectCount *args)
3168{
3169 struct vkr_context *ctx = dispatch->data;
3170 struct vkr_command_buffer *cmd = (struct vkr_command_buffer *)args->commandBuffer;
3171 if (!cmd || cmd->base.type != VK_OBJECT_TYPE_COMMAND_BUFFER) {
3172 vkr_cs_decoder_set_fatal(&ctx->decoder);
3173 return;
3174 }
3175
3176 vn_replace_vkCmdDrawIndirectCount_args_handle(args);
3177 cmd->device->CmdDrawIndirectCount(args->commandBuffer, args->buffer, args->offset, args->countBuffer, args->countBufferOffset, args->maxDrawCount, args->stride);
3178}
3179
3180static void
3181vkr_dispatch_vkCmdDrawIndexedIndirectCount(struct vn_dispatch_context *dispatch, struct vn_command_vkCmdDrawIndexedIndirectCount *args)
3182{
3183 struct vkr_context *ctx = dispatch->data;
3184 struct vkr_command_buffer *cmd = (struct vkr_command_buffer *)args->commandBuffer;
3185 if (!cmd || cmd->base.type != VK_OBJECT_TYPE_COMMAND_BUFFER) {
3186 vkr_cs_decoder_set_fatal(&ctx->decoder);
3187 return;
3188 }
3189
3190 vn_replace_vkCmdDrawIndexedIndirectCount_args_handle(args);
3191 cmd->device->CmdDrawIndexedIndirectCount(args->commandBuffer, args->buffer, args->offset, args->countBuffer, args->countBufferOffset, args->maxDrawCount, args->stride);
3192}
3193
3194static void
3195vkr_dispatch_vkCmdBindTransformFeedbackBuffersEXT(struct vn_dispatch_context *dispatch, struct vn_command_vkCmdBindTransformFeedbackBuffersEXT *args)
3196{
3197 struct vkr_context *ctx = dispatch->data;
3198 struct vkr_command_buffer *cmd = (struct vkr_command_buffer *)args->commandBuffer;
3199 if (!cmd || cmd->base.type != VK_OBJECT_TYPE_COMMAND_BUFFER) {
3200 vkr_cs_decoder_set_fatal(&ctx->decoder);
3201 return;
3202 }
3203
3204 vn_replace_vkCmdBindTransformFeedbackBuffersEXT_args_handle(args);
3205 cmd->device->cmd_bind_transform_feedback_buffers(args->commandBuffer, args->firstBinding, args->bindingCount, args->pBuffers, args->pOffsets, args->pSizes);
3206}
3207
3208static void
3209vkr_dispatch_vkCmdBeginTransformFeedbackEXT(struct vn_dispatch_context *dispatch, struct vn_command_vkCmdBeginTransformFeedbackEXT *args)
3210{
3211 struct vkr_context *ctx = dispatch->data;
3212 struct vkr_command_buffer *cmd = (struct vkr_command_buffer *)args->commandBuffer;
3213 if (!cmd || cmd->base.type != VK_OBJECT_TYPE_COMMAND_BUFFER) {
3214 vkr_cs_decoder_set_fatal(&ctx->decoder);
3215 return;
3216 }
3217
3218 vn_replace_vkCmdBeginTransformFeedbackEXT_args_handle(args);
3219 cmd->device->cmd_begin_transform_feedback(args->commandBuffer, args->firstCounterBuffer, args->counterBufferCount, args->pCounterBuffers, args->pCounterBufferOffsets);
3220}
3221
3222static void
3223vkr_dispatch_vkCmdEndTransformFeedbackEXT(struct vn_dispatch_context *dispatch, struct vn_command_vkCmdEndTransformFeedbackEXT *args)
3224{
3225 struct vkr_context *ctx = dispatch->data;
3226 struct vkr_command_buffer *cmd = (struct vkr_command_buffer *)args->commandBuffer;
3227 if (!cmd || cmd->base.type != VK_OBJECT_TYPE_COMMAND_BUFFER) {
3228 vkr_cs_decoder_set_fatal(&ctx->decoder);
3229 return;
3230 }
3231
3232 vn_replace_vkCmdEndTransformFeedbackEXT_args_handle(args);
3233 cmd->device->cmd_end_transform_feedback(args->commandBuffer, args->firstCounterBuffer, args->counterBufferCount, args->pCounterBuffers, args->pCounterBufferOffsets);
3234}
3235
3236static void
3237vkr_dispatch_vkCmdBeginQueryIndexedEXT(struct vn_dispatch_context *dispatch, struct vn_command_vkCmdBeginQueryIndexedEXT *args)
3238{
3239 struct vkr_context *ctx = dispatch->data;
3240 struct vkr_command_buffer *cmd = (struct vkr_command_buffer *)args->commandBuffer;
3241 if (!cmd || cmd->base.type != VK_OBJECT_TYPE_COMMAND_BUFFER) {
3242 vkr_cs_decoder_set_fatal(&ctx->decoder);
3243 return;
3244 }
3245
3246 vn_replace_vkCmdBeginQueryIndexedEXT_args_handle(args);
3247 cmd->device->cmd_begin_query_indexed(args->commandBuffer, args->queryPool, args->query, args->flags, args->index);
3248}
3249
3250static void
3251vkr_dispatch_vkCmdEndQueryIndexedEXT(struct vn_dispatch_context *dispatch, struct vn_command_vkCmdEndQueryIndexedEXT *args)
3252{
3253 struct vkr_context *ctx = dispatch->data;
3254 struct vkr_command_buffer *cmd = (struct vkr_command_buffer *)args->commandBuffer;
3255 if (!cmd || cmd->base.type != VK_OBJECT_TYPE_COMMAND_BUFFER) {
3256 vkr_cs_decoder_set_fatal(&ctx->decoder);
3257 return;
3258 }
3259
3260 vn_replace_vkCmdEndQueryIndexedEXT_args_handle(args);
3261 cmd->device->cmd_end_query_indexed(args->commandBuffer, args->queryPool, args->query, args->index);
3262}
3263
3264static void
3265vkr_dispatch_vkCmdDrawIndirectByteCountEXT(struct vn_dispatch_context *dispatch, struct vn_command_vkCmdDrawIndirectByteCountEXT *args)
3266{
3267 struct vkr_context *ctx = dispatch->data;
3268 struct vkr_command_buffer *cmd = (struct vkr_command_buffer *)args->commandBuffer;
3269 if (!cmd || cmd->base.type != VK_OBJECT_TYPE_COMMAND_BUFFER) {
3270 vkr_cs_decoder_set_fatal(&ctx->decoder);
3271 return;
3272 }
3273
3274 vn_replace_vkCmdDrawIndirectByteCountEXT_args_handle(args);
3275 cmd->device->cmd_draw_indirect_byte_count(args->commandBuffer, args->instanceCount, args->firstInstance, args->counterBuffer, args->counterBufferOffset, args->counterOffset, args->vertexStride);
3276}
3277
3278static void
3279vkr_dispatch_vkGetImageDrmFormatModifierPropertiesEXT(struct vn_dispatch_context *dispatch, struct vn_command_vkGetImageDrmFormatModifierPropertiesEXT *args)
3280{
3281 struct vkr_context *ctx = dispatch->data;
3282 struct vkr_device *dev = (struct vkr_device *)args->device;
3283 if (!dev || dev->base.type != VK_OBJECT_TYPE_DEVICE) {
3284 vkr_cs_decoder_set_fatal(&ctx->decoder);
3285 return;
3286 }
3287
3288 vn_replace_vkGetImageDrmFormatModifierPropertiesEXT_args_handle(args);
3289 args->ret = dev->get_image_drm_format_modifier_properties(args->device, args->image, args->pProperties);
3290}
3291
3292static void
Yiwei Zhang508ff682021-04-13 06:47:38 +00003293vkr_dispatch_vkGetMemoryResourcePropertiesMESA(struct vn_dispatch_context *dispatch, struct vn_command_vkGetMemoryResourcePropertiesMESA *args)
3294{
3295 struct vkr_context *ctx = dispatch->data;
3296 struct vkr_device *dev = (struct vkr_device *)args->device;
3297 if (!dev || dev->base.type != VK_OBJECT_TYPE_DEVICE) {
3298 vkr_cs_decoder_set_fatal(&ctx->decoder);
3299 return;
3300 }
3301
3302 struct vkr_resource_attachment *att = util_hash_table_get(ctx->resource_table, uintptr_to_pointer(args->resourceId));
3303 if (!att || !att->resource) {
3304 args->ret = VK_ERROR_INVALID_EXTERNAL_HANDLE;
3305 return;
3306 }
3307
3308 VkExternalMemoryHandleTypeFlagBits handle_type;
3309 if (!vkr_get_fd_handle_type_from_virgl_fd_type(dev->physical_device, att->resource->fd_type, &handle_type)) {
3310 args->ret = VK_ERROR_INVALID_EXTERNAL_HANDLE;
3311 return;
3312 }
3313
3314 VkMemoryFdPropertiesKHR memory_fd_properties = {
3315 .sType = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR,
3316 .pNext = NULL,
3317 .memoryTypeBits = 0,
3318 };
3319 vn_replace_vkGetMemoryResourcePropertiesMESA_args_handle(args);
3320 args->ret = dev->get_memory_fd_properties(args->device, handle_type, att->resource->fd, &memory_fd_properties);
3321 args->pMemoryResourceProperties->memoryTypeBits = memory_fd_properties.memoryTypeBits;
3322}
3323
3324static void
Chia-I Wu9b2d22b2020-04-17 15:29:05 -07003325vkr_dispatch_debug_log(UNUSED struct vn_dispatch_context *dispatch, const char *msg)
3326{
3327 vrend_printf("vkr: %s\n", msg);
3328}
3329
3330static void
3331vkr_context_init_dispatch(struct vkr_context *ctx)
3332{
3333 struct vn_dispatch_context *dispatch = &ctx->dispatch;
3334
3335 dispatch->data = ctx;
3336 dispatch->debug_log = vkr_dispatch_debug_log;
3337
3338 dispatch->encoder = (struct vn_cs_encoder *)&ctx->encoder;
3339 dispatch->decoder = (struct vn_cs_decoder *)&ctx->decoder;
3340
3341 dispatch->dispatch_vkSetReplyCommandStreamMESA = vkr_dispatch_vkSetReplyCommandStreamMESA;
3342 dispatch->dispatch_vkSeekReplyCommandStreamMESA = vkr_dispatch_vkSeekReplyCommandStreamMESA;
3343 dispatch->dispatch_vkExecuteCommandStreamsMESA = vkr_dispatch_vkExecuteCommandStreamsMESA;
3344 dispatch->dispatch_vkCreateRingMESA = vkr_dispatch_vkCreateRingMESA;
3345 dispatch->dispatch_vkDestroyRingMESA = vkr_dispatch_vkDestroyRingMESA;
3346 dispatch->dispatch_vkNotifyRingMESA = vkr_dispatch_vkNotifyRingMESA;
3347 dispatch->dispatch_vkWriteRingExtraMESA = vkr_dispatch_vkWriteRingExtraMESA;
3348
3349 dispatch->dispatch_vkEnumerateInstanceVersion = vkr_dispatch_vkEnumerateInstanceVersion;
3350 dispatch->dispatch_vkEnumerateInstanceExtensionProperties = vkr_dispatch_vkEnumerateInstanceExtensionProperties;
3351 /* we don't advertise layers (and should never) */
3352 dispatch->dispatch_vkEnumerateInstanceLayerProperties = NULL;
3353 dispatch->dispatch_vkCreateInstance = vkr_dispatch_vkCreateInstance;
3354 dispatch->dispatch_vkDestroyInstance = vkr_dispatch_vkDestroyInstance;
3355 dispatch->dispatch_vkGetInstanceProcAddr = NULL;
3356
3357 dispatch->dispatch_vkEnumeratePhysicalDevices = vkr_dispatch_vkEnumeratePhysicalDevices;
3358 dispatch->dispatch_vkEnumeratePhysicalDeviceGroups = vkr_dispatch_vkEnumeratePhysicalDeviceGroups;
3359 dispatch->dispatch_vkGetPhysicalDeviceFeatures = vkr_dispatch_vkGetPhysicalDeviceFeatures;
3360 dispatch->dispatch_vkGetPhysicalDeviceProperties = vkr_dispatch_vkGetPhysicalDeviceProperties;
3361 dispatch->dispatch_vkGetPhysicalDeviceQueueFamilyProperties = vkr_dispatch_vkGetPhysicalDeviceQueueFamilyProperties;
3362 dispatch->dispatch_vkGetPhysicalDeviceMemoryProperties = vkr_dispatch_vkGetPhysicalDeviceMemoryProperties;
3363 dispatch->dispatch_vkGetPhysicalDeviceFormatProperties = vkr_dispatch_vkGetPhysicalDeviceFormatProperties;
3364 dispatch->dispatch_vkGetPhysicalDeviceImageFormatProperties = vkr_dispatch_vkGetPhysicalDeviceImageFormatProperties;
3365 dispatch->dispatch_vkGetPhysicalDeviceSparseImageFormatProperties = vkr_dispatch_vkGetPhysicalDeviceSparseImageFormatProperties;
3366 dispatch->dispatch_vkGetPhysicalDeviceFeatures2 = vkr_dispatch_vkGetPhysicalDeviceFeatures2;
3367 dispatch->dispatch_vkGetPhysicalDeviceProperties2 = vkr_dispatch_vkGetPhysicalDeviceProperties2;
3368 dispatch->dispatch_vkGetPhysicalDeviceQueueFamilyProperties2 = vkr_dispatch_vkGetPhysicalDeviceQueueFamilyProperties2;
3369 dispatch->dispatch_vkGetPhysicalDeviceMemoryProperties2 = vkr_dispatch_vkGetPhysicalDeviceMemoryProperties2;
3370 dispatch->dispatch_vkGetPhysicalDeviceFormatProperties2 = vkr_dispatch_vkGetPhysicalDeviceFormatProperties2;
3371 dispatch->dispatch_vkGetPhysicalDeviceImageFormatProperties2 = vkr_dispatch_vkGetPhysicalDeviceImageFormatProperties2;
3372 dispatch->dispatch_vkGetPhysicalDeviceSparseImageFormatProperties2 = vkr_dispatch_vkGetPhysicalDeviceSparseImageFormatProperties2;
3373 dispatch->dispatch_vkGetPhysicalDeviceExternalBufferProperties = NULL;
3374 dispatch->dispatch_vkGetPhysicalDeviceExternalSemaphoreProperties = NULL;
3375 dispatch->dispatch_vkGetPhysicalDeviceExternalFenceProperties = NULL;
3376
3377 dispatch->dispatch_vkEnumerateDeviceExtensionProperties = vkr_dispatch_vkEnumerateDeviceExtensionProperties;
3378 dispatch->dispatch_vkEnumerateDeviceLayerProperties = NULL;
3379 dispatch->dispatch_vkCreateDevice = vkr_dispatch_vkCreateDevice;
3380 dispatch->dispatch_vkDestroyDevice = vkr_dispatch_vkDestroyDevice;
3381 dispatch->dispatch_vkGetDeviceProcAddr = NULL;
3382 dispatch->dispatch_vkGetDeviceGroupPeerMemoryFeatures = vkr_dispatch_vkGetDeviceGroupPeerMemoryFeatures;
3383 dispatch->dispatch_vkDeviceWaitIdle = vkr_dispatch_vkDeviceWaitIdle;
3384
3385 dispatch->dispatch_vkGetDeviceQueue = vkr_dispatch_vkGetDeviceQueue;
3386 dispatch->dispatch_vkGetDeviceQueue2 = vkr_dispatch_vkGetDeviceQueue2;
3387 dispatch->dispatch_vkQueueSubmit = vkr_dispatch_vkQueueSubmit;
3388 dispatch->dispatch_vkQueueBindSparse = vkr_dispatch_vkQueueBindSparse;
3389 dispatch->dispatch_vkQueueWaitIdle = vkr_dispatch_vkQueueWaitIdle;
3390
3391 dispatch->dispatch_vkCreateFence = vkr_dispatch_vkCreateFence;
3392 dispatch->dispatch_vkDestroyFence = vkr_dispatch_vkDestroyFence;
3393 dispatch->dispatch_vkResetFences = vkr_dispatch_vkResetFences;
3394 dispatch->dispatch_vkGetFenceStatus = vkr_dispatch_vkGetFenceStatus;
3395 dispatch->dispatch_vkWaitForFences = vkr_dispatch_vkWaitForFences;
3396
3397 dispatch->dispatch_vkCreateSemaphore = vkr_dispatch_vkCreateSemaphore;
3398 dispatch->dispatch_vkDestroySemaphore = vkr_dispatch_vkDestroySemaphore;
3399 dispatch->dispatch_vkGetSemaphoreCounterValue = vkr_dispatch_vkGetSemaphoreCounterValue;
3400 dispatch->dispatch_vkWaitSemaphores = vkr_dispatch_vkWaitSemaphores;
3401 dispatch->dispatch_vkSignalSemaphore = vkr_dispatch_vkSignalSemaphore;
3402
3403 dispatch->dispatch_vkAllocateMemory = vkr_dispatch_vkAllocateMemory;
3404 dispatch->dispatch_vkFreeMemory = vkr_dispatch_vkFreeMemory;
3405 dispatch->dispatch_vkMapMemory = NULL;
3406 dispatch->dispatch_vkUnmapMemory = NULL;
3407 dispatch->dispatch_vkFlushMappedMemoryRanges = NULL;
3408 dispatch->dispatch_vkInvalidateMappedMemoryRanges = NULL;
3409 dispatch->dispatch_vkGetDeviceMemoryCommitment = vkr_dispatch_vkGetDeviceMemoryCommitment;
3410 dispatch->dispatch_vkGetDeviceMemoryOpaqueCaptureAddress = vkr_dispatch_vkGetDeviceMemoryOpaqueCaptureAddress;
3411
3412 dispatch->dispatch_vkCreateBuffer = vkr_dispatch_vkCreateBuffer;
3413 dispatch->dispatch_vkDestroyBuffer = vkr_dispatch_vkDestroyBuffer;
3414 dispatch->dispatch_vkGetBufferMemoryRequirements = vkr_dispatch_vkGetBufferMemoryRequirements;
3415 dispatch->dispatch_vkGetBufferMemoryRequirements2 = vkr_dispatch_vkGetBufferMemoryRequirements2;
3416 dispatch->dispatch_vkBindBufferMemory = vkr_dispatch_vkBindBufferMemory;
3417 dispatch->dispatch_vkBindBufferMemory2 = vkr_dispatch_vkBindBufferMemory2;
3418 dispatch->dispatch_vkGetBufferOpaqueCaptureAddress = vkr_dispatch_vkGetBufferOpaqueCaptureAddress;
3419 dispatch->dispatch_vkGetBufferDeviceAddress = vkr_dispatch_vkGetBufferDeviceAddress;
3420
3421 dispatch->dispatch_vkCreateBufferView = vkr_dispatch_vkCreateBufferView;
3422 dispatch->dispatch_vkDestroyBufferView = vkr_dispatch_vkDestroyBufferView;
3423
3424 dispatch->dispatch_vkCreateImage = vkr_dispatch_vkCreateImage;
3425 dispatch->dispatch_vkDestroyImage = vkr_dispatch_vkDestroyImage;
3426 dispatch->dispatch_vkGetImageMemoryRequirements = vkr_dispatch_vkGetImageMemoryRequirements;
3427 dispatch->dispatch_vkGetImageMemoryRequirements2 = vkr_dispatch_vkGetImageMemoryRequirements2;
3428 dispatch->dispatch_vkGetImageSparseMemoryRequirements = vkr_dispatch_vkGetImageSparseMemoryRequirements;
3429 dispatch->dispatch_vkGetImageSparseMemoryRequirements2 = vkr_dispatch_vkGetImageSparseMemoryRequirements2;
3430 dispatch->dispatch_vkBindImageMemory = vkr_dispatch_vkBindImageMemory;
3431 dispatch->dispatch_vkBindImageMemory2 = vkr_dispatch_vkBindImageMemory2;
3432 dispatch->dispatch_vkGetImageSubresourceLayout = vkr_dispatch_vkGetImageSubresourceLayout;
3433
3434 dispatch->dispatch_vkCreateImageView = vkr_dispatch_vkCreateImageView;
3435 dispatch->dispatch_vkDestroyImageView = vkr_dispatch_vkDestroyImageView;
3436
3437 dispatch->dispatch_vkCreateSampler = vkr_dispatch_vkCreateSampler;
3438 dispatch->dispatch_vkDestroySampler = vkr_dispatch_vkDestroySampler;
3439
3440 dispatch->dispatch_vkCreateSamplerYcbcrConversion = vkr_dispatch_vkCreateSamplerYcbcrConversion;
3441 dispatch->dispatch_vkDestroySamplerYcbcrConversion = vkr_dispatch_vkDestroySamplerYcbcrConversion;
3442
3443 dispatch->dispatch_vkGetDescriptorSetLayoutSupport = vkr_dispatch_vkGetDescriptorSetLayoutSupport;
3444 dispatch->dispatch_vkCreateDescriptorSetLayout = vkr_dispatch_vkCreateDescriptorSetLayout;
3445 dispatch->dispatch_vkDestroyDescriptorSetLayout = vkr_dispatch_vkDestroyDescriptorSetLayout;
3446
3447 dispatch->dispatch_vkCreateDescriptorPool = vkr_dispatch_vkCreateDescriptorPool;
3448 dispatch->dispatch_vkDestroyDescriptorPool = vkr_dispatch_vkDestroyDescriptorPool;
3449 dispatch->dispatch_vkResetDescriptorPool = vkr_dispatch_vkResetDescriptorPool;
3450
3451 dispatch->dispatch_vkAllocateDescriptorSets = vkr_dispatch_vkAllocateDescriptorSets;
3452 dispatch->dispatch_vkFreeDescriptorSets = vkr_dispatch_vkFreeDescriptorSets;
3453 dispatch->dispatch_vkUpdateDescriptorSets = vkr_dispatch_vkUpdateDescriptorSets;
3454
3455 dispatch->dispatch_vkCreateDescriptorUpdateTemplate = vkr_dispatch_vkCreateDescriptorUpdateTemplate;
3456 dispatch->dispatch_vkDestroyDescriptorUpdateTemplate = vkr_dispatch_vkDestroyDescriptorUpdateTemplate;
3457 dispatch->dispatch_vkUpdateDescriptorSetWithTemplate = NULL;
3458
3459 dispatch->dispatch_vkCreateRenderPass = vkr_dispatch_vkCreateRenderPass;
3460 dispatch->dispatch_vkCreateRenderPass2 = vkr_dispatch_vkCreateRenderPass2;
3461 dispatch->dispatch_vkDestroyRenderPass = vkr_dispatch_vkDestroyRenderPass;
3462 dispatch->dispatch_vkGetRenderAreaGranularity = vkr_dispatch_vkGetRenderAreaGranularity;
3463
3464 dispatch->dispatch_vkCreateFramebuffer = vkr_dispatch_vkCreateFramebuffer;
3465 dispatch->dispatch_vkDestroyFramebuffer = vkr_dispatch_vkDestroyFramebuffer;
3466
3467 dispatch->dispatch_vkCreateEvent = vkr_dispatch_vkCreateEvent;
3468 dispatch->dispatch_vkDestroyEvent = vkr_dispatch_vkDestroyEvent;
3469 dispatch->dispatch_vkGetEventStatus = vkr_dispatch_vkGetEventStatus;
3470 dispatch->dispatch_vkSetEvent = vkr_dispatch_vkSetEvent;
3471 dispatch->dispatch_vkResetEvent = vkr_dispatch_vkResetEvent;
3472
3473 dispatch->dispatch_vkCreateQueryPool = vkr_dispatch_vkCreateQueryPool;
3474 dispatch->dispatch_vkDestroyQueryPool = vkr_dispatch_vkDestroyQueryPool;
3475 dispatch->dispatch_vkGetQueryPoolResults = vkr_dispatch_vkGetQueryPoolResults;
3476 dispatch->dispatch_vkResetQueryPool = vkr_dispatch_vkResetQueryPool;
3477
3478 dispatch->dispatch_vkCreateShaderModule = vkr_dispatch_vkCreateShaderModule;
3479 dispatch->dispatch_vkDestroyShaderModule = vkr_dispatch_vkDestroyShaderModule;
3480
3481 dispatch->dispatch_vkCreatePipelineLayout = vkr_dispatch_vkCreatePipelineLayout;
3482 dispatch->dispatch_vkDestroyPipelineLayout = vkr_dispatch_vkDestroyPipelineLayout;
3483
3484 dispatch->dispatch_vkCreatePipelineCache = vkr_dispatch_vkCreatePipelineCache;
3485 dispatch->dispatch_vkDestroyPipelineCache = vkr_dispatch_vkDestroyPipelineCache;
3486 dispatch->dispatch_vkGetPipelineCacheData = vkr_dispatch_vkGetPipelineCacheData;
3487 dispatch->dispatch_vkMergePipelineCaches = vkr_dispatch_vkMergePipelineCaches;
3488
3489 dispatch->dispatch_vkCreateGraphicsPipelines = vkr_dispatch_vkCreateGraphicsPipelines;
3490 dispatch->dispatch_vkCreateComputePipelines = vkr_dispatch_vkCreateComputePipelines;
3491 dispatch->dispatch_vkDestroyPipeline = vkr_dispatch_vkDestroyPipeline;
3492
3493 dispatch->dispatch_vkCreateCommandPool = vkr_dispatch_vkCreateCommandPool;
3494 dispatch->dispatch_vkDestroyCommandPool = vkr_dispatch_vkDestroyCommandPool;
3495 dispatch->dispatch_vkResetCommandPool = vkr_dispatch_vkResetCommandPool;
3496 dispatch->dispatch_vkTrimCommandPool = vkr_dispatch_vkTrimCommandPool;
3497
3498 dispatch->dispatch_vkAllocateCommandBuffers = vkr_dispatch_vkAllocateCommandBuffers;
3499 dispatch->dispatch_vkFreeCommandBuffers = vkr_dispatch_vkFreeCommandBuffers;
3500 dispatch->dispatch_vkResetCommandBuffer = vkr_dispatch_vkResetCommandBuffer;
3501 dispatch->dispatch_vkBeginCommandBuffer = vkr_dispatch_vkBeginCommandBuffer;
3502 dispatch->dispatch_vkEndCommandBuffer = vkr_dispatch_vkEndCommandBuffer;
3503
3504 dispatch->dispatch_vkCmdBindPipeline = vkr_dispatch_vkCmdBindPipeline;
3505 dispatch->dispatch_vkCmdSetViewport = vkr_dispatch_vkCmdSetViewport;
3506 dispatch->dispatch_vkCmdSetScissor = vkr_dispatch_vkCmdSetScissor;
3507 dispatch->dispatch_vkCmdSetLineWidth = vkr_dispatch_vkCmdSetLineWidth;
3508 dispatch->dispatch_vkCmdSetDepthBias = vkr_dispatch_vkCmdSetDepthBias;
3509 dispatch->dispatch_vkCmdSetBlendConstants = vkr_dispatch_vkCmdSetBlendConstants;
3510 dispatch->dispatch_vkCmdSetDepthBounds = vkr_dispatch_vkCmdSetDepthBounds;
3511 dispatch->dispatch_vkCmdSetStencilCompareMask = vkr_dispatch_vkCmdSetStencilCompareMask;
3512 dispatch->dispatch_vkCmdSetStencilWriteMask = vkr_dispatch_vkCmdSetStencilWriteMask;
3513 dispatch->dispatch_vkCmdSetStencilReference = vkr_dispatch_vkCmdSetStencilReference;
3514 dispatch->dispatch_vkCmdBindDescriptorSets = vkr_dispatch_vkCmdBindDescriptorSets;
3515 dispatch->dispatch_vkCmdBindIndexBuffer = vkr_dispatch_vkCmdBindIndexBuffer;
3516 dispatch->dispatch_vkCmdBindVertexBuffers = vkr_dispatch_vkCmdBindVertexBuffers;
3517 dispatch->dispatch_vkCmdDraw = vkr_dispatch_vkCmdDraw;
3518 dispatch->dispatch_vkCmdDrawIndexed = vkr_dispatch_vkCmdDrawIndexed;
3519 dispatch->dispatch_vkCmdDrawIndirect = vkr_dispatch_vkCmdDrawIndirect;
3520 dispatch->dispatch_vkCmdDrawIndexedIndirect = vkr_dispatch_vkCmdDrawIndexedIndirect;
3521 dispatch->dispatch_vkCmdDispatch = vkr_dispatch_vkCmdDispatch;
3522 dispatch->dispatch_vkCmdDispatchIndirect = vkr_dispatch_vkCmdDispatchIndirect;
3523 dispatch->dispatch_vkCmdCopyBuffer = vkr_dispatch_vkCmdCopyBuffer;
3524 dispatch->dispatch_vkCmdCopyImage = vkr_dispatch_vkCmdCopyImage;
3525 dispatch->dispatch_vkCmdBlitImage = vkr_dispatch_vkCmdBlitImage;
3526 dispatch->dispatch_vkCmdCopyBufferToImage = vkr_dispatch_vkCmdCopyBufferToImage;
3527 dispatch->dispatch_vkCmdCopyImageToBuffer = vkr_dispatch_vkCmdCopyImageToBuffer;
3528 dispatch->dispatch_vkCmdUpdateBuffer = vkr_dispatch_vkCmdUpdateBuffer;
3529 dispatch->dispatch_vkCmdFillBuffer = vkr_dispatch_vkCmdFillBuffer;
3530 dispatch->dispatch_vkCmdClearColorImage = vkr_dispatch_vkCmdClearColorImage;
3531 dispatch->dispatch_vkCmdClearDepthStencilImage = vkr_dispatch_vkCmdClearDepthStencilImage;
3532 dispatch->dispatch_vkCmdClearAttachments = vkr_dispatch_vkCmdClearAttachments;
3533 dispatch->dispatch_vkCmdResolveImage = vkr_dispatch_vkCmdResolveImage;
3534 dispatch->dispatch_vkCmdSetEvent = vkr_dispatch_vkCmdSetEvent;
3535 dispatch->dispatch_vkCmdResetEvent = vkr_dispatch_vkCmdResetEvent;
3536 dispatch->dispatch_vkCmdWaitEvents = vkr_dispatch_vkCmdWaitEvents;
3537 dispatch->dispatch_vkCmdPipelineBarrier = vkr_dispatch_vkCmdPipelineBarrier;
3538 dispatch->dispatch_vkCmdBeginQuery = vkr_dispatch_vkCmdBeginQuery;
3539 dispatch->dispatch_vkCmdEndQuery = vkr_dispatch_vkCmdEndQuery;
3540 dispatch->dispatch_vkCmdResetQueryPool = vkr_dispatch_vkCmdResetQueryPool;
3541 dispatch->dispatch_vkCmdWriteTimestamp = vkr_dispatch_vkCmdWriteTimestamp;
3542 dispatch->dispatch_vkCmdCopyQueryPoolResults = vkr_dispatch_vkCmdCopyQueryPoolResults;
3543 dispatch->dispatch_vkCmdPushConstants = vkr_dispatch_vkCmdPushConstants;
3544 dispatch->dispatch_vkCmdBeginRenderPass = vkr_dispatch_vkCmdBeginRenderPass;
3545 dispatch->dispatch_vkCmdNextSubpass = vkr_dispatch_vkCmdNextSubpass;
3546 dispatch->dispatch_vkCmdEndRenderPass = vkr_dispatch_vkCmdEndRenderPass;
3547 dispatch->dispatch_vkCmdExecuteCommands = vkr_dispatch_vkCmdExecuteCommands;
3548 dispatch->dispatch_vkCmdSetDeviceMask = vkr_dispatch_vkCmdSetDeviceMask;
3549 dispatch->dispatch_vkCmdDispatchBase = vkr_dispatch_vkCmdDispatchBase;
3550 dispatch->dispatch_vkCmdBeginRenderPass2 = vkr_dispatch_vkCmdBeginRenderPass2;
3551 dispatch->dispatch_vkCmdNextSubpass2 = vkr_dispatch_vkCmdNextSubpass2;
3552 dispatch->dispatch_vkCmdEndRenderPass2 = vkr_dispatch_vkCmdEndRenderPass2;
3553 dispatch->dispatch_vkCmdDrawIndirectCount = vkr_dispatch_vkCmdDrawIndirectCount;
3554 dispatch->dispatch_vkCmdDrawIndexedIndirectCount = vkr_dispatch_vkCmdDrawIndexedIndirectCount;
3555
3556 dispatch->dispatch_vkCmdBindTransformFeedbackBuffersEXT = vkr_dispatch_vkCmdBindTransformFeedbackBuffersEXT;
3557 dispatch->dispatch_vkCmdBeginTransformFeedbackEXT = vkr_dispatch_vkCmdBeginTransformFeedbackEXT;
3558 dispatch->dispatch_vkCmdEndTransformFeedbackEXT = vkr_dispatch_vkCmdEndTransformFeedbackEXT;
3559 dispatch->dispatch_vkCmdBeginQueryIndexedEXT = vkr_dispatch_vkCmdBeginQueryIndexedEXT;
3560 dispatch->dispatch_vkCmdEndQueryIndexedEXT = vkr_dispatch_vkCmdEndQueryIndexedEXT;
3561 dispatch->dispatch_vkCmdDrawIndirectByteCountEXT = vkr_dispatch_vkCmdDrawIndirectByteCountEXT;
3562
3563 dispatch->dispatch_vkGetImageDrmFormatModifierPropertiesEXT = vkr_dispatch_vkGetImageDrmFormatModifierPropertiesEXT;
Yiwei Zhang508ff682021-04-13 06:47:38 +00003564
3565 dispatch->dispatch_vkGetMemoryResourcePropertiesMESA = vkr_dispatch_vkGetMemoryResourcePropertiesMESA;
Chia-I Wu9b2d22b2020-04-17 15:29:05 -07003566}
3567
3568static int
3569vkr_context_submit_fence_locked(struct virgl_context *base,
3570 uint32_t flags,
3571 uint64_t queue_id,
3572 void *fence_cookie)
3573{
3574 struct vkr_context *ctx = (struct vkr_context *)base;
3575 struct vkr_queue *queue;
3576 VkResult result;
3577
3578 queue = util_hash_table_get_u64(ctx->object_table, queue_id);
3579 if (!queue)
3580 return -EINVAL;
3581 struct vkr_device *dev = queue->device;
3582
3583 struct vkr_queue_sync *sync;
3584 if (LIST_IS_EMPTY(&dev->free_syncs)) {
3585 sync = malloc(sizeof(*sync));
3586 if (!sync)
3587 return -ENOMEM;
3588
3589 const struct VkFenceCreateInfo create_info = {
3590 .sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
3591 };
3592 result = vkCreateFence(dev->base.handle.device, &create_info, NULL, &sync->fence);
3593 if (result != VK_SUCCESS) {
3594 free(sync);
3595 return -ENOMEM;
3596 }
3597 } else {
3598 sync = LIST_ENTRY(struct vkr_queue_sync, dev->free_syncs.next, head);
3599 list_del(&sync->head);
3600 vkResetFences(dev->base.handle.device, 1, &sync->fence);
3601 }
3602
3603 result = vkQueueSubmit(queue->base.handle.queue, 0, NULL, sync->fence);
3604 if (result != VK_SUCCESS) {
3605 list_add(&sync->head, &dev->free_syncs);
3606 return -1;
3607 }
3608
3609 sync->flags = flags;
3610 sync->fence_cookie = fence_cookie;
3611
3612 if (queue->has_thread) {
3613 mtx_lock(&queue->mutex);
3614 list_addtail(&sync->head, &queue->pending_syncs);
3615 mtx_unlock(&queue->mutex);
3616 cnd_signal(&queue->cond);
3617 } else {
3618 list_addtail(&sync->head, &queue->pending_syncs);
3619 }
3620
3621 if (LIST_IS_EMPTY(&queue->busy_head))
3622 list_addtail(&queue->busy_head, &ctx->busy_queues);
3623
3624 return 0;
3625}
3626
3627static int
3628vkr_context_submit_fence(struct virgl_context *base,
3629 uint32_t flags,
3630 uint64_t queue_id,
3631 void *fence_cookie)
3632{
3633 struct vkr_context *ctx = (struct vkr_context *)base;
3634 int ret;
3635
3636 mtx_lock(&ctx->mutex);
3637 ret = vkr_context_submit_fence_locked(base, flags, queue_id, fence_cookie);
3638 mtx_unlock(&ctx->mutex);
3639 return ret;
3640}
3641
3642static void
3643vkr_context_retire_fences_locked(UNUSED struct virgl_context *base)
3644{
3645 struct vkr_context *ctx = (struct vkr_context *)base;
3646 struct vkr_queue_sync *sync, *sync_tmp;
3647 struct vkr_queue *queue, *queue_tmp;
3648
3649 /* flush first and once because the per-queue sync threads might write to
3650 * it any time
3651 */
3652 if (ctx->fence_eventfd >= 0)
3653 flush_eventfd(ctx->fence_eventfd);
3654
3655 LIST_FOR_EACH_ENTRY_SAFE(queue, queue_tmp, &ctx->busy_queues, busy_head) {
3656 struct vkr_device *dev = queue->device;
3657 struct list_head retired_syncs;
3658 bool queue_empty;
3659
3660 vkr_queue_retire_syncs(queue, &retired_syncs, &queue_empty);
3661
3662 LIST_FOR_EACH_ENTRY_SAFE(sync, sync_tmp, &retired_syncs, head) {
3663 ctx->base.fence_retire(&ctx->base,
3664 queue->base.id,
3665 sync->fence_cookie);
3666 list_addtail(&sync->head, &dev->free_syncs);
3667 }
3668
3669 if (queue_empty)
3670 list_delinit(&queue->busy_head);
3671 }
3672}
3673
3674static void
3675vkr_context_retire_fences(struct virgl_context *base)
3676{
3677 struct vkr_context *ctx = (struct vkr_context *)base;
3678 mtx_lock(&ctx->mutex);
3679 vkr_context_retire_fences_locked(base);
3680 mtx_unlock(&ctx->mutex);
3681}
3682
3683static int vkr_context_get_fencing_fd(struct virgl_context *base)
3684{
3685 struct vkr_context *ctx = (struct vkr_context *)base;
3686 return ctx->fence_eventfd;
3687}
3688
3689static int vkr_context_submit_cmd(struct virgl_context *base,
3690 const void *buffer,
3691 size_t size)
3692{
3693 struct vkr_context *ctx = (struct vkr_context *)base;
3694 int ret = 0;
3695
3696 mtx_lock(&ctx->mutex);
3697
3698 vkr_cs_decoder_set_stream(&ctx->decoder, buffer, size);
3699
3700 while (vkr_cs_decoder_has_command(&ctx->decoder)) {
3701 vn_dispatch_command(&ctx->dispatch);
3702 /* TODO consider the client malicious and disconnect it */
3703 if (vkr_cs_decoder_get_fatal(&ctx->decoder)) {
3704 ret = EINVAL;
3705 break;
3706 }
3707 }
3708
3709 vkr_cs_decoder_reset(&ctx->decoder);
3710
3711 mtx_unlock(&ctx->mutex);
3712
3713 return ret;
3714}
3715
3716static int vkr_context_get_blob_locked(struct virgl_context *base,
3717 uint64_t blob_id,
3718 uint32_t flags,
3719 struct virgl_context_blob *blob)
3720{
3721 struct vkr_context *ctx = (struct vkr_context *)base;
3722 struct vkr_device_memory *mem;
3723 enum virgl_resource_fd_type fd_type = VIRGL_RESOURCE_FD_INVALID;
3724
3725 mem = util_hash_table_get_u64(ctx->object_table, blob_id);
3726 if (!mem || mem->base.type != VK_OBJECT_TYPE_DEVICE_MEMORY)
3727 return EINVAL;
3728
3729 /* a memory can only be exported once; we don't want two resources to point
3730 * to the same storage.
3731 */
3732 if (mem->exported)
3733 return EINVAL;
3734
3735 if (!mem->valid_fd_types)
3736 return EINVAL;
3737
3738 if (flags & VIRGL_RENDERER_BLOB_FLAG_USE_MAPPABLE) {
3739 const bool host_visible =
3740 mem->property_flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
3741 if (!host_visible)
3742 return EINVAL;
3743 }
3744
3745 if (flags & VIRGL_RENDERER_BLOB_FLAG_USE_CROSS_DEVICE) {
3746 if (!(mem->valid_fd_types & (1 << VIRGL_RESOURCE_FD_DMABUF)))
3747 return EINVAL;
3748
3749 fd_type = VIRGL_RESOURCE_FD_DMABUF;
3750 }
3751
3752 if (fd_type == VIRGL_RESOURCE_FD_INVALID) {
3753 /* prefer dmabuf for easier mapping? prefer opaque for performance? */
3754 if (mem->valid_fd_types & (1 << VIRGL_RESOURCE_FD_DMABUF))
3755 fd_type = VIRGL_RESOURCE_FD_DMABUF;
3756 else if (mem->valid_fd_types & (1 << VIRGL_RESOURCE_FD_OPAQUE))
3757 fd_type = VIRGL_RESOURCE_FD_OPAQUE;
3758 }
3759
3760 int fd = -1;
3761 if (fd_type != VIRGL_RESOURCE_FD_INVALID) {
3762 VkExternalMemoryHandleTypeFlagBits handle_type;
3763 switch (fd_type) {
3764 case VIRGL_RESOURCE_FD_DMABUF:
3765 handle_type = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
3766 break;
3767 case VIRGL_RESOURCE_FD_OPAQUE:
3768 handle_type = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT;
3769 break;
3770 default:
3771 return EINVAL;
3772 }
3773
3774 VkResult result = ctx->instance->get_memory_fd(mem->device,
3775 &(VkMemoryGetFdInfoKHR){
3776 .sType = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR,
3777 .memory = mem->base.handle.device_memory,
3778 .handleType = handle_type,
3779 }, &fd);
3780 if (result != VK_SUCCESS)
3781 return EINVAL;
3782 }
3783
3784 blob->type = fd_type;
3785 blob->u.fd = fd;
3786
3787 if (flags & VIRGL_RENDERER_BLOB_FLAG_USE_MAPPABLE) {
3788 const bool host_coherent =
3789 mem->property_flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
3790 const bool host_cached =
3791 mem->property_flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
3792
3793 /* XXX guessed */
3794 if (host_coherent) {
3795 blob->map_info = host_cached ?
3796 VIRGL_RENDERER_MAP_CACHE_CACHED : VIRGL_RENDERER_MAP_CACHE_WC;
3797 } else {
3798 blob->map_info = VIRGL_RENDERER_MAP_CACHE_WC;
3799 }
3800 } else {
3801 blob->map_info = VIRGL_RENDERER_MAP_CACHE_NONE;
3802 }
3803
3804 blob->renderer_data = mem;
3805
3806 return 0;
3807}
3808
3809static int vkr_context_get_blob(struct virgl_context *base,
3810 uint64_t blob_id,
3811 uint32_t flags,
3812 struct virgl_context_blob *blob)
3813{
3814 struct vkr_context *ctx = (struct vkr_context *)base;
3815 int ret;
3816
3817 mtx_lock(&ctx->mutex);
3818 ret = vkr_context_get_blob_locked(base, blob_id, flags, blob);
3819 /* XXX unlock in vkr_context_get_blob_done on success */
3820 if (ret)
3821 mtx_unlock(&ctx->mutex);
3822
3823 return ret;
3824}
3825
3826static void vkr_context_get_blob_done(struct virgl_context *base,
3827 uint32_t res_id,
3828 struct virgl_context_blob *blob)
3829{
3830 struct vkr_context *ctx = (struct vkr_context *)base;
3831 struct vkr_device_memory *mem = blob->renderer_data;
3832
3833 mem->exported = true;
3834 mem->exported_res_id = res_id;
3835 list_add(&mem->head, &ctx->newly_exported_memories);
3836
3837 /* XXX locked in vkr_context_get_blob */
3838 mtx_unlock(&ctx->mutex);
3839}
3840
3841static int vkr_context_transfer_3d_locked(struct virgl_context *base,
3842 struct virgl_resource *res,
3843 const struct vrend_transfer_info *info,
3844 int transfer_mode)
3845{
3846 struct vkr_context *ctx = (struct vkr_context *)base;
3847 struct vkr_resource_attachment *att;
3848 const struct iovec *iov;
3849 int iov_count;
3850
3851 if (info->level || info->stride || info->layer_stride)
3852 return EINVAL;
3853
3854 if (info->iovec) {
3855 iov = info->iovec;
3856 iov_count = info->iovec_cnt;
3857 } else {
3858 iov = res->iov;
3859 iov_count = res->iov_count;
3860 }
3861
3862 if (!iov || !iov_count)
3863 return 0;
3864
3865 att = util_hash_table_get(ctx->resource_table,
3866 uintptr_to_pointer(res->res_id));
3867 if (!att)
3868 return EINVAL;
3869
3870 assert(att->resource == res);
3871
3872 /* TODO transfer via dmabuf (and find a solution to coherency issues) */
3873 if (LIST_IS_EMPTY(&att->memories)) {
3874 vrend_printf("unable to transfer without VkDeviceMemory (TODO)");
3875 return EINVAL;
3876 }
3877
3878 struct vkr_device_memory *mem =
3879 LIST_ENTRY(struct vkr_device_memory, att->memories.next, head);
3880 const VkMappedMemoryRange range = {
3881 .sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
3882 .memory = mem->base.handle.device_memory,
3883 .offset = info->box->x,
3884 .size = info->box->width,
3885 };
3886
3887 void *ptr;
3888 VkResult result = vkMapMemory(mem->device, range.memory,
3889 range.offset, range.size, 0, &ptr);
3890 if (result != VK_SUCCESS)
3891 return EINVAL;
3892
3893 if (transfer_mode == VIRGL_TRANSFER_TO_HOST) {
3894 vrend_read_from_iovec(iov, iov_count, range.offset, ptr, range.size);
3895 vkFlushMappedMemoryRanges(mem->device, 1, &range);
3896 } else {
3897 vkInvalidateMappedMemoryRanges(mem->device, 1, &range);
3898 vrend_write_to_iovec(iov, iov_count, range.offset, ptr, range.size);
3899 }
3900
3901 vkUnmapMemory(mem->device, range.memory);
3902
3903 return 0;
3904}
3905
3906static int vkr_context_transfer_3d(struct virgl_context *base,
3907 struct virgl_resource *res,
3908 const struct vrend_transfer_info *info,
3909 int transfer_mode)
3910{
3911 struct vkr_context *ctx = (struct vkr_context *)base;
3912 int ret;
3913
3914 mtx_lock(&ctx->mutex);
3915 ret = vkr_context_transfer_3d_locked(base, res, info, transfer_mode);
3916 mtx_unlock(&ctx->mutex);
3917
3918 return ret;
3919}
3920
3921static void vkr_context_attach_resource_locked(struct virgl_context *base,
3922 struct virgl_resource *res)
3923{
3924 struct vkr_context *ctx = (struct vkr_context *)base;
3925 struct vkr_resource_attachment *att;
3926
3927 att = util_hash_table_get(ctx->resource_table,
3928 uintptr_to_pointer(res->res_id));
3929 if (att) {
3930 assert(att->resource == res);
3931 return;
3932 }
3933
3934 att = calloc(1, sizeof(*att));
3935 if (!att)
3936 return;
3937
3938 /* TODO When in multi-process mode, we cannot share a virgl_resource as-is
3939 * to another process. The resource must have a valid fd, and only the fd
3940 * and the iov can be sent the other process.
3941 *
3942 * For vrend-to-vkr sharing, we can get the fd from pipe_resource.
3943 */
3944
3945 att->resource = res;
3946 list_inithead(&att->memories);
3947
3948 /* associate a memory with the resource, if any */
3949 struct vkr_device_memory *mem;
3950 LIST_FOR_EACH_ENTRY(mem, &ctx->newly_exported_memories, head) {
3951 if (mem->exported_res_id == res->res_id) {
3952 list_del(&mem->head);
3953 list_addtail(&mem->head, &att->memories);
3954 break;
3955 }
3956 }
3957
3958 util_hash_table_set(ctx->resource_table,
3959 uintptr_to_pointer(res->res_id),
3960 att);
3961}
3962
3963static void vkr_context_attach_resource(struct virgl_context *base,
3964 struct virgl_resource *res)
3965{
3966 struct vkr_context *ctx = (struct vkr_context *)base;
3967 mtx_lock(&ctx->mutex);
3968 vkr_context_attach_resource_locked(base, res);
3969 mtx_unlock(&ctx->mutex);
3970}
3971
3972static void vkr_context_detach_resource(struct virgl_context *base,
3973 struct virgl_resource *res)
3974{
3975 struct vkr_context *ctx = (struct vkr_context *)base;
3976
3977 mtx_lock(&ctx->mutex);
3978 util_hash_table_remove(ctx->resource_table,
3979 uintptr_to_pointer(res->res_id));
3980 mtx_unlock(&ctx->mutex);
3981}
3982
3983static void vkr_context_destroy(struct virgl_context *base)
3984{
3985 struct vkr_context *ctx = (struct vkr_context *)base;
3986
3987 struct vkr_ring *ring, *ring_tmp;
3988 LIST_FOR_EACH_ENTRY_SAFE(ring, ring_tmp, &ctx->rings, head) {
3989 vkr_ring_stop(ring);
3990 vkr_ring_destroy(ring);
3991 }
3992
3993 /* TODO properly destroy all Vulkan objects */
3994 util_hash_table_destroy(ctx->resource_table);
3995 util_hash_table_destroy_u64(ctx->object_table);
3996
3997 if (ctx->fence_eventfd >= 0)
3998 close(ctx->fence_eventfd);
3999
4000 vkr_cs_decoder_fini(&ctx->decoder);
4001
4002 mtx_destroy(&ctx->mutex);
4003 free(ctx->debug_name);
4004 free(ctx);
4005}
4006
4007static void
4008vkr_context_init_base(struct vkr_context *ctx)
4009{
4010 ctx->base.destroy = vkr_context_destroy;
4011 ctx->base.attach_resource = vkr_context_attach_resource;
4012 ctx->base.detach_resource = vkr_context_detach_resource;
4013 ctx->base.transfer_3d = vkr_context_transfer_3d;
4014 ctx->base.get_blob = vkr_context_get_blob;
4015 ctx->base.get_blob_done = vkr_context_get_blob_done;
4016 ctx->base.submit_cmd = vkr_context_submit_cmd;
4017
4018 ctx->base.get_fencing_fd = vkr_context_get_fencing_fd;
4019 ctx->base.retire_fences =vkr_context_retire_fences;
4020 ctx->base.submit_fence = vkr_context_submit_fence;
4021}
4022
4023static void
4024destroy_func_object(void *val)
4025{
4026 struct vkr_object *obj = val;
4027 free(obj);
4028}
4029
4030static void
4031destroy_func_resource(void *val)
4032{
4033 struct vkr_resource_attachment *att = val;
4034 struct vkr_device_memory *mem, *tmp;
4035
4036 LIST_FOR_EACH_ENTRY_SAFE(mem, tmp, &att->memories, head)
4037 list_delinit(&mem->head);
4038
4039 free(att);
4040}
4041
4042struct virgl_context *
4043vkr_context_create(size_t debug_len, const char *debug_name)
4044{
4045 struct vkr_context *ctx;
4046
4047 /* TODO inject a proxy context when multi-process */
4048
4049 ctx = calloc(1, sizeof(*ctx));
4050 if (!ctx)
4051 return NULL;
4052
4053 ctx->debug_name = malloc(debug_len + 1);
4054 if (!ctx->debug_name) {
4055 free(ctx);
4056 return NULL;
4057 }
4058
4059 memcpy(ctx->debug_name, debug_name, debug_len);
4060 ctx->debug_name[debug_len] = '\0';
4061
4062 if (mtx_init(&ctx->mutex, mtx_plain) != thrd_success) {
4063 free(ctx->debug_name);
4064 free(ctx);
4065 return NULL;
4066 }
4067
4068 list_inithead(&ctx->rings);
4069
4070 ctx->object_table =
4071 util_hash_table_create_u64(destroy_func_object);
4072 ctx->resource_table =
4073 util_hash_table_create(hash_func_u32,
4074 compare_func,
4075 destroy_func_resource);
4076 if (!ctx->object_table || !ctx->resource_table)
4077 goto fail;
4078
4079 list_inithead(&ctx->newly_exported_memories);
4080
4081 vkr_cs_decoder_init(&ctx->decoder, ctx->object_table);
4082 vkr_cs_encoder_init(&ctx->encoder, &ctx->decoder.fatal_error);
4083
4084 vkr_context_init_base(ctx);
4085 vkr_context_init_dispatch(ctx);
4086
4087 if (vkr_renderer_flags & VKR_RENDERER_THREAD_SYNC) {
4088 ctx->fence_eventfd = create_eventfd(0);
4089 if (ctx->fence_eventfd < 0)
4090 goto fail;
4091 } else {
4092 ctx->fence_eventfd = -1;
4093 }
4094
4095 list_inithead(&ctx->busy_queues);
4096
4097 return &ctx->base;
4098
4099fail:
4100 if (ctx->object_table)
4101 util_hash_table_destroy_u64(ctx->object_table);
4102 if (ctx->resource_table)
4103 util_hash_table_destroy(ctx->resource_table);
4104 mtx_destroy(&ctx->mutex);
4105 free(ctx->debug_name);
4106 free(ctx);
4107 return NULL;
4108}
4109
Chia-I Wu9b2d22b2020-04-17 15:29:05 -07004110size_t
4111vkr_get_capset(void *capset)
4112{
4113 struct virgl_renderer_capset_venus *c = capset;
4114 if (c) {
4115 memset(c, 0, sizeof(*c));
4116 c->wire_format_version = vn_info_wire_format_version();
4117 c->vk_xml_version = vn_info_vk_xml_version();
4118 c->vk_ext_command_serialization_spec_version = vn_info_extension_spec_version("VK_EXT_command_serialization");
4119 c->vk_mesa_venus_protocol_spec_version = vn_info_extension_spec_version("VK_MESA_venus_protocol");
4120 }
4121
4122 return sizeof(*c);
4123}
4124
4125int
4126vkr_renderer_init(uint32_t flags)
4127{
4128 /* TODO VKR_RENDERER_MULTI_PROCESS hint */
4129
4130 vkr_renderer_flags = flags;
4131
4132 return 0;
4133}
4134
4135void
4136vkr_renderer_fini(void)
4137{
4138 vkr_renderer_flags = 0;
4139}
4140
4141void
4142vkr_renderer_reset(void)
4143{
4144}