1 /* This file is generated by venus-protocol. See vn_protocol_driver.h. */
2
3 /*
4 * Copyright 2020 Google LLC
5 * SPDX-License-Identifier: MIT
6 */
7
8 #ifndef VN_PROTOCOL_DRIVER_QUEUE_H
9 #define VN_PROTOCOL_DRIVER_QUEUE_H
10
11 #include "vn_instance.h"
12 #include "vn_protocol_driver_structs.h"
13
14 /* struct VkDeviceGroupSubmitInfo chain */
15
16 static inline size_t
vn_sizeof_VkDeviceGroupSubmitInfo_pnext(const void * val)17 vn_sizeof_VkDeviceGroupSubmitInfo_pnext(const void *val)
18 {
19 /* no known/supported struct */
20 return vn_sizeof_simple_pointer(NULL);
21 }
22
23 static inline size_t
vn_sizeof_VkDeviceGroupSubmitInfo_self(const VkDeviceGroupSubmitInfo * val)24 vn_sizeof_VkDeviceGroupSubmitInfo_self(const VkDeviceGroupSubmitInfo *val)
25 {
26 size_t size = 0;
27 /* skip val->{sType,pNext} */
28 size += vn_sizeof_uint32_t(&val->waitSemaphoreCount);
29 if (val->pWaitSemaphoreDeviceIndices) {
30 size += vn_sizeof_array_size(val->waitSemaphoreCount);
31 size += vn_sizeof_uint32_t_array(val->pWaitSemaphoreDeviceIndices, val->waitSemaphoreCount);
32 } else {
33 size += vn_sizeof_array_size(0);
34 }
35 size += vn_sizeof_uint32_t(&val->commandBufferCount);
36 if (val->pCommandBufferDeviceMasks) {
37 size += vn_sizeof_array_size(val->commandBufferCount);
38 size += vn_sizeof_uint32_t_array(val->pCommandBufferDeviceMasks, val->commandBufferCount);
39 } else {
40 size += vn_sizeof_array_size(0);
41 }
42 size += vn_sizeof_uint32_t(&val->signalSemaphoreCount);
43 if (val->pSignalSemaphoreDeviceIndices) {
44 size += vn_sizeof_array_size(val->signalSemaphoreCount);
45 size += vn_sizeof_uint32_t_array(val->pSignalSemaphoreDeviceIndices, val->signalSemaphoreCount);
46 } else {
47 size += vn_sizeof_array_size(0);
48 }
49 return size;
50 }
51
52 static inline size_t
vn_sizeof_VkDeviceGroupSubmitInfo(const VkDeviceGroupSubmitInfo * val)53 vn_sizeof_VkDeviceGroupSubmitInfo(const VkDeviceGroupSubmitInfo *val)
54 {
55 size_t size = 0;
56
57 size += vn_sizeof_VkStructureType(&val->sType);
58 size += vn_sizeof_VkDeviceGroupSubmitInfo_pnext(val->pNext);
59 size += vn_sizeof_VkDeviceGroupSubmitInfo_self(val);
60
61 return size;
62 }
63
64 static inline void
vn_encode_VkDeviceGroupSubmitInfo_pnext(struct vn_cs_encoder * enc,const void * val)65 vn_encode_VkDeviceGroupSubmitInfo_pnext(struct vn_cs_encoder *enc, const void *val)
66 {
67 /* no known/supported struct */
68 vn_encode_simple_pointer(enc, NULL);
69 }
70
71 static inline void
vn_encode_VkDeviceGroupSubmitInfo_self(struct vn_cs_encoder * enc,const VkDeviceGroupSubmitInfo * val)72 vn_encode_VkDeviceGroupSubmitInfo_self(struct vn_cs_encoder *enc, const VkDeviceGroupSubmitInfo *val)
73 {
74 /* skip val->{sType,pNext} */
75 vn_encode_uint32_t(enc, &val->waitSemaphoreCount);
76 if (val->pWaitSemaphoreDeviceIndices) {
77 vn_encode_array_size(enc, val->waitSemaphoreCount);
78 vn_encode_uint32_t_array(enc, val->pWaitSemaphoreDeviceIndices, val->waitSemaphoreCount);
79 } else {
80 vn_encode_array_size(enc, 0);
81 }
82 vn_encode_uint32_t(enc, &val->commandBufferCount);
83 if (val->pCommandBufferDeviceMasks) {
84 vn_encode_array_size(enc, val->commandBufferCount);
85 vn_encode_uint32_t_array(enc, val->pCommandBufferDeviceMasks, val->commandBufferCount);
86 } else {
87 vn_encode_array_size(enc, 0);
88 }
89 vn_encode_uint32_t(enc, &val->signalSemaphoreCount);
90 if (val->pSignalSemaphoreDeviceIndices) {
91 vn_encode_array_size(enc, val->signalSemaphoreCount);
92 vn_encode_uint32_t_array(enc, val->pSignalSemaphoreDeviceIndices, val->signalSemaphoreCount);
93 } else {
94 vn_encode_array_size(enc, 0);
95 }
96 }
97
98 static inline void
vn_encode_VkDeviceGroupSubmitInfo(struct vn_cs_encoder * enc,const VkDeviceGroupSubmitInfo * val)99 vn_encode_VkDeviceGroupSubmitInfo(struct vn_cs_encoder *enc, const VkDeviceGroupSubmitInfo *val)
100 {
101 assert(val->sType == VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO);
102 vn_encode_VkStructureType(enc, &(VkStructureType){ VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO });
103 vn_encode_VkDeviceGroupSubmitInfo_pnext(enc, val->pNext);
104 vn_encode_VkDeviceGroupSubmitInfo_self(enc, val);
105 }
106
107 /* struct VkProtectedSubmitInfo chain */
108
109 static inline size_t
vn_sizeof_VkProtectedSubmitInfo_pnext(const void * val)110 vn_sizeof_VkProtectedSubmitInfo_pnext(const void *val)
111 {
112 /* no known/supported struct */
113 return vn_sizeof_simple_pointer(NULL);
114 }
115
116 static inline size_t
vn_sizeof_VkProtectedSubmitInfo_self(const VkProtectedSubmitInfo * val)117 vn_sizeof_VkProtectedSubmitInfo_self(const VkProtectedSubmitInfo *val)
118 {
119 size_t size = 0;
120 /* skip val->{sType,pNext} */
121 size += vn_sizeof_VkBool32(&val->protectedSubmit);
122 return size;
123 }
124
125 static inline size_t
vn_sizeof_VkProtectedSubmitInfo(const VkProtectedSubmitInfo * val)126 vn_sizeof_VkProtectedSubmitInfo(const VkProtectedSubmitInfo *val)
127 {
128 size_t size = 0;
129
130 size += vn_sizeof_VkStructureType(&val->sType);
131 size += vn_sizeof_VkProtectedSubmitInfo_pnext(val->pNext);
132 size += vn_sizeof_VkProtectedSubmitInfo_self(val);
133
134 return size;
135 }
136
137 static inline void
vn_encode_VkProtectedSubmitInfo_pnext(struct vn_cs_encoder * enc,const void * val)138 vn_encode_VkProtectedSubmitInfo_pnext(struct vn_cs_encoder *enc, const void *val)
139 {
140 /* no known/supported struct */
141 vn_encode_simple_pointer(enc, NULL);
142 }
143
144 static inline void
vn_encode_VkProtectedSubmitInfo_self(struct vn_cs_encoder * enc,const VkProtectedSubmitInfo * val)145 vn_encode_VkProtectedSubmitInfo_self(struct vn_cs_encoder *enc, const VkProtectedSubmitInfo *val)
146 {
147 /* skip val->{sType,pNext} */
148 vn_encode_VkBool32(enc, &val->protectedSubmit);
149 }
150
151 static inline void
vn_encode_VkProtectedSubmitInfo(struct vn_cs_encoder * enc,const VkProtectedSubmitInfo * val)152 vn_encode_VkProtectedSubmitInfo(struct vn_cs_encoder *enc, const VkProtectedSubmitInfo *val)
153 {
154 assert(val->sType == VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO);
155 vn_encode_VkStructureType(enc, &(VkStructureType){ VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO });
156 vn_encode_VkProtectedSubmitInfo_pnext(enc, val->pNext);
157 vn_encode_VkProtectedSubmitInfo_self(enc, val);
158 }
159
160 /* struct VkTimelineSemaphoreSubmitInfo chain */
161
162 static inline size_t
vn_sizeof_VkTimelineSemaphoreSubmitInfo_pnext(const void * val)163 vn_sizeof_VkTimelineSemaphoreSubmitInfo_pnext(const void *val)
164 {
165 /* no known/supported struct */
166 return vn_sizeof_simple_pointer(NULL);
167 }
168
169 static inline size_t
vn_sizeof_VkTimelineSemaphoreSubmitInfo_self(const VkTimelineSemaphoreSubmitInfo * val)170 vn_sizeof_VkTimelineSemaphoreSubmitInfo_self(const VkTimelineSemaphoreSubmitInfo *val)
171 {
172 size_t size = 0;
173 /* skip val->{sType,pNext} */
174 size += vn_sizeof_uint32_t(&val->waitSemaphoreValueCount);
175 if (val->pWaitSemaphoreValues) {
176 size += vn_sizeof_array_size(val->waitSemaphoreValueCount);
177 size += vn_sizeof_uint64_t_array(val->pWaitSemaphoreValues, val->waitSemaphoreValueCount);
178 } else {
179 size += vn_sizeof_array_size(0);
180 }
181 size += vn_sizeof_uint32_t(&val->signalSemaphoreValueCount);
182 if (val->pSignalSemaphoreValues) {
183 size += vn_sizeof_array_size(val->signalSemaphoreValueCount);
184 size += vn_sizeof_uint64_t_array(val->pSignalSemaphoreValues, val->signalSemaphoreValueCount);
185 } else {
186 size += vn_sizeof_array_size(0);
187 }
188 return size;
189 }
190
191 static inline size_t
vn_sizeof_VkTimelineSemaphoreSubmitInfo(const VkTimelineSemaphoreSubmitInfo * val)192 vn_sizeof_VkTimelineSemaphoreSubmitInfo(const VkTimelineSemaphoreSubmitInfo *val)
193 {
194 size_t size = 0;
195
196 size += vn_sizeof_VkStructureType(&val->sType);
197 size += vn_sizeof_VkTimelineSemaphoreSubmitInfo_pnext(val->pNext);
198 size += vn_sizeof_VkTimelineSemaphoreSubmitInfo_self(val);
199
200 return size;
201 }
202
203 static inline void
vn_encode_VkTimelineSemaphoreSubmitInfo_pnext(struct vn_cs_encoder * enc,const void * val)204 vn_encode_VkTimelineSemaphoreSubmitInfo_pnext(struct vn_cs_encoder *enc, const void *val)
205 {
206 /* no known/supported struct */
207 vn_encode_simple_pointer(enc, NULL);
208 }
209
210 static inline void
vn_encode_VkTimelineSemaphoreSubmitInfo_self(struct vn_cs_encoder * enc,const VkTimelineSemaphoreSubmitInfo * val)211 vn_encode_VkTimelineSemaphoreSubmitInfo_self(struct vn_cs_encoder *enc, const VkTimelineSemaphoreSubmitInfo *val)
212 {
213 /* skip val->{sType,pNext} */
214 vn_encode_uint32_t(enc, &val->waitSemaphoreValueCount);
215 if (val->pWaitSemaphoreValues) {
216 vn_encode_array_size(enc, val->waitSemaphoreValueCount);
217 vn_encode_uint64_t_array(enc, val->pWaitSemaphoreValues, val->waitSemaphoreValueCount);
218 } else {
219 vn_encode_array_size(enc, 0);
220 }
221 vn_encode_uint32_t(enc, &val->signalSemaphoreValueCount);
222 if (val->pSignalSemaphoreValues) {
223 vn_encode_array_size(enc, val->signalSemaphoreValueCount);
224 vn_encode_uint64_t_array(enc, val->pSignalSemaphoreValues, val->signalSemaphoreValueCount);
225 } else {
226 vn_encode_array_size(enc, 0);
227 }
228 }
229
230 static inline void
vn_encode_VkTimelineSemaphoreSubmitInfo(struct vn_cs_encoder * enc,const VkTimelineSemaphoreSubmitInfo * val)231 vn_encode_VkTimelineSemaphoreSubmitInfo(struct vn_cs_encoder *enc, const VkTimelineSemaphoreSubmitInfo *val)
232 {
233 assert(val->sType == VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO);
234 vn_encode_VkStructureType(enc, &(VkStructureType){ VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO });
235 vn_encode_VkTimelineSemaphoreSubmitInfo_pnext(enc, val->pNext);
236 vn_encode_VkTimelineSemaphoreSubmitInfo_self(enc, val);
237 }
238
239 /* struct VkSubmitInfo chain */
240
241 static inline size_t
vn_sizeof_VkSubmitInfo_pnext(const void * val)242 vn_sizeof_VkSubmitInfo_pnext(const void *val)
243 {
244 const VkBaseInStructure *pnext = val;
245 size_t size = 0;
246
247 while (pnext) {
248 switch ((int32_t)pnext->sType) {
249 case VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO:
250 size += vn_sizeof_simple_pointer(pnext);
251 size += vn_sizeof_VkStructureType(&pnext->sType);
252 size += vn_sizeof_VkSubmitInfo_pnext(pnext->pNext);
253 size += vn_sizeof_VkDeviceGroupSubmitInfo_self((const VkDeviceGroupSubmitInfo *)pnext);
254 return size;
255 case VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO:
256 size += vn_sizeof_simple_pointer(pnext);
257 size += vn_sizeof_VkStructureType(&pnext->sType);
258 size += vn_sizeof_VkSubmitInfo_pnext(pnext->pNext);
259 size += vn_sizeof_VkProtectedSubmitInfo_self((const VkProtectedSubmitInfo *)pnext);
260 return size;
261 case VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO:
262 size += vn_sizeof_simple_pointer(pnext);
263 size += vn_sizeof_VkStructureType(&pnext->sType);
264 size += vn_sizeof_VkSubmitInfo_pnext(pnext->pNext);
265 size += vn_sizeof_VkTimelineSemaphoreSubmitInfo_self((const VkTimelineSemaphoreSubmitInfo *)pnext);
266 return size;
267 default:
268 /* ignore unknown/unsupported struct */
269 break;
270 }
271 pnext = pnext->pNext;
272 }
273
274 return vn_sizeof_simple_pointer(NULL);
275 }
276
277 static inline size_t
vn_sizeof_VkSubmitInfo_self(const VkSubmitInfo * val)278 vn_sizeof_VkSubmitInfo_self(const VkSubmitInfo *val)
279 {
280 size_t size = 0;
281 /* skip val->{sType,pNext} */
282 size += vn_sizeof_uint32_t(&val->waitSemaphoreCount);
283 if (val->pWaitSemaphores) {
284 size += vn_sizeof_array_size(val->waitSemaphoreCount);
285 for (uint32_t i = 0; i < val->waitSemaphoreCount; i++)
286 size += vn_sizeof_VkSemaphore(&val->pWaitSemaphores[i]);
287 } else {
288 size += vn_sizeof_array_size(0);
289 }
290 if (val->pWaitDstStageMask) {
291 size += vn_sizeof_array_size(val->waitSemaphoreCount);
292 for (uint32_t i = 0; i < val->waitSemaphoreCount; i++)
293 size += vn_sizeof_VkFlags(&val->pWaitDstStageMask[i]);
294 } else {
295 size += vn_sizeof_array_size(0);
296 }
297 size += vn_sizeof_uint32_t(&val->commandBufferCount);
298 if (val->pCommandBuffers) {
299 size += vn_sizeof_array_size(val->commandBufferCount);
300 for (uint32_t i = 0; i < val->commandBufferCount; i++)
301 size += vn_sizeof_VkCommandBuffer(&val->pCommandBuffers[i]);
302 } else {
303 size += vn_sizeof_array_size(0);
304 }
305 size += vn_sizeof_uint32_t(&val->signalSemaphoreCount);
306 if (val->pSignalSemaphores) {
307 size += vn_sizeof_array_size(val->signalSemaphoreCount);
308 for (uint32_t i = 0; i < val->signalSemaphoreCount; i++)
309 size += vn_sizeof_VkSemaphore(&val->pSignalSemaphores[i]);
310 } else {
311 size += vn_sizeof_array_size(0);
312 }
313 return size;
314 }
315
316 static inline size_t
vn_sizeof_VkSubmitInfo(const VkSubmitInfo * val)317 vn_sizeof_VkSubmitInfo(const VkSubmitInfo *val)
318 {
319 size_t size = 0;
320
321 size += vn_sizeof_VkStructureType(&val->sType);
322 size += vn_sizeof_VkSubmitInfo_pnext(val->pNext);
323 size += vn_sizeof_VkSubmitInfo_self(val);
324
325 return size;
326 }
327
328 static inline void
vn_encode_VkSubmitInfo_pnext(struct vn_cs_encoder * enc,const void * val)329 vn_encode_VkSubmitInfo_pnext(struct vn_cs_encoder *enc, const void *val)
330 {
331 const VkBaseInStructure *pnext = val;
332
333 while (pnext) {
334 switch ((int32_t)pnext->sType) {
335 case VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO:
336 vn_encode_simple_pointer(enc, pnext);
337 vn_encode_VkStructureType(enc, &pnext->sType);
338 vn_encode_VkSubmitInfo_pnext(enc, pnext->pNext);
339 vn_encode_VkDeviceGroupSubmitInfo_self(enc, (const VkDeviceGroupSubmitInfo *)pnext);
340 return;
341 case VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO:
342 vn_encode_simple_pointer(enc, pnext);
343 vn_encode_VkStructureType(enc, &pnext->sType);
344 vn_encode_VkSubmitInfo_pnext(enc, pnext->pNext);
345 vn_encode_VkProtectedSubmitInfo_self(enc, (const VkProtectedSubmitInfo *)pnext);
346 return;
347 case VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO:
348 vn_encode_simple_pointer(enc, pnext);
349 vn_encode_VkStructureType(enc, &pnext->sType);
350 vn_encode_VkSubmitInfo_pnext(enc, pnext->pNext);
351 vn_encode_VkTimelineSemaphoreSubmitInfo_self(enc, (const VkTimelineSemaphoreSubmitInfo *)pnext);
352 return;
353 default:
354 /* ignore unknown/unsupported struct */
355 break;
356 }
357 pnext = pnext->pNext;
358 }
359
360 vn_encode_simple_pointer(enc, NULL);
361 }
362
363 static inline void
vn_encode_VkSubmitInfo_self(struct vn_cs_encoder * enc,const VkSubmitInfo * val)364 vn_encode_VkSubmitInfo_self(struct vn_cs_encoder *enc, const VkSubmitInfo *val)
365 {
366 /* skip val->{sType,pNext} */
367 vn_encode_uint32_t(enc, &val->waitSemaphoreCount);
368 if (val->pWaitSemaphores) {
369 vn_encode_array_size(enc, val->waitSemaphoreCount);
370 for (uint32_t i = 0; i < val->waitSemaphoreCount; i++)
371 vn_encode_VkSemaphore(enc, &val->pWaitSemaphores[i]);
372 } else {
373 vn_encode_array_size(enc, 0);
374 }
375 if (val->pWaitDstStageMask) {
376 vn_encode_array_size(enc, val->waitSemaphoreCount);
377 for (uint32_t i = 0; i < val->waitSemaphoreCount; i++)
378 vn_encode_VkFlags(enc, &val->pWaitDstStageMask[i]);
379 } else {
380 vn_encode_array_size(enc, 0);
381 }
382 vn_encode_uint32_t(enc, &val->commandBufferCount);
383 if (val->pCommandBuffers) {
384 vn_encode_array_size(enc, val->commandBufferCount);
385 for (uint32_t i = 0; i < val->commandBufferCount; i++)
386 vn_encode_VkCommandBuffer(enc, &val->pCommandBuffers[i]);
387 } else {
388 vn_encode_array_size(enc, 0);
389 }
390 vn_encode_uint32_t(enc, &val->signalSemaphoreCount);
391 if (val->pSignalSemaphores) {
392 vn_encode_array_size(enc, val->signalSemaphoreCount);
393 for (uint32_t i = 0; i < val->signalSemaphoreCount; i++)
394 vn_encode_VkSemaphore(enc, &val->pSignalSemaphores[i]);
395 } else {
396 vn_encode_array_size(enc, 0);
397 }
398 }
399
400 static inline void
vn_encode_VkSubmitInfo(struct vn_cs_encoder * enc,const VkSubmitInfo * val)401 vn_encode_VkSubmitInfo(struct vn_cs_encoder *enc, const VkSubmitInfo *val)
402 {
403 assert(val->sType == VK_STRUCTURE_TYPE_SUBMIT_INFO);
404 vn_encode_VkStructureType(enc, &(VkStructureType){ VK_STRUCTURE_TYPE_SUBMIT_INFO });
405 vn_encode_VkSubmitInfo_pnext(enc, val->pNext);
406 vn_encode_VkSubmitInfo_self(enc, val);
407 }
408
409 /* struct VkSparseMemoryBind */
410
411 static inline size_t
vn_sizeof_VkSparseMemoryBind(const VkSparseMemoryBind * val)412 vn_sizeof_VkSparseMemoryBind(const VkSparseMemoryBind *val)
413 {
414 size_t size = 0;
415 size += vn_sizeof_VkDeviceSize(&val->resourceOffset);
416 size += vn_sizeof_VkDeviceSize(&val->size);
417 size += vn_sizeof_VkDeviceMemory(&val->memory);
418 size += vn_sizeof_VkDeviceSize(&val->memoryOffset);
419 size += vn_sizeof_VkFlags(&val->flags);
420 return size;
421 }
422
423 static inline void
vn_encode_VkSparseMemoryBind(struct vn_cs_encoder * enc,const VkSparseMemoryBind * val)424 vn_encode_VkSparseMemoryBind(struct vn_cs_encoder *enc, const VkSparseMemoryBind *val)
425 {
426 vn_encode_VkDeviceSize(enc, &val->resourceOffset);
427 vn_encode_VkDeviceSize(enc, &val->size);
428 vn_encode_VkDeviceMemory(enc, &val->memory);
429 vn_encode_VkDeviceSize(enc, &val->memoryOffset);
430 vn_encode_VkFlags(enc, &val->flags);
431 }
432
433 /* struct VkSparseBufferMemoryBindInfo */
434
435 static inline size_t
vn_sizeof_VkSparseBufferMemoryBindInfo(const VkSparseBufferMemoryBindInfo * val)436 vn_sizeof_VkSparseBufferMemoryBindInfo(const VkSparseBufferMemoryBindInfo *val)
437 {
438 size_t size = 0;
439 size += vn_sizeof_VkBuffer(&val->buffer);
440 size += vn_sizeof_uint32_t(&val->bindCount);
441 if (val->pBinds) {
442 size += vn_sizeof_array_size(val->bindCount);
443 for (uint32_t i = 0; i < val->bindCount; i++)
444 size += vn_sizeof_VkSparseMemoryBind(&val->pBinds[i]);
445 } else {
446 size += vn_sizeof_array_size(0);
447 }
448 return size;
449 }
450
451 static inline void
vn_encode_VkSparseBufferMemoryBindInfo(struct vn_cs_encoder * enc,const VkSparseBufferMemoryBindInfo * val)452 vn_encode_VkSparseBufferMemoryBindInfo(struct vn_cs_encoder *enc, const VkSparseBufferMemoryBindInfo *val)
453 {
454 vn_encode_VkBuffer(enc, &val->buffer);
455 vn_encode_uint32_t(enc, &val->bindCount);
456 if (val->pBinds) {
457 vn_encode_array_size(enc, val->bindCount);
458 for (uint32_t i = 0; i < val->bindCount; i++)
459 vn_encode_VkSparseMemoryBind(enc, &val->pBinds[i]);
460 } else {
461 vn_encode_array_size(enc, 0);
462 }
463 }
464
465 /* struct VkSparseImageOpaqueMemoryBindInfo */
466
467 static inline size_t
vn_sizeof_VkSparseImageOpaqueMemoryBindInfo(const VkSparseImageOpaqueMemoryBindInfo * val)468 vn_sizeof_VkSparseImageOpaqueMemoryBindInfo(const VkSparseImageOpaqueMemoryBindInfo *val)
469 {
470 size_t size = 0;
471 size += vn_sizeof_VkImage(&val->image);
472 size += vn_sizeof_uint32_t(&val->bindCount);
473 if (val->pBinds) {
474 size += vn_sizeof_array_size(val->bindCount);
475 for (uint32_t i = 0; i < val->bindCount; i++)
476 size += vn_sizeof_VkSparseMemoryBind(&val->pBinds[i]);
477 } else {
478 size += vn_sizeof_array_size(0);
479 }
480 return size;
481 }
482
483 static inline void
vn_encode_VkSparseImageOpaqueMemoryBindInfo(struct vn_cs_encoder * enc,const VkSparseImageOpaqueMemoryBindInfo * val)484 vn_encode_VkSparseImageOpaqueMemoryBindInfo(struct vn_cs_encoder *enc, const VkSparseImageOpaqueMemoryBindInfo *val)
485 {
486 vn_encode_VkImage(enc, &val->image);
487 vn_encode_uint32_t(enc, &val->bindCount);
488 if (val->pBinds) {
489 vn_encode_array_size(enc, val->bindCount);
490 for (uint32_t i = 0; i < val->bindCount; i++)
491 vn_encode_VkSparseMemoryBind(enc, &val->pBinds[i]);
492 } else {
493 vn_encode_array_size(enc, 0);
494 }
495 }
496
497 /* struct VkSparseImageMemoryBind */
498
499 static inline size_t
vn_sizeof_VkSparseImageMemoryBind(const VkSparseImageMemoryBind * val)500 vn_sizeof_VkSparseImageMemoryBind(const VkSparseImageMemoryBind *val)
501 {
502 size_t size = 0;
503 size += vn_sizeof_VkImageSubresource(&val->subresource);
504 size += vn_sizeof_VkOffset3D(&val->offset);
505 size += vn_sizeof_VkExtent3D(&val->extent);
506 size += vn_sizeof_VkDeviceMemory(&val->memory);
507 size += vn_sizeof_VkDeviceSize(&val->memoryOffset);
508 size += vn_sizeof_VkFlags(&val->flags);
509 return size;
510 }
511
512 static inline void
vn_encode_VkSparseImageMemoryBind(struct vn_cs_encoder * enc,const VkSparseImageMemoryBind * val)513 vn_encode_VkSparseImageMemoryBind(struct vn_cs_encoder *enc, const VkSparseImageMemoryBind *val)
514 {
515 vn_encode_VkImageSubresource(enc, &val->subresource);
516 vn_encode_VkOffset3D(enc, &val->offset);
517 vn_encode_VkExtent3D(enc, &val->extent);
518 vn_encode_VkDeviceMemory(enc, &val->memory);
519 vn_encode_VkDeviceSize(enc, &val->memoryOffset);
520 vn_encode_VkFlags(enc, &val->flags);
521 }
522
523 /* struct VkSparseImageMemoryBindInfo */
524
525 static inline size_t
vn_sizeof_VkSparseImageMemoryBindInfo(const VkSparseImageMemoryBindInfo * val)526 vn_sizeof_VkSparseImageMemoryBindInfo(const VkSparseImageMemoryBindInfo *val)
527 {
528 size_t size = 0;
529 size += vn_sizeof_VkImage(&val->image);
530 size += vn_sizeof_uint32_t(&val->bindCount);
531 if (val->pBinds) {
532 size += vn_sizeof_array_size(val->bindCount);
533 for (uint32_t i = 0; i < val->bindCount; i++)
534 size += vn_sizeof_VkSparseImageMemoryBind(&val->pBinds[i]);
535 } else {
536 size += vn_sizeof_array_size(0);
537 }
538 return size;
539 }
540
541 static inline void
vn_encode_VkSparseImageMemoryBindInfo(struct vn_cs_encoder * enc,const VkSparseImageMemoryBindInfo * val)542 vn_encode_VkSparseImageMemoryBindInfo(struct vn_cs_encoder *enc, const VkSparseImageMemoryBindInfo *val)
543 {
544 vn_encode_VkImage(enc, &val->image);
545 vn_encode_uint32_t(enc, &val->bindCount);
546 if (val->pBinds) {
547 vn_encode_array_size(enc, val->bindCount);
548 for (uint32_t i = 0; i < val->bindCount; i++)
549 vn_encode_VkSparseImageMemoryBind(enc, &val->pBinds[i]);
550 } else {
551 vn_encode_array_size(enc, 0);
552 }
553 }
554
555 /* struct VkDeviceGroupBindSparseInfo chain */
556
557 static inline size_t
vn_sizeof_VkDeviceGroupBindSparseInfo_pnext(const void * val)558 vn_sizeof_VkDeviceGroupBindSparseInfo_pnext(const void *val)
559 {
560 /* no known/supported struct */
561 return vn_sizeof_simple_pointer(NULL);
562 }
563
564 static inline size_t
vn_sizeof_VkDeviceGroupBindSparseInfo_self(const VkDeviceGroupBindSparseInfo * val)565 vn_sizeof_VkDeviceGroupBindSparseInfo_self(const VkDeviceGroupBindSparseInfo *val)
566 {
567 size_t size = 0;
568 /* skip val->{sType,pNext} */
569 size += vn_sizeof_uint32_t(&val->resourceDeviceIndex);
570 size += vn_sizeof_uint32_t(&val->memoryDeviceIndex);
571 return size;
572 }
573
574 static inline size_t
vn_sizeof_VkDeviceGroupBindSparseInfo(const VkDeviceGroupBindSparseInfo * val)575 vn_sizeof_VkDeviceGroupBindSparseInfo(const VkDeviceGroupBindSparseInfo *val)
576 {
577 size_t size = 0;
578
579 size += vn_sizeof_VkStructureType(&val->sType);
580 size += vn_sizeof_VkDeviceGroupBindSparseInfo_pnext(val->pNext);
581 size += vn_sizeof_VkDeviceGroupBindSparseInfo_self(val);
582
583 return size;
584 }
585
586 static inline void
vn_encode_VkDeviceGroupBindSparseInfo_pnext(struct vn_cs_encoder * enc,const void * val)587 vn_encode_VkDeviceGroupBindSparseInfo_pnext(struct vn_cs_encoder *enc, const void *val)
588 {
589 /* no known/supported struct */
590 vn_encode_simple_pointer(enc, NULL);
591 }
592
593 static inline void
vn_encode_VkDeviceGroupBindSparseInfo_self(struct vn_cs_encoder * enc,const VkDeviceGroupBindSparseInfo * val)594 vn_encode_VkDeviceGroupBindSparseInfo_self(struct vn_cs_encoder *enc, const VkDeviceGroupBindSparseInfo *val)
595 {
596 /* skip val->{sType,pNext} */
597 vn_encode_uint32_t(enc, &val->resourceDeviceIndex);
598 vn_encode_uint32_t(enc, &val->memoryDeviceIndex);
599 }
600
601 static inline void
vn_encode_VkDeviceGroupBindSparseInfo(struct vn_cs_encoder * enc,const VkDeviceGroupBindSparseInfo * val)602 vn_encode_VkDeviceGroupBindSparseInfo(struct vn_cs_encoder *enc, const VkDeviceGroupBindSparseInfo *val)
603 {
604 assert(val->sType == VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO);
605 vn_encode_VkStructureType(enc, &(VkStructureType){ VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO });
606 vn_encode_VkDeviceGroupBindSparseInfo_pnext(enc, val->pNext);
607 vn_encode_VkDeviceGroupBindSparseInfo_self(enc, val);
608 }
609
610 /* struct VkBindSparseInfo chain */
611
612 static inline size_t
vn_sizeof_VkBindSparseInfo_pnext(const void * val)613 vn_sizeof_VkBindSparseInfo_pnext(const void *val)
614 {
615 const VkBaseInStructure *pnext = val;
616 size_t size = 0;
617
618 while (pnext) {
619 switch ((int32_t)pnext->sType) {
620 case VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO:
621 size += vn_sizeof_simple_pointer(pnext);
622 size += vn_sizeof_VkStructureType(&pnext->sType);
623 size += vn_sizeof_VkBindSparseInfo_pnext(pnext->pNext);
624 size += vn_sizeof_VkDeviceGroupBindSparseInfo_self((const VkDeviceGroupBindSparseInfo *)pnext);
625 return size;
626 case VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO:
627 size += vn_sizeof_simple_pointer(pnext);
628 size += vn_sizeof_VkStructureType(&pnext->sType);
629 size += vn_sizeof_VkBindSparseInfo_pnext(pnext->pNext);
630 size += vn_sizeof_VkTimelineSemaphoreSubmitInfo_self((const VkTimelineSemaphoreSubmitInfo *)pnext);
631 return size;
632 default:
633 /* ignore unknown/unsupported struct */
634 break;
635 }
636 pnext = pnext->pNext;
637 }
638
639 return vn_sizeof_simple_pointer(NULL);
640 }
641
642 static inline size_t
vn_sizeof_VkBindSparseInfo_self(const VkBindSparseInfo * val)643 vn_sizeof_VkBindSparseInfo_self(const VkBindSparseInfo *val)
644 {
645 size_t size = 0;
646 /* skip val->{sType,pNext} */
647 size += vn_sizeof_uint32_t(&val->waitSemaphoreCount);
648 if (val->pWaitSemaphores) {
649 size += vn_sizeof_array_size(val->waitSemaphoreCount);
650 for (uint32_t i = 0; i < val->waitSemaphoreCount; i++)
651 size += vn_sizeof_VkSemaphore(&val->pWaitSemaphores[i]);
652 } else {
653 size += vn_sizeof_array_size(0);
654 }
655 size += vn_sizeof_uint32_t(&val->bufferBindCount);
656 if (val->pBufferBinds) {
657 size += vn_sizeof_array_size(val->bufferBindCount);
658 for (uint32_t i = 0; i < val->bufferBindCount; i++)
659 size += vn_sizeof_VkSparseBufferMemoryBindInfo(&val->pBufferBinds[i]);
660 } else {
661 size += vn_sizeof_array_size(0);
662 }
663 size += vn_sizeof_uint32_t(&val->imageOpaqueBindCount);
664 if (val->pImageOpaqueBinds) {
665 size += vn_sizeof_array_size(val->imageOpaqueBindCount);
666 for (uint32_t i = 0; i < val->imageOpaqueBindCount; i++)
667 size += vn_sizeof_VkSparseImageOpaqueMemoryBindInfo(&val->pImageOpaqueBinds[i]);
668 } else {
669 size += vn_sizeof_array_size(0);
670 }
671 size += vn_sizeof_uint32_t(&val->imageBindCount);
672 if (val->pImageBinds) {
673 size += vn_sizeof_array_size(val->imageBindCount);
674 for (uint32_t i = 0; i < val->imageBindCount; i++)
675 size += vn_sizeof_VkSparseImageMemoryBindInfo(&val->pImageBinds[i]);
676 } else {
677 size += vn_sizeof_array_size(0);
678 }
679 size += vn_sizeof_uint32_t(&val->signalSemaphoreCount);
680 if (val->pSignalSemaphores) {
681 size += vn_sizeof_array_size(val->signalSemaphoreCount);
682 for (uint32_t i = 0; i < val->signalSemaphoreCount; i++)
683 size += vn_sizeof_VkSemaphore(&val->pSignalSemaphores[i]);
684 } else {
685 size += vn_sizeof_array_size(0);
686 }
687 return size;
688 }
689
690 static inline size_t
vn_sizeof_VkBindSparseInfo(const VkBindSparseInfo * val)691 vn_sizeof_VkBindSparseInfo(const VkBindSparseInfo *val)
692 {
693 size_t size = 0;
694
695 size += vn_sizeof_VkStructureType(&val->sType);
696 size += vn_sizeof_VkBindSparseInfo_pnext(val->pNext);
697 size += vn_sizeof_VkBindSparseInfo_self(val);
698
699 return size;
700 }
701
702 static inline void
vn_encode_VkBindSparseInfo_pnext(struct vn_cs_encoder * enc,const void * val)703 vn_encode_VkBindSparseInfo_pnext(struct vn_cs_encoder *enc, const void *val)
704 {
705 const VkBaseInStructure *pnext = val;
706
707 while (pnext) {
708 switch ((int32_t)pnext->sType) {
709 case VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO:
710 vn_encode_simple_pointer(enc, pnext);
711 vn_encode_VkStructureType(enc, &pnext->sType);
712 vn_encode_VkBindSparseInfo_pnext(enc, pnext->pNext);
713 vn_encode_VkDeviceGroupBindSparseInfo_self(enc, (const VkDeviceGroupBindSparseInfo *)pnext);
714 return;
715 case VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO:
716 vn_encode_simple_pointer(enc, pnext);
717 vn_encode_VkStructureType(enc, &pnext->sType);
718 vn_encode_VkBindSparseInfo_pnext(enc, pnext->pNext);
719 vn_encode_VkTimelineSemaphoreSubmitInfo_self(enc, (const VkTimelineSemaphoreSubmitInfo *)pnext);
720 return;
721 default:
722 /* ignore unknown/unsupported struct */
723 break;
724 }
725 pnext = pnext->pNext;
726 }
727
728 vn_encode_simple_pointer(enc, NULL);
729 }
730
731 static inline void
vn_encode_VkBindSparseInfo_self(struct vn_cs_encoder * enc,const VkBindSparseInfo * val)732 vn_encode_VkBindSparseInfo_self(struct vn_cs_encoder *enc, const VkBindSparseInfo *val)
733 {
734 /* skip val->{sType,pNext} */
735 vn_encode_uint32_t(enc, &val->waitSemaphoreCount);
736 if (val->pWaitSemaphores) {
737 vn_encode_array_size(enc, val->waitSemaphoreCount);
738 for (uint32_t i = 0; i < val->waitSemaphoreCount; i++)
739 vn_encode_VkSemaphore(enc, &val->pWaitSemaphores[i]);
740 } else {
741 vn_encode_array_size(enc, 0);
742 }
743 vn_encode_uint32_t(enc, &val->bufferBindCount);
744 if (val->pBufferBinds) {
745 vn_encode_array_size(enc, val->bufferBindCount);
746 for (uint32_t i = 0; i < val->bufferBindCount; i++)
747 vn_encode_VkSparseBufferMemoryBindInfo(enc, &val->pBufferBinds[i]);
748 } else {
749 vn_encode_array_size(enc, 0);
750 }
751 vn_encode_uint32_t(enc, &val->imageOpaqueBindCount);
752 if (val->pImageOpaqueBinds) {
753 vn_encode_array_size(enc, val->imageOpaqueBindCount);
754 for (uint32_t i = 0; i < val->imageOpaqueBindCount; i++)
755 vn_encode_VkSparseImageOpaqueMemoryBindInfo(enc, &val->pImageOpaqueBinds[i]);
756 } else {
757 vn_encode_array_size(enc, 0);
758 }
759 vn_encode_uint32_t(enc, &val->imageBindCount);
760 if (val->pImageBinds) {
761 vn_encode_array_size(enc, val->imageBindCount);
762 for (uint32_t i = 0; i < val->imageBindCount; i++)
763 vn_encode_VkSparseImageMemoryBindInfo(enc, &val->pImageBinds[i]);
764 } else {
765 vn_encode_array_size(enc, 0);
766 }
767 vn_encode_uint32_t(enc, &val->signalSemaphoreCount);
768 if (val->pSignalSemaphores) {
769 vn_encode_array_size(enc, val->signalSemaphoreCount);
770 for (uint32_t i = 0; i < val->signalSemaphoreCount; i++)
771 vn_encode_VkSemaphore(enc, &val->pSignalSemaphores[i]);
772 } else {
773 vn_encode_array_size(enc, 0);
774 }
775 }
776
777 static inline void
vn_encode_VkBindSparseInfo(struct vn_cs_encoder * enc,const VkBindSparseInfo * val)778 vn_encode_VkBindSparseInfo(struct vn_cs_encoder *enc, const VkBindSparseInfo *val)
779 {
780 assert(val->sType == VK_STRUCTURE_TYPE_BIND_SPARSE_INFO);
781 vn_encode_VkStructureType(enc, &(VkStructureType){ VK_STRUCTURE_TYPE_BIND_SPARSE_INFO });
782 vn_encode_VkBindSparseInfo_pnext(enc, val->pNext);
783 vn_encode_VkBindSparseInfo_self(enc, val);
784 }
785
vn_sizeof_vkQueueSubmit(VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)786 static inline size_t vn_sizeof_vkQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence)
787 {
788 const VkCommandTypeEXT cmd_type = VK_COMMAND_TYPE_vkQueueSubmit_EXT;
789 const VkFlags cmd_flags = 0;
790 size_t cmd_size = vn_sizeof_VkCommandTypeEXT(&cmd_type) + vn_sizeof_VkFlags(&cmd_flags);
791
792 cmd_size += vn_sizeof_VkQueue(&queue);
793 cmd_size += vn_sizeof_uint32_t(&submitCount);
794 if (pSubmits) {
795 cmd_size += vn_sizeof_array_size(submitCount);
796 for (uint32_t i = 0; i < submitCount; i++)
797 cmd_size += vn_sizeof_VkSubmitInfo(&pSubmits[i]);
798 } else {
799 cmd_size += vn_sizeof_array_size(0);
800 }
801 cmd_size += vn_sizeof_VkFence(&fence);
802
803 return cmd_size;
804 }
805
vn_encode_vkQueueSubmit(struct vn_cs_encoder * enc,VkCommandFlagsEXT cmd_flags,VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)806 static inline void vn_encode_vkQueueSubmit(struct vn_cs_encoder *enc, VkCommandFlagsEXT cmd_flags, VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence)
807 {
808 const VkCommandTypeEXT cmd_type = VK_COMMAND_TYPE_vkQueueSubmit_EXT;
809
810 vn_encode_VkCommandTypeEXT(enc, &cmd_type);
811 vn_encode_VkFlags(enc, &cmd_flags);
812
813 vn_encode_VkQueue(enc, &queue);
814 vn_encode_uint32_t(enc, &submitCount);
815 if (pSubmits) {
816 vn_encode_array_size(enc, submitCount);
817 for (uint32_t i = 0; i < submitCount; i++)
818 vn_encode_VkSubmitInfo(enc, &pSubmits[i]);
819 } else {
820 vn_encode_array_size(enc, 0);
821 }
822 vn_encode_VkFence(enc, &fence);
823 }
824
vn_sizeof_vkQueueSubmit_reply(VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)825 static inline size_t vn_sizeof_vkQueueSubmit_reply(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence)
826 {
827 const VkCommandTypeEXT cmd_type = VK_COMMAND_TYPE_vkQueueSubmit_EXT;
828 size_t cmd_size = vn_sizeof_VkCommandTypeEXT(&cmd_type);
829
830 VkResult ret;
831 cmd_size += vn_sizeof_VkResult(&ret);
832 /* skip queue */
833 /* skip submitCount */
834 /* skip pSubmits */
835 /* skip fence */
836
837 return cmd_size;
838 }
839
vn_decode_vkQueueSubmit_reply(struct vn_cs_decoder * dec,VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)840 static inline VkResult vn_decode_vkQueueSubmit_reply(struct vn_cs_decoder *dec, VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence)
841 {
842 VkCommandTypeEXT command_type;
843 vn_decode_VkCommandTypeEXT(dec, &command_type);
844 assert(command_type == VK_COMMAND_TYPE_vkQueueSubmit_EXT);
845
846 VkResult ret;
847 vn_decode_VkResult(dec, &ret);
848 /* skip queue */
849 /* skip submitCount */
850 /* skip pSubmits */
851 /* skip fence */
852
853 return ret;
854 }
855
vn_sizeof_vkQueueWaitIdle(VkQueue queue)856 static inline size_t vn_sizeof_vkQueueWaitIdle(VkQueue queue)
857 {
858 const VkCommandTypeEXT cmd_type = VK_COMMAND_TYPE_vkQueueWaitIdle_EXT;
859 const VkFlags cmd_flags = 0;
860 size_t cmd_size = vn_sizeof_VkCommandTypeEXT(&cmd_type) + vn_sizeof_VkFlags(&cmd_flags);
861
862 cmd_size += vn_sizeof_VkQueue(&queue);
863
864 return cmd_size;
865 }
866
vn_encode_vkQueueWaitIdle(struct vn_cs_encoder * enc,VkCommandFlagsEXT cmd_flags,VkQueue queue)867 static inline void vn_encode_vkQueueWaitIdle(struct vn_cs_encoder *enc, VkCommandFlagsEXT cmd_flags, VkQueue queue)
868 {
869 const VkCommandTypeEXT cmd_type = VK_COMMAND_TYPE_vkQueueWaitIdle_EXT;
870
871 vn_encode_VkCommandTypeEXT(enc, &cmd_type);
872 vn_encode_VkFlags(enc, &cmd_flags);
873
874 vn_encode_VkQueue(enc, &queue);
875 }
876
vn_sizeof_vkQueueWaitIdle_reply(VkQueue queue)877 static inline size_t vn_sizeof_vkQueueWaitIdle_reply(VkQueue queue)
878 {
879 const VkCommandTypeEXT cmd_type = VK_COMMAND_TYPE_vkQueueWaitIdle_EXT;
880 size_t cmd_size = vn_sizeof_VkCommandTypeEXT(&cmd_type);
881
882 VkResult ret;
883 cmd_size += vn_sizeof_VkResult(&ret);
884 /* skip queue */
885
886 return cmd_size;
887 }
888
vn_decode_vkQueueWaitIdle_reply(struct vn_cs_decoder * dec,VkQueue queue)889 static inline VkResult vn_decode_vkQueueWaitIdle_reply(struct vn_cs_decoder *dec, VkQueue queue)
890 {
891 VkCommandTypeEXT command_type;
892 vn_decode_VkCommandTypeEXT(dec, &command_type);
893 assert(command_type == VK_COMMAND_TYPE_vkQueueWaitIdle_EXT);
894
895 VkResult ret;
896 vn_decode_VkResult(dec, &ret);
897 /* skip queue */
898
899 return ret;
900 }
901
vn_sizeof_vkQueueBindSparse(VkQueue queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence)902 static inline size_t vn_sizeof_vkQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence)
903 {
904 const VkCommandTypeEXT cmd_type = VK_COMMAND_TYPE_vkQueueBindSparse_EXT;
905 const VkFlags cmd_flags = 0;
906 size_t cmd_size = vn_sizeof_VkCommandTypeEXT(&cmd_type) + vn_sizeof_VkFlags(&cmd_flags);
907
908 cmd_size += vn_sizeof_VkQueue(&queue);
909 cmd_size += vn_sizeof_uint32_t(&bindInfoCount);
910 if (pBindInfo) {
911 cmd_size += vn_sizeof_array_size(bindInfoCount);
912 for (uint32_t i = 0; i < bindInfoCount; i++)
913 cmd_size += vn_sizeof_VkBindSparseInfo(&pBindInfo[i]);
914 } else {
915 cmd_size += vn_sizeof_array_size(0);
916 }
917 cmd_size += vn_sizeof_VkFence(&fence);
918
919 return cmd_size;
920 }
921
vn_encode_vkQueueBindSparse(struct vn_cs_encoder * enc,VkCommandFlagsEXT cmd_flags,VkQueue queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence)922 static inline void vn_encode_vkQueueBindSparse(struct vn_cs_encoder *enc, VkCommandFlagsEXT cmd_flags, VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence)
923 {
924 const VkCommandTypeEXT cmd_type = VK_COMMAND_TYPE_vkQueueBindSparse_EXT;
925
926 vn_encode_VkCommandTypeEXT(enc, &cmd_type);
927 vn_encode_VkFlags(enc, &cmd_flags);
928
929 vn_encode_VkQueue(enc, &queue);
930 vn_encode_uint32_t(enc, &bindInfoCount);
931 if (pBindInfo) {
932 vn_encode_array_size(enc, bindInfoCount);
933 for (uint32_t i = 0; i < bindInfoCount; i++)
934 vn_encode_VkBindSparseInfo(enc, &pBindInfo[i]);
935 } else {
936 vn_encode_array_size(enc, 0);
937 }
938 vn_encode_VkFence(enc, &fence);
939 }
940
vn_sizeof_vkQueueBindSparse_reply(VkQueue queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence)941 static inline size_t vn_sizeof_vkQueueBindSparse_reply(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence)
942 {
943 const VkCommandTypeEXT cmd_type = VK_COMMAND_TYPE_vkQueueBindSparse_EXT;
944 size_t cmd_size = vn_sizeof_VkCommandTypeEXT(&cmd_type);
945
946 VkResult ret;
947 cmd_size += vn_sizeof_VkResult(&ret);
948 /* skip queue */
949 /* skip bindInfoCount */
950 /* skip pBindInfo */
951 /* skip fence */
952
953 return cmd_size;
954 }
955
vn_decode_vkQueueBindSparse_reply(struct vn_cs_decoder * dec,VkQueue queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence)956 static inline VkResult vn_decode_vkQueueBindSparse_reply(struct vn_cs_decoder *dec, VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence)
957 {
958 VkCommandTypeEXT command_type;
959 vn_decode_VkCommandTypeEXT(dec, &command_type);
960 assert(command_type == VK_COMMAND_TYPE_vkQueueBindSparse_EXT);
961
962 VkResult ret;
963 vn_decode_VkResult(dec, &ret);
964 /* skip queue */
965 /* skip bindInfoCount */
966 /* skip pBindInfo */
967 /* skip fence */
968
969 return ret;
970 }
971
vn_submit_vkQueueSubmit(struct vn_instance * vn_instance,VkCommandFlagsEXT cmd_flags,VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence,struct vn_instance_submit_command * submit)972 static inline void vn_submit_vkQueueSubmit(struct vn_instance *vn_instance, VkCommandFlagsEXT cmd_flags, VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence, struct vn_instance_submit_command *submit)
973 {
974 uint8_t local_cmd_data[VN_SUBMIT_LOCAL_CMD_SIZE];
975 void *cmd_data = local_cmd_data;
976 size_t cmd_size = vn_sizeof_vkQueueSubmit(queue, submitCount, pSubmits, fence);
977 if (cmd_size > sizeof(local_cmd_data)) {
978 cmd_data = malloc(cmd_size);
979 if (!cmd_data)
980 cmd_size = 0;
981 }
982 const size_t reply_size = cmd_flags & VK_COMMAND_GENERATE_REPLY_BIT_EXT ? vn_sizeof_vkQueueSubmit_reply(queue, submitCount, pSubmits, fence) : 0;
983
984 struct vn_cs_encoder *enc = vn_instance_submit_command_init(vn_instance, submit, cmd_data, cmd_size, reply_size);
985 if (cmd_size) {
986 vn_encode_vkQueueSubmit(enc, cmd_flags, queue, submitCount, pSubmits, fence);
987 vn_instance_submit_command(vn_instance, submit);
988 if (cmd_data != local_cmd_data)
989 free(cmd_data);
990 }
991 }
992
vn_submit_vkQueueWaitIdle(struct vn_instance * vn_instance,VkCommandFlagsEXT cmd_flags,VkQueue queue,struct vn_instance_submit_command * submit)993 static inline void vn_submit_vkQueueWaitIdle(struct vn_instance *vn_instance, VkCommandFlagsEXT cmd_flags, VkQueue queue, struct vn_instance_submit_command *submit)
994 {
995 uint8_t local_cmd_data[VN_SUBMIT_LOCAL_CMD_SIZE];
996 void *cmd_data = local_cmd_data;
997 size_t cmd_size = vn_sizeof_vkQueueWaitIdle(queue);
998 if (cmd_size > sizeof(local_cmd_data)) {
999 cmd_data = malloc(cmd_size);
1000 if (!cmd_data)
1001 cmd_size = 0;
1002 }
1003 const size_t reply_size = cmd_flags & VK_COMMAND_GENERATE_REPLY_BIT_EXT ? vn_sizeof_vkQueueWaitIdle_reply(queue) : 0;
1004
1005 struct vn_cs_encoder *enc = vn_instance_submit_command_init(vn_instance, submit, cmd_data, cmd_size, reply_size);
1006 if (cmd_size) {
1007 vn_encode_vkQueueWaitIdle(enc, cmd_flags, queue);
1008 vn_instance_submit_command(vn_instance, submit);
1009 if (cmd_data != local_cmd_data)
1010 free(cmd_data);
1011 }
1012 }
1013
vn_submit_vkQueueBindSparse(struct vn_instance * vn_instance,VkCommandFlagsEXT cmd_flags,VkQueue queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence,struct vn_instance_submit_command * submit)1014 static inline void vn_submit_vkQueueBindSparse(struct vn_instance *vn_instance, VkCommandFlagsEXT cmd_flags, VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence, struct vn_instance_submit_command *submit)
1015 {
1016 uint8_t local_cmd_data[VN_SUBMIT_LOCAL_CMD_SIZE];
1017 void *cmd_data = local_cmd_data;
1018 size_t cmd_size = vn_sizeof_vkQueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
1019 if (cmd_size > sizeof(local_cmd_data)) {
1020 cmd_data = malloc(cmd_size);
1021 if (!cmd_data)
1022 cmd_size = 0;
1023 }
1024 const size_t reply_size = cmd_flags & VK_COMMAND_GENERATE_REPLY_BIT_EXT ? vn_sizeof_vkQueueBindSparse_reply(queue, bindInfoCount, pBindInfo, fence) : 0;
1025
1026 struct vn_cs_encoder *enc = vn_instance_submit_command_init(vn_instance, submit, cmd_data, cmd_size, reply_size);
1027 if (cmd_size) {
1028 vn_encode_vkQueueBindSparse(enc, cmd_flags, queue, bindInfoCount, pBindInfo, fence);
1029 vn_instance_submit_command(vn_instance, submit);
1030 if (cmd_data != local_cmd_data)
1031 free(cmd_data);
1032 }
1033 }
1034
vn_call_vkQueueSubmit(struct vn_instance * vn_instance,VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)1035 static inline VkResult vn_call_vkQueueSubmit(struct vn_instance *vn_instance, VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence)
1036 {
1037 VN_TRACE_FUNC();
1038
1039 struct vn_instance_submit_command submit;
1040 vn_submit_vkQueueSubmit(vn_instance, VK_COMMAND_GENERATE_REPLY_BIT_EXT, queue, submitCount, pSubmits, fence, &submit);
1041 struct vn_cs_decoder *dec = vn_instance_get_command_reply(vn_instance, &submit);
1042 if (dec) {
1043 const VkResult ret = vn_decode_vkQueueSubmit_reply(dec, queue, submitCount, pSubmits, fence);
1044 vn_instance_free_command_reply(vn_instance, &submit);
1045 return ret;
1046 } else {
1047 return VK_ERROR_OUT_OF_HOST_MEMORY;
1048 }
1049 }
1050
vn_async_vkQueueSubmit(struct vn_instance * vn_instance,VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)1051 static inline void vn_async_vkQueueSubmit(struct vn_instance *vn_instance, VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence)
1052 {
1053 struct vn_instance_submit_command submit;
1054 vn_submit_vkQueueSubmit(vn_instance, 0, queue, submitCount, pSubmits, fence, &submit);
1055 }
1056
vn_call_vkQueueWaitIdle(struct vn_instance * vn_instance,VkQueue queue)1057 static inline VkResult vn_call_vkQueueWaitIdle(struct vn_instance *vn_instance, VkQueue queue)
1058 {
1059 VN_TRACE_FUNC();
1060
1061 struct vn_instance_submit_command submit;
1062 vn_submit_vkQueueWaitIdle(vn_instance, VK_COMMAND_GENERATE_REPLY_BIT_EXT, queue, &submit);
1063 struct vn_cs_decoder *dec = vn_instance_get_command_reply(vn_instance, &submit);
1064 if (dec) {
1065 const VkResult ret = vn_decode_vkQueueWaitIdle_reply(dec, queue);
1066 vn_instance_free_command_reply(vn_instance, &submit);
1067 return ret;
1068 } else {
1069 return VK_ERROR_OUT_OF_HOST_MEMORY;
1070 }
1071 }
1072
vn_async_vkQueueWaitIdle(struct vn_instance * vn_instance,VkQueue queue)1073 static inline void vn_async_vkQueueWaitIdle(struct vn_instance *vn_instance, VkQueue queue)
1074 {
1075 struct vn_instance_submit_command submit;
1076 vn_submit_vkQueueWaitIdle(vn_instance, 0, queue, &submit);
1077 }
1078
vn_call_vkQueueBindSparse(struct vn_instance * vn_instance,VkQueue queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence)1079 static inline VkResult vn_call_vkQueueBindSparse(struct vn_instance *vn_instance, VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence)
1080 {
1081 VN_TRACE_FUNC();
1082
1083 struct vn_instance_submit_command submit;
1084 vn_submit_vkQueueBindSparse(vn_instance, VK_COMMAND_GENERATE_REPLY_BIT_EXT, queue, bindInfoCount, pBindInfo, fence, &submit);
1085 struct vn_cs_decoder *dec = vn_instance_get_command_reply(vn_instance, &submit);
1086 if (dec) {
1087 const VkResult ret = vn_decode_vkQueueBindSparse_reply(dec, queue, bindInfoCount, pBindInfo, fence);
1088 vn_instance_free_command_reply(vn_instance, &submit);
1089 return ret;
1090 } else {
1091 return VK_ERROR_OUT_OF_HOST_MEMORY;
1092 }
1093 }
1094
vn_async_vkQueueBindSparse(struct vn_instance * vn_instance,VkQueue queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence)1095 static inline void vn_async_vkQueueBindSparse(struct vn_instance *vn_instance, VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence)
1096 {
1097 struct vn_instance_submit_command submit;
1098 vn_submit_vkQueueBindSparse(vn_instance, 0, queue, bindInfoCount, pBindInfo, fence, &submit);
1099 }
1100
1101 #endif /* VN_PROTOCOL_DRIVER_QUEUE_H */
1102