1 /*
2 * Copyright 2021 Google LLC
3 * SPDX-License-Identifier: MIT
4 */
5
6 #ifndef VKR_CS_H
7 #define VKR_CS_H
8
9 #include "vkr_common.h"
10
11 #define VKR_CS_DECODER_TEMP_POOL_MAX_SIZE (64u * 1024 * 1024)
12
13 struct iovec;
14
15 struct vkr_cs_encoder {
16 bool *fatal_error;
17
18 struct {
19 const struct iovec *iov;
20 int iov_count;
21 size_t offset;
22 size_t size;
23
24 int cached_index;
25 size_t cached_offset;
26 } stream;
27
28 size_t remaining_size;
29 int next_iov;
30 uint8_t *cur;
31 const uint8_t *end;
32 };
33
34 struct vkr_cs_decoder_saved_state {
35 const uint8_t *cur;
36 const uint8_t *end;
37
38 uint32_t pool_buffer_count;
39 uint8_t *pool_reset_to;
40 };
41
42 struct vkr_cs_decoder_temp_pool {
43 uint8_t **buffers;
44 uint32_t buffer_count;
45 uint32_t buffer_max;
46 size_t total_size;
47
48 uint8_t *reset_to;
49
50 uint8_t *cur;
51 const uint8_t *end;
52 };
53
54 struct vkr_cs_decoder {
55 const struct util_hash_table_u64 *object_table;
56
57 bool fatal_error;
58 struct vkr_cs_decoder_temp_pool temp_pool;
59
60 struct vkr_cs_decoder_saved_state saved_states[1];
61 uint32_t saved_state_count;
62
63 const uint8_t *cur;
64 const uint8_t *end;
65 };
66
67 static inline void
vkr_cs_encoder_init(struct vkr_cs_encoder * enc,bool * fatal_error)68 vkr_cs_encoder_init(struct vkr_cs_encoder *enc, bool *fatal_error)
69 {
70 memset(enc, 0, sizeof(*enc));
71 enc->fatal_error = fatal_error;
72 }
73
74 static inline void
vkr_cs_encoder_set_fatal(const struct vkr_cs_encoder * enc)75 vkr_cs_encoder_set_fatal(const struct vkr_cs_encoder *enc)
76 {
77 *enc->fatal_error = true;
78 }
79
80 void
81 vkr_cs_encoder_set_stream(struct vkr_cs_encoder *enc,
82 const struct iovec *iov,
83 int iov_count,
84 size_t offset,
85 size_t size);
86
87 void
88 vkr_cs_encoder_seek_stream(struct vkr_cs_encoder *enc, size_t pos);
89
90 void
91 vkr_cs_encoder_write_internal(struct vkr_cs_encoder *enc,
92 size_t size,
93 const void *val,
94 size_t val_size);
95
96 static inline void
vkr_cs_encoder_write(struct vkr_cs_encoder * enc,size_t size,const void * val,size_t val_size)97 vkr_cs_encoder_write(struct vkr_cs_encoder *enc,
98 size_t size,
99 const void *val,
100 size_t val_size)
101 {
102 assert(val_size <= size);
103
104 if (unlikely(size > (size_t)(enc->end - enc->cur))) {
105 vkr_cs_encoder_write_internal(enc, size, val, val_size);
106 return;
107 }
108
109 /* we should not rely on the compiler to optimize away memcpy... */
110 memcpy(enc->cur, val, val_size);
111 enc->cur += size;
112 }
113
114 void
115 vkr_cs_decoder_init(struct vkr_cs_decoder *dec,
116 const struct util_hash_table_u64 *object_table);
117
118 void
119 vkr_cs_decoder_fini(struct vkr_cs_decoder *dec);
120
121 void
122 vkr_cs_decoder_reset(struct vkr_cs_decoder *dec);
123
124 static inline void
vkr_cs_decoder_set_fatal(const struct vkr_cs_decoder * dec)125 vkr_cs_decoder_set_fatal(const struct vkr_cs_decoder *dec)
126 {
127 ((struct vkr_cs_decoder *)dec)->fatal_error = true;
128 }
129
130 static inline bool
vkr_cs_decoder_get_fatal(const struct vkr_cs_decoder * dec)131 vkr_cs_decoder_get_fatal(const struct vkr_cs_decoder *dec)
132 {
133 return dec->fatal_error;
134 }
135
136 static inline void
vkr_cs_decoder_set_stream(struct vkr_cs_decoder * dec,const void * data,size_t size)137 vkr_cs_decoder_set_stream(struct vkr_cs_decoder *dec, const void *data, size_t size)
138 {
139 dec->cur = data;
140 dec->end = dec->cur + size;
141 }
142
143 static inline bool
vkr_cs_decoder_has_command(const struct vkr_cs_decoder * dec)144 vkr_cs_decoder_has_command(const struct vkr_cs_decoder *dec)
145 {
146 return dec->cur < dec->end;
147 }
148
149 bool
150 vkr_cs_decoder_push_state(struct vkr_cs_decoder *dec);
151
152 void
153 vkr_cs_decoder_pop_state(struct vkr_cs_decoder *dec);
154
155 static inline bool
vkr_cs_decoder_peek_internal(const struct vkr_cs_decoder * dec,size_t size,void * val,size_t val_size)156 vkr_cs_decoder_peek_internal(const struct vkr_cs_decoder *dec,
157 size_t size,
158 void *val,
159 size_t val_size)
160 {
161 assert(val_size <= size);
162
163 if (unlikely(size > (size_t)(dec->end - dec->cur))) {
164 vkr_cs_decoder_set_fatal(dec);
165 memset(val, 0, val_size);
166 return false;
167 }
168
169 /* we should not rely on the compiler to optimize away memcpy... */
170 memcpy(val, dec->cur, val_size);
171 return true;
172 }
173
174 static inline void
vkr_cs_decoder_read(struct vkr_cs_decoder * dec,size_t size,void * val,size_t val_size)175 vkr_cs_decoder_read(struct vkr_cs_decoder *dec, size_t size, void *val, size_t val_size)
176 {
177 if (vkr_cs_decoder_peek_internal(dec, size, val, val_size))
178 dec->cur += size;
179 }
180
181 static inline void
vkr_cs_decoder_peek(const struct vkr_cs_decoder * dec,size_t size,void * val,size_t val_size)182 vkr_cs_decoder_peek(const struct vkr_cs_decoder *dec,
183 size_t size,
184 void *val,
185 size_t val_size)
186 {
187 vkr_cs_decoder_peek_internal(dec, size, val, val_size);
188 }
189
190 static inline struct vkr_object *
vkr_cs_decoder_lookup_object(const struct vkr_cs_decoder * dec,vkr_object_id id,VkObjectType type)191 vkr_cs_decoder_lookup_object(const struct vkr_cs_decoder *dec,
192 vkr_object_id id,
193 VkObjectType type)
194 {
195 struct vkr_object *obj;
196
197 if (!id)
198 return NULL;
199
200 obj = util_hash_table_get_u64((struct util_hash_table_u64 *)dec->object_table, id);
201 if (!obj || obj->type != type)
202 vkr_cs_decoder_set_fatal(dec);
203
204 return obj;
205 }
206
207 static inline void
vkr_cs_decoder_reset_temp_pool(struct vkr_cs_decoder * dec)208 vkr_cs_decoder_reset_temp_pool(struct vkr_cs_decoder *dec)
209 {
210 struct vkr_cs_decoder_temp_pool *pool = &dec->temp_pool;
211 pool->cur = pool->reset_to;
212 }
213
214 bool
215 vkr_cs_decoder_alloc_temp_internal(struct vkr_cs_decoder *dec, size_t size);
216
217 static inline void *
vkr_cs_decoder_alloc_temp(struct vkr_cs_decoder * dec,size_t size)218 vkr_cs_decoder_alloc_temp(struct vkr_cs_decoder *dec, size_t size)
219 {
220 struct vkr_cs_decoder_temp_pool *pool = &dec->temp_pool;
221
222 if (unlikely(size > (size_t)(pool->end - pool->cur))) {
223 if (!vkr_cs_decoder_alloc_temp_internal(dec, size)) {
224 vkr_cs_decoder_set_fatal(dec);
225 return NULL;
226 }
227 }
228
229 /* align to 64-bit after we know size is at most
230 * VKR_CS_DECODER_TEMP_POOL_MAX_SIZE and cannot overflow
231 */
232 size = (size + 7) & ~7;
233 assert(size <= (size_t)(pool->end - pool->cur));
234
235 void *ptr = pool->cur;
236 pool->cur += size;
237 return ptr;
238 }
239
240 static inline bool
vkr_cs_handle_indirect_id(VkObjectType type)241 vkr_cs_handle_indirect_id(VkObjectType type)
242 {
243 /* Dispatchable handles may or may not have enough bits to store
244 * vkr_object_id. Non-dispatchable handles always have enough bits to
245 * store vkr_object_id.
246 *
247 * This should compile to a constant after inlining.
248 */
249 switch (type) {
250 case VK_OBJECT_TYPE_INSTANCE:
251 case VK_OBJECT_TYPE_PHYSICAL_DEVICE:
252 case VK_OBJECT_TYPE_DEVICE:
253 case VK_OBJECT_TYPE_QUEUE:
254 case VK_OBJECT_TYPE_COMMAND_BUFFER:
255 return sizeof(VkInstance) < sizeof(vkr_object_id);
256 default:
257 return false;
258 }
259 }
260
261 static inline vkr_object_id
vkr_cs_handle_load_id(const void ** handle,VkObjectType type)262 vkr_cs_handle_load_id(const void **handle, VkObjectType type)
263 {
264 const vkr_object_id *p = vkr_cs_handle_indirect_id(type)
265 ? *(const vkr_object_id **)handle
266 : (const vkr_object_id *)handle;
267 return *p;
268 }
269
270 static inline void
vkr_cs_handle_store_id(void ** handle,vkr_object_id id,VkObjectType type)271 vkr_cs_handle_store_id(void **handle, vkr_object_id id, VkObjectType type)
272 {
273 vkr_object_id *p = vkr_cs_handle_indirect_id(type) ? *(vkr_object_id **)handle
274 : (vkr_object_id *)handle;
275 *p = id;
276 }
277
278 #endif /* VKR_CS_H */
279