• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2021 Google LLC
3  * SPDX-License-Identifier: MIT
4  */
5 
6 #include "vkr_cs.h"
7 
8 #include "vrend_iov.h"
9 
10 void
vkr_cs_encoder_set_stream(struct vkr_cs_encoder * enc,const struct iovec * iov,int iov_count,size_t offset,size_t size)11 vkr_cs_encoder_set_stream(struct vkr_cs_encoder *enc,
12                           const struct iovec *iov,
13                           int iov_count,
14                           size_t offset,
15                           size_t size)
16 {
17    enc->stream.iov = iov;
18    enc->stream.iov_count = iov_count;
19    enc->stream.offset = offset;
20    enc->stream.size = size;
21    /* clear cache */
22    enc->stream.cached_index = 0;
23    enc->stream.cached_offset = 0;
24 
25    vkr_cs_encoder_seek_stream(enc, 0);
26 }
27 
28 static bool
vkr_cs_encoder_translate_stream_offset(struct vkr_cs_encoder * enc,size_t offset,int * iov_index,size_t * iov_offset)29 vkr_cs_encoder_translate_stream_offset(struct vkr_cs_encoder *enc,
30                                        size_t offset,
31                                        int *iov_index,
32                                        size_t *iov_offset)
33 {
34    int idx = 0;
35 
36    /* use or clear cache */
37    if (offset >= enc->stream.cached_offset) {
38       offset -= enc->stream.cached_offset;
39       idx = enc->stream.cached_index;
40    } else {
41       enc->stream.cached_index = 0;
42       enc->stream.cached_offset = 0;
43    }
44 
45    while (true) {
46       if (idx >= enc->stream.iov_count)
47          return false;
48 
49       const struct iovec *iov = &enc->stream.iov[idx];
50       if (offset < iov->iov_len)
51          break;
52 
53       idx++;
54       offset -= iov->iov_len;
55 
56       /* update cache */
57       enc->stream.cached_index++;
58       enc->stream.cached_offset += iov->iov_len;
59    }
60 
61    *iov_index = idx;
62    *iov_offset = offset;
63 
64    return true;
65 }
66 
67 static void
vkr_cs_encoder_update_end(struct vkr_cs_encoder * enc)68 vkr_cs_encoder_update_end(struct vkr_cs_encoder *enc)
69 {
70    const struct iovec *iov = &enc->stream.iov[enc->next_iov - 1];
71    const size_t iov_offset = enc->cur - (uint8_t *)iov->iov_base;
72    const size_t iov_remain = iov->iov_len - iov_offset;
73 
74    if (enc->remaining_size >= iov_remain) {
75       enc->end = enc->cur + iov_remain;
76       enc->remaining_size -= iov_remain;
77    } else {
78       enc->end = enc->cur + enc->remaining_size;
79       enc->remaining_size = 0;
80    }
81 }
82 
83 void
vkr_cs_encoder_seek_stream(struct vkr_cs_encoder * enc,size_t pos)84 vkr_cs_encoder_seek_stream(struct vkr_cs_encoder *enc, size_t pos)
85 {
86    const size_t offset = enc->stream.offset + pos;
87    int iov_index;
88    size_t iov_offset;
89    if (pos > enc->stream.size ||
90        !vkr_cs_encoder_translate_stream_offset(enc, offset, &iov_index, &iov_offset)) {
91       vkr_cs_encoder_set_fatal(enc);
92       return;
93    }
94 
95    enc->remaining_size = enc->stream.size - pos;
96    enc->next_iov = iov_index + 1;
97 
98    const struct iovec *iov = &enc->stream.iov[iov_index];
99    enc->cur = iov->iov_base;
100    enc->cur += iov_offset;
101 
102    vkr_cs_encoder_update_end(enc);
103 }
104 
105 static bool
vkr_cs_encoder_next_iov(struct vkr_cs_encoder * enc)106 vkr_cs_encoder_next_iov(struct vkr_cs_encoder *enc)
107 {
108    if (enc->next_iov >= enc->stream.iov_count)
109       return false;
110 
111    const struct iovec *iov = &enc->stream.iov[enc->next_iov++];
112    enc->cur = iov->iov_base;
113    vkr_cs_encoder_update_end(enc);
114 
115    return true;
116 }
117 
118 static uint8_t *
vkr_cs_encoder_get_ptr(struct vkr_cs_encoder * enc,size_t size,size_t * ptr_size)119 vkr_cs_encoder_get_ptr(struct vkr_cs_encoder *enc, size_t size, size_t *ptr_size)
120 {
121    while (true) {
122       uint8_t *ptr = enc->cur;
123       const size_t avail = enc->end - enc->cur;
124 
125       if (avail) {
126          *ptr_size = MIN2(size, avail);
127          enc->cur += *ptr_size;
128          return ptr;
129       }
130 
131       if (!vkr_cs_encoder_next_iov(enc)) {
132          *ptr_size = 0;
133          return size ? NULL : ptr;
134       }
135    }
136 }
137 
138 void
vkr_cs_encoder_write_internal(struct vkr_cs_encoder * enc,size_t size,const void * val,size_t val_size)139 vkr_cs_encoder_write_internal(struct vkr_cs_encoder *enc,
140                               size_t size,
141                               const void *val,
142                               size_t val_size)
143 {
144    size_t pad_size = size - val_size;
145 
146    do {
147       size_t ptr_size;
148       uint8_t *ptr = vkr_cs_encoder_get_ptr(enc, val_size, &ptr_size);
149       if (unlikely(!ptr)) {
150          vkr_cs_encoder_set_fatal(enc);
151          return;
152       }
153 
154       memcpy(ptr, val, ptr_size);
155       val = (const uint8_t *)val + ptr_size;
156       val_size -= ptr_size;
157    } while (val_size);
158 
159    while (pad_size) {
160       size_t ptr_size;
161       const void *ptr = vkr_cs_encoder_get_ptr(enc, pad_size, &ptr_size);
162       if (unlikely(!ptr)) {
163          vkr_cs_encoder_set_fatal(enc);
164          return;
165       }
166       pad_size -= ptr_size;
167    }
168 }
169 
170 void
vkr_cs_decoder_init(struct vkr_cs_decoder * dec,const struct util_hash_table_u64 * object_table)171 vkr_cs_decoder_init(struct vkr_cs_decoder *dec,
172                     const struct util_hash_table_u64 *object_table)
173 {
174    memset(dec, 0, sizeof(*dec));
175    dec->object_table = object_table;
176 }
177 
178 void
vkr_cs_decoder_fini(struct vkr_cs_decoder * dec)179 vkr_cs_decoder_fini(struct vkr_cs_decoder *dec)
180 {
181    struct vkr_cs_decoder_temp_pool *pool = &dec->temp_pool;
182    for (uint32_t i = 0; i < pool->buffer_count; i++)
183       free(pool->buffers[i]);
184    if (pool->buffers)
185       free(pool->buffers);
186 }
187 
188 static void
vkr_cs_decoder_sanity_check(const struct vkr_cs_decoder * dec)189 vkr_cs_decoder_sanity_check(const struct vkr_cs_decoder *dec)
190 {
191    const struct vkr_cs_decoder_temp_pool *pool = &dec->temp_pool;
192    assert(pool->buffer_count <= pool->buffer_max);
193    if (pool->buffer_count) {
194       assert(pool->buffers[pool->buffer_count - 1] <= pool->reset_to);
195       assert(pool->reset_to <= pool->cur);
196       assert(pool->cur <= pool->end);
197    }
198 
199    assert(dec->cur <= dec->end);
200 }
201 
202 static void
vkr_cs_decoder_gc_temp_pool(struct vkr_cs_decoder * dec)203 vkr_cs_decoder_gc_temp_pool(struct vkr_cs_decoder *dec)
204 {
205    struct vkr_cs_decoder_temp_pool *pool = &dec->temp_pool;
206    if (!pool->buffer_count)
207       return;
208 
209    /* free all but the last buffer */
210    if (pool->buffer_count > 1) {
211       for (uint32_t i = 0; i < pool->buffer_count - 1; i++)
212          free(pool->buffers[i]);
213 
214       pool->buffers[0] = pool->buffers[pool->buffer_count - 1];
215       pool->buffer_count = 1;
216    }
217 
218    pool->reset_to = pool->buffers[0];
219    pool->cur = pool->buffers[0];
220 
221    pool->total_size = pool->end - pool->cur;
222 
223    vkr_cs_decoder_sanity_check(dec);
224 }
225 
226 /**
227  * Reset a decoder for reuse.
228  */
229 void
vkr_cs_decoder_reset(struct vkr_cs_decoder * dec)230 vkr_cs_decoder_reset(struct vkr_cs_decoder *dec)
231 {
232    /* dec->fatal_error is sticky */
233 
234    vkr_cs_decoder_gc_temp_pool(dec);
235 
236    dec->saved_state_count = 0;
237    dec->cur = NULL;
238    dec->end = NULL;
239 }
240 
241 bool
vkr_cs_decoder_push_state(struct vkr_cs_decoder * dec)242 vkr_cs_decoder_push_state(struct vkr_cs_decoder *dec)
243 {
244    struct vkr_cs_decoder_temp_pool *pool = &dec->temp_pool;
245    struct vkr_cs_decoder_saved_state *saved;
246 
247    if (dec->saved_state_count >= ARRAY_SIZE(dec->saved_states))
248       return false;
249 
250    saved = &dec->saved_states[dec->saved_state_count++];
251    saved->cur = dec->cur;
252    saved->end = dec->end;
253 
254    saved->pool_buffer_count = pool->buffer_count;
255    saved->pool_reset_to = pool->reset_to;
256    /* avoid temp data corruption */
257    pool->reset_to = pool->cur;
258 
259    vkr_cs_decoder_sanity_check(dec);
260 
261    return true;
262 }
263 
264 void
vkr_cs_decoder_pop_state(struct vkr_cs_decoder * dec)265 vkr_cs_decoder_pop_state(struct vkr_cs_decoder *dec)
266 {
267    struct vkr_cs_decoder_temp_pool *pool = &dec->temp_pool;
268    const struct vkr_cs_decoder_saved_state *saved;
269 
270    assert(dec->saved_state_count);
271    saved = &dec->saved_states[--dec->saved_state_count];
272    dec->cur = saved->cur;
273    dec->end = saved->end;
274 
275    /* restore only if pool->reset_to points to the same buffer */
276    if (pool->buffer_count == saved->pool_buffer_count)
277       pool->reset_to = saved->pool_reset_to;
278 
279    vkr_cs_decoder_sanity_check(dec);
280 }
281 
282 static uint32_t
next_array_size(uint32_t cur_size,uint32_t min_size)283 next_array_size(uint32_t cur_size, uint32_t min_size)
284 {
285    const uint32_t next_size = cur_size ? cur_size * 2 : min_size;
286    return next_size > cur_size ? next_size : 0;
287 }
288 
289 static size_t
next_buffer_size(size_t cur_size,size_t min_size,size_t need)290 next_buffer_size(size_t cur_size, size_t min_size, size_t need)
291 {
292    size_t next_size = cur_size ? cur_size * 2 : min_size;
293    while (next_size < need) {
294       next_size *= 2;
295       if (!next_size)
296          return 0;
297    }
298    return next_size;
299 }
300 
301 static bool
vkr_cs_decoder_grow_temp_pool(struct vkr_cs_decoder * dec)302 vkr_cs_decoder_grow_temp_pool(struct vkr_cs_decoder *dec)
303 {
304    struct vkr_cs_decoder_temp_pool *pool = &dec->temp_pool;
305    const uint32_t buf_max = next_array_size(pool->buffer_max, 4);
306    if (!buf_max)
307       return false;
308 
309    uint8_t **bufs = realloc(pool->buffers, sizeof(*pool->buffers) * buf_max);
310    if (!bufs)
311       return false;
312 
313    pool->buffers = bufs;
314    pool->buffer_max = buf_max;
315 
316    return true;
317 }
318 
319 bool
vkr_cs_decoder_alloc_temp_internal(struct vkr_cs_decoder * dec,size_t size)320 vkr_cs_decoder_alloc_temp_internal(struct vkr_cs_decoder *dec, size_t size)
321 {
322    struct vkr_cs_decoder_temp_pool *pool = &dec->temp_pool;
323 
324    if (pool->buffer_count >= pool->buffer_max) {
325       if (!vkr_cs_decoder_grow_temp_pool(dec))
326          return false;
327       assert(pool->buffer_count < pool->buffer_max);
328    }
329 
330    const size_t cur_buf_size =
331       pool->buffer_count ? pool->end - pool->buffers[pool->buffer_count - 1] : 0;
332    const size_t buf_size = next_buffer_size(cur_buf_size, 4096, size);
333    if (!buf_size)
334       return false;
335 
336    if (buf_size > VKR_CS_DECODER_TEMP_POOL_MAX_SIZE - pool->total_size)
337       return false;
338 
339    uint8_t *buf = malloc(buf_size);
340    if (!buf)
341       return false;
342 
343    pool->total_size += buf_size;
344    pool->buffers[pool->buffer_count++] = buf;
345    pool->reset_to = buf;
346    pool->cur = buf;
347    pool->end = buf + buf_size;
348 
349    vkr_cs_decoder_sanity_check(dec);
350 
351    return true;
352 }
353