• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2019 Google LLC
3  * SPDX-License-Identifier: MIT
4  */
5 
6 #ifndef VN_CS_H
7 #define VN_CS_H
8 
9 #include "vn_common.h"
10 
11 #define VN_CS_ENCODER_BUFFER_INITIALIZER(storage)                            \
12    (struct vn_cs_encoder_buffer) { .base = storage, }
13 
14 /* note that buffers points to an unamed local variable */
15 #define VN_CS_ENCODER_INITIALIZER_LOCAL(storage, size)                       \
16    (struct vn_cs_encoder)                                                    \
17    {                                                                         \
18       .buffers = &VN_CS_ENCODER_BUFFER_INITIALIZER(storage),                 \
19       .buffer_count = 1, .buffer_max = 1, .current_buffer_size = size,       \
20       .cur = storage, .end = (const void *)(storage) + (size),               \
21    }
22 
23 #define VN_CS_ENCODER_INITIALIZER(buf, size)                                 \
24    (struct vn_cs_encoder)                                                    \
25    {                                                                         \
26       .buffers = (buf), .buffer_count = 1, .buffer_max = 1,                  \
27       .current_buffer_size = size, .cur = (buf)->base,                       \
28       .end = (buf)->base + (size),                                           \
29    }
30 
31 #define VN_CS_DECODER_INITIALIZER(storage, size)                             \
32    (struct vn_cs_decoder)                                                    \
33    {                                                                         \
34       .cur = storage, .end = (const void *)(storage) + (size),               \
35    }
36 
37 struct vn_cs_encoder_buffer {
38    struct vn_renderer_shmem *shmem;
39    size_t offset;
40    void *base;
41    size_t committed_size;
42 };
43 
44 struct vn_cs_encoder {
45    struct vn_instance *instance; /* TODO shmem cache */
46    size_t min_buffer_size;
47    bool indirect;
48 
49    bool fatal_error;
50 
51    struct vn_cs_encoder_buffer *buffers;
52    uint32_t buffer_count;
53    uint32_t buffer_max;
54    size_t total_committed_size;
55 
56    /* the current buffer is buffers[buffer_count - 1].shmem */
57    size_t current_buffer_size;
58    uint32_t current_buffer_roundtrip;
59 
60    /* cur is the write pointer.  When cur passes end, the slow path is
61     * triggered.
62     */
63    void *cur;
64    const void *end;
65 };
66 
67 struct vn_cs_decoder {
68    const void *cur;
69    const void *end;
70 };
71 
72 void
73 vn_cs_encoder_init_indirect(struct vn_cs_encoder *enc,
74                             struct vn_instance *instance,
75                             size_t min_size);
76 
77 void
78 vn_cs_encoder_fini(struct vn_cs_encoder *enc);
79 
80 void
81 vn_cs_encoder_reset(struct vn_cs_encoder *enc);
82 
83 static inline void
vn_cs_encoder_set_fatal(const struct vn_cs_encoder * enc)84 vn_cs_encoder_set_fatal(const struct vn_cs_encoder *enc)
85 {
86    /* This is fatal and should be treated as VK_ERROR_DEVICE_LOST or even
87     * abort().  Note that vn_cs_encoder_reset does not clear this.
88     */
89    ((struct vn_cs_encoder *)enc)->fatal_error = true;
90 }
91 
92 static inline bool
vn_cs_encoder_get_fatal(const struct vn_cs_encoder * enc)93 vn_cs_encoder_get_fatal(const struct vn_cs_encoder *enc)
94 {
95    return enc->fatal_error;
96 }
97 
98 static inline bool
vn_cs_encoder_is_empty(const struct vn_cs_encoder * enc)99 vn_cs_encoder_is_empty(const struct vn_cs_encoder *enc)
100 {
101    return !enc->buffer_count || enc->cur == enc->buffers[0].base;
102 }
103 
104 static inline size_t
vn_cs_encoder_get_len(const struct vn_cs_encoder * enc)105 vn_cs_encoder_get_len(const struct vn_cs_encoder *enc)
106 {
107    if (unlikely(!enc->buffer_count))
108       return 0;
109 
110    size_t len = enc->total_committed_size;
111    const struct vn_cs_encoder_buffer *cur_buf =
112       &enc->buffers[enc->buffer_count - 1];
113    if (!cur_buf->committed_size)
114       len += enc->cur - cur_buf->base;
115    return len;
116 }
117 
118 bool
119 vn_cs_encoder_reserve_internal(struct vn_cs_encoder *enc, size_t size);
120 
121 /**
122  * Reserve space for commands.
123  */
124 static inline bool
vn_cs_encoder_reserve(struct vn_cs_encoder * enc,size_t size)125 vn_cs_encoder_reserve(struct vn_cs_encoder *enc, size_t size)
126 {
127    if (unlikely(size > enc->end - enc->cur)) {
128       if (!vn_cs_encoder_reserve_internal(enc, size)) {
129          vn_cs_encoder_set_fatal(enc);
130          return false;
131       }
132       assert(size <= enc->end - enc->cur);
133    }
134 
135    return true;
136 }
137 
138 static inline void
vn_cs_encoder_write(struct vn_cs_encoder * enc,size_t size,const void * val,size_t val_size)139 vn_cs_encoder_write(struct vn_cs_encoder *enc,
140                     size_t size,
141                     const void *val,
142                     size_t val_size)
143 {
144    assert(val_size <= size);
145    assert(size <= enc->end - enc->cur);
146 
147    /* we should not rely on the compiler to optimize away memcpy... */
148    memcpy(enc->cur, val, val_size);
149    enc->cur += size;
150 }
151 
152 void
153 vn_cs_encoder_commit(struct vn_cs_encoder *enc);
154 
155 static inline void
vn_cs_decoder_init(struct vn_cs_decoder * dec,const void * data,size_t size)156 vn_cs_decoder_init(struct vn_cs_decoder *dec, const void *data, size_t size)
157 {
158    *dec = VN_CS_DECODER_INITIALIZER(data, size);
159 }
160 
161 static inline void
vn_cs_decoder_set_fatal(const struct vn_cs_decoder * dec)162 vn_cs_decoder_set_fatal(const struct vn_cs_decoder *dec)
163 {
164    abort();
165 }
166 
167 static inline bool
vn_cs_decoder_peek_internal(const struct vn_cs_decoder * dec,size_t size,void * val,size_t val_size)168 vn_cs_decoder_peek_internal(const struct vn_cs_decoder *dec,
169                             size_t size,
170                             void *val,
171                             size_t val_size)
172 {
173    assert(val_size <= size);
174 
175    if (unlikely(size > dec->end - dec->cur)) {
176       vn_cs_decoder_set_fatal(dec);
177       memset(val, 0, val_size);
178       return false;
179    }
180 
181    /* we should not rely on the compiler to optimize away memcpy... */
182    memcpy(val, dec->cur, val_size);
183    return true;
184 }
185 
186 static inline void
vn_cs_decoder_read(struct vn_cs_decoder * dec,size_t size,void * val,size_t val_size)187 vn_cs_decoder_read(struct vn_cs_decoder *dec,
188                    size_t size,
189                    void *val,
190                    size_t val_size)
191 {
192    if (vn_cs_decoder_peek_internal(dec, size, val, val_size))
193       dec->cur += size;
194 }
195 
196 static inline void
vn_cs_decoder_peek(const struct vn_cs_decoder * dec,size_t size,void * val,size_t val_size)197 vn_cs_decoder_peek(const struct vn_cs_decoder *dec,
198                    size_t size,
199                    void *val,
200                    size_t val_size)
201 {
202    vn_cs_decoder_peek_internal(dec, size, val, val_size);
203 }
204 
205 static inline vn_object_id
vn_cs_handle_load_id(const void ** handle,VkObjectType type)206 vn_cs_handle_load_id(const void **handle, VkObjectType type)
207 {
208    return *handle ? vn_object_get_id(*handle, type) : 0;
209 }
210 
211 static inline void
vn_cs_handle_store_id(void ** handle,vn_object_id id,VkObjectType type)212 vn_cs_handle_store_id(void **handle, vn_object_id id, VkObjectType type)
213 {
214    vn_object_set_id(*handle, id, type);
215 }
216 
217 #endif /* VN_CS_H */
218