• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright (c) 2017 The vulkano developers
2 // Licensed under the Apache License, Version 2.0
3 // <LICENSE-APACHE or
4 // https://www.apache.org/licenses/LICENSE-2.0> or the MIT
5 // license <LICENSE-MIT or https://opensource.org/licenses/MIT>,
6 // at your option. All files in the project carrying such
7 // notice may not be copied, modified, or distributed except
8 // according to those terms.
9 
10 use crate::buffer::BufferAccess;
11 use crate::command_buffer::DynamicState;
12 use crate::descriptor_set::DescriptorSetWithOffsets;
13 use crate::pipeline::input_assembly::IndexType;
14 use crate::pipeline::ComputePipelineAbstract;
15 use crate::pipeline::GraphicsPipelineAbstract;
16 use crate::pipeline::PipelineBindPoint;
17 use crate::DeviceSize;
18 use crate::VulkanObject;
19 use smallvec::SmallVec;
20 use std::ops::Range;
21 
22 /// Keep track of the state of a command buffer builder, so that you don't need to bind objects
23 /// that were already bound.
24 ///
25 /// > **Important**: Executing a secondary command buffer invalidates the state of a command buffer
26 /// > builder. When you do so, you need to call `invalidate()`.
27 pub struct StateCacher {
28     // The dynamic state to synchronize with `CmdSetState`.
29     dynamic_state: DynamicState,
30     // The compute pipeline currently bound. 0 if nothing bound.
31     compute_pipeline: ash::vk::Pipeline,
32     // The graphics pipeline currently bound. 0 if nothing bound.
33     graphics_pipeline: ash::vk::Pipeline,
34     // The descriptor sets for the compute pipeline.
35     compute_descriptor_sets: SmallVec<[(ash::vk::DescriptorSet, SmallVec<[u32; 32]>); 12]>,
36     // The descriptor sets for the graphics pipeline.
37     graphics_descriptor_sets: SmallVec<[(ash::vk::DescriptorSet, SmallVec<[u32; 32]>); 12]>,
38     // If the user starts comparing descriptor sets, but drops the helper struct in the middle of
39     // the processing then we will end up in a weird state. This bool is true when we start
40     // comparing sets, and is set to false when we end up comparing. If it was true when we start
41     // comparing, we know that something bad happened and we flush the cache.
42     poisoned_descriptor_sets: bool,
43     // The vertex buffers currently bound.
44     vertex_buffers: SmallVec<[(ash::vk::Buffer, DeviceSize); 12]>,
45     // Same as `poisoned_descriptor_sets` but for vertex buffers.
46     poisoned_vertex_buffers: bool,
47     // The index buffer, offset, and index type currently bound. `None` if nothing bound.
48     index_buffer: Option<(ash::vk::Buffer, DeviceSize, IndexType)>,
49 }
50 
51 /// Outcome of an operation.
52 #[derive(Debug, Copy, Clone, PartialEq, Eq)]
53 pub enum StateCacherOutcome {
54     /// The caller needs to perform the state change in the actual command buffer builder.
55     NeedChange,
56     /// The state change is not necessary.
57     AlreadyOk,
58 }
59 
60 impl StateCacher {
61     /// Builds a new `StateCacher`.
62     #[inline]
new() -> StateCacher63     pub fn new() -> StateCacher {
64         StateCacher {
65             dynamic_state: DynamicState::none(),
66             compute_pipeline: ash::vk::Pipeline::null(),
67             graphics_pipeline: ash::vk::Pipeline::null(),
68             compute_descriptor_sets: SmallVec::new(),
69             graphics_descriptor_sets: SmallVec::new(),
70             poisoned_descriptor_sets: false,
71             vertex_buffers: SmallVec::new(),
72             poisoned_vertex_buffers: false,
73             index_buffer: None,
74         }
75     }
76 
77     /// Resets the cache to its default state. You **must** call this after executing a secondary
78     /// command buffer.
79     #[inline]
invalidate(&mut self)80     pub fn invalidate(&mut self) {
81         self.dynamic_state = DynamicState::none();
82         self.compute_pipeline = ash::vk::Pipeline::null();
83         self.graphics_pipeline = ash::vk::Pipeline::null();
84         self.compute_descriptor_sets = SmallVec::new();
85         self.graphics_descriptor_sets = SmallVec::new();
86         self.vertex_buffers = SmallVec::new();
87         self.index_buffer = None;
88     }
89 
90     /// Compares the current state with `incoming`, and returns a new state that contains the
91     /// states that differ and that need to be actually set in the command buffer builder.
92     ///
93     /// This function also updates the state cacher. The state cacher assumes that the state
94     /// changes are going to be performed after this function returns.
dynamic_state(&mut self, incoming: &DynamicState) -> DynamicState95     pub fn dynamic_state(&mut self, incoming: &DynamicState) -> DynamicState {
96         let mut changed = DynamicState::none();
97 
98         macro_rules! cmp {
99             ($field:ident) => {
100                 if self.dynamic_state.$field != incoming.$field {
101                     changed.$field = incoming.$field.clone();
102                     if incoming.$field.is_some() {
103                         self.dynamic_state.$field = incoming.$field.clone();
104                     }
105                 }
106             };
107         }
108 
109         cmp!(line_width);
110         cmp!(viewports);
111         cmp!(scissors);
112         cmp!(compare_mask);
113         cmp!(reference);
114         cmp!(write_mask);
115 
116         changed
117     }
118 
119     /// Starts the process of comparing a list of descriptor sets to the descriptor sets currently
120     /// in cache.
121     ///
122     /// After calling this function, call `add` for each set one by one. Then call `compare` in
123     /// order to get the index of the first set to bind, or `None` if the sets were identical to
124     /// what is in cache.
125     ///
126     /// This process also updates the state cacher. The state cacher assumes that the state
127     /// changes are going to be performed after the `compare` function returns.
128     #[inline]
bind_descriptor_sets( &mut self, pipeline_bind_point: PipelineBindPoint, ) -> StateCacherDescriptorSets129     pub fn bind_descriptor_sets(
130         &mut self,
131         pipeline_bind_point: PipelineBindPoint,
132     ) -> StateCacherDescriptorSets {
133         if self.poisoned_descriptor_sets {
134             self.compute_descriptor_sets = SmallVec::new();
135             self.graphics_descriptor_sets = SmallVec::new();
136         }
137 
138         self.poisoned_descriptor_sets = true;
139 
140         StateCacherDescriptorSets {
141             poisoned: &mut self.poisoned_descriptor_sets,
142             state: match pipeline_bind_point {
143                 PipelineBindPoint::Compute => &mut self.compute_descriptor_sets,
144                 PipelineBindPoint::Graphics => &mut self.graphics_descriptor_sets,
145             },
146             offset: 0,
147             found_diff: None,
148         }
149     }
150 
151     /// Checks whether we need to bind a graphics pipeline. Returns `StateCacherOutcome::AlreadyOk`
152     /// if the pipeline was already bound earlier, and `StateCacherOutcome::NeedChange` if you need
153     /// to actually bind the pipeline.
154     ///
155     /// This function also updates the state cacher. The state cacher assumes that the state
156     /// changes are going to be performed after this function returns.
bind_graphics_pipeline<P>(&mut self, pipeline: &P) -> StateCacherOutcome where P: GraphicsPipelineAbstract,157     pub fn bind_graphics_pipeline<P>(&mut self, pipeline: &P) -> StateCacherOutcome
158     where
159         P: GraphicsPipelineAbstract,
160     {
161         let inner = GraphicsPipelineAbstract::inner(pipeline).internal_object();
162         if inner == self.graphics_pipeline {
163             StateCacherOutcome::AlreadyOk
164         } else {
165             self.graphics_pipeline = inner;
166             StateCacherOutcome::NeedChange
167         }
168     }
169 
170     /// Checks whether we need to bind a compute pipeline. Returns `StateCacherOutcome::AlreadyOk`
171     /// if the pipeline was already bound earlier, and `StateCacherOutcome::NeedChange` if you need
172     /// to actually bind the pipeline.
173     ///
174     /// This function also updates the state cacher. The state cacher assumes that the state
175     /// changes are going to be performed after this function returns.
bind_compute_pipeline<P>(&mut self, pipeline: &P) -> StateCacherOutcome where P: ComputePipelineAbstract,176     pub fn bind_compute_pipeline<P>(&mut self, pipeline: &P) -> StateCacherOutcome
177     where
178         P: ComputePipelineAbstract,
179     {
180         let inner = pipeline.inner().internal_object();
181         if inner == self.compute_pipeline {
182             StateCacherOutcome::AlreadyOk
183         } else {
184             self.compute_pipeline = inner;
185             StateCacherOutcome::NeedChange
186         }
187     }
188 
189     /// Starts the process of comparing a list of vertex buffers to the vertex buffers currently
190     /// in cache.
191     ///
192     /// After calling this function, call `add` for each set one by one. Then call `compare` in
193     /// order to get the range of the vertex buffers to bind, or `None` if the sets were identical
194     /// to what is in cache.
195     ///
196     /// This process also updates the state cacher. The state cacher assumes that the state
197     /// changes are going to be performed after the `compare` function returns.
198     #[inline]
bind_vertex_buffers(&mut self) -> StateCacherVertexBuffers199     pub fn bind_vertex_buffers(&mut self) -> StateCacherVertexBuffers {
200         if self.poisoned_vertex_buffers {
201             self.vertex_buffers = SmallVec::new();
202         }
203 
204         self.poisoned_vertex_buffers = true;
205 
206         StateCacherVertexBuffers {
207             poisoned: &mut self.poisoned_vertex_buffers,
208             state: &mut self.vertex_buffers,
209             offset: 0,
210             first_diff: None,
211             last_diff: 0,
212         }
213     }
214 
215     /// Checks whether we need to bind an index buffer. Returns `StateCacherOutcome::AlreadyOk`
216     /// if the index buffer was already bound earlier, and `StateCacherOutcome::NeedChange` if you
217     /// need to actually bind the buffer.
218     ///
219     /// This function also updates the state cacher. The state cacher assumes that the state
220     /// changes are going to be performed after this function returns.
bind_index_buffer<B>(&mut self, index_buffer: &B, ty: IndexType) -> StateCacherOutcome where B: ?Sized + BufferAccess,221     pub fn bind_index_buffer<B>(&mut self, index_buffer: &B, ty: IndexType) -> StateCacherOutcome
222     where
223         B: ?Sized + BufferAccess,
224     {
225         let value = {
226             let inner = index_buffer.inner();
227             (inner.buffer.internal_object(), inner.offset, ty)
228         };
229 
230         if self.index_buffer == Some(value) {
231             StateCacherOutcome::AlreadyOk
232         } else {
233             self.index_buffer = Some(value);
234             StateCacherOutcome::NeedChange
235         }
236     }
237 }
238 
239 /// Helper struct for comparing descriptor sets.
240 ///
241 /// > **Note**: For reliability reasons, if you drop/leak this struct before calling `compare` then
242 /// > the cache of the currently bound descriptor sets will be reset.
243 pub struct StateCacherDescriptorSets<'s> {
244     // Reference to the parent's `poisoned_descriptor_sets`.
245     poisoned: &'s mut bool,
246     // Reference to the descriptor sets list to compare to.
247     state: &'s mut SmallVec<[(ash::vk::DescriptorSet, SmallVec<[u32; 32]>); 12]>,
248     // Next offset within the list to compare to.
249     offset: usize,
250     // Contains the return value of `compare`.
251     found_diff: Option<u32>,
252 }
253 
254 impl<'s> StateCacherDescriptorSets<'s> {
255     /// Adds a descriptor set to the list to compare.
256     #[inline]
add(&mut self, descriptor_set: &DescriptorSetWithOffsets)257     pub fn add(&mut self, descriptor_set: &DescriptorSetWithOffsets) {
258         let (descriptor_set, dynamic_offsets) = descriptor_set.as_ref();
259         let raw = descriptor_set.inner().internal_object();
260         let dynamic_offsets = dynamic_offsets.iter().copied().collect();
261 
262         if let Some(state) = self.state.get_mut(self.offset) {
263             if (&state.0, &state.1) == (&raw, &dynamic_offsets) {
264                 self.offset += 1;
265                 return;
266             }
267 
268             *state = (raw, dynamic_offsets);
269         } else {
270             self.state.push((raw, dynamic_offsets));
271         }
272 
273         if self.found_diff.is_none() {
274             self.found_diff = Some(self.offset as u32);
275         }
276         self.offset += 1;
277     }
278 
279     /// Compares your list to the list in cache, and returns the offset of the first set to bind.
280     /// Returns `None` if the two lists were identical.
281     ///
282     /// After this function returns, the cache will be updated to match your list.
283     #[inline]
compare(self) -> Option<u32>284     pub fn compare(self) -> Option<u32> {
285         *self.poisoned = false;
286         // Removing from the cache any set that wasn't added with `add`.
287         self.state.truncate(self.offset);
288         self.found_diff
289     }
290 }
291 
292 /// Helper struct for comparing vertex buffers.
293 ///
294 /// > **Note**: For reliability reasons, if you drop/leak this struct before calling `compare` then
295 /// > the cache of the currently bound vertex buffers will be reset.
296 pub struct StateCacherVertexBuffers<'s> {
297     // Reference to the parent's `poisoned_vertex_buffers`.
298     poisoned: &'s mut bool,
299     // Reference to the vertex buffers list to compare to.
300     state: &'s mut SmallVec<[(ash::vk::Buffer, DeviceSize); 12]>,
301     // Next offset within the list to compare to.
302     offset: usize,
303     // Contains the offset of the first vertex buffer that differs.
304     first_diff: Option<u32>,
305     // Offset of the last vertex buffer that differs.
306     last_diff: u32,
307 }
308 
309 impl<'s> StateCacherVertexBuffers<'s> {
310     /// Adds a vertex buffer to the list to compare.
311     #[inline]
add<B>(&mut self, buffer: &B) where B: ?Sized + BufferAccess,312     pub fn add<B>(&mut self, buffer: &B)
313     where
314         B: ?Sized + BufferAccess,
315     {
316         let raw = {
317             let inner = buffer.inner();
318             let raw = inner.buffer.internal_object();
319             let offset = inner.offset;
320             (raw, offset)
321         };
322 
323         if self.offset < self.state.len() {
324             if self.state[self.offset] == raw {
325                 self.offset += 1;
326                 return;
327             }
328 
329             self.state[self.offset] = raw;
330         } else {
331             self.state.push(raw);
332         }
333 
334         self.last_diff = self.offset as u32;
335         if self.first_diff.is_none() {
336             self.first_diff = Some(self.offset as u32);
337         }
338         self.offset += 1;
339     }
340 
341     /// Compares your list to the list in cache, and returns the range of the vertex buffers to
342     /// bind. Returns `None` if the two lists were identical.
343     ///
344     /// After this function returns, the cache will be updated to match your list.
345     ///
346     /// > **Note**: Keep in mind that `range.end` is *after* the last element. For example the
347     /// > range `1 .. 2` only contains one element.
348     #[inline]
compare(self) -> Option<Range<u32>>349     pub fn compare(self) -> Option<Range<u32>> {
350         *self.poisoned = false;
351 
352         // Removing from the cache any set that wasn't added with `add`.
353         self.state.truncate(self.offset);
354 
355         self.first_diff.map(|first| {
356             debug_assert!(first <= self.last_diff);
357             first..(self.last_diff + 1)
358         })
359     }
360 }
361 
362 #[cfg(test)]
363 mod tests {
364     use crate::buffer::BufferUsage;
365     use crate::buffer::CpuAccessibleBuffer;
366     use crate::command_buffer::state_cacher::StateCacher;
367 
368     #[test]
vb_caching_single()369     fn vb_caching_single() {
370         let (device, queue) = gfx_dev_and_queue!();
371 
372         const EMPTY: [i32; 0] = [];
373         let buf =
374             CpuAccessibleBuffer::from_data(device, BufferUsage::vertex_buffer(), false, EMPTY)
375                 .unwrap();
376 
377         let mut cacher = StateCacher::new();
378 
379         {
380             let mut bind_vb = cacher.bind_vertex_buffers();
381             bind_vb.add(&buf);
382             assert_eq!(bind_vb.compare(), Some(0..1));
383         }
384 
385         for _ in 0..3 {
386             let mut bind_vb = cacher.bind_vertex_buffers();
387             bind_vb.add(&buf);
388             assert_eq!(bind_vb.compare(), None);
389         }
390     }
391 
392     #[test]
vb_caching_invalidated()393     fn vb_caching_invalidated() {
394         let (device, queue) = gfx_dev_and_queue!();
395 
396         const EMPTY: [i32; 0] = [];
397         let buf =
398             CpuAccessibleBuffer::from_data(device, BufferUsage::vertex_buffer(), false, EMPTY)
399                 .unwrap();
400 
401         let mut cacher = StateCacher::new();
402 
403         {
404             let mut bind_vb = cacher.bind_vertex_buffers();
405             bind_vb.add(&buf);
406             assert_eq!(bind_vb.compare(), Some(0..1));
407         }
408 
409         {
410             let mut bind_vb = cacher.bind_vertex_buffers();
411             bind_vb.add(&buf);
412             assert_eq!(bind_vb.compare(), None);
413         }
414 
415         cacher.invalidate();
416 
417         {
418             let mut bind_vb = cacher.bind_vertex_buffers();
419             bind_vb.add(&buf);
420             assert_eq!(bind_vb.compare(), Some(0..1));
421         }
422     }
423 
424     #[test]
vb_caching_multi()425     fn vb_caching_multi() {
426         let (device, queue) = gfx_dev_and_queue!();
427 
428         const EMPTY: [i32; 0] = [];
429         let buf1 = CpuAccessibleBuffer::from_data(
430             device.clone(),
431             BufferUsage::vertex_buffer(),
432             false,
433             EMPTY,
434         )
435         .unwrap();
436         let buf2 = CpuAccessibleBuffer::from_data(
437             device.clone(),
438             BufferUsage::vertex_buffer(),
439             false,
440             EMPTY,
441         )
442         .unwrap();
443         let buf3 =
444             CpuAccessibleBuffer::from_data(device, BufferUsage::vertex_buffer(), false, EMPTY)
445                 .unwrap();
446 
447         let mut cacher = StateCacher::new();
448 
449         {
450             let mut bind_vb = cacher.bind_vertex_buffers();
451             bind_vb.add(&buf1);
452             bind_vb.add(&buf2);
453             assert_eq!(bind_vb.compare(), Some(0..2));
454         }
455 
456         {
457             let mut bind_vb = cacher.bind_vertex_buffers();
458             bind_vb.add(&buf1);
459             bind_vb.add(&buf2);
460             bind_vb.add(&buf3);
461             assert_eq!(bind_vb.compare(), Some(2..3));
462         }
463 
464         {
465             let mut bind_vb = cacher.bind_vertex_buffers();
466             bind_vb.add(&buf1);
467             assert_eq!(bind_vb.compare(), None);
468         }
469 
470         {
471             let mut bind_vb = cacher.bind_vertex_buffers();
472             bind_vb.add(&buf1);
473             bind_vb.add(&buf3);
474             assert_eq!(bind_vb.compare(), Some(1..2));
475         }
476 
477         {
478             let mut bind_vb = cacher.bind_vertex_buffers();
479             bind_vb.add(&buf2);
480             bind_vb.add(&buf3);
481             assert_eq!(bind_vb.compare(), Some(0..1));
482         }
483     }
484 }
485