• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright (c) 2016 The vulkano developers
2 // Licensed under the Apache License, Version 2.0
3 // <LICENSE-APACHE or
4 // https://www.apache.org/licenses/LICENSE-2.0> or the MIT
5 // license <LICENSE-MIT or https://opensource.org/licenses/MIT>,
6 // at your option. All files in the project carrying such
7 // notice may not be copied, modified, or distributed except
8 // according to those terms.
9 
10 use crate::buffer::BufferAccess;
11 use crate::buffer::BufferInner;
12 use crate::buffer::TypedBufferAccess;
13 use crate::check_errors;
14 use crate::command_buffer::pool::UnsafeCommandPoolAlloc;
15 use crate::command_buffer::CommandBufferInheritance;
16 use crate::command_buffer::CommandBufferLevel;
17 use crate::command_buffer::CommandBufferUsage;
18 use crate::command_buffer::SecondaryCommandBuffer;
19 use crate::command_buffer::SubpassContents;
20 use crate::descriptor_set::sys::UnsafeDescriptorSet;
21 use crate::device::Device;
22 use crate::device::DeviceOwned;
23 use crate::format::ClearValue;
24 use crate::format::FormatTy;
25 use crate::image::ImageAccess;
26 use crate::image::ImageAspect;
27 use crate::image::ImageAspects;
28 use crate::image::ImageLayout;
29 use crate::image::SampleCount;
30 use crate::pipeline::depth_stencil::StencilFaces;
31 use crate::pipeline::input_assembly::IndexType;
32 use crate::pipeline::layout::PipelineLayout;
33 use crate::pipeline::shader::ShaderStages;
34 use crate::pipeline::viewport::Scissor;
35 use crate::pipeline::viewport::Viewport;
36 use crate::pipeline::ComputePipelineAbstract;
37 use crate::pipeline::GraphicsPipelineAbstract;
38 use crate::pipeline::PipelineBindPoint;
39 use crate::query::QueriesRange;
40 use crate::query::Query;
41 use crate::query::QueryControlFlags;
42 use crate::query::QueryResultElement;
43 use crate::query::QueryResultFlags;
44 use crate::render_pass::FramebufferAbstract;
45 use crate::sampler::Filter;
46 use crate::sync::AccessFlags;
47 use crate::sync::Event;
48 use crate::sync::PipelineStage;
49 use crate::sync::PipelineStages;
50 use crate::DeviceSize;
51 use crate::OomError;
52 use crate::VulkanObject;
53 use ash::vk::Handle;
54 use smallvec::SmallVec;
55 use std::ffi::CStr;
56 use std::fmt;
57 use std::mem;
58 use std::ops::Range;
59 use std::sync::Arc;
60 
61 /// Command buffer being built.
62 ///
63 /// You can add commands to an `UnsafeCommandBufferBuilder` by using the `AddCommand` trait.
64 /// The `AddCommand<&Cmd>` trait is implemented on the `UnsafeCommandBufferBuilder` for any `Cmd`
65 /// that is a raw Vulkan command.
66 ///
67 /// When you are finished adding commands, you can use the `CommandBufferBuild` trait to turn this
68 /// builder into an `UnsafeCommandBuffer`.
69 pub struct UnsafeCommandBufferBuilder {
70     command_buffer: ash::vk::CommandBuffer,
71     device: Arc<Device>,
72     usage: CommandBufferUsage,
73 }
74 
75 impl fmt::Debug for UnsafeCommandBufferBuilder {
76     #[inline]
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result77     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
78         write!(
79             f,
80             "<Vulkan command buffer builder #{}>",
81             self.command_buffer.as_raw()
82         )
83     }
84 }
85 
86 impl UnsafeCommandBufferBuilder {
87     /// Creates a new builder, for recording commands.
88     ///
89     /// # Safety
90     ///
91     /// - `pool_alloc` must outlive the returned builder and its created command buffer.
92     /// - `kind` must match how `pool_alloc` was created.
93     /// - All submitted commands must be valid and follow the requirements of the Vulkan specification.
94     /// - Any resources used by submitted commands must outlive the returned builder and its created command buffer. They must be protected against data races through manual synchronization.
95     ///
96     /// > **Note**: Some checks are still made with `debug_assert!`. Do not expect to be able to
97     /// > submit invalid commands.
new<F>( pool_alloc: &UnsafeCommandPoolAlloc, level: CommandBufferLevel<F>, usage: CommandBufferUsage, ) -> Result<UnsafeCommandBufferBuilder, OomError> where F: FramebufferAbstract,98     pub unsafe fn new<F>(
99         pool_alloc: &UnsafeCommandPoolAlloc,
100         level: CommandBufferLevel<F>,
101         usage: CommandBufferUsage,
102     ) -> Result<UnsafeCommandBufferBuilder, OomError>
103     where
104         F: FramebufferAbstract,
105     {
106         let secondary = match level {
107             CommandBufferLevel::Primary => false,
108             CommandBufferLevel::Secondary(..) => true,
109         };
110 
111         let device = pool_alloc.device().clone();
112         let fns = device.fns();
113 
114         let vk_flags = {
115             let a = ash::vk::CommandBufferUsageFlags::from(usage);
116             let b = match level {
117                 CommandBufferLevel::Secondary(ref inheritance)
118                     if inheritance.render_pass.is_some() =>
119                 {
120                     ash::vk::CommandBufferUsageFlags::RENDER_PASS_CONTINUE
121                 }
122                 _ => ash::vk::CommandBufferUsageFlags::empty(),
123             };
124 
125             a | b
126         };
127 
128         let (rp, sp, fb) = match level {
129             CommandBufferLevel::Secondary(CommandBufferInheritance {
130                 render_pass: Some(ref render_pass),
131                 ..
132             }) => {
133                 let rp = render_pass.subpass.render_pass().inner().internal_object();
134                 let sp = render_pass.subpass.index();
135                 let fb = match render_pass.framebuffer {
136                     Some(ref fb) => {
137                         // TODO: debug assert that the framebuffer is compatible with
138                         //       the render pass?
139                         FramebufferAbstract::inner(fb).internal_object()
140                     }
141                     None => ash::vk::Framebuffer::null(),
142                 };
143                 (rp, sp, fb)
144             }
145             _ => (ash::vk::RenderPass::null(), 0, ash::vk::Framebuffer::null()),
146         };
147 
148         let (oqe, qf, ps) = match level {
149             CommandBufferLevel::Secondary(CommandBufferInheritance {
150                 occlusion_query,
151                 query_statistics_flags,
152                 ..
153             }) => {
154                 let ps: ash::vk::QueryPipelineStatisticFlags = query_statistics_flags.into();
155                 let (oqe, qf) = match occlusion_query {
156                     Some(flags) => {
157                         let qf = if flags.precise {
158                             ash::vk::QueryControlFlags::PRECISE
159                         } else {
160                             ash::vk::QueryControlFlags::empty()
161                         };
162                         (ash::vk::TRUE, qf)
163                     }
164                     None => (0, ash::vk::QueryControlFlags::empty()),
165                 };
166 
167                 (oqe, qf, ps)
168             }
169             _ => (
170                 0,
171                 ash::vk::QueryControlFlags::empty(),
172                 ash::vk::QueryPipelineStatisticFlags::empty(),
173             ),
174         };
175 
176         let inheritance = ash::vk::CommandBufferInheritanceInfo {
177             render_pass: rp,
178             subpass: sp,
179             framebuffer: fb,
180             occlusion_query_enable: oqe,
181             query_flags: qf,
182             pipeline_statistics: ps,
183             ..Default::default()
184         };
185 
186         let infos = ash::vk::CommandBufferBeginInfo {
187             flags: vk_flags,
188             p_inheritance_info: &inheritance,
189             ..Default::default()
190         };
191 
192         check_errors(
193             fns.v1_0
194                 .begin_command_buffer(pool_alloc.internal_object(), &infos),
195         )?;
196 
197         Ok(UnsafeCommandBufferBuilder {
198             command_buffer: pool_alloc.internal_object(),
199             device: device.clone(),
200             usage,
201         })
202     }
203 
204     /// Turns the builder into an actual command buffer.
205     #[inline]
build(self) -> Result<UnsafeCommandBuffer, OomError>206     pub fn build(self) -> Result<UnsafeCommandBuffer, OomError> {
207         unsafe {
208             let fns = self.device.fns();
209             check_errors(fns.v1_0.end_command_buffer(self.command_buffer))?;
210 
211             Ok(UnsafeCommandBuffer {
212                 command_buffer: self.command_buffer,
213                 device: self.device.clone(),
214                 usage: self.usage,
215             })
216         }
217     }
218 
219     /// Calls `vkCmdBeginQuery` on the builder.
220     #[inline]
begin_query(&mut self, query: Query, flags: QueryControlFlags)221     pub unsafe fn begin_query(&mut self, query: Query, flags: QueryControlFlags) {
222         let fns = self.device().fns();
223         let cmd = self.internal_object();
224         let flags = if flags.precise {
225             ash::vk::QueryControlFlags::PRECISE
226         } else {
227             ash::vk::QueryControlFlags::empty()
228         };
229         fns.v1_0
230             .cmd_begin_query(cmd, query.pool().internal_object(), query.index(), flags);
231     }
232 
233     /// Calls `vkCmdBeginRenderPass` on the builder.
234     #[inline]
begin_render_pass<F, I>( &mut self, framebuffer: &F, subpass_contents: SubpassContents, clear_values: I, ) where F: ?Sized + FramebufferAbstract, I: IntoIterator<Item = ClearValue>,235     pub unsafe fn begin_render_pass<F, I>(
236         &mut self,
237         framebuffer: &F,
238         subpass_contents: SubpassContents,
239         clear_values: I,
240     ) where
241         F: ?Sized + FramebufferAbstract,
242         I: IntoIterator<Item = ClearValue>,
243     {
244         let fns = self.device().fns();
245         let cmd = self.internal_object();
246 
247         // TODO: allow passing a different render pass
248         let raw_render_pass = framebuffer.render_pass().inner().internal_object();
249         let raw_framebuffer = framebuffer.inner().internal_object();
250 
251         let raw_clear_values: SmallVec<[_; 12]> = clear_values
252             .into_iter()
253             .map(|clear_value| match clear_value {
254                 ClearValue::None => ash::vk::ClearValue {
255                     color: ash::vk::ClearColorValue { float32: [0.0; 4] },
256                 },
257                 ClearValue::Float(val) => ash::vk::ClearValue {
258                     color: ash::vk::ClearColorValue { float32: val },
259                 },
260                 ClearValue::Int(val) => ash::vk::ClearValue {
261                     color: ash::vk::ClearColorValue { int32: val },
262                 },
263                 ClearValue::Uint(val) => ash::vk::ClearValue {
264                     color: ash::vk::ClearColorValue { uint32: val },
265                 },
266                 ClearValue::Depth(val) => ash::vk::ClearValue {
267                     depth_stencil: ash::vk::ClearDepthStencilValue {
268                         depth: val,
269                         stencil: 0,
270                     },
271                 },
272                 ClearValue::Stencil(val) => ash::vk::ClearValue {
273                     depth_stencil: ash::vk::ClearDepthStencilValue {
274                         depth: 0.0,
275                         stencil: val,
276                     },
277                 },
278                 ClearValue::DepthStencil((depth, stencil)) => ash::vk::ClearValue {
279                     depth_stencil: ash::vk::ClearDepthStencilValue { depth, stencil },
280                 },
281             })
282             .collect();
283 
284         // TODO: allow customizing
285         let rect = [
286             0..framebuffer.dimensions()[0],
287             0..framebuffer.dimensions()[1],
288         ];
289 
290         let begin = ash::vk::RenderPassBeginInfo {
291             render_pass: raw_render_pass,
292             framebuffer: raw_framebuffer,
293             render_area: ash::vk::Rect2D {
294                 offset: ash::vk::Offset2D {
295                     x: rect[0].start as i32,
296                     y: rect[1].start as i32,
297                 },
298                 extent: ash::vk::Extent2D {
299                     width: rect[0].end - rect[0].start,
300                     height: rect[1].end - rect[1].start,
301                 },
302             },
303             clear_value_count: raw_clear_values.len() as u32,
304             p_clear_values: raw_clear_values.as_ptr(),
305             ..Default::default()
306         };
307 
308         fns.v1_0
309             .cmd_begin_render_pass(cmd, &begin, subpass_contents.into());
310     }
311 
312     /// Calls `vkCmdBindDescriptorSets` on the builder.
313     ///
314     /// Does nothing if the list of descriptor sets is empty, as it would be a no-op and isn't a
315     /// valid usage of the command anyway.
316     #[inline]
bind_descriptor_sets<'s, S, I>( &mut self, pipeline_bind_point: PipelineBindPoint, pipeline_layout: &PipelineLayout, first_binding: u32, sets: S, dynamic_offsets: I, ) where S: IntoIterator<Item = &'s UnsafeDescriptorSet>, I: IntoIterator<Item = u32>,317     pub unsafe fn bind_descriptor_sets<'s, S, I>(
318         &mut self,
319         pipeline_bind_point: PipelineBindPoint,
320         pipeline_layout: &PipelineLayout,
321         first_binding: u32,
322         sets: S,
323         dynamic_offsets: I,
324     ) where
325         S: IntoIterator<Item = &'s UnsafeDescriptorSet>,
326         I: IntoIterator<Item = u32>,
327     {
328         let fns = self.device().fns();
329         let cmd = self.internal_object();
330 
331         let sets: SmallVec<[_; 12]> = sets.into_iter().map(|s| s.internal_object()).collect();
332         if sets.is_empty() {
333             return;
334         }
335         let dynamic_offsets: SmallVec<[u32; 32]> = dynamic_offsets.into_iter().collect();
336 
337         let num_bindings = sets.len() as u32;
338         debug_assert!(
339             first_binding + num_bindings <= pipeline_layout.descriptor_set_layouts().len() as u32
340         );
341 
342         fns.v1_0.cmd_bind_descriptor_sets(
343             cmd,
344             pipeline_bind_point.into(),
345             pipeline_layout.internal_object(),
346             first_binding,
347             num_bindings,
348             sets.as_ptr(),
349             dynamic_offsets.len() as u32,
350             dynamic_offsets.as_ptr(),
351         );
352     }
353 
354     /// Calls `vkCmdBindIndexBuffer` on the builder.
355     #[inline]
bind_index_buffer<B>(&mut self, buffer: &B, index_ty: IndexType) where B: ?Sized + BufferAccess,356     pub unsafe fn bind_index_buffer<B>(&mut self, buffer: &B, index_ty: IndexType)
357     where
358         B: ?Sized + BufferAccess,
359     {
360         let fns = self.device().fns();
361         let cmd = self.internal_object();
362 
363         let inner = buffer.inner();
364         debug_assert!(inner.offset < inner.buffer.size());
365         debug_assert!(inner.buffer.usage().index_buffer);
366 
367         fns.v1_0.cmd_bind_index_buffer(
368             cmd,
369             inner.buffer.internal_object(),
370             inner.offset,
371             index_ty.into(),
372         );
373     }
374 
375     /// Calls `vkCmdBindPipeline` on the builder with a compute pipeline.
376     #[inline]
bind_pipeline_compute<Cp>(&mut self, pipeline: &Cp) where Cp: ?Sized + ComputePipelineAbstract,377     pub unsafe fn bind_pipeline_compute<Cp>(&mut self, pipeline: &Cp)
378     where
379         Cp: ?Sized + ComputePipelineAbstract,
380     {
381         let fns = self.device().fns();
382         let cmd = self.internal_object();
383         fns.v1_0.cmd_bind_pipeline(
384             cmd,
385             ash::vk::PipelineBindPoint::COMPUTE,
386             pipeline.inner().internal_object(),
387         );
388     }
389 
390     /// Calls `vkCmdBindPipeline` on the builder with a graphics pipeline.
391     #[inline]
bind_pipeline_graphics<Gp>(&mut self, pipeline: &Gp) where Gp: ?Sized + GraphicsPipelineAbstract,392     pub unsafe fn bind_pipeline_graphics<Gp>(&mut self, pipeline: &Gp)
393     where
394         Gp: ?Sized + GraphicsPipelineAbstract,
395     {
396         let fns = self.device().fns();
397         let cmd = self.internal_object();
398         let inner = GraphicsPipelineAbstract::inner(pipeline).internal_object();
399         fns.v1_0
400             .cmd_bind_pipeline(cmd, ash::vk::PipelineBindPoint::GRAPHICS, inner);
401     }
402 
403     /// Calls `vkCmdBindVertexBuffers` on the builder.
404     ///
405     /// Does nothing if the list of buffers is empty, as it would be a no-op and isn't a valid
406     /// usage of the command anyway.
407     #[inline]
bind_vertex_buffers( &mut self, first_binding: u32, params: UnsafeCommandBufferBuilderBindVertexBuffer, )408     pub unsafe fn bind_vertex_buffers(
409         &mut self,
410         first_binding: u32,
411         params: UnsafeCommandBufferBuilderBindVertexBuffer,
412     ) {
413         debug_assert_eq!(params.raw_buffers.len(), params.offsets.len());
414 
415         if params.raw_buffers.is_empty() {
416             return;
417         }
418 
419         let fns = self.device().fns();
420         let cmd = self.internal_object();
421 
422         let num_bindings = params.raw_buffers.len() as u32;
423 
424         debug_assert!({
425             let max_bindings = self
426                 .device()
427                 .physical_device()
428                 .properties()
429                 .max_vertex_input_bindings;
430             first_binding + num_bindings <= max_bindings
431         });
432 
433         fns.v1_0.cmd_bind_vertex_buffers(
434             cmd,
435             first_binding,
436             num_bindings,
437             params.raw_buffers.as_ptr(),
438             params.offsets.as_ptr(),
439         );
440     }
441 
442     /// Calls `vkCmdCopyImage` on the builder.
443     ///
444     /// Does nothing if the list of regions is empty, as it would be a no-op and isn't a valid
445     /// usage of the command anyway.
446     #[inline]
copy_image<S, D, R>( &mut self, source: &S, source_layout: ImageLayout, destination: &D, destination_layout: ImageLayout, regions: R, ) where S: ?Sized + ImageAccess, D: ?Sized + ImageAccess, R: IntoIterator<Item = UnsafeCommandBufferBuilderImageCopy>,447     pub unsafe fn copy_image<S, D, R>(
448         &mut self,
449         source: &S,
450         source_layout: ImageLayout,
451         destination: &D,
452         destination_layout: ImageLayout,
453         regions: R,
454     ) where
455         S: ?Sized + ImageAccess,
456         D: ?Sized + ImageAccess,
457         R: IntoIterator<Item = UnsafeCommandBufferBuilderImageCopy>,
458     {
459         // TODO: The correct check here is that the uncompressed element size of the source is
460         // equal to the compressed element size of the destination.
461         debug_assert!(
462             source.format().ty() == FormatTy::Compressed
463                 || destination.format().ty() == FormatTy::Compressed
464                 || source.format().size() == destination.format().size()
465         );
466 
467         // Depth/Stencil formats are required to match exactly.
468         debug_assert!(
469             !matches!(
470                 source.format().ty(),
471                 FormatTy::Depth | FormatTy::Stencil | FormatTy::DepthStencil
472             ) || source.format() == destination.format()
473         );
474 
475         debug_assert_eq!(source.samples(), destination.samples());
476         let source = source.inner();
477         debug_assert!(source.image.usage().transfer_source);
478         debug_assert!(
479             source_layout == ImageLayout::General
480                 || source_layout == ImageLayout::TransferSrcOptimal
481         );
482 
483         let destination = destination.inner();
484         debug_assert!(destination.image.usage().transfer_destination);
485         debug_assert!(
486             destination_layout == ImageLayout::General
487                 || destination_layout == ImageLayout::TransferDstOptimal
488         );
489 
490         let regions: SmallVec<[_; 8]> = regions
491             .into_iter()
492             .filter_map(|copy| {
493                 // TODO: not everything is checked here
494                 debug_assert!(
495                     copy.source_base_array_layer + copy.layer_count <= source.num_layers as u32
496                 );
497                 debug_assert!(
498                     copy.destination_base_array_layer + copy.layer_count
499                         <= destination.num_layers as u32
500                 );
501                 debug_assert!(copy.source_mip_level < destination.num_mipmap_levels as u32);
502                 debug_assert!(copy.destination_mip_level < destination.num_mipmap_levels as u32);
503 
504                 if copy.layer_count == 0 {
505                     return None;
506                 }
507 
508                 Some(ash::vk::ImageCopy {
509                     src_subresource: ash::vk::ImageSubresourceLayers {
510                         aspect_mask: copy.aspects.into(),
511                         mip_level: copy.source_mip_level,
512                         base_array_layer: copy.source_base_array_layer + source.first_layer as u32,
513                         layer_count: copy.layer_count,
514                     },
515                     src_offset: ash::vk::Offset3D {
516                         x: copy.source_offset[0],
517                         y: copy.source_offset[1],
518                         z: copy.source_offset[2],
519                     },
520                     dst_subresource: ash::vk::ImageSubresourceLayers {
521                         aspect_mask: copy.aspects.into(),
522                         mip_level: copy.destination_mip_level,
523                         base_array_layer: copy.destination_base_array_layer
524                             + destination.first_layer as u32,
525                         layer_count: copy.layer_count,
526                     },
527                     dst_offset: ash::vk::Offset3D {
528                         x: copy.destination_offset[0],
529                         y: copy.destination_offset[1],
530                         z: copy.destination_offset[2],
531                     },
532                     extent: ash::vk::Extent3D {
533                         width: copy.extent[0],
534                         height: copy.extent[1],
535                         depth: copy.extent[2],
536                     },
537                 })
538             })
539             .collect();
540 
541         if regions.is_empty() {
542             return;
543         }
544 
545         let fns = self.device().fns();
546         let cmd = self.internal_object();
547         fns.v1_0.cmd_copy_image(
548             cmd,
549             source.image.internal_object(),
550             source_layout.into(),
551             destination.image.internal_object(),
552             destination_layout.into(),
553             regions.len() as u32,
554             regions.as_ptr(),
555         );
556     }
557 
558     /// Calls `vkCmdBlitImage` on the builder.
559     ///
560     /// Does nothing if the list of regions is empty, as it would be a no-op and isn't a valid
561     /// usage of the command anyway.
562     #[inline]
blit_image<S, D, R>( &mut self, source: &S, source_layout: ImageLayout, destination: &D, destination_layout: ImageLayout, regions: R, filter: Filter, ) where S: ?Sized + ImageAccess, D: ?Sized + ImageAccess, R: IntoIterator<Item = UnsafeCommandBufferBuilderImageBlit>,563     pub unsafe fn blit_image<S, D, R>(
564         &mut self,
565         source: &S,
566         source_layout: ImageLayout,
567         destination: &D,
568         destination_layout: ImageLayout,
569         regions: R,
570         filter: Filter,
571     ) where
572         S: ?Sized + ImageAccess,
573         D: ?Sized + ImageAccess,
574         R: IntoIterator<Item = UnsafeCommandBufferBuilderImageBlit>,
575     {
576         debug_assert!(
577             filter == Filter::Nearest
578                 || !matches!(
579                     source.format().ty(),
580                     FormatTy::Depth | FormatTy::Stencil | FormatTy::DepthStencil
581                 )
582         );
583         debug_assert!(
584             (source.format().ty() == FormatTy::Uint)
585                 == (destination.format().ty() == FormatTy::Uint)
586         );
587         debug_assert!(
588             (source.format().ty() == FormatTy::Sint)
589                 == (destination.format().ty() == FormatTy::Sint)
590         );
591         debug_assert!(
592             source.format() == destination.format()
593                 || !matches!(
594                     source.format().ty(),
595                     FormatTy::Depth | FormatTy::Stencil | FormatTy::DepthStencil
596                 )
597         );
598 
599         debug_assert_eq!(source.samples(), SampleCount::Sample1);
600         let source = source.inner();
601         debug_assert!(source.image.format_features().blit_src);
602         debug_assert!(source.image.usage().transfer_source);
603         debug_assert!(
604             source_layout == ImageLayout::General
605                 || source_layout == ImageLayout::TransferSrcOptimal
606         );
607 
608         debug_assert_eq!(destination.samples(), SampleCount::Sample1);
609         let destination = destination.inner();
610         debug_assert!(destination.image.format_features().blit_dst);
611         debug_assert!(destination.image.usage().transfer_destination);
612         debug_assert!(
613             destination_layout == ImageLayout::General
614                 || destination_layout == ImageLayout::TransferDstOptimal
615         );
616 
617         let regions: SmallVec<[_; 8]> = regions
618             .into_iter()
619             .filter_map(|blit| {
620                 // TODO: not everything is checked here
621                 debug_assert!(
622                     blit.source_base_array_layer + blit.layer_count <= source.num_layers as u32
623                 );
624                 debug_assert!(
625                     blit.destination_base_array_layer + blit.layer_count
626                         <= destination.num_layers as u32
627                 );
628                 debug_assert!(blit.source_mip_level < destination.num_mipmap_levels as u32);
629                 debug_assert!(blit.destination_mip_level < destination.num_mipmap_levels as u32);
630 
631                 if blit.layer_count == 0 {
632                     return None;
633                 }
634 
635                 Some(ash::vk::ImageBlit {
636                     src_subresource: ash::vk::ImageSubresourceLayers {
637                         aspect_mask: blit.aspects.into(),
638                         mip_level: blit.source_mip_level,
639                         base_array_layer: blit.source_base_array_layer + source.first_layer as u32,
640                         layer_count: blit.layer_count,
641                     },
642                     src_offsets: [
643                         ash::vk::Offset3D {
644                             x: blit.source_top_left[0],
645                             y: blit.source_top_left[1],
646                             z: blit.source_top_left[2],
647                         },
648                         ash::vk::Offset3D {
649                             x: blit.source_bottom_right[0],
650                             y: blit.source_bottom_right[1],
651                             z: blit.source_bottom_right[2],
652                         },
653                     ],
654                     dst_subresource: ash::vk::ImageSubresourceLayers {
655                         aspect_mask: blit.aspects.into(),
656                         mip_level: blit.destination_mip_level,
657                         base_array_layer: blit.destination_base_array_layer
658                             + destination.first_layer as u32,
659                         layer_count: blit.layer_count,
660                     },
661                     dst_offsets: [
662                         ash::vk::Offset3D {
663                             x: blit.destination_top_left[0],
664                             y: blit.destination_top_left[1],
665                             z: blit.destination_top_left[2],
666                         },
667                         ash::vk::Offset3D {
668                             x: blit.destination_bottom_right[0],
669                             y: blit.destination_bottom_right[1],
670                             z: blit.destination_bottom_right[2],
671                         },
672                     ],
673                 })
674             })
675             .collect();
676 
677         if regions.is_empty() {
678             return;
679         }
680 
681         let fns = self.device().fns();
682         let cmd = self.internal_object();
683         fns.v1_0.cmd_blit_image(
684             cmd,
685             source.image.internal_object(),
686             source_layout.into(),
687             destination.image.internal_object(),
688             destination_layout.into(),
689             regions.len() as u32,
690             regions.as_ptr(),
691             filter.into(),
692         );
693     }
694 
695     // TODO: missing structs
696     /*/// Calls `vkCmdClearAttachments` on the builder.
697     ///
698     /// Does nothing if the list of attachments or the list of rects is empty, as it would be a
699     /// no-op and isn't a valid usage of the command anyway.
700     #[inline]
701     pub unsafe fn clear_attachments<A, R>(&mut self, attachments: A, rects: R)
702         where A: IntoIterator<Item = >,
703               R: IntoIterator<Item = >
704     {
705         let attachments: SmallVec<[_; 16]> = attachments.map().collect();
706         let rects: SmallVec<[_; 4]> = rects.map().collect();
707 
708         if attachments.is_empty() || rects.is_empty() {
709             return;
710         }
711 
712         let fns = self.device().fns();
713         let cmd = self.internal_object();
714         fns.v1_0.CmdClearAttachments(cmd, attachments.len() as u32, attachments.as_ptr(),
715                                rects.len() as u32, rects.as_ptr());
716     }*/
717 
718     /// Calls `vkCmdClearColorImage` on the builder.
719     ///
720     /// Does nothing if the list of regions is empty, as it would be a no-op and isn't a valid
721     /// usage of the command anyway.
722     // TODO: ClearValue could be more precise
clear_color_image<I, R>( &mut self, image: &I, layout: ImageLayout, color: ClearValue, regions: R, ) where I: ?Sized + ImageAccess, R: IntoIterator<Item = UnsafeCommandBufferBuilderColorImageClear>,723     pub unsafe fn clear_color_image<I, R>(
724         &mut self,
725         image: &I,
726         layout: ImageLayout,
727         color: ClearValue,
728         regions: R,
729     ) where
730         I: ?Sized + ImageAccess,
731         R: IntoIterator<Item = UnsafeCommandBufferBuilderColorImageClear>,
732     {
733         debug_assert!(
734             image.format().ty() == FormatTy::Float
735                 || image.format().ty() == FormatTy::Uint
736                 || image.format().ty() == FormatTy::Sint
737         );
738 
739         let image = image.inner();
740         debug_assert!(image.image.usage().transfer_destination);
741         debug_assert!(layout == ImageLayout::General || layout == ImageLayout::TransferDstOptimal);
742 
743         let color = match color {
744             ClearValue::Float(val) => ash::vk::ClearColorValue { float32: val },
745             ClearValue::Int(val) => ash::vk::ClearColorValue { int32: val },
746             ClearValue::Uint(val) => ash::vk::ClearColorValue { uint32: val },
747             _ => ash::vk::ClearColorValue { float32: [0.0; 4] },
748         };
749 
750         let regions: SmallVec<[_; 8]> = regions
751             .into_iter()
752             .filter_map(|region| {
753                 debug_assert!(
754                     region.layer_count + region.base_array_layer <= image.num_layers as u32
755                 );
756                 debug_assert!(
757                     region.level_count + region.base_mip_level <= image.num_mipmap_levels as u32
758                 );
759 
760                 if region.layer_count == 0 || region.level_count == 0 {
761                     return None;
762                 }
763 
764                 Some(ash::vk::ImageSubresourceRange {
765                     aspect_mask: ash::vk::ImageAspectFlags::COLOR,
766                     base_mip_level: region.base_mip_level + image.first_mipmap_level as u32,
767                     level_count: region.level_count,
768                     base_array_layer: region.base_array_layer + image.first_layer as u32,
769                     layer_count: region.layer_count,
770                 })
771             })
772             .collect();
773 
774         if regions.is_empty() {
775             return;
776         }
777 
778         let fns = self.device().fns();
779         let cmd = self.internal_object();
780         fns.v1_0.cmd_clear_color_image(
781             cmd,
782             image.image.internal_object(),
783             layout.into(),
784             &color,
785             regions.len() as u32,
786             regions.as_ptr(),
787         );
788     }
789 
790     /// Calls `vkCmdCopyBuffer` on the builder.
791     ///
792     /// Does nothing if the list of regions is empty, as it would be a no-op and isn't a valid
793     /// usage of the command anyway.
794     #[inline]
copy_buffer<S, D, R>(&mut self, source: &S, destination: &D, regions: R) where S: ?Sized + BufferAccess, D: ?Sized + BufferAccess, R: IntoIterator<Item = (DeviceSize, DeviceSize, DeviceSize)>,795     pub unsafe fn copy_buffer<S, D, R>(&mut self, source: &S, destination: &D, regions: R)
796     where
797         S: ?Sized + BufferAccess,
798         D: ?Sized + BufferAccess,
799         R: IntoIterator<Item = (DeviceSize, DeviceSize, DeviceSize)>,
800     {
801         // TODO: debug assert that there's no overlap in the destinations?
802 
803         let source = source.inner();
804         debug_assert!(source.offset < source.buffer.size());
805         debug_assert!(source.buffer.usage().transfer_source);
806 
807         let destination = destination.inner();
808         debug_assert!(destination.offset < destination.buffer.size());
809         debug_assert!(destination.buffer.usage().transfer_destination);
810 
811         let regions: SmallVec<[_; 8]> = regions
812             .into_iter()
813             .map(|(sr, de, sz)| ash::vk::BufferCopy {
814                 src_offset: sr + source.offset,
815                 dst_offset: de + destination.offset,
816                 size: sz,
817             })
818             .collect();
819 
820         if regions.is_empty() {
821             return;
822         }
823 
824         let fns = self.device().fns();
825         let cmd = self.internal_object();
826         fns.v1_0.cmd_copy_buffer(
827             cmd,
828             source.buffer.internal_object(),
829             destination.buffer.internal_object(),
830             regions.len() as u32,
831             regions.as_ptr(),
832         );
833     }
834 
835     /// Calls `vkCmdCopyBufferToImage` on the builder.
836     ///
837     /// Does nothing if the list of regions is empty, as it would be a no-op and isn't a valid
838     /// usage of the command anyway.
839     #[inline]
copy_buffer_to_image<S, D, R>( &mut self, source: &S, destination: &D, destination_layout: ImageLayout, regions: R, ) where S: ?Sized + BufferAccess, D: ?Sized + ImageAccess, R: IntoIterator<Item = UnsafeCommandBufferBuilderBufferImageCopy>,840     pub unsafe fn copy_buffer_to_image<S, D, R>(
841         &mut self,
842         source: &S,
843         destination: &D,
844         destination_layout: ImageLayout,
845         regions: R,
846     ) where
847         S: ?Sized + BufferAccess,
848         D: ?Sized + ImageAccess,
849         R: IntoIterator<Item = UnsafeCommandBufferBuilderBufferImageCopy>,
850     {
851         let source = source.inner();
852         debug_assert!(source.offset < source.buffer.size());
853         debug_assert!(source.buffer.usage().transfer_source);
854 
855         debug_assert_eq!(destination.samples(), SampleCount::Sample1);
856         let destination = destination.inner();
857         debug_assert!(destination.image.usage().transfer_destination);
858         debug_assert!(
859             destination_layout == ImageLayout::General
860                 || destination_layout == ImageLayout::TransferDstOptimal
861         );
862 
863         let regions: SmallVec<[_; 8]> = regions
864             .into_iter()
865             .map(|copy| {
866                 debug_assert!(copy.image_layer_count <= destination.num_layers as u32);
867                 debug_assert!(copy.image_mip_level < destination.num_mipmap_levels as u32);
868 
869                 ash::vk::BufferImageCopy {
870                     buffer_offset: source.offset + copy.buffer_offset,
871                     buffer_row_length: copy.buffer_row_length,
872                     buffer_image_height: copy.buffer_image_height,
873                     image_subresource: ash::vk::ImageSubresourceLayers {
874                         aspect_mask: copy.image_aspect.into(),
875                         mip_level: copy.image_mip_level + destination.first_mipmap_level as u32,
876                         base_array_layer: copy.image_base_array_layer
877                             + destination.first_layer as u32,
878                         layer_count: copy.image_layer_count,
879                     },
880                     image_offset: ash::vk::Offset3D {
881                         x: copy.image_offset[0],
882                         y: copy.image_offset[1],
883                         z: copy.image_offset[2],
884                     },
885                     image_extent: ash::vk::Extent3D {
886                         width: copy.image_extent[0],
887                         height: copy.image_extent[1],
888                         depth: copy.image_extent[2],
889                     },
890                 }
891             })
892             .collect();
893 
894         if regions.is_empty() {
895             return;
896         }
897 
898         let fns = self.device().fns();
899         let cmd = self.internal_object();
900         fns.v1_0.cmd_copy_buffer_to_image(
901             cmd,
902             source.buffer.internal_object(),
903             destination.image.internal_object(),
904             destination_layout.into(),
905             regions.len() as u32,
906             regions.as_ptr(),
907         );
908     }
909 
910     /// Calls `vkCmdCopyImageToBuffer` on the builder.
911     ///
912     /// Does nothing if the list of regions is empty, as it would be a no-op and isn't a valid
913     /// usage of the command anyway.
914     #[inline]
copy_image_to_buffer<S, D, R>( &mut self, source: &S, source_layout: ImageLayout, destination: &D, regions: R, ) where S: ?Sized + ImageAccess, D: ?Sized + BufferAccess, R: IntoIterator<Item = UnsafeCommandBufferBuilderBufferImageCopy>,915     pub unsafe fn copy_image_to_buffer<S, D, R>(
916         &mut self,
917         source: &S,
918         source_layout: ImageLayout,
919         destination: &D,
920         regions: R,
921     ) where
922         S: ?Sized + ImageAccess,
923         D: ?Sized + BufferAccess,
924         R: IntoIterator<Item = UnsafeCommandBufferBuilderBufferImageCopy>,
925     {
926         debug_assert_eq!(source.samples(), SampleCount::Sample1);
927         let source = source.inner();
928         debug_assert!(source.image.usage().transfer_source);
929         debug_assert!(
930             source_layout == ImageLayout::General
931                 || source_layout == ImageLayout::TransferSrcOptimal
932         );
933 
934         let destination = destination.inner();
935         debug_assert!(destination.offset < destination.buffer.size());
936         debug_assert!(destination.buffer.usage().transfer_destination);
937 
938         let regions: SmallVec<[_; 8]> = regions
939             .into_iter()
940             .map(|copy| {
941                 debug_assert!(copy.image_layer_count <= source.num_layers as u32);
942                 debug_assert!(copy.image_mip_level < source.num_mipmap_levels as u32);
943 
944                 ash::vk::BufferImageCopy {
945                     buffer_offset: destination.offset + copy.buffer_offset,
946                     buffer_row_length: copy.buffer_row_length,
947                     buffer_image_height: copy.buffer_image_height,
948                     image_subresource: ash::vk::ImageSubresourceLayers {
949                         aspect_mask: copy.image_aspect.into(),
950                         mip_level: copy.image_mip_level + source.first_mipmap_level as u32,
951                         base_array_layer: copy.image_base_array_layer + source.first_layer as u32,
952                         layer_count: copy.image_layer_count,
953                     },
954                     image_offset: ash::vk::Offset3D {
955                         x: copy.image_offset[0],
956                         y: copy.image_offset[1],
957                         z: copy.image_offset[2],
958                     },
959                     image_extent: ash::vk::Extent3D {
960                         width: copy.image_extent[0],
961                         height: copy.image_extent[1],
962                         depth: copy.image_extent[2],
963                     },
964                 }
965             })
966             .collect();
967 
968         if regions.is_empty() {
969             return;
970         }
971 
972         let fns = self.device().fns();
973         let cmd = self.internal_object();
974         fns.v1_0.cmd_copy_image_to_buffer(
975             cmd,
976             source.image.internal_object(),
977             source_layout.into(),
978             destination.buffer.internal_object(),
979             regions.len() as u32,
980             regions.as_ptr(),
981         );
982     }
983 
984     /// Calls `vkCmdCopyQueryPoolResults` on the builder.
985     #[inline]
copy_query_pool_results<D, T>( &mut self, queries: QueriesRange, destination: D, stride: DeviceSize, flags: QueryResultFlags, ) where D: BufferAccess + TypedBufferAccess<Content = [T]>, T: QueryResultElement,986     pub unsafe fn copy_query_pool_results<D, T>(
987         &mut self,
988         queries: QueriesRange,
989         destination: D,
990         stride: DeviceSize,
991         flags: QueryResultFlags,
992     ) where
993         D: BufferAccess + TypedBufferAccess<Content = [T]>,
994         T: QueryResultElement,
995     {
996         let destination = destination.inner();
997         let range = queries.range();
998         debug_assert!(destination.offset < destination.buffer.size());
999         debug_assert!(destination.buffer.usage().transfer_destination);
1000         debug_assert!(destination.offset % std::mem::size_of::<T>() as DeviceSize == 0);
1001         debug_assert!(stride % std::mem::size_of::<T>() as DeviceSize == 0);
1002 
1003         let fns = self.device().fns();
1004         let cmd = self.internal_object();
1005         fns.v1_0.cmd_copy_query_pool_results(
1006             cmd,
1007             queries.pool().internal_object(),
1008             range.start,
1009             range.end - range.start,
1010             destination.buffer.internal_object(),
1011             destination.offset,
1012             stride,
1013             ash::vk::QueryResultFlags::from(flags) | T::FLAG,
1014         );
1015     }
1016 
1017     /// Calls `vkCmdDispatch` on the builder.
1018     #[inline]
dispatch(&mut self, group_counts: [u32; 3])1019     pub unsafe fn dispatch(&mut self, group_counts: [u32; 3]) {
1020         debug_assert!({
1021             let max_group_counts = self
1022                 .device()
1023                 .physical_device()
1024                 .properties()
1025                 .max_compute_work_group_count;
1026             group_counts[0] <= max_group_counts[0]
1027                 && group_counts[1] <= max_group_counts[1]
1028                 && group_counts[2] <= max_group_counts[2]
1029         });
1030 
1031         let fns = self.device().fns();
1032         let cmd = self.internal_object();
1033         fns.v1_0
1034             .cmd_dispatch(cmd, group_counts[0], group_counts[1], group_counts[2]);
1035     }
1036 
1037     /// Calls `vkCmdDispatchIndirect` on the builder.
1038     #[inline]
dispatch_indirect<B>(&mut self, buffer: &B) where B: ?Sized + BufferAccess,1039     pub unsafe fn dispatch_indirect<B>(&mut self, buffer: &B)
1040     where
1041         B: ?Sized + BufferAccess,
1042     {
1043         let fns = self.device().fns();
1044         let cmd = self.internal_object();
1045 
1046         let inner = buffer.inner();
1047         debug_assert!(inner.offset < inner.buffer.size());
1048         debug_assert!(inner.buffer.usage().indirect_buffer);
1049         debug_assert_eq!(inner.offset % 4, 0);
1050 
1051         fns.v1_0
1052             .cmd_dispatch_indirect(cmd, inner.buffer.internal_object(), inner.offset);
1053     }
1054 
1055     /// Calls `vkCmdDraw` on the builder.
1056     #[inline]
draw( &mut self, vertex_count: u32, instance_count: u32, first_vertex: u32, first_instance: u32, )1057     pub unsafe fn draw(
1058         &mut self,
1059         vertex_count: u32,
1060         instance_count: u32,
1061         first_vertex: u32,
1062         first_instance: u32,
1063     ) {
1064         let fns = self.device().fns();
1065         let cmd = self.internal_object();
1066         fns.v1_0.cmd_draw(
1067             cmd,
1068             vertex_count,
1069             instance_count,
1070             first_vertex,
1071             first_instance,
1072         );
1073     }
1074 
1075     /// Calls `vkCmdDrawIndexed` on the builder.
1076     #[inline]
draw_indexed( &mut self, index_count: u32, instance_count: u32, first_index: u32, vertex_offset: i32, first_instance: u32, )1077     pub unsafe fn draw_indexed(
1078         &mut self,
1079         index_count: u32,
1080         instance_count: u32,
1081         first_index: u32,
1082         vertex_offset: i32,
1083         first_instance: u32,
1084     ) {
1085         let fns = self.device().fns();
1086         let cmd = self.internal_object();
1087         fns.v1_0.cmd_draw_indexed(
1088             cmd,
1089             index_count,
1090             instance_count,
1091             first_index,
1092             vertex_offset,
1093             first_instance,
1094         );
1095     }
1096 
1097     /// Calls `vkCmdDrawIndirect` on the builder.
1098     #[inline]
draw_indirect<B>(&mut self, buffer: &B, draw_count: u32, stride: u32) where B: ?Sized + BufferAccess,1099     pub unsafe fn draw_indirect<B>(&mut self, buffer: &B, draw_count: u32, stride: u32)
1100     where
1101         B: ?Sized + BufferAccess,
1102     {
1103         let fns = self.device().fns();
1104         let cmd = self.internal_object();
1105 
1106         debug_assert!(
1107             draw_count == 0
1108                 || ((stride % 4) == 0)
1109                     && stride as usize >= mem::size_of::<ash::vk::DrawIndirectCommand>()
1110         );
1111 
1112         let inner = buffer.inner();
1113         debug_assert!(inner.offset < inner.buffer.size());
1114         debug_assert!(inner.buffer.usage().indirect_buffer);
1115 
1116         fns.v1_0.cmd_draw_indirect(
1117             cmd,
1118             inner.buffer.internal_object(),
1119             inner.offset,
1120             draw_count,
1121             stride,
1122         );
1123     }
1124 
1125     /// Calls `vkCmdDrawIndexedIndirect` on the builder.
1126     #[inline]
draw_indexed_indirect<B>(&mut self, buffer: &B, draw_count: u32, stride: u32) where B: ?Sized + BufferAccess,1127     pub unsafe fn draw_indexed_indirect<B>(&mut self, buffer: &B, draw_count: u32, stride: u32)
1128     where
1129         B: ?Sized + BufferAccess,
1130     {
1131         let fns = self.device().fns();
1132         let cmd = self.internal_object();
1133 
1134         let inner = buffer.inner();
1135         debug_assert!(inner.offset < inner.buffer.size());
1136         debug_assert!(inner.buffer.usage().indirect_buffer);
1137 
1138         fns.v1_0.cmd_draw_indexed_indirect(
1139             cmd,
1140             inner.buffer.internal_object(),
1141             inner.offset,
1142             draw_count,
1143             stride,
1144         );
1145     }
1146 
1147     /// Calls `vkCmdEndQuery` on the builder.
1148     #[inline]
end_query(&mut self, query: Query)1149     pub unsafe fn end_query(&mut self, query: Query) {
1150         let fns = self.device().fns();
1151         let cmd = self.internal_object();
1152         fns.v1_0
1153             .cmd_end_query(cmd, query.pool().internal_object(), query.index());
1154     }
1155 
1156     /// Calls `vkCmdEndRenderPass` on the builder.
1157     #[inline]
end_render_pass(&mut self)1158     pub unsafe fn end_render_pass(&mut self) {
1159         let fns = self.device().fns();
1160         let cmd = self.internal_object();
1161         fns.v1_0.cmd_end_render_pass(cmd);
1162     }
1163 
1164     /// Calls `vkCmdExecuteCommands` on the builder.
1165     ///
1166     /// Does nothing if the list of command buffers is empty, as it would be a no-op and isn't a
1167     /// valid usage of the command anyway.
1168     #[inline]
execute_commands(&mut self, cbs: UnsafeCommandBufferBuilderExecuteCommands)1169     pub unsafe fn execute_commands(&mut self, cbs: UnsafeCommandBufferBuilderExecuteCommands) {
1170         if cbs.raw_cbs.is_empty() {
1171             return;
1172         }
1173 
1174         let fns = self.device().fns();
1175         let cmd = self.internal_object();
1176         fns.v1_0
1177             .cmd_execute_commands(cmd, cbs.raw_cbs.len() as u32, cbs.raw_cbs.as_ptr());
1178     }
1179 
1180     /// Calls `vkCmdFillBuffer` on the builder.
1181     #[inline]
fill_buffer<B>(&mut self, buffer: &B, data: u32) where B: ?Sized + BufferAccess,1182     pub unsafe fn fill_buffer<B>(&mut self, buffer: &B, data: u32)
1183     where
1184         B: ?Sized + BufferAccess,
1185     {
1186         let fns = self.device().fns();
1187         let cmd = self.internal_object();
1188 
1189         let size = buffer.size();
1190 
1191         let (buffer_handle, offset) = {
1192             let BufferInner {
1193                 buffer: buffer_inner,
1194                 offset,
1195             } = buffer.inner();
1196             debug_assert!(buffer_inner.usage().transfer_destination);
1197             debug_assert_eq!(offset % 4, 0);
1198             (buffer_inner.internal_object(), offset)
1199         };
1200 
1201         fns.v1_0
1202             .cmd_fill_buffer(cmd, buffer_handle, offset, size, data);
1203     }
1204 
1205     /// Calls `vkCmdNextSubpass` on the builder.
1206     #[inline]
next_subpass(&mut self, subpass_contents: SubpassContents)1207     pub unsafe fn next_subpass(&mut self, subpass_contents: SubpassContents) {
1208         let fns = self.device().fns();
1209         let cmd = self.internal_object();
1210         fns.v1_0.cmd_next_subpass(cmd, subpass_contents.into());
1211     }
1212 
1213     #[inline]
pipeline_barrier(&mut self, command: &UnsafeCommandBufferBuilderPipelineBarrier)1214     pub unsafe fn pipeline_barrier(&mut self, command: &UnsafeCommandBufferBuilderPipelineBarrier) {
1215         // If barrier is empty, don't do anything.
1216         if command.src_stage_mask.is_empty() || command.dst_stage_mask.is_empty() {
1217             debug_assert!(command.src_stage_mask.is_empty() && command.dst_stage_mask.is_empty());
1218             debug_assert!(command.memory_barriers.is_empty());
1219             debug_assert!(command.buffer_barriers.is_empty());
1220             debug_assert!(command.image_barriers.is_empty());
1221             return;
1222         }
1223 
1224         let fns = self.device().fns();
1225         let cmd = self.internal_object();
1226 
1227         debug_assert!(!command.src_stage_mask.is_empty());
1228         debug_assert!(!command.dst_stage_mask.is_empty());
1229 
1230         fns.v1_0.cmd_pipeline_barrier(
1231             cmd,
1232             command.src_stage_mask,
1233             command.dst_stage_mask,
1234             command.dependency_flags,
1235             command.memory_barriers.len() as u32,
1236             command.memory_barriers.as_ptr(),
1237             command.buffer_barriers.len() as u32,
1238             command.buffer_barriers.as_ptr(),
1239             command.image_barriers.len() as u32,
1240             command.image_barriers.as_ptr(),
1241         );
1242     }
1243 
1244     /// Calls `vkCmdPushConstants` on the builder.
1245     #[inline]
push_constants<D>( &mut self, pipeline_layout: &PipelineLayout, stages: ShaderStages, offset: u32, size: u32, data: &D, ) where D: ?Sized,1246     pub unsafe fn push_constants<D>(
1247         &mut self,
1248         pipeline_layout: &PipelineLayout,
1249         stages: ShaderStages,
1250         offset: u32,
1251         size: u32,
1252         data: &D,
1253     ) where
1254         D: ?Sized,
1255     {
1256         let fns = self.device().fns();
1257         let cmd = self.internal_object();
1258 
1259         debug_assert!(stages != ShaderStages::none());
1260         debug_assert!(size > 0);
1261         debug_assert_eq!(size % 4, 0);
1262         debug_assert_eq!(offset % 4, 0);
1263         debug_assert!(mem::size_of_val(data) >= size as usize);
1264 
1265         fns.v1_0.cmd_push_constants(
1266             cmd,
1267             pipeline_layout.internal_object(),
1268             stages.into(),
1269             offset as u32,
1270             size as u32,
1271             data as *const D as *const _,
1272         );
1273     }
1274 
1275     /// Calls `vkCmdResetEvent` on the builder.
1276     #[inline]
reset_event(&mut self, event: &Event, stages: PipelineStages)1277     pub unsafe fn reset_event(&mut self, event: &Event, stages: PipelineStages) {
1278         let fns = self.device().fns();
1279         let cmd = self.internal_object();
1280 
1281         debug_assert!(!stages.host);
1282         debug_assert_ne!(stages, PipelineStages::none());
1283 
1284         fns.v1_0
1285             .cmd_reset_event(cmd, event.internal_object(), stages.into());
1286     }
1287 
1288     /// Calls `vkCmdResetQueryPool` on the builder.
1289     #[inline]
reset_query_pool(&mut self, queries: QueriesRange)1290     pub unsafe fn reset_query_pool(&mut self, queries: QueriesRange) {
1291         let range = queries.range();
1292         let fns = self.device().fns();
1293         let cmd = self.internal_object();
1294         fns.v1_0.cmd_reset_query_pool(
1295             cmd,
1296             queries.pool().internal_object(),
1297             range.start,
1298             range.end - range.start,
1299         );
1300     }
1301 
1302     /// Calls `vkCmdSetBlendConstants` on the builder.
1303     #[inline]
set_blend_constants(&mut self, constants: [f32; 4])1304     pub unsafe fn set_blend_constants(&mut self, constants: [f32; 4]) {
1305         let fns = self.device().fns();
1306         let cmd = self.internal_object();
1307         fns.v1_0.cmd_set_blend_constants(cmd, &constants);
1308     }
1309 
1310     /// Calls `vkCmdSetDepthBias` on the builder.
1311     #[inline]
set_depth_bias(&mut self, constant_factor: f32, clamp: f32, slope_factor: f32)1312     pub unsafe fn set_depth_bias(&mut self, constant_factor: f32, clamp: f32, slope_factor: f32) {
1313         let fns = self.device().fns();
1314         let cmd = self.internal_object();
1315         debug_assert!(clamp == 0.0 || self.device().enabled_features().depth_bias_clamp);
1316         fns.v1_0
1317             .cmd_set_depth_bias(cmd, constant_factor, clamp, slope_factor);
1318     }
1319 
1320     /// Calls `vkCmdSetDepthBounds` on the builder.
1321     #[inline]
set_depth_bounds(&mut self, min: f32, max: f32)1322     pub unsafe fn set_depth_bounds(&mut self, min: f32, max: f32) {
1323         let fns = self.device().fns();
1324         let cmd = self.internal_object();
1325         debug_assert!(min >= 0.0 && min <= 1.0);
1326         debug_assert!(max >= 0.0 && max <= 1.0);
1327         fns.v1_0.cmd_set_depth_bounds(cmd, min, max);
1328     }
1329 
1330     /// Calls `vkCmdSetEvent` on the builder.
1331     #[inline]
set_event(&mut self, event: &Event, stages: PipelineStages)1332     pub unsafe fn set_event(&mut self, event: &Event, stages: PipelineStages) {
1333         let fns = self.device().fns();
1334         let cmd = self.internal_object();
1335 
1336         debug_assert!(!stages.host);
1337         debug_assert_ne!(stages, PipelineStages::none());
1338 
1339         fns.v1_0
1340             .cmd_set_event(cmd, event.internal_object(), stages.into());
1341     }
1342 
1343     /// Calls `vkCmdSetLineWidth` on the builder.
1344     #[inline]
set_line_width(&mut self, line_width: f32)1345     pub unsafe fn set_line_width(&mut self, line_width: f32) {
1346         let fns = self.device().fns();
1347         let cmd = self.internal_object();
1348         debug_assert!(line_width == 1.0 || self.device().enabled_features().wide_lines);
1349         fns.v1_0.cmd_set_line_width(cmd, line_width);
1350     }
1351 
1352     /// Calls `vkCmdSetStencilCompareMask` on the builder.
1353     #[inline]
set_stencil_compare_mask(&mut self, face_mask: StencilFaces, compare_mask: u32)1354     pub unsafe fn set_stencil_compare_mask(&mut self, face_mask: StencilFaces, compare_mask: u32) {
1355         let fns = self.device().fns();
1356         let cmd = self.internal_object();
1357         fns.v1_0
1358             .cmd_set_stencil_compare_mask(cmd, face_mask.into(), compare_mask);
1359     }
1360 
1361     /// Calls `vkCmdSetStencilReference` on the builder.
1362     #[inline]
set_stencil_reference(&mut self, face_mask: StencilFaces, reference: u32)1363     pub unsafe fn set_stencil_reference(&mut self, face_mask: StencilFaces, reference: u32) {
1364         let fns = self.device().fns();
1365         let cmd = self.internal_object();
1366         fns.v1_0
1367             .cmd_set_stencil_reference(cmd, face_mask.into(), reference);
1368     }
1369 
1370     /// Calls `vkCmdSetStencilWriteMask` on the builder.
1371     #[inline]
set_stencil_write_mask(&mut self, face_mask: StencilFaces, write_mask: u32)1372     pub unsafe fn set_stencil_write_mask(&mut self, face_mask: StencilFaces, write_mask: u32) {
1373         let fns = self.device().fns();
1374         let cmd = self.internal_object();
1375         fns.v1_0
1376             .cmd_set_stencil_write_mask(cmd, face_mask.into(), write_mask);
1377     }
1378 
1379     /// Calls `vkCmdSetScissor` on the builder.
1380     ///
1381     /// If the list is empty then the command is automatically ignored.
1382     #[inline]
set_scissor<I>(&mut self, first_scissor: u32, scissors: I) where I: IntoIterator<Item = Scissor>,1383     pub unsafe fn set_scissor<I>(&mut self, first_scissor: u32, scissors: I)
1384     where
1385         I: IntoIterator<Item = Scissor>,
1386     {
1387         let scissors = scissors
1388             .into_iter()
1389             .map(|v| ash::vk::Rect2D::from(v.clone()))
1390             .collect::<SmallVec<[_; 16]>>();
1391         if scissors.is_empty() {
1392             return;
1393         }
1394 
1395         debug_assert!(scissors.iter().all(|s| s.offset.x >= 0 && s.offset.y >= 0));
1396         debug_assert!(scissors.iter().all(|s| {
1397             s.extent.width < i32::MAX as u32
1398                 && s.extent.height < i32::MAX as u32
1399                 && s.offset.x.checked_add(s.extent.width as i32).is_some()
1400                 && s.offset.y.checked_add(s.extent.height as i32).is_some()
1401         }));
1402         debug_assert!(
1403             (first_scissor == 0 && scissors.len() == 1)
1404                 || self.device().enabled_features().multi_viewport
1405         );
1406         debug_assert!({
1407             let max = self
1408                 .device()
1409                 .physical_device()
1410                 .properties()
1411                 .max_viewports;
1412             first_scissor + scissors.len() as u32 <= max
1413         });
1414 
1415         let fns = self.device().fns();
1416         let cmd = self.internal_object();
1417         fns.v1_0
1418             .cmd_set_scissor(cmd, first_scissor, scissors.len() as u32, scissors.as_ptr());
1419     }
1420 
1421     /// Calls `vkCmdSetViewport` on the builder.
1422     ///
1423     /// If the list is empty then the command is automatically ignored.
1424     #[inline]
set_viewport<I>(&mut self, first_viewport: u32, viewports: I) where I: IntoIterator<Item = Viewport>,1425     pub unsafe fn set_viewport<I>(&mut self, first_viewport: u32, viewports: I)
1426     where
1427         I: IntoIterator<Item = Viewport>,
1428     {
1429         let viewports = viewports
1430             .into_iter()
1431             .map(|v| v.clone().into())
1432             .collect::<SmallVec<[_; 16]>>();
1433         if viewports.is_empty() {
1434             return;
1435         }
1436 
1437         debug_assert!(
1438             (first_viewport == 0 && viewports.len() == 1)
1439                 || self.device().enabled_features().multi_viewport
1440         );
1441         debug_assert!({
1442             let max = self
1443                 .device()
1444                 .physical_device()
1445                 .properties()
1446                 .max_viewports;
1447             first_viewport + viewports.len() as u32 <= max
1448         });
1449 
1450         let fns = self.device().fns();
1451         let cmd = self.internal_object();
1452         fns.v1_0.cmd_set_viewport(
1453             cmd,
1454             first_viewport,
1455             viewports.len() as u32,
1456             viewports.as_ptr(),
1457         );
1458     }
1459 
1460     /// Calls `vkCmdUpdateBuffer` on the builder.
1461     #[inline]
update_buffer<B, D>(&mut self, buffer: &B, data: &D) where B: ?Sized + BufferAccess, D: ?Sized,1462     pub unsafe fn update_buffer<B, D>(&mut self, buffer: &B, data: &D)
1463     where
1464         B: ?Sized + BufferAccess,
1465         D: ?Sized,
1466     {
1467         let fns = self.device().fns();
1468         let cmd = self.internal_object();
1469 
1470         let size = buffer.size();
1471         debug_assert_eq!(size % 4, 0);
1472         debug_assert!(size <= 65536);
1473         debug_assert!(size <= mem::size_of_val(data) as DeviceSize);
1474 
1475         let (buffer_handle, offset) = {
1476             let BufferInner {
1477                 buffer: buffer_inner,
1478                 offset,
1479             } = buffer.inner();
1480             debug_assert!(buffer_inner.usage().transfer_destination);
1481             debug_assert_eq!(offset % 4, 0);
1482             (buffer_inner.internal_object(), offset)
1483         };
1484 
1485         fns.v1_0.cmd_update_buffer(
1486             cmd,
1487             buffer_handle,
1488             offset,
1489             size,
1490             data as *const D as *const _,
1491         );
1492     }
1493 
1494     /// Calls `vkCmdWriteTimestamp` on the builder.
1495     #[inline]
write_timestamp(&mut self, query: Query, stage: PipelineStage)1496     pub unsafe fn write_timestamp(&mut self, query: Query, stage: PipelineStage) {
1497         let fns = self.device().fns();
1498         let cmd = self.internal_object();
1499         fns.v1_0.cmd_write_timestamp(
1500             cmd,
1501             stage.into(),
1502             query.pool().internal_object(),
1503             query.index(),
1504         );
1505     }
1506 
1507     /// Calls `vkCmdBeginDebugUtilsLabelEXT` on the builder.
1508     ///
1509     /// # Safety
1510     /// The command pool that this command buffer was allocated from must support graphics or
1511     /// compute operations
1512     #[inline]
debug_marker_begin(&mut self, name: &CStr, color: [f32; 4])1513     pub unsafe fn debug_marker_begin(&mut self, name: &CStr, color: [f32; 4]) {
1514         let fns = self.device().instance().fns();
1515         let cmd = self.internal_object();
1516         let info = ash::vk::DebugUtilsLabelEXT {
1517             p_label_name: name.as_ptr(),
1518             color,
1519             ..Default::default()
1520         };
1521         fns.ext_debug_utils
1522             .cmd_begin_debug_utils_label_ext(cmd, &info);
1523     }
1524 
1525     /// Calls `vkCmdEndDebugUtilsLabelEXT` on the builder.
1526     ///
1527     /// # Safety
1528     /// There must be an outstanding `vkCmdBeginDebugUtilsLabelEXT` command prior to the
1529     /// `vkQueueEndDebugUtilsLabelEXT` on the queue tha `CommandBuffer` is submitted to.
1530     #[inline]
debug_marker_end(&mut self)1531     pub unsafe fn debug_marker_end(&mut self) {
1532         let fns = self.device().instance().fns();
1533         let cmd = self.internal_object();
1534         fns.ext_debug_utils.cmd_end_debug_utils_label_ext(cmd);
1535     }
1536 
1537     /// Calls `vkCmdInsertDebugUtilsLabelEXT` on the builder.
1538     ///
1539     /// # Safety
1540     /// The command pool that this command buffer was allocated from must support graphics or
1541     /// compute operations
1542     #[inline]
debug_marker_insert(&mut self, name: &CStr, color: [f32; 4])1543     pub unsafe fn debug_marker_insert(&mut self, name: &CStr, color: [f32; 4]) {
1544         let fns = self.device().instance().fns();
1545         let cmd = self.internal_object();
1546         let info = ash::vk::DebugUtilsLabelEXT {
1547             p_label_name: name.as_ptr(),
1548             color,
1549             ..Default::default()
1550         };
1551         fns.ext_debug_utils
1552             .cmd_insert_debug_utils_label_ext(cmd, &info);
1553     }
1554 }
1555 
1556 unsafe impl DeviceOwned for UnsafeCommandBufferBuilder {
1557     #[inline]
device(&self) -> &Arc<Device>1558     fn device(&self) -> &Arc<Device> {
1559         &self.device
1560     }
1561 }
1562 
1563 unsafe impl VulkanObject for UnsafeCommandBufferBuilder {
1564     type Object = ash::vk::CommandBuffer;
1565 
1566     #[inline]
internal_object(&self) -> ash::vk::CommandBuffer1567     fn internal_object(&self) -> ash::vk::CommandBuffer {
1568         self.command_buffer
1569     }
1570 }
1571 
1572 /// Prototype for a `vkCmdBindVertexBuffers`.
1573 pub struct UnsafeCommandBufferBuilderBindVertexBuffer {
1574     // Raw handles of the buffers to bind.
1575     raw_buffers: SmallVec<[ash::vk::Buffer; 4]>,
1576     // Raw offsets of the buffers to bind.
1577     offsets: SmallVec<[DeviceSize; 4]>,
1578 }
1579 
1580 impl UnsafeCommandBufferBuilderBindVertexBuffer {
1581     /// Builds a new empty list.
1582     #[inline]
new() -> UnsafeCommandBufferBuilderBindVertexBuffer1583     pub fn new() -> UnsafeCommandBufferBuilderBindVertexBuffer {
1584         UnsafeCommandBufferBuilderBindVertexBuffer {
1585             raw_buffers: SmallVec::new(),
1586             offsets: SmallVec::new(),
1587         }
1588     }
1589 
1590     /// Adds a buffer to the list.
1591     #[inline]
add<B>(&mut self, buffer: &B) where B: ?Sized + BufferAccess,1592     pub fn add<B>(&mut self, buffer: &B)
1593     where
1594         B: ?Sized + BufferAccess,
1595     {
1596         let inner = buffer.inner();
1597         debug_assert!(inner.buffer.usage().vertex_buffer);
1598         self.raw_buffers.push(inner.buffer.internal_object());
1599         self.offsets.push(inner.offset);
1600     }
1601 }
1602 
1603 /// Prototype for a `vkCmdExecuteCommands`.
1604 pub struct UnsafeCommandBufferBuilderExecuteCommands {
1605     // Raw handles of the command buffers to execute.
1606     raw_cbs: SmallVec<[ash::vk::CommandBuffer; 4]>,
1607 }
1608 
1609 impl UnsafeCommandBufferBuilderExecuteCommands {
1610     /// Builds a new empty list.
1611     #[inline]
new() -> UnsafeCommandBufferBuilderExecuteCommands1612     pub fn new() -> UnsafeCommandBufferBuilderExecuteCommands {
1613         UnsafeCommandBufferBuilderExecuteCommands {
1614             raw_cbs: SmallVec::new(),
1615         }
1616     }
1617 
1618     /// Adds a command buffer to the list.
1619     #[inline]
add<C>(&mut self, cb: &C) where C: ?Sized + SecondaryCommandBuffer,1620     pub fn add<C>(&mut self, cb: &C)
1621     where
1622         C: ?Sized + SecondaryCommandBuffer,
1623     {
1624         // TODO: debug assert that it is a secondary command buffer?
1625         self.raw_cbs.push(cb.inner().internal_object());
1626     }
1627 
1628     /// Adds a command buffer to the list.
1629     #[inline]
add_raw(&mut self, cb: ash::vk::CommandBuffer)1630     pub unsafe fn add_raw(&mut self, cb: ash::vk::CommandBuffer) {
1631         self.raw_cbs.push(cb);
1632     }
1633 }
1634 
1635 // TODO: move somewhere else?
1636 #[derive(Debug, Copy, Clone, PartialEq, Eq)]
1637 pub struct UnsafeCommandBufferBuilderColorImageClear {
1638     pub base_mip_level: u32,
1639     pub level_count: u32,
1640     pub base_array_layer: u32,
1641     pub layer_count: u32,
1642 }
1643 
1644 // TODO: move somewhere else?
1645 #[derive(Debug, Copy, Clone, PartialEq, Eq)]
1646 pub struct UnsafeCommandBufferBuilderBufferImageCopy {
1647     pub buffer_offset: DeviceSize,
1648     pub buffer_row_length: u32,
1649     pub buffer_image_height: u32,
1650     pub image_aspect: ImageAspect,
1651     pub image_mip_level: u32,
1652     pub image_base_array_layer: u32,
1653     pub image_layer_count: u32,
1654     pub image_offset: [i32; 3],
1655     pub image_extent: [u32; 3],
1656 }
1657 
1658 // TODO: move somewhere else?
1659 #[derive(Debug, Copy, Clone, PartialEq, Eq)]
1660 pub struct UnsafeCommandBufferBuilderImageCopy {
1661     pub aspects: ImageAspects,
1662     pub source_mip_level: u32,
1663     pub destination_mip_level: u32,
1664     pub source_base_array_layer: u32,
1665     pub destination_base_array_layer: u32,
1666     pub layer_count: u32,
1667     pub source_offset: [i32; 3],
1668     pub destination_offset: [i32; 3],
1669     pub extent: [u32; 3],
1670 }
1671 
1672 // TODO: move somewhere else?
1673 #[derive(Debug, Copy, Clone, PartialEq, Eq)]
1674 pub struct UnsafeCommandBufferBuilderImageBlit {
1675     pub aspects: ImageAspects,
1676     pub source_mip_level: u32,
1677     pub destination_mip_level: u32,
1678     pub source_base_array_layer: u32,
1679     pub destination_base_array_layer: u32,
1680     pub layer_count: u32,
1681     pub source_top_left: [i32; 3],
1682     pub source_bottom_right: [i32; 3],
1683     pub destination_top_left: [i32; 3],
1684     pub destination_bottom_right: [i32; 3],
1685 }
1686 
1687 /// Command that adds a pipeline barrier to a command buffer builder.
1688 ///
1689 /// A pipeline barrier is a low-level system-ish command that is often necessary for safety. By
1690 /// default all commands that you add to a command buffer can potentially run simultaneously.
1691 /// Adding a pipeline barrier separates commands before the barrier from commands after the barrier
1692 /// and prevents them from running simultaneously.
1693 ///
1694 /// Please take a look at the Vulkan specifications for more information. Pipeline barriers are a
1695 /// complex topic and explaining them in this documentation would be redundant.
1696 ///
1697 /// > **Note**: We use a builder-like API here so that users can pass multiple buffers or images of
1698 /// > multiple different types. Doing so with a single function would be very tedious in terms of
1699 /// > API.
1700 pub struct UnsafeCommandBufferBuilderPipelineBarrier {
1701     src_stage_mask: ash::vk::PipelineStageFlags,
1702     dst_stage_mask: ash::vk::PipelineStageFlags,
1703     dependency_flags: ash::vk::DependencyFlags,
1704     memory_barriers: SmallVec<[ash::vk::MemoryBarrier; 2]>,
1705     buffer_barriers: SmallVec<[ash::vk::BufferMemoryBarrier; 8]>,
1706     image_barriers: SmallVec<[ash::vk::ImageMemoryBarrier; 8]>,
1707 }
1708 
1709 impl UnsafeCommandBufferBuilderPipelineBarrier {
1710     /// Creates a new empty pipeline barrier command.
1711     #[inline]
new() -> UnsafeCommandBufferBuilderPipelineBarrier1712     pub fn new() -> UnsafeCommandBufferBuilderPipelineBarrier {
1713         UnsafeCommandBufferBuilderPipelineBarrier {
1714             src_stage_mask: ash::vk::PipelineStageFlags::empty(),
1715             dst_stage_mask: ash::vk::PipelineStageFlags::empty(),
1716             dependency_flags: ash::vk::DependencyFlags::BY_REGION,
1717             memory_barriers: SmallVec::new(),
1718             buffer_barriers: SmallVec::new(),
1719             image_barriers: SmallVec::new(),
1720         }
1721     }
1722 
1723     /// Returns true if no barrier or execution dependency has been added yet.
1724     #[inline]
is_empty(&self) -> bool1725     pub fn is_empty(&self) -> bool {
1726         self.src_stage_mask.is_empty() || self.dst_stage_mask.is_empty()
1727     }
1728 
1729     /// Merges another pipeline builder into this one.
1730     #[inline]
merge(&mut self, other: UnsafeCommandBufferBuilderPipelineBarrier)1731     pub fn merge(&mut self, other: UnsafeCommandBufferBuilderPipelineBarrier) {
1732         self.src_stage_mask |= other.src_stage_mask;
1733         self.dst_stage_mask |= other.dst_stage_mask;
1734         self.dependency_flags &= other.dependency_flags;
1735 
1736         self.memory_barriers
1737             .extend(other.memory_barriers.into_iter());
1738         self.buffer_barriers
1739             .extend(other.buffer_barriers.into_iter());
1740         self.image_barriers.extend(other.image_barriers.into_iter());
1741     }
1742 
1743     /// Adds an execution dependency. This means that all the stages in `source` of the previous
1744     /// commands must finish before any of the stages in `destination` of the following commands can start.
1745     ///
1746     /// # Safety
1747     ///
1748     /// - If the pipeline stages include geometry or tessellation stages, then the corresponding
1749     ///   features must have been enabled in the device.
1750     /// - There are certain rules regarding the pipeline barriers inside render passes.
1751     ///
1752     #[inline]
add_execution_dependency( &mut self, source: PipelineStages, destination: PipelineStages, by_region: bool, )1753     pub unsafe fn add_execution_dependency(
1754         &mut self,
1755         source: PipelineStages,
1756         destination: PipelineStages,
1757         by_region: bool,
1758     ) {
1759         if !by_region {
1760             self.dependency_flags = ash::vk::DependencyFlags::empty();
1761         }
1762 
1763         debug_assert_ne!(source, PipelineStages::none());
1764         debug_assert_ne!(destination, PipelineStages::none());
1765 
1766         self.src_stage_mask |= ash::vk::PipelineStageFlags::from(source);
1767         self.dst_stage_mask |= ash::vk::PipelineStageFlags::from(destination);
1768     }
1769 
1770     /// Adds a memory barrier. This means that all the memory writes by the given source stages
1771     /// for the given source accesses must be visible by the given destination stages for the given
1772     /// destination accesses.
1773     ///
1774     /// Also adds an execution dependency similar to `add_execution_dependency`.
1775     ///
1776     /// # Safety
1777     ///
1778     /// - Same as `add_execution_dependency`.
1779     ///
add_memory_barrier( &mut self, source_stage: PipelineStages, source_access: AccessFlags, destination_stage: PipelineStages, destination_access: AccessFlags, by_region: bool, )1780     pub unsafe fn add_memory_barrier(
1781         &mut self,
1782         source_stage: PipelineStages,
1783         source_access: AccessFlags,
1784         destination_stage: PipelineStages,
1785         destination_access: AccessFlags,
1786         by_region: bool,
1787     ) {
1788         debug_assert!(source_access.is_compatible_with(&source_stage));
1789         debug_assert!(destination_access.is_compatible_with(&destination_stage));
1790 
1791         self.add_execution_dependency(source_stage, destination_stage, by_region);
1792 
1793         self.memory_barriers.push(ash::vk::MemoryBarrier {
1794             src_access_mask: source_access.into(),
1795             dst_access_mask: destination_access.into(),
1796             ..Default::default()
1797         });
1798     }
1799 
1800     /// Adds a buffer memory barrier. This means that all the memory writes to the given buffer by
1801     /// the given source stages for the given source accesses must be visible by the given dest
1802     /// stages for the given destination accesses.
1803     ///
1804     /// Also adds an execution dependency similar to `add_execution_dependency`.
1805     ///
1806     /// Also allows transferring buffer ownership between queues.
1807     ///
1808     /// # Safety
1809     ///
1810     /// - Same as `add_execution_dependency`.
1811     /// - The buffer must be alive for at least as long as the command buffer to which this barrier
1812     ///   is added.
1813     /// - Queue ownership transfers must be correct.
1814     ///
add_buffer_memory_barrier<B>( &mut self, buffer: &B, source_stage: PipelineStages, source_access: AccessFlags, destination_stage: PipelineStages, destination_access: AccessFlags, by_region: bool, queue_transfer: Option<(u32, u32)>, offset: DeviceSize, size: DeviceSize, ) where B: ?Sized + BufferAccess,1815     pub unsafe fn add_buffer_memory_barrier<B>(
1816         &mut self,
1817         buffer: &B,
1818         source_stage: PipelineStages,
1819         source_access: AccessFlags,
1820         destination_stage: PipelineStages,
1821         destination_access: AccessFlags,
1822         by_region: bool,
1823         queue_transfer: Option<(u32, u32)>,
1824         offset: DeviceSize,
1825         size: DeviceSize,
1826     ) where
1827         B: ?Sized + BufferAccess,
1828     {
1829         debug_assert!(source_access.is_compatible_with(&source_stage));
1830         debug_assert!(destination_access.is_compatible_with(&destination_stage));
1831 
1832         self.add_execution_dependency(source_stage, destination_stage, by_region);
1833 
1834         debug_assert!(size <= buffer.size());
1835         let BufferInner {
1836             buffer,
1837             offset: org_offset,
1838         } = buffer.inner();
1839         let offset = offset + org_offset;
1840 
1841         let (src_queue, dest_queue) = if let Some((src_queue, dest_queue)) = queue_transfer {
1842             (src_queue, dest_queue)
1843         } else {
1844             (ash::vk::QUEUE_FAMILY_IGNORED, ash::vk::QUEUE_FAMILY_IGNORED)
1845         };
1846 
1847         self.buffer_barriers.push(ash::vk::BufferMemoryBarrier {
1848             src_access_mask: source_access.into(),
1849             dst_access_mask: destination_access.into(),
1850             src_queue_family_index: src_queue,
1851             dst_queue_family_index: dest_queue,
1852             buffer: buffer.internal_object(),
1853             offset,
1854             size,
1855             ..Default::default()
1856         });
1857     }
1858 
1859     /// Adds an image memory barrier. This is the equivalent of `add_buffer_memory_barrier` but
1860     /// for images.
1861     ///
1862     /// In addition to transferring image ownership between queues, it also allows changing the
1863     /// layout of images.
1864     ///
1865     /// Also adds an execution dependency similar to `add_execution_dependency`.
1866     ///
1867     /// # Safety
1868     ///
1869     /// - Same as `add_execution_dependency`.
1870     /// - The buffer must be alive for at least as long as the command buffer to which this barrier
1871     ///   is added.
1872     /// - Queue ownership transfers must be correct.
1873     /// - Image layouts transfers must be correct.
1874     /// - Access flags must be compatible with the image usage flags passed at image creation.
1875     ///
add_image_memory_barrier<I>( &mut self, image: &I, mipmaps: Range<u32>, layers: Range<u32>, source_stage: PipelineStages, source_access: AccessFlags, destination_stage: PipelineStages, destination_access: AccessFlags, by_region: bool, queue_transfer: Option<(u32, u32)>, current_layout: ImageLayout, new_layout: ImageLayout, ) where I: ?Sized + ImageAccess,1876     pub unsafe fn add_image_memory_barrier<I>(
1877         &mut self,
1878         image: &I,
1879         mipmaps: Range<u32>,
1880         layers: Range<u32>,
1881         source_stage: PipelineStages,
1882         source_access: AccessFlags,
1883         destination_stage: PipelineStages,
1884         destination_access: AccessFlags,
1885         by_region: bool,
1886         queue_transfer: Option<(u32, u32)>,
1887         current_layout: ImageLayout,
1888         new_layout: ImageLayout,
1889     ) where
1890         I: ?Sized + ImageAccess,
1891     {
1892         debug_assert!(source_access.is_compatible_with(&source_stage));
1893         debug_assert!(destination_access.is_compatible_with(&destination_stage));
1894 
1895         self.add_execution_dependency(source_stage, destination_stage, by_region);
1896 
1897         debug_assert_ne!(new_layout, ImageLayout::Undefined);
1898         debug_assert_ne!(new_layout, ImageLayout::Preinitialized);
1899 
1900         debug_assert!(mipmaps.start < mipmaps.end);
1901         debug_assert!(mipmaps.end <= image.mipmap_levels());
1902         debug_assert!(layers.start < layers.end);
1903         debug_assert!(layers.end <= image.dimensions().array_layers());
1904 
1905         let (src_queue, dest_queue) = if let Some((src_queue, dest_queue)) = queue_transfer {
1906             (src_queue, dest_queue)
1907         } else {
1908             (ash::vk::QUEUE_FAMILY_IGNORED, ash::vk::QUEUE_FAMILY_IGNORED)
1909         };
1910 
1911         if image.format().ty() == FormatTy::Ycbcr {
1912             unimplemented!();
1913         }
1914 
1915         // TODO: Let user choose
1916         let aspects = image.format().aspects();
1917         let image = image.inner();
1918 
1919         self.image_barriers.push(ash::vk::ImageMemoryBarrier {
1920             src_access_mask: source_access.into(),
1921             dst_access_mask: destination_access.into(),
1922             old_layout: current_layout.into(),
1923             new_layout: new_layout.into(),
1924             src_queue_family_index: src_queue,
1925             dst_queue_family_index: dest_queue,
1926             image: image.image.internal_object(),
1927             subresource_range: ash::vk::ImageSubresourceRange {
1928                 aspect_mask: aspects.into(),
1929                 base_mip_level: mipmaps.start + image.first_mipmap_level as u32,
1930                 level_count: mipmaps.end - mipmaps.start,
1931                 base_array_layer: layers.start + image.first_layer as u32,
1932                 layer_count: layers.end - layers.start,
1933             },
1934             ..Default::default()
1935         });
1936     }
1937 }
1938 
1939 /// Command buffer that has been built.
1940 ///
1941 /// # Safety
1942 ///
1943 /// The command buffer must not outlive the command pool that it was created from,
1944 /// nor the resources used by the recorded commands.
1945 pub struct UnsafeCommandBuffer {
1946     command_buffer: ash::vk::CommandBuffer,
1947     device: Arc<Device>,
1948     usage: CommandBufferUsage,
1949 }
1950 
1951 impl UnsafeCommandBuffer {
1952     #[inline]
usage(&self) -> CommandBufferUsage1953     pub fn usage(&self) -> CommandBufferUsage {
1954         self.usage
1955     }
1956 }
1957 
1958 unsafe impl DeviceOwned for UnsafeCommandBuffer {
1959     #[inline]
device(&self) -> &Arc<Device>1960     fn device(&self) -> &Arc<Device> {
1961         &self.device
1962     }
1963 }
1964 
1965 unsafe impl VulkanObject for UnsafeCommandBuffer {
1966     type Object = ash::vk::CommandBuffer;
1967 
1968     #[inline]
internal_object(&self) -> ash::vk::CommandBuffer1969     fn internal_object(&self) -> ash::vk::CommandBuffer {
1970         self.command_buffer
1971     }
1972 }
1973