• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 use crate::compiler::nir::*;
2 use crate::pipe::fence::*;
3 use crate::pipe::resource::*;
4 use crate::pipe::screen::*;
5 use crate::pipe::transfer::*;
6 
7 use mesa_rust_gen::pipe_fd_type::*;
8 use mesa_rust_gen::*;
9 use mesa_rust_util::has_required_feature;
10 
11 use std::os::raw::*;
12 use std::ptr;
13 use std::ptr::*;
14 use std::sync::Arc;
15 
16 pub struct PipeContext {
17     pipe: NonNull<pipe_context>,
18     screen: Arc<PipeScreen>,
19 }
20 
21 unsafe impl Send for PipeContext {}
22 unsafe impl Sync for PipeContext {}
23 
24 #[derive(Clone, Copy)]
25 #[repr(u32)]
26 pub enum RWFlags {
27     RD = pipe_map_flags::PIPE_MAP_READ.0,
28     WR = pipe_map_flags::PIPE_MAP_WRITE.0,
29     RW = pipe_map_flags::PIPE_MAP_READ_WRITE.0,
30 }
31 
32 impl From<RWFlags> for pipe_map_flags {
from(rw: RWFlags) -> Self33     fn from(rw: RWFlags) -> Self {
34         pipe_map_flags(rw as u32)
35     }
36 }
37 
38 pub enum ResourceMapType {
39     Normal,
40     Async,
41     Coherent,
42 }
43 
44 impl From<ResourceMapType> for pipe_map_flags {
from(map_type: ResourceMapType) -> Self45     fn from(map_type: ResourceMapType) -> Self {
46         match map_type {
47             ResourceMapType::Normal => pipe_map_flags(0),
48             ResourceMapType::Async => pipe_map_flags::PIPE_MAP_UNSYNCHRONIZED,
49             ResourceMapType::Coherent => {
50                 pipe_map_flags::PIPE_MAP_COHERENT
51                     | pipe_map_flags::PIPE_MAP_PERSISTENT
52                     | pipe_map_flags::PIPE_MAP_UNSYNCHRONIZED
53             }
54         }
55     }
56 }
57 
58 impl PipeContext {
new(context: *mut pipe_context, screen: &Arc<PipeScreen>) -> Option<Self>59     pub(super) fn new(context: *mut pipe_context, screen: &Arc<PipeScreen>) -> Option<Self> {
60         let s = Self {
61             pipe: NonNull::new(context)?,
62             screen: screen.clone(),
63         };
64 
65         if !has_required_cbs(unsafe { s.pipe.as_ref() }) {
66             assert!(false, "Context missing features. This should never happen!");
67             return None;
68         }
69 
70         Some(s)
71     }
72 
buffer_subdata( &self, res: &PipeResource, offset: c_uint, data: *const c_void, size: c_uint, )73     pub fn buffer_subdata(
74         &self,
75         res: &PipeResource,
76         offset: c_uint,
77         data: *const c_void,
78         size: c_uint,
79     ) {
80         unsafe {
81             self.pipe.as_ref().buffer_subdata.unwrap()(
82                 self.pipe.as_ptr(),
83                 res.pipe(),
84                 pipe_map_flags::PIPE_MAP_WRITE.0, // TODO PIPE_MAP_x
85                 offset,
86                 size,
87                 data,
88             )
89         }
90     }
91 
texture_subdata( &self, res: &PipeResource, bx: &pipe_box, data: *const c_void, stride: u32, layer_stride: usize, )92     pub fn texture_subdata(
93         &self,
94         res: &PipeResource,
95         bx: &pipe_box,
96         data: *const c_void,
97         stride: u32,
98         layer_stride: usize,
99     ) {
100         unsafe {
101             self.pipe.as_ref().texture_subdata.unwrap()(
102                 self.pipe.as_ptr(),
103                 res.pipe(),
104                 0,
105                 pipe_map_flags::PIPE_MAP_WRITE.0, // TODO PIPE_MAP_x
106                 bx,
107                 data,
108                 stride,
109                 layer_stride,
110             )
111         }
112     }
113 
clear_buffer(&self, res: &PipeResource, pattern: &[u8], offset: u32, size: u32)114     pub fn clear_buffer(&self, res: &PipeResource, pattern: &[u8], offset: u32, size: u32) {
115         unsafe {
116             self.pipe.as_ref().clear_buffer.unwrap()(
117                 self.pipe.as_ptr(),
118                 res.pipe(),
119                 offset,
120                 size,
121                 pattern.as_ptr().cast(),
122                 pattern.len() as i32,
123             )
124         }
125     }
126 
clear_image_buffer( &self, res: &PipeResource, pattern: &[u32], origin: &[usize; 3], region: &[usize; 3], strides: (usize, usize), pixel_size: usize, )127     pub fn clear_image_buffer(
128         &self,
129         res: &PipeResource,
130         pattern: &[u32],
131         origin: &[usize; 3],
132         region: &[usize; 3],
133         strides: (usize, usize),
134         pixel_size: usize,
135     ) {
136         let (row_pitch, slice_pitch) = strides;
137         for z in 0..region[2] {
138             for y in 0..region[1] {
139                 let pitch = [pixel_size, row_pitch, slice_pitch];
140                 // Convoluted way of doing (origin + [0, y, z]) * pitch
141                 let offset = (0..3)
142                     .map(|i| ((origin[i] + [0, y, z][i]) * pitch[i]) as u32)
143                     .sum();
144 
145                 // SAFETY: clear_buffer arguments are specified
146                 // in bytes, so pattern.len() dimension value
147                 // should be multiplied by pixel_size
148                 unsafe {
149                     self.pipe.as_ref().clear_buffer.unwrap()(
150                         self.pipe.as_ptr(),
151                         res.pipe(),
152                         offset,
153                         (region[0] * pixel_size) as u32,
154                         pattern.as_ptr().cast(),
155                         (pattern.len() * pixel_size) as i32,
156                     )
157                 };
158             }
159         }
160     }
161 
clear_texture(&self, res: &PipeResource, pattern: &[u32], bx: &pipe_box)162     pub fn clear_texture(&self, res: &PipeResource, pattern: &[u32], bx: &pipe_box) {
163         unsafe {
164             let clear_texture = self
165                 .pipe
166                 .as_ref()
167                 .clear_texture
168                 .unwrap_or(u_default_clear_texture);
169             clear_texture(
170                 self.pipe.as_ptr(),
171                 res.pipe(),
172                 0,
173                 bx,
174                 pattern.as_ptr().cast(),
175             )
176         }
177     }
178 
resource_copy_region( &self, src: &PipeResource, dst: &PipeResource, dst_offset: &[u32; 3], bx: &pipe_box, )179     pub fn resource_copy_region(
180         &self,
181         src: &PipeResource,
182         dst: &PipeResource,
183         dst_offset: &[u32; 3],
184         bx: &pipe_box,
185     ) {
186         unsafe {
187             self.pipe.as_ref().resource_copy_region.unwrap()(
188                 self.pipe.as_ptr(),
189                 dst.pipe(),
190                 0,
191                 dst_offset[0],
192                 dst_offset[1],
193                 dst_offset[2],
194                 src.pipe(),
195                 0,
196                 bx,
197             )
198         }
199     }
200 
resource_map( &self, res: &PipeResource, bx: &pipe_box, flags: pipe_map_flags, is_buffer: bool, ) -> Option<PipeTransfer>201     fn resource_map(
202         &self,
203         res: &PipeResource,
204         bx: &pipe_box,
205         flags: pipe_map_flags,
206         is_buffer: bool,
207     ) -> Option<PipeTransfer> {
208         let mut out: *mut pipe_transfer = ptr::null_mut();
209 
210         let ptr = unsafe {
211             let func = if is_buffer {
212                 self.pipe.as_ref().buffer_map
213             } else {
214                 self.pipe.as_ref().texture_map
215             };
216 
217             func.unwrap()(self.pipe.as_ptr(), res.pipe(), 0, flags.0, bx, &mut out)
218         };
219 
220         if ptr.is_null() {
221             None
222         } else {
223             Some(PipeTransfer::new(is_buffer, out, ptr))
224         }
225     }
226 
_buffer_map( &self, res: &PipeResource, offset: i32, size: i32, flags: pipe_map_flags, ) -> Option<PipeTransfer>227     fn _buffer_map(
228         &self,
229         res: &PipeResource,
230         offset: i32,
231         size: i32,
232         flags: pipe_map_flags,
233     ) -> Option<PipeTransfer> {
234         let b = pipe_box {
235             x: offset,
236             width: size,
237             height: 1,
238             depth: 1,
239             ..Default::default()
240         };
241 
242         self.resource_map(res, &b, flags, true)
243     }
244 
buffer_map( &self, res: &PipeResource, offset: i32, size: i32, rw: RWFlags, map_type: ResourceMapType, ) -> Option<PipeTransfer>245     pub fn buffer_map(
246         &self,
247         res: &PipeResource,
248         offset: i32,
249         size: i32,
250         rw: RWFlags,
251         map_type: ResourceMapType,
252     ) -> Option<PipeTransfer> {
253         let mut flags: pipe_map_flags = map_type.into();
254         flags |= rw.into();
255         self._buffer_map(res, offset, size, flags)
256     }
257 
buffer_map_directly( &self, res: &PipeResource, offset: i32, size: i32, rw: RWFlags, ) -> Option<PipeTransfer>258     pub fn buffer_map_directly(
259         &self,
260         res: &PipeResource,
261         offset: i32,
262         size: i32,
263         rw: RWFlags,
264     ) -> Option<PipeTransfer> {
265         let flags =
266             pipe_map_flags::PIPE_MAP_DIRECTLY | pipe_map_flags::PIPE_MAP_UNSYNCHRONIZED | rw.into();
267         self._buffer_map(res, offset, size, flags)
268     }
269 
buffer_unmap(&self, tx: *mut pipe_transfer)270     pub(super) fn buffer_unmap(&self, tx: *mut pipe_transfer) {
271         unsafe { self.pipe.as_ref().buffer_unmap.unwrap()(self.pipe.as_ptr(), tx) };
272     }
273 
_texture_map( &self, res: &PipeResource, bx: &pipe_box, flags: pipe_map_flags, ) -> Option<PipeTransfer>274     pub fn _texture_map(
275         &self,
276         res: &PipeResource,
277         bx: &pipe_box,
278         flags: pipe_map_flags,
279     ) -> Option<PipeTransfer> {
280         self.resource_map(res, bx, flags, false)
281     }
282 
texture_map( &self, res: &PipeResource, bx: &pipe_box, rw: RWFlags, map_type: ResourceMapType, ) -> Option<PipeTransfer>283     pub fn texture_map(
284         &self,
285         res: &PipeResource,
286         bx: &pipe_box,
287         rw: RWFlags,
288         map_type: ResourceMapType,
289     ) -> Option<PipeTransfer> {
290         let mut flags: pipe_map_flags = map_type.into();
291         flags |= rw.into();
292         self._texture_map(res, bx, flags)
293     }
294 
texture_map_directly( &self, res: &PipeResource, bx: &pipe_box, rw: RWFlags, ) -> Option<PipeTransfer>295     pub fn texture_map_directly(
296         &self,
297         res: &PipeResource,
298         bx: &pipe_box,
299         rw: RWFlags,
300     ) -> Option<PipeTransfer> {
301         let flags =
302             pipe_map_flags::PIPE_MAP_DIRECTLY | pipe_map_flags::PIPE_MAP_UNSYNCHRONIZED | rw.into();
303         self.resource_map(res, bx, flags, false)
304     }
305 
texture_unmap(&self, tx: *mut pipe_transfer)306     pub(super) fn texture_unmap(&self, tx: *mut pipe_transfer) {
307         unsafe { self.pipe.as_ref().texture_unmap.unwrap()(self.pipe.as_ptr(), tx) };
308     }
309 
create_compute_state(&self, nir: &NirShader, static_local_mem: u32) -> *mut c_void310     pub fn create_compute_state(&self, nir: &NirShader, static_local_mem: u32) -> *mut c_void {
311         let state = pipe_compute_state {
312             ir_type: pipe_shader_ir::PIPE_SHADER_IR_NIR,
313             prog: nir.dup_for_driver().cast(),
314             req_input_mem: 0,
315             static_shared_mem: static_local_mem,
316         };
317         unsafe { self.pipe.as_ref().create_compute_state.unwrap()(self.pipe.as_ptr(), &state) }
318     }
319 
bind_compute_state(&self, state: *mut c_void)320     pub fn bind_compute_state(&self, state: *mut c_void) {
321         unsafe { self.pipe.as_ref().bind_compute_state.unwrap()(self.pipe.as_ptr(), state) }
322     }
323 
delete_compute_state(&self, state: *mut c_void)324     pub fn delete_compute_state(&self, state: *mut c_void) {
325         unsafe { self.pipe.as_ref().delete_compute_state.unwrap()(self.pipe.as_ptr(), state) }
326     }
327 
compute_state_info(&self, state: *mut c_void) -> pipe_compute_state_object_info328     pub fn compute_state_info(&self, state: *mut c_void) -> pipe_compute_state_object_info {
329         let mut info = pipe_compute_state_object_info::default();
330         unsafe {
331             self.pipe.as_ref().get_compute_state_info.unwrap()(self.pipe.as_ptr(), state, &mut info)
332         }
333         info
334     }
335 
compute_state_subgroup_size(&self, state: *mut c_void, block: &[u32; 3]) -> u32336     pub fn compute_state_subgroup_size(&self, state: *mut c_void, block: &[u32; 3]) -> u32 {
337         unsafe {
338             if let Some(cb) = self.pipe.as_ref().get_compute_state_subgroup_size {
339                 cb(self.pipe.as_ptr(), state, block)
340             } else {
341                 0
342             }
343         }
344     }
345 
is_create_fence_fd_supported(&self) -> bool346     pub fn is_create_fence_fd_supported(&self) -> bool {
347         unsafe { self.pipe.as_ref().create_fence_fd.is_some() }
348     }
349 
create_sampler_state(&self, state: &pipe_sampler_state) -> *mut c_void350     pub fn create_sampler_state(&self, state: &pipe_sampler_state) -> *mut c_void {
351         unsafe { self.pipe.as_ref().create_sampler_state.unwrap()(self.pipe.as_ptr(), state) }
352     }
353 
bind_sampler_states(&self, samplers: &[*mut c_void])354     pub fn bind_sampler_states(&self, samplers: &[*mut c_void]) {
355         let mut samplers = samplers.to_owned();
356         unsafe {
357             self.pipe.as_ref().bind_sampler_states.unwrap()(
358                 self.pipe.as_ptr(),
359                 pipe_shader_type::PIPE_SHADER_COMPUTE,
360                 0,
361                 samplers.len() as u32,
362                 samplers.as_mut_ptr(),
363             )
364         }
365     }
366 
clear_sampler_states(&self, count: u32)367     pub fn clear_sampler_states(&self, count: u32) {
368         let mut samplers = vec![ptr::null_mut(); count as usize];
369         unsafe {
370             self.pipe.as_ref().bind_sampler_states.unwrap()(
371                 self.pipe.as_ptr(),
372                 pipe_shader_type::PIPE_SHADER_COMPUTE,
373                 0,
374                 count,
375                 samplers.as_mut_ptr(),
376             )
377         }
378     }
379 
delete_sampler_state(&self, ptr: *mut c_void)380     pub fn delete_sampler_state(&self, ptr: *mut c_void) {
381         unsafe { self.pipe.as_ref().delete_sampler_state.unwrap()(self.pipe.as_ptr(), ptr) }
382     }
383 
bind_constant_buffer(&self, idx: u32, res: &PipeResource)384     pub fn bind_constant_buffer(&self, idx: u32, res: &PipeResource) {
385         let cb = pipe_constant_buffer {
386             buffer: res.pipe(),
387             buffer_offset: 0,
388             buffer_size: res.width(),
389             user_buffer: ptr::null(),
390         };
391         unsafe {
392             self.pipe.as_ref().set_constant_buffer.unwrap()(
393                 self.pipe.as_ptr(),
394                 pipe_shader_type::PIPE_SHADER_COMPUTE,
395                 idx,
396                 false,
397                 &cb,
398             )
399         }
400     }
401 
set_constant_buffer(&self, idx: u32, data: &[u8])402     pub fn set_constant_buffer(&self, idx: u32, data: &[u8]) {
403         let cb = pipe_constant_buffer {
404             buffer: ptr::null_mut(),
405             buffer_offset: 0,
406             buffer_size: data.len() as u32,
407             user_buffer: data.as_ptr().cast(),
408         };
409         unsafe {
410             self.pipe.as_ref().set_constant_buffer.unwrap()(
411                 self.pipe.as_ptr(),
412                 pipe_shader_type::PIPE_SHADER_COMPUTE,
413                 idx,
414                 false,
415                 if data.is_empty() { ptr::null() } else { &cb },
416             )
417         }
418     }
419 
launch_grid( &self, work_dim: u32, block: [u32; 3], grid: [u32; 3], variable_local_mem: u32, )420     pub fn launch_grid(
421         &self,
422         work_dim: u32,
423         block: [u32; 3],
424         grid: [u32; 3],
425         variable_local_mem: u32,
426     ) {
427         let info = pipe_grid_info {
428             pc: 0,
429             input: ptr::null(),
430             variable_shared_mem: variable_local_mem,
431             work_dim: work_dim,
432             block: block,
433             last_block: [0; 3],
434             grid: grid,
435             grid_base: [0; 3],
436             indirect: ptr::null_mut(),
437             indirect_offset: 0,
438             indirect_stride: 0,
439             draw_count: 0,
440             indirect_draw_count_offset: 0,
441             indirect_draw_count: ptr::null_mut(),
442         };
443         unsafe { self.pipe.as_ref().launch_grid.unwrap()(self.pipe.as_ptr(), &info) }
444     }
445 
set_global_binding(&self, res: &[&Arc<PipeResource>], out: &mut [*mut u32])446     pub fn set_global_binding(&self, res: &[&Arc<PipeResource>], out: &mut [*mut u32]) {
447         let mut res: Vec<_> = res.iter().map(|r| r.pipe()).collect();
448         unsafe {
449             self.pipe.as_ref().set_global_binding.unwrap()(
450                 self.pipe.as_ptr(),
451                 0,
452                 res.len() as u32,
453                 res.as_mut_ptr(),
454                 out.as_mut_ptr(),
455             )
456         }
457     }
458 
create_sampler_view( &self, res: &PipeResource, format: pipe_format, app_img_info: Option<&AppImgInfo>, ) -> *mut pipe_sampler_view459     pub fn create_sampler_view(
460         &self,
461         res: &PipeResource,
462         format: pipe_format,
463         app_img_info: Option<&AppImgInfo>,
464     ) -> *mut pipe_sampler_view {
465         let template = res.pipe_sampler_view_template(format, app_img_info);
466 
467         unsafe {
468             let s_view = self.pipe.as_ref().create_sampler_view.unwrap()(
469                 self.pipe.as_ptr(),
470                 res.pipe(),
471                 &template,
472             );
473 
474             s_view
475         }
476     }
477 
clear_global_binding(&self, count: u32)478     pub fn clear_global_binding(&self, count: u32) {
479         unsafe {
480             self.pipe.as_ref().set_global_binding.unwrap()(
481                 self.pipe.as_ptr(),
482                 0,
483                 count,
484                 ptr::null_mut(),
485                 ptr::null_mut(),
486             )
487         }
488     }
489 
set_sampler_views(&self, views: &mut [*mut pipe_sampler_view])490     pub fn set_sampler_views(&self, views: &mut [*mut pipe_sampler_view]) {
491         unsafe {
492             self.pipe.as_ref().set_sampler_views.unwrap()(
493                 self.pipe.as_ptr(),
494                 pipe_shader_type::PIPE_SHADER_COMPUTE,
495                 0,
496                 views.len() as u32,
497                 0,
498                 false,
499                 views.as_mut_ptr(),
500             )
501         }
502     }
503 
clear_sampler_views(&self, count: u32)504     pub fn clear_sampler_views(&self, count: u32) {
505         let mut samplers = vec![ptr::null_mut(); count as usize];
506         unsafe {
507             self.pipe.as_ref().set_sampler_views.unwrap()(
508                 self.pipe.as_ptr(),
509                 pipe_shader_type::PIPE_SHADER_COMPUTE,
510                 0,
511                 count,
512                 0,
513                 false,
514                 samplers.as_mut_ptr(),
515             )
516         }
517     }
518 
sampler_view_destroy(&self, view: *mut pipe_sampler_view)519     pub fn sampler_view_destroy(&self, view: *mut pipe_sampler_view) {
520         unsafe { self.pipe.as_ref().sampler_view_destroy.unwrap()(self.pipe.as_ptr(), view) }
521     }
522 
set_shader_images(&self, images: &[PipeImageView])523     pub fn set_shader_images(&self, images: &[PipeImageView]) {
524         let images = PipeImageView::slice_to_pipe(images);
525         unsafe {
526             self.pipe.as_ref().set_shader_images.unwrap()(
527                 self.pipe.as_ptr(),
528                 pipe_shader_type::PIPE_SHADER_COMPUTE,
529                 0,
530                 images.len() as u32,
531                 0,
532                 images.as_ptr(),
533             )
534         }
535     }
536 
clear_shader_images(&self, count: u32)537     pub fn clear_shader_images(&self, count: u32) {
538         unsafe {
539             self.pipe.as_ref().set_shader_images.unwrap()(
540                 self.pipe.as_ptr(),
541                 pipe_shader_type::PIPE_SHADER_COMPUTE,
542                 0,
543                 count,
544                 0,
545                 ptr::null_mut(),
546             )
547         }
548     }
549 
create_query(&self, query_type: c_uint, index: c_uint) -> *mut pipe_query550     pub(crate) fn create_query(&self, query_type: c_uint, index: c_uint) -> *mut pipe_query {
551         unsafe { self.pipe.as_ref().create_query.unwrap()(self.pipe.as_ptr(), query_type, index) }
552     }
553 
554     /// # Safety
555     ///
556     /// usual rules on raw mut pointers apply, specifically no concurrent access
end_query(&self, pq: *mut pipe_query) -> bool557     pub(crate) unsafe fn end_query(&self, pq: *mut pipe_query) -> bool {
558         unsafe { self.pipe.as_ref().end_query.unwrap()(self.pipe.as_ptr(), pq) }
559     }
560 
561     /// # Safety
562     ///
563     /// usual rules on raw mut pointers apply, specifically no concurrent access
get_query_result( &self, pq: *mut pipe_query, wait: bool, pqr: *mut pipe_query_result, ) -> bool564     pub(crate) unsafe fn get_query_result(
565         &self,
566         pq: *mut pipe_query,
567         wait: bool,
568         pqr: *mut pipe_query_result,
569     ) -> bool {
570         unsafe { self.pipe.as_ref().get_query_result.unwrap()(self.pipe.as_ptr(), pq, wait, pqr) }
571     }
572 
573     /// # Safety
574     ///
575     /// usual rules on raw mut pointers apply, specifically no concurrent access
destroy_query(&self, pq: *mut pipe_query)576     pub(crate) unsafe fn destroy_query(&self, pq: *mut pipe_query) {
577         unsafe { self.pipe.as_ref().destroy_query.unwrap()(self.pipe.as_ptr(), pq) }
578     }
579 
memory_barrier(&self, barriers: u32)580     pub fn memory_barrier(&self, barriers: u32) {
581         unsafe { self.pipe.as_ref().memory_barrier.unwrap()(self.pipe.as_ptr(), barriers) }
582     }
583 
flush(&self) -> PipeFence584     pub fn flush(&self) -> PipeFence {
585         unsafe {
586             let mut fence = ptr::null_mut();
587             self.pipe.as_ref().flush.unwrap()(self.pipe.as_ptr(), &mut fence, 0);
588             PipeFence::new(fence, &self.screen)
589         }
590     }
591 
import_fence(&self, fence_fd: &FenceFd) -> PipeFence592     pub fn import_fence(&self, fence_fd: &FenceFd) -> PipeFence {
593         unsafe {
594             let mut fence = ptr::null_mut();
595             self.pipe.as_ref().create_fence_fd.unwrap()(
596                 self.pipe.as_ptr(),
597                 &mut fence,
598                 fence_fd.fd,
599                 PIPE_FD_TYPE_NATIVE_SYNC,
600             );
601             PipeFence::new(fence, &self.screen)
602         }
603     }
604 
svm_migrate( &self, ptrs: &[usize], sizes: &[usize], to_device: bool, content_undefined: bool, )605     pub fn svm_migrate(
606         &self,
607         ptrs: &[usize],
608         sizes: &[usize],
609         to_device: bool,
610         content_undefined: bool,
611     ) {
612         assert_eq!(ptrs.len(), sizes.len());
613         unsafe {
614             if let Some(cb) = self.pipe.as_ref().svm_migrate {
615                 cb(
616                     self.pipe.as_ptr(),
617                     ptrs.len() as u32,
618                     ptrs.as_ptr().cast(),
619                     sizes.as_ptr(),
620                     to_device,
621                     content_undefined,
622                 );
623             }
624         }
625     }
626 }
627 
628 impl Drop for PipeContext {
drop(&mut self)629     fn drop(&mut self) {
630         unsafe {
631             self.pipe.as_ref().destroy.unwrap()(self.pipe.as_ptr());
632         }
633     }
634 }
635 
has_required_cbs(context: &pipe_context) -> bool636 fn has_required_cbs(context: &pipe_context) -> bool {
637     // Use '&' to evaluate all features and to not stop
638     // on first missing one to list all missing features.
639     has_required_feature!(context, destroy)
640         & has_required_feature!(context, bind_compute_state)
641         & has_required_feature!(context, bind_sampler_states)
642         & has_required_feature!(context, buffer_map)
643         & has_required_feature!(context, buffer_subdata)
644         & has_required_feature!(context, buffer_unmap)
645         & has_required_feature!(context, clear_buffer)
646         & has_required_feature!(context, create_compute_state)
647         & has_required_feature!(context, create_query)
648         & has_required_feature!(context, delete_compute_state)
649         & has_required_feature!(context, delete_sampler_state)
650         & has_required_feature!(context, destroy_query)
651         & has_required_feature!(context, end_query)
652         & has_required_feature!(context, flush)
653         & has_required_feature!(context, get_compute_state_info)
654         & has_required_feature!(context, launch_grid)
655         & has_required_feature!(context, memory_barrier)
656         & has_required_feature!(context, resource_copy_region)
657         & has_required_feature!(context, sampler_view_destroy)
658         & has_required_feature!(context, set_constant_buffer)
659         & has_required_feature!(context, set_global_binding)
660         & has_required_feature!(context, set_sampler_views)
661         & has_required_feature!(context, set_shader_images)
662         & has_required_feature!(context, texture_map)
663         & has_required_feature!(context, texture_subdata)
664         & has_required_feature!(context, texture_unmap)
665 }
666