• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 use crate::compiler::nir::*;
2 use crate::pipe::fence::*;
3 use crate::pipe::resource::*;
4 use crate::pipe::screen::*;
5 use crate::pipe::transfer::*;
6 
7 use mesa_rust_gen::pipe_fd_type::*;
8 use mesa_rust_gen::*;
9 use mesa_rust_util::has_required_feature;
10 
11 use std::mem;
12 use std::mem::size_of;
13 use std::os::raw::*;
14 use std::ptr;
15 use std::ptr::*;
16 use std::sync::Arc;
17 
18 pub struct PipeContext {
19     pipe: NonNull<pipe_context>,
20     screen: Arc<PipeScreen>,
21 }
22 
23 unsafe impl Send for PipeContext {}
24 unsafe impl Sync for PipeContext {}
25 
26 #[derive(Clone, Copy)]
27 #[repr(u32)]
28 pub enum RWFlags {
29     RD = pipe_map_flags::PIPE_MAP_READ.0,
30     WR = pipe_map_flags::PIPE_MAP_WRITE.0,
31     RW = pipe_map_flags::PIPE_MAP_READ_WRITE.0,
32 }
33 
34 impl From<RWFlags> for pipe_map_flags {
from(rw: RWFlags) -> Self35     fn from(rw: RWFlags) -> Self {
36         pipe_map_flags(rw as u32)
37     }
38 }
39 
40 impl PipeContext {
new(context: *mut pipe_context, screen: &Arc<PipeScreen>) -> Option<Self>41     pub(super) fn new(context: *mut pipe_context, screen: &Arc<PipeScreen>) -> Option<Self> {
42         let s = Self {
43             pipe: NonNull::new(context)?,
44             screen: screen.clone(),
45         };
46 
47         if !has_required_cbs(unsafe { s.pipe.as_ref() }) {
48             assert!(false, "Context missing features. This should never happen!");
49             return None;
50         }
51 
52         Some(s)
53     }
54 
pipe(&self) -> NonNull<pipe_context>55     pub(crate) fn pipe(&self) -> NonNull<pipe_context> {
56         self.pipe
57     }
58 
buffer_subdata( &self, res: &PipeResource, offset: c_uint, data: *const c_void, size: c_uint, )59     pub fn buffer_subdata(
60         &self,
61         res: &PipeResource,
62         offset: c_uint,
63         data: *const c_void,
64         size: c_uint,
65     ) {
66         unsafe {
67             self.pipe.as_ref().buffer_subdata.unwrap()(
68                 self.pipe.as_ptr(),
69                 res.pipe(),
70                 pipe_map_flags::PIPE_MAP_WRITE.0, // TODO PIPE_MAP_x
71                 offset,
72                 size,
73                 data,
74             )
75         }
76     }
77 
texture_subdata( &self, res: &PipeResource, bx: &pipe_box, data: *const c_void, stride: u32, layer_stride: usize, )78     pub fn texture_subdata(
79         &self,
80         res: &PipeResource,
81         bx: &pipe_box,
82         data: *const c_void,
83         stride: u32,
84         layer_stride: usize,
85     ) {
86         unsafe {
87             self.pipe.as_ref().texture_subdata.unwrap()(
88                 self.pipe.as_ptr(),
89                 res.pipe(),
90                 0,
91                 pipe_map_flags::PIPE_MAP_WRITE.0, // TODO PIPE_MAP_x
92                 bx,
93                 data,
94                 stride,
95                 layer_stride,
96             )
97         }
98     }
99 
clear_buffer(&self, res: &PipeResource, pattern: &[u8], offset: u32, size: u32)100     pub fn clear_buffer(&self, res: &PipeResource, pattern: &[u8], offset: u32, size: u32) {
101         unsafe {
102             self.pipe
103                 .as_ref()
104                 .clear_buffer
105                 .unwrap_or(u_default_clear_buffer)(
106                 self.pipe.as_ptr(),
107                 res.pipe(),
108                 offset,
109                 size,
110                 pattern.as_ptr().cast(),
111                 pattern.len() as i32,
112             )
113         }
114     }
115 
clear_image_buffer( &self, res: &PipeResource, pattern: &[u32], origin: &[usize; 3], region: &[usize; 3], strides: (usize, usize), pixel_size: usize, )116     pub fn clear_image_buffer(
117         &self,
118         res: &PipeResource,
119         pattern: &[u32],
120         origin: &[usize; 3],
121         region: &[usize; 3],
122         strides: (usize, usize),
123         pixel_size: usize,
124     ) {
125         let (row_pitch, slice_pitch) = strides;
126         for z in 0..region[2] {
127             for y in 0..region[1] {
128                 let pitch = [pixel_size, row_pitch, slice_pitch];
129                 // Convoluted way of doing (origin + [0, y, z]) * pitch
130                 let offset = (0..3)
131                     .map(|i| ((origin[i] + [0, y, z][i]) * pitch[i]) as u32)
132                     .sum();
133 
134                 unsafe {
135                     self.pipe.as_ref().clear_buffer.unwrap()(
136                         self.pipe.as_ptr(),
137                         res.pipe(),
138                         offset,
139                         (region[0] * pixel_size) as u32,
140                         pattern.as_ptr().cast(),
141                         pixel_size as i32,
142                     )
143                 };
144             }
145         }
146     }
147 
clear_texture(&self, res: &PipeResource, pattern: &[u32], bx: &pipe_box)148     pub fn clear_texture(&self, res: &PipeResource, pattern: &[u32], bx: &pipe_box) {
149         unsafe {
150             let clear_texture = self
151                 .pipe
152                 .as_ref()
153                 .clear_texture
154                 .unwrap_or(u_default_clear_texture);
155             clear_texture(
156                 self.pipe.as_ptr(),
157                 res.pipe(),
158                 0,
159                 bx,
160                 pattern.as_ptr().cast(),
161             )
162         }
163     }
164 
resource_copy_region( &self, src: &PipeResource, dst: &PipeResource, dst_offset: &[u32; 3], bx: &pipe_box, )165     pub fn resource_copy_region(
166         &self,
167         src: &PipeResource,
168         dst: &PipeResource,
169         dst_offset: &[u32; 3],
170         bx: &pipe_box,
171     ) {
172         unsafe {
173             self.pipe.as_ref().resource_copy_region.unwrap()(
174                 self.pipe.as_ptr(),
175                 dst.pipe(),
176                 0,
177                 dst_offset[0],
178                 dst_offset[1],
179                 dst_offset[2],
180                 src.pipe(),
181                 0,
182                 bx,
183             )
184         }
185     }
186 
resource_map( &self, res: &PipeResource, bx: &pipe_box, flags: pipe_map_flags, is_buffer: bool, ) -> Option<PipeTransfer>187     fn resource_map(
188         &self,
189         res: &PipeResource,
190         bx: &pipe_box,
191         flags: pipe_map_flags,
192         is_buffer: bool,
193     ) -> Option<PipeTransfer> {
194         let mut out: *mut pipe_transfer = ptr::null_mut();
195 
196         let ptr = unsafe {
197             let func = if is_buffer {
198                 self.pipe.as_ref().buffer_map
199             } else {
200                 self.pipe.as_ref().texture_map
201             };
202 
203             func.unwrap()(self.pipe.as_ptr(), res.pipe(), 0, flags.0, bx, &mut out)
204         };
205 
206         if ptr.is_null() {
207             None
208         } else {
209             Some(PipeTransfer::new(self, is_buffer, out, ptr))
210         }
211     }
212 
buffer_map_flags( &self, res: &PipeResource, offset: i32, size: i32, flags: pipe_map_flags, ) -> Option<PipeTransfer>213     pub fn buffer_map_flags(
214         &self,
215         res: &PipeResource,
216         offset: i32,
217         size: i32,
218         flags: pipe_map_flags,
219     ) -> Option<PipeTransfer> {
220         let b = pipe_box {
221             x: offset,
222             width: size,
223             height: 1,
224             depth: 1,
225             ..Default::default()
226         };
227 
228         self.resource_map(res, &b, flags, true)
229     }
230 
buffer_map( &self, res: &PipeResource, offset: i32, size: i32, rw: RWFlags, ) -> Option<PipeTransfer>231     pub fn buffer_map(
232         &self,
233         res: &PipeResource,
234         offset: i32,
235         size: i32,
236         rw: RWFlags,
237     ) -> Option<PipeTransfer> {
238         self.buffer_map_flags(res, offset, size, rw.into())
239     }
240 
buffer_unmap(&self, tx: *mut pipe_transfer)241     pub(super) fn buffer_unmap(&self, tx: *mut pipe_transfer) {
242         unsafe { self.pipe.as_ref().buffer_unmap.unwrap()(self.pipe.as_ptr(), tx) };
243     }
244 
texture_map_flags( &self, res: &PipeResource, bx: &pipe_box, flags: pipe_map_flags, ) -> Option<PipeTransfer>245     pub fn texture_map_flags(
246         &self,
247         res: &PipeResource,
248         bx: &pipe_box,
249         flags: pipe_map_flags,
250     ) -> Option<PipeTransfer> {
251         self.resource_map(res, bx, flags, false)
252     }
253 
texture_map( &self, res: &PipeResource, bx: &pipe_box, rw: RWFlags, ) -> Option<PipeTransfer>254     pub fn texture_map(
255         &self,
256         res: &PipeResource,
257         bx: &pipe_box,
258         rw: RWFlags,
259     ) -> Option<PipeTransfer> {
260         self.texture_map_flags(res, bx, rw.into())
261     }
262 
texture_unmap(&self, tx: *mut pipe_transfer)263     pub(super) fn texture_unmap(&self, tx: *mut pipe_transfer) {
264         unsafe { self.pipe.as_ref().texture_unmap.unwrap()(self.pipe.as_ptr(), tx) };
265     }
266 
create_compute_state(&self, nir: &NirShader, static_local_mem: u32) -> *mut c_void267     pub fn create_compute_state(&self, nir: &NirShader, static_local_mem: u32) -> *mut c_void {
268         let state = pipe_compute_state {
269             ir_type: pipe_shader_ir::PIPE_SHADER_IR_NIR,
270             prog: nir.dup_for_driver().cast(),
271             req_input_mem: 0,
272             static_shared_mem: static_local_mem,
273         };
274         unsafe { self.pipe.as_ref().create_compute_state.unwrap()(self.pipe.as_ptr(), &state) }
275     }
276 
bind_compute_state(&self, state: *mut c_void)277     pub fn bind_compute_state(&self, state: *mut c_void) {
278         unsafe { self.pipe.as_ref().bind_compute_state.unwrap()(self.pipe.as_ptr(), state) }
279     }
280 
delete_compute_state(&self, state: *mut c_void)281     pub fn delete_compute_state(&self, state: *mut c_void) {
282         unsafe { self.pipe.as_ref().delete_compute_state.unwrap()(self.pipe.as_ptr(), state) }
283     }
284 
compute_state_info(&self, state: *mut c_void) -> pipe_compute_state_object_info285     pub fn compute_state_info(&self, state: *mut c_void) -> pipe_compute_state_object_info {
286         let mut info = pipe_compute_state_object_info::default();
287         unsafe {
288             self.pipe.as_ref().get_compute_state_info.unwrap()(self.pipe.as_ptr(), state, &mut info)
289         }
290         info
291     }
292 
compute_state_subgroup_size(&self, state: *mut c_void, block: &[u32; 3]) -> u32293     pub fn compute_state_subgroup_size(&self, state: *mut c_void, block: &[u32; 3]) -> u32 {
294         unsafe {
295             if let Some(cb) = self.pipe.as_ref().get_compute_state_subgroup_size {
296                 cb(self.pipe.as_ptr(), state, block)
297             } else {
298                 0
299             }
300         }
301     }
302 
is_create_fence_fd_supported(&self) -> bool303     pub fn is_create_fence_fd_supported(&self) -> bool {
304         unsafe { self.pipe.as_ref().create_fence_fd.is_some() }
305     }
306 
create_sampler_state(&self, state: &pipe_sampler_state) -> *mut c_void307     pub fn create_sampler_state(&self, state: &pipe_sampler_state) -> *mut c_void {
308         unsafe { self.pipe.as_ref().create_sampler_state.unwrap()(self.pipe.as_ptr(), state) }
309     }
310 
bind_sampler_states(&self, samplers: &[*mut c_void])311     pub fn bind_sampler_states(&self, samplers: &[*mut c_void]) {
312         let mut samplers = samplers.to_owned();
313         unsafe {
314             self.pipe.as_ref().bind_sampler_states.unwrap()(
315                 self.pipe.as_ptr(),
316                 pipe_shader_type::PIPE_SHADER_COMPUTE,
317                 0,
318                 samplers.len() as u32,
319                 samplers.as_mut_ptr(),
320             )
321         }
322     }
323 
clear_sampler_states(&self, count: u32)324     pub fn clear_sampler_states(&self, count: u32) {
325         let mut samplers = vec![ptr::null_mut(); count as usize];
326         unsafe {
327             self.pipe.as_ref().bind_sampler_states.unwrap()(
328                 self.pipe.as_ptr(),
329                 pipe_shader_type::PIPE_SHADER_COMPUTE,
330                 0,
331                 count,
332                 samplers.as_mut_ptr(),
333             )
334         }
335     }
336 
delete_sampler_state(&self, ptr: *mut c_void)337     pub fn delete_sampler_state(&self, ptr: *mut c_void) {
338         unsafe { self.pipe.as_ref().delete_sampler_state.unwrap()(self.pipe.as_ptr(), ptr) }
339     }
340 
bind_constant_buffer(&self, idx: u32, res: &PipeResource)341     pub fn bind_constant_buffer(&self, idx: u32, res: &PipeResource) {
342         let cb = pipe_constant_buffer {
343             buffer: res.pipe(),
344             buffer_offset: 0,
345             buffer_size: res.width(),
346             user_buffer: ptr::null(),
347         };
348         unsafe {
349             self.pipe.as_ref().set_constant_buffer.unwrap()(
350                 self.pipe.as_ptr(),
351                 pipe_shader_type::PIPE_SHADER_COMPUTE,
352                 idx,
353                 false,
354                 &cb,
355             )
356         }
357     }
358 
set_constant_buffer(&self, idx: u32, data: &[u8])359     pub fn set_constant_buffer(&self, idx: u32, data: &[u8]) {
360         let cb = pipe_constant_buffer {
361             buffer: ptr::null_mut(),
362             buffer_offset: 0,
363             buffer_size: data.len() as u32,
364             user_buffer: data.as_ptr().cast(),
365         };
366         unsafe {
367             self.pipe.as_ref().set_constant_buffer.unwrap()(
368                 self.pipe.as_ptr(),
369                 pipe_shader_type::PIPE_SHADER_COMPUTE,
370                 idx,
371                 false,
372                 if data.is_empty() { ptr::null() } else { &cb },
373             )
374         }
375     }
376 
377     /// returns false when failing to allocate GPU memory.
378     #[must_use]
set_constant_buffer_stream(&self, idx: u32, data: &[u8]) -> bool379     pub fn set_constant_buffer_stream(&self, idx: u32, data: &[u8]) -> bool {
380         let mut cb = pipe_constant_buffer {
381             buffer: ptr::null_mut(),
382             buffer_offset: 0,
383             buffer_size: data.len() as u32,
384             user_buffer: ptr::null_mut(),
385         };
386 
387         unsafe {
388             let stream = self.pipe.as_ref().stream_uploader;
389             u_upload_data(
390                 stream,
391                 0,
392                 data.len() as u32,
393                 size_of::<[u64; 16]>() as u32,
394                 data.as_ptr().cast(),
395                 &mut cb.buffer_offset,
396                 &mut cb.buffer,
397             );
398             u_upload_unmap(stream);
399 
400             if cb.buffer.is_null() {
401                 return false;
402             }
403 
404             self.pipe.as_ref().set_constant_buffer.unwrap()(
405                 self.pipe.as_ptr(),
406                 pipe_shader_type::PIPE_SHADER_COMPUTE,
407                 idx,
408                 true,
409                 &cb,
410             );
411 
412             true
413         }
414     }
415 
launch_grid( &self, work_dim: u32, block: [u32; 3], grid: [u32; 3], variable_local_mem: u32, )416     pub fn launch_grid(
417         &self,
418         work_dim: u32,
419         block: [u32; 3],
420         grid: [u32; 3],
421         variable_local_mem: u32,
422     ) {
423         let info = pipe_grid_info {
424             variable_shared_mem: variable_local_mem,
425             work_dim: work_dim,
426             block: block,
427             grid: grid,
428             ..Default::default()
429         };
430         unsafe { self.pipe.as_ref().launch_grid.unwrap()(self.pipe.as_ptr(), &info) }
431     }
432 
set_global_binding(&self, res: &[&PipeResource], out: &mut [*mut u32])433     pub fn set_global_binding(&self, res: &[&PipeResource], out: &mut [*mut u32]) {
434         let mut res: Vec<_> = res.iter().copied().map(PipeResource::pipe).collect();
435         unsafe {
436             self.pipe.as_ref().set_global_binding.unwrap()(
437                 self.pipe.as_ptr(),
438                 0,
439                 res.len() as u32,
440                 res.as_mut_ptr(),
441                 out.as_mut_ptr(),
442             )
443         }
444     }
445 
clear_global_binding(&self, count: u32)446     pub fn clear_global_binding(&self, count: u32) {
447         unsafe {
448             self.pipe.as_ref().set_global_binding.unwrap()(
449                 self.pipe.as_ptr(),
450                 0,
451                 count,
452                 ptr::null_mut(),
453                 ptr::null_mut(),
454             )
455         }
456     }
457 
set_sampler_views(&self, mut views: Vec<PipeSamplerView>)458     pub fn set_sampler_views(&self, mut views: Vec<PipeSamplerView>) {
459         unsafe {
460             self.pipe.as_ref().set_sampler_views.unwrap()(
461                 self.pipe.as_ptr(),
462                 pipe_shader_type::PIPE_SHADER_COMPUTE,
463                 0,
464                 views.len() as u32,
465                 0,
466                 true,
467                 PipeSamplerView::as_pipe(views.as_mut_slice()),
468             )
469         }
470 
471         // the take_ownership parameter of set_sampler_views is set to true, so we need to forget
472         // about them on our side as ownership has been transferred to the driver.
473         views.into_iter().for_each(mem::forget);
474     }
475 
clear_sampler_views(&self, count: u32)476     pub fn clear_sampler_views(&self, count: u32) {
477         let mut samplers = vec![ptr::null_mut(); count as usize];
478         unsafe {
479             self.pipe.as_ref().set_sampler_views.unwrap()(
480                 self.pipe.as_ptr(),
481                 pipe_shader_type::PIPE_SHADER_COMPUTE,
482                 0,
483                 count,
484                 0,
485                 true,
486                 samplers.as_mut_ptr(),
487             )
488         }
489     }
490 
set_shader_images(&self, images: &[PipeImageView])491     pub fn set_shader_images(&self, images: &[PipeImageView]) {
492         let images = PipeImageView::slice_to_pipe(images);
493         unsafe {
494             self.pipe.as_ref().set_shader_images.unwrap()(
495                 self.pipe.as_ptr(),
496                 pipe_shader_type::PIPE_SHADER_COMPUTE,
497                 0,
498                 images.len() as u32,
499                 0,
500                 images.as_ptr(),
501             )
502         }
503     }
504 
clear_shader_images(&self, count: u32)505     pub fn clear_shader_images(&self, count: u32) {
506         unsafe {
507             self.pipe.as_ref().set_shader_images.unwrap()(
508                 self.pipe.as_ptr(),
509                 pipe_shader_type::PIPE_SHADER_COMPUTE,
510                 0,
511                 count,
512                 0,
513                 ptr::null_mut(),
514             )
515         }
516     }
517 
create_query(&self, query_type: c_uint, index: c_uint) -> *mut pipe_query518     pub(crate) fn create_query(&self, query_type: c_uint, index: c_uint) -> *mut pipe_query {
519         unsafe { self.pipe.as_ref().create_query.unwrap()(self.pipe.as_ptr(), query_type, index) }
520     }
521 
522     /// # Safety
523     ///
524     /// usual rules on raw mut pointers apply, specifically no concurrent access
end_query(&self, pq: *mut pipe_query) -> bool525     pub(crate) unsafe fn end_query(&self, pq: *mut pipe_query) -> bool {
526         unsafe { self.pipe.as_ref().end_query.unwrap()(self.pipe.as_ptr(), pq) }
527     }
528 
529     /// # Safety
530     ///
531     /// usual rules on raw mut pointers apply, specifically no concurrent access
get_query_result( &self, pq: *mut pipe_query, wait: bool, pqr: *mut pipe_query_result, ) -> bool532     pub(crate) unsafe fn get_query_result(
533         &self,
534         pq: *mut pipe_query,
535         wait: bool,
536         pqr: *mut pipe_query_result,
537     ) -> bool {
538         unsafe { self.pipe.as_ref().get_query_result.unwrap()(self.pipe.as_ptr(), pq, wait, pqr) }
539     }
540 
541     /// # Safety
542     ///
543     /// usual rules on raw mut pointers apply, specifically no concurrent access
destroy_query(&self, pq: *mut pipe_query)544     pub(crate) unsafe fn destroy_query(&self, pq: *mut pipe_query) {
545         unsafe { self.pipe.as_ref().destroy_query.unwrap()(self.pipe.as_ptr(), pq) }
546     }
547 
memory_barrier(&self, barriers: u32)548     pub fn memory_barrier(&self, barriers: u32) {
549         unsafe { self.pipe.as_ref().memory_barrier.unwrap()(self.pipe.as_ptr(), barriers) }
550     }
551 
flush(&self) -> PipeFence552     pub fn flush(&self) -> PipeFence {
553         unsafe {
554             let mut fence = ptr::null_mut();
555             self.pipe.as_ref().flush.unwrap()(self.pipe.as_ptr(), &mut fence, 0);
556             PipeFence::new(fence, &self.screen)
557         }
558     }
559 
import_fence(&self, fence_fd: &FenceFd) -> PipeFence560     pub fn import_fence(&self, fence_fd: &FenceFd) -> PipeFence {
561         unsafe {
562             let mut fence = ptr::null_mut();
563             self.pipe.as_ref().create_fence_fd.unwrap()(
564                 self.pipe.as_ptr(),
565                 &mut fence,
566                 fence_fd.fd,
567                 PIPE_FD_TYPE_NATIVE_SYNC,
568             );
569             PipeFence::new(fence, &self.screen)
570         }
571     }
572 
svm_migrate( &self, ptrs: &[usize], sizes: &[usize], to_device: bool, content_undefined: bool, )573     pub fn svm_migrate(
574         &self,
575         ptrs: &[usize],
576         sizes: &[usize],
577         to_device: bool,
578         content_undefined: bool,
579     ) {
580         assert_eq!(ptrs.len(), sizes.len());
581         unsafe {
582             if let Some(cb) = self.pipe.as_ref().svm_migrate {
583                 cb(
584                     self.pipe.as_ptr(),
585                     ptrs.len() as u32,
586                     ptrs.as_ptr().cast(),
587                     sizes.as_ptr(),
588                     to_device,
589                     content_undefined,
590                 );
591             }
592         }
593     }
594 }
595 
596 impl Drop for PipeContext {
drop(&mut self)597     fn drop(&mut self) {
598         self.flush().wait();
599         unsafe {
600             self.pipe.as_ref().destroy.unwrap()(self.pipe.as_ptr());
601         }
602     }
603 }
604 
has_required_cbs(context: &pipe_context) -> bool605 fn has_required_cbs(context: &pipe_context) -> bool {
606     // Use '&' to evaluate all features and to not stop
607     // on first missing one to list all missing features.
608     has_required_feature!(context, destroy)
609         & has_required_feature!(context, bind_compute_state)
610         & has_required_feature!(context, bind_sampler_states)
611         & has_required_feature!(context, buffer_map)
612         & has_required_feature!(context, buffer_subdata)
613         & has_required_feature!(context, buffer_unmap)
614         & has_required_feature!(context, create_compute_state)
615         & has_required_feature!(context, create_query)
616         & has_required_feature!(context, delete_compute_state)
617         & has_required_feature!(context, delete_sampler_state)
618         & has_required_feature!(context, destroy_query)
619         & has_required_feature!(context, end_query)
620         & has_required_feature!(context, flush)
621         & has_required_feature!(context, get_compute_state_info)
622         & has_required_feature!(context, launch_grid)
623         & has_required_feature!(context, memory_barrier)
624         & has_required_feature!(context, resource_copy_region)
625         // implicitly used through pipe_sampler_view_reference
626         & has_required_feature!(context, sampler_view_destroy)
627         & has_required_feature!(context, set_constant_buffer)
628         & has_required_feature!(context, set_global_binding)
629         & has_required_feature!(context, set_sampler_views)
630         & has_required_feature!(context, set_shader_images)
631         & has_required_feature!(context, texture_map)
632         & has_required_feature!(context, texture_subdata)
633         & has_required_feature!(context, texture_unmap)
634 }
635