• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright © 2022 Collabora, Ltd.
2 // SPDX-License-Identifier: MIT
3 
4 use crate::bindings::*;
5 
6 use std::ffi::{c_void, CStr};
7 use std::marker::PhantomData;
8 use std::mem::offset_of;
9 use std::ptr::NonNull;
10 use std::str;
11 
12 pub struct ExecListIter<'a, T> {
13     n: &'a exec_node,
14     offset: usize,
15     rev: bool,
16     _marker: PhantomData<T>,
17 }
18 
19 impl<'a, T> ExecListIter<'a, T> {
new(l: &'a exec_list, offset: usize) -> Self20     fn new(l: &'a exec_list, offset: usize) -> Self {
21         Self {
22             n: &l.head_sentinel,
23             offset: offset,
24             rev: false,
25             _marker: PhantomData,
26         }
27     }
28 
29     #[allow(dead_code)]
new_rev(l: &'a exec_list, offset: usize) -> Self30     fn new_rev(l: &'a exec_list, offset: usize) -> Self {
31         Self {
32             n: &l.tail_sentinel,
33             offset: offset,
34             rev: true,
35             _marker: PhantomData,
36         }
37     }
38 
at(n: &'a exec_node, offset: usize, rev: bool) -> Self39     fn at(n: &'a exec_node, offset: usize, rev: bool) -> Self {
40         Self {
41             n,
42             offset: offset,
43             rev: rev,
44             _marker: PhantomData,
45         }
46     }
47 }
48 
49 impl<'a, T: 'a> Iterator for ExecListIter<'a, T> {
50     type Item = &'a T;
51 
next(&mut self) -> Option<Self::Item>52     fn next(&mut self) -> Option<Self::Item> {
53         if self.rev {
54             self.n = unsafe { &*self.n.prev };
55             if self.n.prev.is_null() {
56                 None
57             } else {
58                 let t: *const c_void = (self.n as *const exec_node).cast();
59                 Some(unsafe { &*(t.sub(self.offset).cast()) })
60             }
61         } else {
62             self.n = unsafe { &*self.n.next };
63             if self.n.next.is_null() {
64                 None
65             } else {
66                 let t: *const c_void = (self.n as *const exec_node).cast();
67                 Some(unsafe { &*(t.sub(self.offset).cast()) })
68             }
69         }
70     }
71 }
72 
73 impl nir_def {
parent_instr<'a>(&'a self) -> &'a nir_instr74     pub fn parent_instr<'a>(&'a self) -> &'a nir_instr {
75         unsafe { NonNull::new(self.parent_instr).unwrap().as_ref() }
76     }
77 
components_read(&self) -> nir_component_mask_t78     pub fn components_read(&self) -> nir_component_mask_t {
79         unsafe { nir_def_components_read(self as *const _) }
80     }
81 
all_uses_are_fsat(&self) -> bool82     pub fn all_uses_are_fsat(&self) -> bool {
83         unsafe { nir_def_all_uses_are_fsat(self as *const _) }
84     }
85 }
86 
87 pub trait AsDef {
as_def<'a>(&'a self) -> &'a nir_def88     fn as_def<'a>(&'a self) -> &'a nir_def;
89 
bit_size(&self) -> u890     fn bit_size(&self) -> u8 {
91         self.as_def().bit_size
92     }
93 
num_components(&self) -> u894     fn num_components(&self) -> u8 {
95         self.as_def().num_components
96     }
97 
as_load_const<'a>(&'a self) -> Option<&'a nir_load_const_instr>98     fn as_load_const<'a>(&'a self) -> Option<&'a nir_load_const_instr> {
99         self.as_def().parent_instr().as_load_const()
100     }
101 
is_const(&self) -> bool102     fn is_const(&self) -> bool {
103         self.as_load_const().is_some()
104     }
105 
comp_as_int(&self, comp: u8) -> Option<i64>106     fn comp_as_int(&self, comp: u8) -> Option<i64> {
107         if let Some(load) = self.as_load_const() {
108             assert!(comp < load.def.num_components);
109             Some(unsafe {
110                 let comp = usize::from(comp);
111                 match self.bit_size() {
112                     8 => load.values()[comp].i8_ as i64,
113                     16 => load.values()[comp].i16_ as i64,
114                     32 => load.values()[comp].i32_ as i64,
115                     64 => load.values()[comp].i64_,
116                     _ => panic!("Invalid bit size"),
117                 }
118             })
119         } else {
120             None
121         }
122     }
123 
comp_as_uint(&self, comp: u8) -> Option<u64>124     fn comp_as_uint(&self, comp: u8) -> Option<u64> {
125         if let Some(load) = self.as_load_const() {
126             assert!(comp < load.def.num_components);
127             Some(unsafe {
128                 let comp = usize::from(comp);
129                 match self.bit_size() {
130                     8 => load.values()[comp].u8_ as u64,
131                     16 => load.values()[comp].u16_ as u64,
132                     32 => load.values()[comp].u32_ as u64,
133                     64 => load.values()[comp].u64_,
134                     _ => panic!("Invalid bit size"),
135                 }
136             })
137         } else {
138             None
139         }
140     }
141 
as_int(&self) -> Option<i64>142     fn as_int(&self) -> Option<i64> {
143         assert!(self.num_components() == 1);
144         self.comp_as_int(0)
145     }
146 
as_uint(&self) -> Option<u64>147     fn as_uint(&self) -> Option<u64> {
148         assert!(self.num_components() == 1);
149         self.comp_as_uint(0)
150     }
151 
is_zero(&self) -> bool152     fn is_zero(&self) -> bool {
153         self.num_components() == 1 && self.as_uint() == Some(0)
154     }
155 }
156 
157 impl AsDef for nir_def {
as_def<'a>(&'a self) -> &'a nir_def158     fn as_def<'a>(&'a self) -> &'a nir_def {
159         self
160     }
161 }
162 
163 impl AsDef for nir_src {
as_def<'a>(&'a self) -> &'a nir_def164     fn as_def<'a>(&'a self) -> &'a nir_def {
165         unsafe { NonNull::new(self.ssa).unwrap().as_ref() }
166     }
167 }
168 
169 impl nir_alu_instr {
info(&self) -> &'static nir_op_info170     pub fn info(&self) -> &'static nir_op_info {
171         let info_idx: usize = self.op.try_into().unwrap();
172         unsafe { &nir_op_infos[info_idx] }
173     }
174 
src_components(&self, src_idx: u8) -> u8175     pub fn src_components(&self, src_idx: u8) -> u8 {
176         assert!(src_idx < self.info().num_inputs);
177         unsafe {
178             nir_ssa_alu_instr_src_components(self as *const _, src_idx.into())
179                 .try_into()
180                 .unwrap()
181         }
182     }
183 
srcs_as_slice<'a>(&'a self) -> &'a [nir_alu_src]184     pub fn srcs_as_slice<'a>(&'a self) -> &'a [nir_alu_src] {
185         unsafe {
186             self.src
187                 .as_slice(self.info().num_inputs.try_into().unwrap())
188         }
189     }
190 
get_src(&self, idx: usize) -> &nir_alu_src191     pub fn get_src(&self, idx: usize) -> &nir_alu_src {
192         &self.srcs_as_slice()[idx]
193     }
194 }
195 
196 impl nir_op_info {
name(&self) -> &'static str197     pub fn name(&self) -> &'static str {
198         unsafe { CStr::from_ptr(self.name).to_str().expect("Invalid UTF-8") }
199     }
200 }
201 
202 impl nir_alu_src {
bit_size(&self) -> u8203     pub fn bit_size(&self) -> u8 {
204         self.src.bit_size()
205     }
206 
comp_as_int(&self, comp: u8) -> Option<i64>207     pub fn comp_as_int(&self, comp: u8) -> Option<i64> {
208         self.src.comp_as_int(self.swizzle[usize::from(comp)])
209     }
210 
comp_as_uint(&self, comp: u8) -> Option<u64>211     pub fn comp_as_uint(&self, comp: u8) -> Option<u64> {
212         self.src.comp_as_uint(self.swizzle[usize::from(comp)])
213     }
214 }
215 
216 impl nir_tex_instr {
srcs_as_slice<'a>(&'a self) -> &'a [nir_tex_src]217     pub fn srcs_as_slice<'a>(&'a self) -> &'a [nir_tex_src] {
218         unsafe { std::slice::from_raw_parts(self.src, self.num_srcs as usize) }
219     }
220 
get_src(&self, idx: usize) -> &nir_tex_src221     pub fn get_src(&self, idx: usize) -> &nir_tex_src {
222         &self.srcs_as_slice()[idx]
223     }
224 }
225 
226 impl nir_intrinsic_instr {
info(&self) -> &'static nir_intrinsic_info227     pub fn info(&self) -> &'static nir_intrinsic_info {
228         let info_idx: usize = self.intrinsic.try_into().unwrap();
229         unsafe { &nir_intrinsic_infos[info_idx] }
230     }
231 
srcs_as_slice<'a>(&'a self) -> &'a [nir_src]232     pub fn srcs_as_slice<'a>(&'a self) -> &'a [nir_src] {
233         unsafe { self.src.as_slice(self.info().num_srcs.try_into().unwrap()) }
234     }
235 
get_src(&self, idx: usize) -> &nir_src236     pub fn get_src(&self, idx: usize) -> &nir_src {
237         &self.srcs_as_slice()[idx]
238     }
239 
get_const_index(&self, name: u32) -> u32240     pub fn get_const_index(&self, name: u32) -> u32 {
241         let name: usize = name.try_into().unwrap();
242         let idx = self.info().index_map[name];
243         assert!(idx > 0);
244         self.const_index[usize::from(idx - 1)] as u32
245     }
246 
base(&self) -> i32247     pub fn base(&self) -> i32 {
248         self.get_const_index(NIR_INTRINSIC_BASE) as i32
249     }
250 
range_base(&self) -> i32251     pub fn range_base(&self) -> i32 {
252         self.get_const_index(NIR_INTRINSIC_RANGE_BASE) as i32
253     }
254 
range(&self) -> i32255     pub fn range(&self) -> i32 {
256         self.get_const_index(NIR_INTRINSIC_RANGE) as i32
257     }
258 
write_mask(&self) -> u32259     pub fn write_mask(&self) -> u32 {
260         self.get_const_index(NIR_INTRINSIC_WRITE_MASK)
261     }
262 
stream_id(&self) -> u32263     pub fn stream_id(&self) -> u32 {
264         self.get_const_index(NIR_INTRINSIC_STREAM_ID)
265     }
266 
component(&self) -> u32267     pub fn component(&self) -> u32 {
268         self.get_const_index(NIR_INTRINSIC_COMPONENT)
269     }
270 
interp_mode(&self) -> u32271     pub fn interp_mode(&self) -> u32 {
272         self.get_const_index(NIR_INTRINSIC_INTERP_MODE)
273     }
274 
reduction_op(&self) -> nir_op275     pub fn reduction_op(&self) -> nir_op {
276         self.get_const_index(NIR_INTRINSIC_REDUCTION_OP) as nir_op
277     }
278 
cluster_size(&self) -> u32279     pub fn cluster_size(&self) -> u32 {
280         self.get_const_index(NIR_INTRINSIC_CLUSTER_SIZE)
281     }
282 
image_dim(&self) -> glsl_sampler_dim283     pub fn image_dim(&self) -> glsl_sampler_dim {
284         self.get_const_index(NIR_INTRINSIC_IMAGE_DIM) as glsl_sampler_dim
285     }
286 
image_array(&self) -> bool287     pub fn image_array(&self) -> bool {
288         self.get_const_index(NIR_INTRINSIC_IMAGE_ARRAY) != 0
289     }
290 
access(&self) -> gl_access_qualifier291     pub fn access(&self) -> gl_access_qualifier {
292         self.get_const_index(NIR_INTRINSIC_ACCESS) as gl_access_qualifier
293     }
294 
align(&self) -> u32295     pub fn align(&self) -> u32 {
296         let mul = self.align_mul();
297         let offset = self.align_offset();
298         assert!(offset < mul);
299         if offset > 0 {
300             1 << offset.trailing_zeros()
301         } else {
302             mul
303         }
304     }
305 
align_mul(&self) -> u32306     pub fn align_mul(&self) -> u32 {
307         self.get_const_index(NIR_INTRINSIC_ALIGN_MUL)
308     }
309 
align_offset(&self) -> u32310     pub fn align_offset(&self) -> u32 {
311         self.get_const_index(NIR_INTRINSIC_ALIGN_OFFSET)
312     }
313 
execution_scope(&self) -> mesa_scope314     pub fn execution_scope(&self) -> mesa_scope {
315         self.get_const_index(NIR_INTRINSIC_EXECUTION_SCOPE)
316     }
317 
memory_scope(&self) -> mesa_scope318     pub fn memory_scope(&self) -> mesa_scope {
319         self.get_const_index(NIR_INTRINSIC_MEMORY_SCOPE)
320     }
321 
memory_semantics(&self) -> nir_memory_semantics322     pub fn memory_semantics(&self) -> nir_memory_semantics {
323         self.get_const_index(NIR_INTRINSIC_MEMORY_SEMANTICS)
324     }
325 
memory_modes(&self) -> nir_variable_mode326     pub fn memory_modes(&self) -> nir_variable_mode {
327         self.get_const_index(NIR_INTRINSIC_MEMORY_MODES)
328     }
329 
flags(&self) -> u32330     pub fn flags(&self) -> u32 {
331         self.get_const_index(NIR_INTRINSIC_FLAGS)
332     }
333 
atomic_op(&self) -> nir_atomic_op334     pub fn atomic_op(&self) -> nir_atomic_op {
335         self.get_const_index(NIR_INTRINSIC_ATOMIC_OP) as nir_atomic_op
336     }
337 }
338 
339 impl nir_intrinsic_info {
name(&self) -> &'static str340     pub fn name(&self) -> &'static str {
341         unsafe { CStr::from_ptr(self.name).to_str().expect("Invalid UTF-8") }
342     }
343 }
344 
345 impl nir_load_const_instr {
values<'a>(&'a self) -> &'a [nir_const_value]346     pub fn values<'a>(&'a self) -> &'a [nir_const_value] {
347         unsafe { self.value.as_slice(self.def.num_components as usize) }
348     }
349 }
350 
351 impl nir_phi_src {
pred<'a>(&'a self) -> &'a nir_block352     pub fn pred<'a>(&'a self) -> &'a nir_block {
353         unsafe { NonNull::new(self.pred).unwrap().as_ref() }
354     }
355 }
356 
357 impl nir_phi_instr {
iter_srcs(&self) -> ExecListIter<nir_phi_src>358     pub fn iter_srcs(&self) -> ExecListIter<nir_phi_src> {
359         ExecListIter::new(&self.srcs, offset_of!(nir_phi_src, node))
360     }
361 }
362 
363 impl nir_jump_instr {
target<'a>(&'a self) -> Option<&'a nir_block>364     pub fn target<'a>(&'a self) -> Option<&'a nir_block> {
365         NonNull::new(self.target).map(|b| unsafe { b.as_ref() })
366     }
367 
else_target<'a>(&'a self) -> Option<&'a nir_block>368     pub fn else_target<'a>(&'a self) -> Option<&'a nir_block> {
369         NonNull::new(self.else_target).map(|b| unsafe { b.as_ref() })
370     }
371 }
372 
373 impl nir_instr {
as_alu<'a>(&'a self) -> Option<&'a nir_alu_instr>374     pub fn as_alu<'a>(&'a self) -> Option<&'a nir_alu_instr> {
375         if self.type_ == nir_instr_type_alu {
376             let p = self as *const nir_instr;
377             Some(unsafe { &*(p as *const nir_alu_instr) })
378         } else {
379             None
380         }
381     }
382 
as_jump<'a>(&'a self) -> Option<&'a nir_jump_instr>383     pub fn as_jump<'a>(&'a self) -> Option<&'a nir_jump_instr> {
384         if self.type_ == nir_instr_type_jump {
385             let p = self as *const nir_instr;
386             Some(unsafe { &*(p as *const nir_jump_instr) })
387         } else {
388             None
389         }
390     }
391 
as_tex<'a>(&'a self) -> Option<&'a nir_tex_instr>392     pub fn as_tex<'a>(&'a self) -> Option<&'a nir_tex_instr> {
393         if self.type_ == nir_instr_type_tex {
394             let p = self as *const nir_instr;
395             Some(unsafe { &*(p as *const nir_tex_instr) })
396         } else {
397             None
398         }
399     }
400 
as_intrinsic<'a>(&'a self) -> Option<&'a nir_intrinsic_instr>401     pub fn as_intrinsic<'a>(&'a self) -> Option<&'a nir_intrinsic_instr> {
402         if self.type_ == nir_instr_type_intrinsic {
403             let p = self as *const nir_instr;
404             Some(unsafe { &*(p as *const nir_intrinsic_instr) })
405         } else {
406             None
407         }
408     }
409 
as_load_const<'a>(&'a self) -> Option<&'a nir_load_const_instr>410     pub fn as_load_const<'a>(&'a self) -> Option<&'a nir_load_const_instr> {
411         if self.type_ == nir_instr_type_load_const {
412             let p = self as *const nir_instr;
413             Some(unsafe { &*(p as *const nir_load_const_instr) })
414         } else {
415             None
416         }
417     }
418 
as_undef<'a>(&'a self) -> Option<&'a nir_undef_instr>419     pub fn as_undef<'a>(&'a self) -> Option<&'a nir_undef_instr> {
420         if self.type_ == nir_instr_type_undef {
421             let p = self as *const nir_instr;
422             Some(unsafe { &*(p as *const nir_undef_instr) })
423         } else {
424             None
425         }
426     }
427 
as_phi<'a>(&'a self) -> Option<&'a nir_phi_instr>428     pub fn as_phi<'a>(&'a self) -> Option<&'a nir_phi_instr> {
429         if self.type_ == nir_instr_type_phi {
430             let p = self as *const nir_instr;
431             Some(unsafe { &*(p as *const nir_phi_instr) })
432         } else {
433             None
434         }
435     }
436 
def<'a>(&'a self) -> Option<&'a nir_def>437     pub fn def<'a>(&'a self) -> Option<&'a nir_def> {
438         unsafe {
439             let def = nir_instr_def(self as *const _ as *mut _);
440             NonNull::new(def).map(|d| d.as_ref())
441         }
442     }
443 }
444 
445 impl nir_block {
iter_instr_list(&self) -> ExecListIter<nir_instr>446     pub fn iter_instr_list(&self) -> ExecListIter<nir_instr> {
447         ExecListIter::new(&self.instr_list, offset_of!(nir_instr, node))
448     }
449 
successors<'a>(&'a self) -> [Option<&'a nir_block>; 2]450     pub fn successors<'a>(&'a self) -> [Option<&'a nir_block>; 2] {
451         [
452             NonNull::new(self.successors[0]).map(|b| unsafe { b.as_ref() }),
453             NonNull::new(self.successors[1]).map(|b| unsafe { b.as_ref() }),
454         ]
455     }
456 
following_if<'a>(&'a self) -> Option<&'a nir_if>457     pub fn following_if<'a>(&'a self) -> Option<&'a nir_if> {
458         let self_ptr = self as *const _ as *mut _;
459         unsafe { nir_block_get_following_if(self_ptr).as_ref() }
460     }
461 
following_loop<'a>(&'a self) -> Option<&'a nir_loop>462     pub fn following_loop<'a>(&'a self) -> Option<&'a nir_loop> {
463         let self_ptr = self as *const _ as *mut _;
464         unsafe { nir_block_get_following_loop(self_ptr).as_ref() }
465     }
466 
parent(&self) -> &nir_cf_node467     pub fn parent(&self) -> &nir_cf_node {
468         self.cf_node.parent().unwrap()
469     }
470 }
471 
472 impl nir_if {
first_then_block(&self) -> &nir_block473     pub fn first_then_block(&self) -> &nir_block {
474         self.iter_then_list().next().unwrap().as_block().unwrap()
475     }
476 
first_else_block(&self) -> &nir_block477     pub fn first_else_block(&self) -> &nir_block {
478         self.iter_else_list().next().unwrap().as_block().unwrap()
479     }
480 
iter_then_list(&self) -> ExecListIter<nir_cf_node>481     pub fn iter_then_list(&self) -> ExecListIter<nir_cf_node> {
482         ExecListIter::new(&self.then_list, offset_of!(nir_cf_node, node))
483     }
484 
iter_else_list(&self) -> ExecListIter<nir_cf_node>485     pub fn iter_else_list(&self) -> ExecListIter<nir_cf_node> {
486         ExecListIter::new(&self.else_list, offset_of!(nir_cf_node, node))
487     }
488 
following_block(&self) -> &nir_block489     pub fn following_block(&self) -> &nir_block {
490         self.cf_node.next().unwrap().as_block().unwrap()
491     }
492 }
493 
494 impl nir_loop {
iter_body(&self) -> ExecListIter<nir_cf_node>495     pub fn iter_body(&self) -> ExecListIter<nir_cf_node> {
496         ExecListIter::new(&self.body, offset_of!(nir_cf_node, node))
497     }
498 
first_block(&self) -> &nir_block499     pub fn first_block(&self) -> &nir_block {
500         self.iter_body().next().unwrap().as_block().unwrap()
501     }
502 
following_block(&self) -> &nir_block503     pub fn following_block(&self) -> &nir_block {
504         self.cf_node.next().unwrap().as_block().unwrap()
505     }
506 }
507 
508 impl nir_cf_node {
as_block<'a>(&'a self) -> Option<&'a nir_block>509     pub fn as_block<'a>(&'a self) -> Option<&'a nir_block> {
510         if self.type_ == nir_cf_node_block {
511             Some(unsafe { &*(self as *const nir_cf_node as *const nir_block) })
512         } else {
513             None
514         }
515     }
516 
as_if<'a>(&'a self) -> Option<&'a nir_if>517     pub fn as_if<'a>(&'a self) -> Option<&'a nir_if> {
518         if self.type_ == nir_cf_node_if {
519             Some(unsafe { &*(self as *const nir_cf_node as *const nir_if) })
520         } else {
521             None
522         }
523     }
524 
as_loop<'a>(&'a self) -> Option<&'a nir_loop>525     pub fn as_loop<'a>(&'a self) -> Option<&'a nir_loop> {
526         if self.type_ == nir_cf_node_loop {
527             Some(unsafe { &*(self as *const nir_cf_node as *const nir_loop) })
528         } else {
529             None
530         }
531     }
532 
next(&self) -> Option<&nir_cf_node>533     pub fn next(&self) -> Option<&nir_cf_node> {
534         let mut iter: ExecListIter<nir_cf_node> =
535             ExecListIter::at(&self.node, offset_of!(nir_cf_node, node), false);
536         iter.next()
537     }
538 
prev(&self) -> Option<&nir_cf_node>539     pub fn prev(&self) -> Option<&nir_cf_node> {
540         let mut iter: ExecListIter<nir_cf_node> =
541             ExecListIter::at(&self.node, offset_of!(nir_cf_node, node), true);
542         iter.next()
543     }
544 
parent<'a>(&'a self) -> Option<&'a nir_cf_node>545     pub fn parent<'a>(&'a self) -> Option<&'a nir_cf_node> {
546         NonNull::new(self.parent).map(|b| unsafe { b.as_ref() })
547     }
548 }
549 
550 impl nir_function_impl {
iter_body(&self) -> ExecListIter<nir_cf_node>551     pub fn iter_body(&self) -> ExecListIter<nir_cf_node> {
552         ExecListIter::new(&self.body, offset_of!(nir_cf_node, node))
553     }
554 
end_block<'a>(&'a self) -> &'a nir_block555     pub fn end_block<'a>(&'a self) -> &'a nir_block {
556         unsafe { NonNull::new(self.end_block).unwrap().as_ref() }
557     }
558 
function<'a>(&'a self) -> &'a nir_function559     pub fn function<'a>(&'a self) -> &'a nir_function {
560         unsafe { self.function.as_ref() }.unwrap()
561     }
562 }
563 
564 impl nir_function {
get_impl(&self) -> Option<&nir_function_impl>565     pub fn get_impl(&self) -> Option<&nir_function_impl> {
566         unsafe { self.impl_.as_ref() }
567     }
568 }
569 
570 impl nir_shader {
iter_functions(&self) -> ExecListIter<nir_function>571     pub fn iter_functions(&self) -> ExecListIter<nir_function> {
572         ExecListIter::new(&self.functions, offset_of!(nir_function, node))
573     }
574 
iter_variables(&self) -> ExecListIter<nir_variable>575     pub fn iter_variables(&self) -> ExecListIter<nir_variable> {
576         ExecListIter::new(&self.variables, offset_of!(nir_variable, node))
577     }
578 }
579