Lines Matching refs:addrspace
12 define void @test2(<2 x i32> addrspace(1)* %out, <2 x i32> addrspace(1)* %in) {
13 %b_ptr = getelementptr <2 x i32>, <2 x i32> addrspace(1)* %in, i32 1
14 %a = load <2 x i32>, <2 x i32> addrspace(1) * %in
15 %b = load <2 x i32>, <2 x i32> addrspace(1) * %b_ptr
17 store <2 x i32> %result, <2 x i32> addrspace(1)* %out
32 define void @test4(<4 x i32> addrspace(1)* %out, <4 x i32> addrspace(1)* %in) {
33 %b_ptr = getelementptr <4 x i32>, <4 x i32> addrspace(1)* %in, i32 1
34 %a = load <4 x i32>, <4 x i32> addrspace(1) * %in
35 %b = load <4 x i32>, <4 x i32> addrspace(1) * %b_ptr
37 store <4 x i32> %result, <4 x i32> addrspace(1)* %out
43 define void @s_and_i32(i32 addrspace(1)* %out, i32 %a, i32 %b) {
45 store i32 %and, i32 addrspace(1)* %out, align 4
51 define void @s_and_constant_i32(i32 addrspace(1)* %out, i32 %a) {
53 store i32 %and, i32 addrspace(1)* %out, align 4
59 define void @v_and_i32(i32 addrspace(1)* %out, i32 addrspace(1)* %aptr, i32 addrspace(1)* %bptr) {
60 %a = load i32, i32 addrspace(1)* %aptr, align 4
61 %b = load i32, i32 addrspace(1)* %bptr, align 4
63 store i32 %and, i32 addrspace(1)* %out, align 4
69 define void @v_and_constant_i32(i32 addrspace(1)* %out, i32 addrspace(1)* %aptr) {
70 %a = load i32, i32 addrspace(1)* %aptr, align 4
72 store i32 %and, i32 addrspace(1)* %out, align 4
78 define void @v_and_inline_imm_64_i32(i32 addrspace(1)* %out, i32 addrspace(1)* %aptr) {
79 %a = load i32, i32 addrspace(1)* %aptr, align 4
81 store i32 %and, i32 addrspace(1)* %out, align 4
87 define void @v_and_inline_imm_neg_16_i32(i32 addrspace(1)* %out, i32 addrspace(1)* %aptr) {
88 %a = load i32, i32 addrspace(1)* %aptr, align 4
90 store i32 %and, i32 addrspace(1)* %out, align 4
96 define void @s_and_i64(i64 addrspace(1)* %out, i64 %a, i64 %b) {
98 store i64 %and, i64 addrspace(1)* %out, align 8
105 define void @s_and_i1(i1 addrspace(1)* %out, i1 %a, i1 %b) {
107 store i1 %and, i1 addrspace(1)* %out
113 define void @s_and_constant_i64(i64 addrspace(1)* %out, i64 %a) {
115 store i64 %and, i64 addrspace(1)* %out, align 8
122 define void @v_and_i64(i64 addrspace(1)* %out, i64 addrspace(1)* %aptr, i64 addrspace(1)* %bptr) {
123 %a = load i64, i64 addrspace(1)* %aptr, align 8
124 %b = load i64, i64 addrspace(1)* %bptr, align 8
126 store i64 %and, i64 addrspace(1)* %out, align 8
133 define void @v_and_i64_br(i64 addrspace(1)* %out, i64 addrspace(1)* %aptr, i64 addrspace(1)* %bptr,…
139 %a = load i64, i64 addrspace(1)* %aptr, align 8
140 %b = load i64, i64 addrspace(1)* %bptr, align 8
146 store i64 %tmp1, i64 addrspace(1)* %out, align 8
153 define void @v_and_constant_i64(i64 addrspace(1)* %out, i64 addrspace(1)* %aptr) {
154 %a = load i64, i64 addrspace(1)* %aptr, align 8
156 store i64 %and, i64 addrspace(1)* %out, align 8
164 define void @v_and_inline_imm_i64(i64 addrspace(1)* %out, i64 addrspace(1)* %aptr) {
165 %a = load i64, i64 addrspace(1)* %aptr, align 8
167 store i64 %and, i64 addrspace(1)* %out, align 8
173 define void @s_and_inline_imm_64_i64(i64 addrspace(1)* %out, i64 addrspace(1)* %aptr, i64 %a) {
175 store i64 %and, i64 addrspace(1)* %out, align 8
181 define void @s_and_inline_imm_1_i64(i64 addrspace(1)* %out, i64 addrspace(1)* %aptr, i64 %a) {
183 store i64 %and, i64 addrspace(1)* %out, align 8
189 define void @s_and_inline_imm_1.0_i64(i64 addrspace(1)* %out, i64 addrspace(1)* %aptr, i64 %a) {
191 store i64 %and, i64 addrspace(1)* %out, align 8
197 define void @s_and_inline_imm_neg_1.0_i64(i64 addrspace(1)* %out, i64 addrspace(1)* %aptr, i64 %a) {
199 store i64 %and, i64 addrspace(1)* %out, align 8
205 define void @s_and_inline_imm_0.5_i64(i64 addrspace(1)* %out, i64 addrspace(1)* %aptr, i64 %a) {
207 store i64 %and, i64 addrspace(1)* %out, align 8
213 define void @s_and_inline_imm_neg_0.5_i64(i64 addrspace(1)* %out, i64 addrspace(1)* %aptr, i64 %a) {
215 store i64 %and, i64 addrspace(1)* %out, align 8
221 define void @s_and_inline_imm_2.0_i64(i64 addrspace(1)* %out, i64 addrspace(1)* %aptr, i64 %a) {
223 store i64 %and, i64 addrspace(1)* %out, align 8
229 define void @s_and_inline_imm_neg_2.0_i64(i64 addrspace(1)* %out, i64 addrspace(1)* %aptr, i64 %a) {
231 store i64 %and, i64 addrspace(1)* %out, align 8
237 define void @s_and_inline_imm_4.0_i64(i64 addrspace(1)* %out, i64 addrspace(1)* %aptr, i64 %a) {
239 store i64 %and, i64 addrspace(1)* %out, align 8
245 define void @s_and_inline_imm_neg_4.0_i64(i64 addrspace(1)* %out, i64 addrspace(1)* %aptr, i64 %a) {
247 store i64 %and, i64 addrspace(1)* %out, align 8
259 define void @s_and_inline_imm_f32_4.0_i64(i64 addrspace(1)* %out, i64 addrspace(1)* %aptr, i64 %a) {
261 store i64 %and, i64 addrspace(1)* %out, align 8
271 define void @s_and_inline_imm_f32_neg_4.0_i64(i64 addrspace(1)* %out, i64 addrspace(1)* %aptr, i64 …
273 store i64 %and, i64 addrspace(1)* %out, align 8
282 define void @s_and_inline_high_imm_f32_4.0_i64(i64 addrspace(1)* %out, i64 addrspace(1)* %aptr, i64…
284 store i64 %and, i64 addrspace(1)* %out, align 8
292 define void @s_and_inline_high_imm_f32_neg_4.0_i64(i64 addrspace(1)* %out, i64 addrspace(1)* %aptr,…
294 store i64 %and, i64 addrspace(1)* %out, align 8