• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1; RUN: llc -aarch64-use-tbi -mtriple=arm64-apple-ios8.0.0 < %s \
2; RUN:     | FileCheck --check-prefix=TBI    --check-prefix=BOTH %s
3; RUN: llc -aarch64-use-tbi -mtriple=arm64-apple-ios7.1.0 < %s \
4; RUN:     | FileCheck --check-prefix=NO_TBI --check-prefix=BOTH %s
5
6; BOTH-LABEL:ld_and32:
7; TBI-NOT: and x
8; NO_TBI: and x
9define i32 @ld_and32(i64 %p) {
10  %and = and i64 %p, 72057594037927935
11  %cast = inttoptr i64 %and to i32*
12  %load = load i32, i32* %cast
13  ret i32 %load
14}
15
16; load (r & MASK) + 4
17; BOTH-LABEL:ld_and_plus_offset:
18; TBI-NOT: and x
19; NO_TBI: and x
20define i32 @ld_and_plus_offset(i64 %p) {
21  %and = and i64 %p, 72057594037927935
22  %cast = inttoptr i64 %and to i32*
23  %gep = getelementptr i32, i32* %cast, i64 4
24  %load = load i32, i32* %gep
25  ret i32 %load
26}
27
28; load (r & WIDER_MASK)
29; BOTH-LABEL:ld_and32_wider:
30; TBI-NOT: and x
31; NO_TBI: and x
32define i32 @ld_and32_wider(i64 %p) {
33  %and = and i64 %p, 1152921504606846975
34  %cast = inttoptr i64 %and to i32*
35  %load = load i32, i32* %cast
36  ret i32 %load
37}
38
39; BOTH-LABEL:ld_and64:
40; TBI-NOT: and x
41; NO_TBI: and x
42define i64 @ld_and64(i64 %p) {
43  %and = and i64 %p, 72057594037927935
44  %cast = inttoptr i64 %and to i64*
45  %load = load i64, i64* %cast
46  ret i64 %load
47}
48
49; BOTH-LABEL:st_and32:
50; TBI-NOT: and x
51; NO_TBI: and x
52define void @st_and32(i64 %p, i32 %v) {
53  %and = and i64 %p, 72057594037927935
54  %cast = inttoptr i64 %and to i32*
55  store i32 %v, i32* %cast
56  ret void
57}
58
59; load (x1 + x2) & MASK
60; BOTH-LABEL:ld_ro:
61; TBI-NOT: and x
62; NO_TBI: and x
63define i32 @ld_ro(i64 %a, i64 %b) {
64  %p = add i64 %a, %b
65  %and = and i64 %p, 72057594037927935
66  %cast = inttoptr i64 %and to i32*
67  %load = load i32, i32* %cast
68  ret i32 %load
69}
70
71; load (r1 & MASK) + r2
72; BOTH-LABEL:ld_ro2:
73; TBI-NOT: and x
74; NO_TBI: and x
75define i32 @ld_ro2(i64 %a, i64 %b) {
76  %and = and i64 %a, 72057594037927935
77  %p = add i64 %and, %b
78  %cast = inttoptr i64 %p to i32*
79  %load = load i32, i32* %cast
80  ret i32 %load
81}
82
83; load (r1 & MASK) | r2
84; BOTH-LABEL:ld_indirect_and:
85; TBI-NOT: and x
86; NO_TBI: and x
87define i32 @ld_indirect_and(i64 %r1, i64 %r2) {
88  %and = and i64 %r1, 72057594037927935
89  %p = or i64 %and, %r2
90  %cast = inttoptr i64 %p to i32*
91  %load = load i32, i32* %cast
92  ret i32 %load
93}
94
95; BOTH-LABEL:ld_and32_narrower:
96; BOTH: and x
97define i32 @ld_and32_narrower(i64 %p) {
98  %and = and i64 %p, 36028797018963967
99  %cast = inttoptr i64 %and to i32*
100  %load = load i32, i32* %cast
101  ret i32 %load
102}
103
104; BOTH-LABEL:ld_and8:
105; BOTH: and x
106define i32 @ld_and8(i64 %base, i8 %off) {
107  %off_masked = and i8 %off, 63
108  %off_64 = zext i8 %off_masked to i64
109  %p = add i64 %base, %off_64
110  %cast = inttoptr i64 %p to i32*
111  %load = load i32, i32* %cast
112  ret i32 %load
113}
114