1# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py 2# RUN: llc -march=aarch64 -run-pass=legalizer %s -o - -verify-machineinstrs | FileCheck %s 3--- | 4 target datalayout = "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128" 5 target triple = "aarch64" 6 7 define void @test_extload() { 8 entry: 9 ret void 10 } 11 12 define i64 @sext_i32_i64(i32* %ptr) { 13 %ld = load i32, i32* %ptr, align 4 14 %v = sext i32 %ld to i64 15 ret i64 %v 16 } 17 18 define i64 @sext_i16_i64(i16* %ptr) { 19 %ld = load i16, i16* %ptr, align 2 20 %v = sext i16 %ld to i64 21 ret i64 %v 22 } 23 24 define i64 @sext_i8_i64(i8* %ptr) { 25 %ld = load i8, i8* %ptr, align 1 26 %v = sext i8 %ld to i64 27 ret i64 %v 28 } 29 30 define i64 @zext_i32_i64(i32* %ptr) { 31 %ld = load i32, i32* %ptr, align 4 32 %v = zext i32 %ld to i64 33 ret i64 %v 34 } 35 36 define i64 @zext_i16_i64(i16* %ptr) { 37 %ld = load i16, i16* %ptr, align 2 38 %v = zext i16 %ld to i64 39 ret i64 %v 40 } 41 42 define i64 @zext_i8_i64(i8* %ptr) { 43 %ld = load i8, i8* %ptr, align 1 44 %v = zext i8 %ld to i64 45 ret i64 %v 46 } 47 48... 49--- 50name: test_extload 51body: | 52 bb.0.entry: 53 liveins: $x0 54 ; CHECK-LABEL: name: test_extload 55 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 56 ; CHECK: [[LOAD:%[0-9]+]]:_(s32) = G_LOAD [[COPY]](p0) :: (load 1) 57 ; CHECK: $w0 = COPY [[LOAD]](s32) 58 %0:_(p0) = COPY $x0 59 %1:_(s32) = G_LOAD %0 :: (load 1) 60 $w0 = COPY %1 61... 62--- 63name: sext_i32_i64 64body: | 65 bb.1: 66 liveins: $x0 67 68 ; CHECK-LABEL: name: sext_i32_i64 69 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 70 ; CHECK: [[SEXTLOAD:%[0-9]+]]:_(s64) = G_SEXTLOAD [[COPY]](p0) :: (load 4 from %ir.ptr) 71 ; CHECK: $x0 = COPY [[SEXTLOAD]](s64) 72 ; CHECK: RET_ReallyLR implicit $x0 73 %0:_(p0) = COPY $x0 74 %2:_(s64) = G_SEXTLOAD %0(p0) :: (load 4 from %ir.ptr) 75 $x0 = COPY %2(s64) 76 RET_ReallyLR implicit $x0 77 78... 79--- 80name: sext_i16_i64 81body: | 82 bb.1: 83 liveins: $x0 84 85 ; CHECK-LABEL: name: sext_i16_i64 86 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 87 ; CHECK: [[SEXTLOAD:%[0-9]+]]:_(s64) = G_SEXTLOAD [[COPY]](p0) :: (load 2 from %ir.ptr) 88 ; CHECK: $x0 = COPY [[SEXTLOAD]](s64) 89 ; CHECK: RET_ReallyLR implicit $x0 90 %0:_(p0) = COPY $x0 91 %2:_(s64) = G_SEXTLOAD %0(p0) :: (load 2 from %ir.ptr) 92 $x0 = COPY %2(s64) 93 RET_ReallyLR implicit $x0 94 95... 96--- 97name: sext_i8_i64 98body: | 99 bb.1: 100 liveins: $x0 101 102 ; CHECK-LABEL: name: sext_i8_i64 103 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 104 ; CHECK: [[SEXTLOAD:%[0-9]+]]:_(s64) = G_SEXTLOAD [[COPY]](p0) :: (load 1 from %ir.ptr) 105 ; CHECK: $x0 = COPY [[SEXTLOAD]](s64) 106 ; CHECK: RET_ReallyLR implicit $x0 107 %0:_(p0) = COPY $x0 108 %2:_(s64) = G_SEXTLOAD %0(p0) :: (load 1 from %ir.ptr) 109 $x0 = COPY %2(s64) 110 RET_ReallyLR implicit $x0 111 112... 113--- 114name: zext_i32_i64 115body: | 116 bb.1: 117 liveins: $x0 118 119 ; CHECK-LABEL: name: zext_i32_i64 120 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 121 ; CHECK: [[ZEXTLOAD:%[0-9]+]]:_(s64) = G_ZEXTLOAD [[COPY]](p0) :: (load 4 from %ir.ptr) 122 ; CHECK: $x0 = COPY [[ZEXTLOAD]](s64) 123 ; CHECK: RET_ReallyLR implicit $x0 124 %0:_(p0) = COPY $x0 125 %2:_(s64) = G_ZEXTLOAD %0(p0) :: (load 4 from %ir.ptr) 126 $x0 = COPY %2(s64) 127 RET_ReallyLR implicit $x0 128 129... 130--- 131name: zext_i16_i64 132body: | 133 bb.1: 134 liveins: $x0 135 136 ; CHECK-LABEL: name: zext_i16_i64 137 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 138 ; CHECK: [[ZEXTLOAD:%[0-9]+]]:_(s64) = G_ZEXTLOAD [[COPY]](p0) :: (load 2 from %ir.ptr) 139 ; CHECK: $x0 = COPY [[ZEXTLOAD]](s64) 140 ; CHECK: RET_ReallyLR implicit $x0 141 %0:_(p0) = COPY $x0 142 %2:_(s64) = G_ZEXTLOAD %0(p0) :: (load 2 from %ir.ptr) 143 $x0 = COPY %2(s64) 144 RET_ReallyLR implicit $x0 145 146... 147--- 148name: zext_i8_i64 149body: | 150 bb.1: 151 liveins: $x0 152 153 ; CHECK-LABEL: name: zext_i8_i64 154 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 155 ; CHECK: [[ZEXTLOAD:%[0-9]+]]:_(s64) = G_ZEXTLOAD [[COPY]](p0) :: (load 1 from %ir.ptr) 156 ; CHECK: $x0 = COPY [[ZEXTLOAD]](s64) 157 ; CHECK: RET_ReallyLR implicit $x0 158 %0:_(p0) = COPY $x0 159 %2:_(s64) = G_ZEXTLOAD %0(p0) :: (load 1 from %ir.ptr) 160 $x0 = COPY %2(s64) 161 RET_ReallyLR implicit $x0 162 163... 164