; RUN: llc -mtriple=aarch64--linux-gnu -mattr=+sve --asm-verbose=false < %s 2>%t | FileCheck %s ; RUN: FileCheck --check-prefix=WARN --allow-empty %s <%t ; If this check fails please read test/CodeGen/AArch64/README for instructions on how to resolve it. ; WARN-NOT: warning ; PRFB , , [, .S, ] -> 32-bit indexes define void @llvm_aarch64_sve_prfb_gather_uxtw_index_nx4vi32( %Pg, i8* %base, %indexes) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfb_gather_uxtw_index_nx4vi32: ; CHECK-NEXT: prfb pldl1strm, p0, [x0, z0.s, uxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfb.gather.uxtw.index.nx4vi32( %Pg, i8* %base, %indexes, i32 1) ret void } define void @llvm_aarch64_sve_prfb_gather_scaled_sxtw_index_nx4vi32( %Pg, i8* %base, %indexes) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfb_gather_scaled_sxtw_index_nx4vi32: ; CHECK-NEXT: prfb pldl1strm, p0, [x0, z0.s, sxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfb.gather.sxtw.index.nx4vi32( %Pg, i8* %base, %indexes, i32 1) ret void } ; PRFB , , [, .D, ] -> 32-bit unpacked indexes define void @llvm_aarch64_sve_prfb_gather_uxtw_index_nx2vi64( %Pg, i8* %base, %indexes) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfb_gather_uxtw_index_nx2vi64: ; CHECK-NEXT: prfb pldl1strm, p0, [x0, z0.d, uxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfb.gather.uxtw.index.nx2vi64( %Pg, i8* %base, %indexes, i32 1) ret void } define void @llvm_aarch64_sve_prfb_gather_scaled_sxtw_index_nx2vi64( %Pg, i8* %base, %indexes) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfb_gather_scaled_sxtw_index_nx2vi64: ; CHECK-NEXT: prfb pldl1strm, p0, [x0, z0.d, sxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfb.gather.sxtw.index.nx2vi64( %Pg, i8* %base, %indexes, i32 1) ret void } ; PRFB , , [, .D] -> 64-bit indexes define void @llvm_aarch64_sve_prfb_gather_scaled_nx2vi64( %Pg, i8* %base, %indexes) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfb_gather_scaled_nx2vi64: ; CHECK-NEXT: prfb pldl1strm, p0, [x0, z0.d] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfb.gather.index.nx2vi64( %Pg, i8* %base, %indexes, i32 1) ret void } ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; PRFH , , [, .S, ] -> 32-bit indexes define void @llvm_aarch64_sve_prfh_gather_uxtw_index_nx4vi32( %Pg, i8* %base, %indexes) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfh_gather_uxtw_index_nx4vi32: ; CHECK-NEXT: prfh pldl1strm, p0, [x0, z0.s, uxtw #1] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfh.gather.uxtw.index.nx4vi32( %Pg, i8* %base, %indexes, i32 1) ret void } define void @llvm_aarch64_sve_prfh_gather_scaled_sxtw_index_nx4vi32( %Pg, i8* %base, %indexes) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfh_gather_scaled_sxtw_index_nx4vi32: ; CHECK-NEXT: prfh pldl1strm, p0, [x0, z0.s, sxtw #1] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfh.gather.sxtw.index.nx4vi32( %Pg, i8* %base, %indexes, i32 1) ret void } ; PRFH , , [, .D, #1] -> 32-bit unpacked indexes define void @llvm_aarch64_sve_prfh_gather_uxtw_index_nx2vi64( %Pg, i8* %base, %indexes) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfh_gather_uxtw_index_nx2vi64: ; CHECK-NEXT: prfh pldl1strm, p0, [x0, z0.d, uxtw #1] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfh.gather.uxtw.index.nx2vi64( %Pg, i8* %base, %indexes, i32 1) ret void } define void @llvm_aarch64_sve_prfh_gather_scaled_sxtw_index_nx2vi64( %Pg, i8* %base, %indexes) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfh_gather_scaled_sxtw_index_nx2vi64: ; CHECK-NEXT: prfh pldl1strm, p0, [x0, z0.d, sxtw #1] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfh.gather.sxtw.index.nx2vi64( %Pg, i8* %base, %indexes, i32 1) ret void } ; PRFH , , [, .D] -> 64-bit indexes define void @llvm_aarch64_sve_prfh_gather_scaled_nx2vi64( %Pg, i8* %base, %indexes) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfh_gather_scaled_nx2vi64: ; CHECK-NEXT: prfh pldl1strm, p0, [x0, z0.d, lsl #1] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfh.gather.index.nx2vi64( %Pg, i8* %base, %indexes, i32 1) ret void } ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; PRFW , , [, .S, ] -> 32-bit indexes define void @llvm_aarch64_sve_prfw_gather_uxtw_index_nx4vi32( %Pg, i8* %base, %indexes) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfw_gather_uxtw_index_nx4vi32: ; CHECK-NEXT: prfw pldl1strm, p0, [x0, z0.s, uxtw #2] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfw.gather.uxtw.index.nx4vi32( %Pg, i8* %base, %indexes, i32 1) ret void } define void @llvm_aarch64_sve_prfw_gather_scaled_sxtw_index_nx4vi32( %Pg, i8* %base, %indexes) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfw_gather_scaled_sxtw_index_nx4vi32: ; CHECK-NEXT: prfw pldl1strm, p0, [x0, z0.s, sxtw #2] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfw.gather.sxtw.index.nx4vi32( %Pg, i8* %base, %indexes, i32 1) ret void } ; PRFW , , [, .D, #2] -> 32-bit unpacked indexes define void @llvm_aarch64_sve_prfw_gather_uxtw_index_nx2vi64( %Pg, i8* %base, %indexes) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfw_gather_uxtw_index_nx2vi64: ; CHECK-NEXT: prfw pldl1strm, p0, [x0, z0.d, uxtw #2] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfw.gather.uxtw.index.nx2vi64( %Pg, i8* %base, %indexes, i32 1) ret void } define void @llvm_aarch64_sve_prfw_gather_scaled_sxtw_index_nx2vi64( %Pg, i8* %base, %indexes) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfw_gather_scaled_sxtw_index_nx2vi64: ; CHECK-NEXT: prfw pldl1strm, p0, [x0, z0.d, sxtw #2] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfw.gather.sxtw.index.nx2vi64( %Pg, i8* %base, %indexes, i32 1) ret void } ; PRFW , , [, .D] -> 64-bit indexes define void @llvm_aarch64_sve_prfw_gather_scaled_nx2vi64( %Pg, i8* %base, %indexes) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfw_gather_scaled_nx2vi64: ; CHECK-NEXT: prfw pldl1strm, p0, [x0, z0.d, lsl #2] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfw.gather.index.nx2vi64( %Pg, i8* %base, %indexes, i32 1) ret void } ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; PRFD , , [, .S, ] -> 32-bit indexes define void @llvm_aarch64_sve_prfd_gather_uxtw_index_nx4vi32( %Pg, i8* %base, %indexes) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfd_gather_uxtw_index_nx4vi32: ; CHECK-NEXT: prfd pldl1strm, p0, [x0, z0.s, uxtw #3] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfd.gather.uxtw.index.nx4vi32( %Pg, i8* %base, %indexes, i32 1) ret void } define void @llvm_aarch64_sve_prfd_gather_scaled_sxtw_index_nx4vi32( %Pg, i8* %base, %indexes) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfd_gather_scaled_sxtw_index_nx4vi32: ; CHECK-NEXT: prfd pldl1strm, p0, [x0, z0.s, sxtw #3] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfd.gather.sxtw.index.nx4vi32( %Pg, i8* %base, %indexes, i32 1) ret void } ; PRFD , , [, .D, #3] -> 32-bit unpacked indexes define void @llvm_aarch64_sve_prfd_gather_uxtw_index_nx2vi64( %Pg, i8* %base, %indexes) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfd_gather_uxtw_index_nx2vi64: ; CHECK-NEXT: prfd pldl1strm, p0, [x0, z0.d, uxtw #3] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfd.gather.uxtw.index.nx2vi64( %Pg, i8* %base, %indexes, i32 1) ret void } define void @llvm_aarch64_sve_prfd_gather_scaled_sxtw_index_nx2vi64( %Pg, i8* %base, %indexes) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfd_gather_scaled_sxtw_index_nx2vi64: ; CHECK-NEXT: prfd pldl1strm, p0, [x0, z0.d, sxtw #3] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfd.gather.sxtw.index.nx2vi64( %Pg, i8* %base, %indexes, i32 1) ret void } ; PRFD , , [, .D] -> 64-bit indexes define void @llvm_aarch64_sve_prfd_gather_scaled_nx2vi64( %Pg, i8* %base, %indexes) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfd_gather_scaled_nx2vi64: ; CHECK-NEXT: prfd pldl1strm, p0, [x0, z0.d, lsl #3] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfd.gather.index.nx2vi64( %Pg, i8* %base, %indexes, i32 1) ret void } declare void @llvm.aarch64.sve.prfb.gather.sxtw.index.nx4vi32( %Pg, i8* %base, %indexes, i32 %prfop) declare void @llvm.aarch64.sve.prfb.gather.uxtw.index.nx4vi32( %Pg, i8* %base, %indexes, i32 %prfop) declare void @llvm.aarch64.sve.prfb.gather.sxtw.index.nx2vi64( %Pg, i8* %base, %indexes, i32 %prfop) declare void @llvm.aarch64.sve.prfb.gather.uxtw.index.nx2vi64( %Pg, i8* %base, %indexes, i32 %prfop) declare void @llvm.aarch64.sve.prfb.gather.index.nx2vi64( %Pg, i8* %base, %indexes, i32 %prfop) declare void @llvm.aarch64.sve.prfh.gather.sxtw.index.nx4vi32( %Pg, i8* %base, %indexes, i32 %prfop) declare void @llvm.aarch64.sve.prfh.gather.uxtw.index.nx4vi32( %Pg, i8* %base, %indexes, i32 %prfop) declare void @llvm.aarch64.sve.prfh.gather.sxtw.index.nx2vi64( %Pg, i8* %base, %indexes, i32 %prfop) declare void @llvm.aarch64.sve.prfh.gather.uxtw.index.nx2vi64( %Pg, i8* %base, %indexes, i32 %prfop) declare void @llvm.aarch64.sve.prfh.gather.index.nx2vi64( %Pg, i8* %base, %indexes, i32 %prfop) declare void @llvm.aarch64.sve.prfw.gather.sxtw.index.nx4vi32( %Pg, i8* %base, %indexes, i32 %prfop) declare void @llvm.aarch64.sve.prfw.gather.uxtw.index.nx4vi32( %Pg, i8* %base, %indexes, i32 %prfop) declare void @llvm.aarch64.sve.prfw.gather.sxtw.index.nx2vi64( %Pg, i8* %base, %indexes, i32 %prfop) declare void @llvm.aarch64.sve.prfw.gather.uxtw.index.nx2vi64( %Pg, i8* %base, %indexes, i32 %prfop) declare void @llvm.aarch64.sve.prfw.gather.index.nx2vi64( %Pg, i8* %base, %indexes, i32 %prfop) declare void @llvm.aarch64.sve.prfd.gather.sxtw.index.nx4vi32( %Pg, i8* %base, %indexes, i32 %prfop) declare void @llvm.aarch64.sve.prfd.gather.uxtw.index.nx4vi32( %Pg, i8* %base, %indexes, i32 %prfop) declare void @llvm.aarch64.sve.prfd.gather.sxtw.index.nx2vi64( %Pg, i8* %base, %indexes, i32 %prfop) declare void @llvm.aarch64.sve.prfd.gather.uxtw.index.nx2vi64( %Pg, i8* %base, %indexes, i32 %prfop) declare void @llvm.aarch64.sve.prfd.gather.index.nx2vi64( %Pg, i8* %base, %indexes, i32 %prfop)