Lines Matching refs:vsll
16 ; CHECK-NEXT: vsll %v0, %v0, %v1
18 …%3 = tail call fast <256 x double> @llvm.ve.vl.vsll.vvvl(<256 x double> %0, <256 x double> %1, i32…
23 declare <256 x double> @llvm.ve.vl.vsll.vvvl(<256 x double>, <256 x double>, i32)
31 ; CHECK-NEXT: vsll %v2, %v0, %v1
36 …%4 = tail call fast <256 x double> @llvm.ve.vl.vsll.vvvvl(<256 x double> %0, <256 x double> %1, <2…
41 declare <256 x double> @llvm.ve.vl.vsll.vvvvl(<256 x double>, <256 x double>, <256 x double>, i32)
49 ; CHECK-NEXT: vsll %v0, %v0, %s0
51 %3 = tail call fast <256 x double> @llvm.ve.vl.vsll.vvsl(<256 x double> %0, i64 %1, i32 256)
56 declare <256 x double> @llvm.ve.vl.vsll.vvsl(<256 x double>, i64, i32)
64 ; CHECK-NEXT: vsll %v1, %v0, %s0
69 …%4 = tail call fast <256 x double> @llvm.ve.vl.vsll.vvsvl(<256 x double> %0, i64 %1, <256 x double…
74 declare <256 x double> @llvm.ve.vl.vsll.vvsvl(<256 x double>, i64, <256 x double>, i32)
82 ; CHECK-NEXT: vsll %v0, %v0, 8
84 %2 = tail call fast <256 x double> @llvm.ve.vl.vsll.vvsl(<256 x double> %0, i64 8, i32 256)
94 ; CHECK-NEXT: vsll %v1, %v0, 8
99 …%3 = tail call fast <256 x double> @llvm.ve.vl.vsll.vvsvl(<256 x double> %0, i64 8, <256 x double>…
109 ; CHECK-NEXT: vsll %v2, %v0, %v1, %vm1
114 …%5 = tail call fast <256 x double> @llvm.ve.vl.vsll.vvvmvl(<256 x double> %0, <256 x double> %1, <…
119 declare <256 x double> @llvm.ve.vl.vsll.vvvmvl(<256 x double>, <256 x double>, <256 x i1>, <256 x d…
127 ; CHECK-NEXT: vsll %v1, %v0, %s0, %vm1
132 …%5 = tail call fast <256 x double> @llvm.ve.vl.vsll.vvsmvl(<256 x double> %0, i64 %1, <256 x i1> %…
137 declare <256 x double> @llvm.ve.vl.vsll.vvsmvl(<256 x double>, i64, <256 x i1>, <256 x double>, i32)
145 ; CHECK-NEXT: vsll %v1, %v0, 8, %vm1
150 …%4 = tail call fast <256 x double> @llvm.ve.vl.vsll.vvsmvl(<256 x double> %0, i64 8, <256 x i1> %1…