• Home
  • Raw
  • Download

Lines Matching refs:V

41 #define VISITOR_LIST_THAT_RETURN(V)                              \  argument
42 V(AddSubExtended) \
43 V(AddSubImmediate) \
44 V(AddSubShifted) \
45 V(AddSubWithCarry) \
46 V(AtomicMemory) \
47 V(Bitfield) \
48 V(CompareBranch) \
49 V(ConditionalBranch) \
50 V(ConditionalCompareImmediate) \
51 V(ConditionalCompareRegister) \
52 V(ConditionalSelect) \
53 V(Crypto2RegSHA) \
54 V(Crypto3RegSHA) \
55 V(CryptoAES) \
56 V(DataProcessing1Source) \
57 V(DataProcessing2Source) \
58 V(DataProcessing3Source) \
59 V(EvaluateIntoFlags) \
60 V(Exception) \
61 V(Extract) \
62 V(FPCompare) \
63 V(FPConditionalCompare) \
64 V(FPConditionalSelect) \
65 V(FPDataProcessing1Source) \
66 V(FPDataProcessing2Source) \
67 V(FPDataProcessing3Source) \
68 V(FPFixedPointConvert) \
69 V(FPImmediate) \
70 V(FPIntegerConvert) \
71 V(LoadLiteral) \
72 V(LoadStoreExclusive) \
73 V(LoadStorePAC) \
74 V(LoadStorePairNonTemporal) \
75 V(LoadStorePairOffset) \
76 V(LoadStorePairPostIndex) \
77 V(LoadStorePairPreIndex) \
78 V(LoadStorePostIndex) \
79 V(LoadStorePreIndex) \
80 V(LoadStoreRCpcUnscaledOffset) \
81 V(LoadStoreRegisterOffset) \
82 V(LoadStoreUnscaledOffset) \
83 V(LoadStoreUnsignedOffset) \
84 V(LogicalImmediate) \
85 V(LogicalShifted) \
86 V(MoveWideImmediate) \
87 V(NEON2RegMisc) \
88 V(NEON2RegMiscFP16) \
89 V(NEON3Different) \
90 V(NEON3Same) \
91 V(NEON3SameExtra) \
92 V(NEON3SameFP16) \
93 V(NEONAcrossLanes) \
94 V(NEONByIndexedElement) \
95 V(NEONCopy) \
96 V(NEONExtract) \
97 V(NEONLoadStoreMultiStruct) \
98 V(NEONLoadStoreMultiStructPostIndex) \
99 V(NEONLoadStoreSingleStruct) \
100 V(NEONLoadStoreSingleStructPostIndex) \
101 V(NEONModifiedImmediate) \
102 V(NEONPerm) \
103 V(NEONScalar2RegMisc) \
104 V(NEONScalar2RegMiscFP16) \
105 V(NEONScalar3Diff) \
106 V(NEONScalar3Same) \
107 V(NEONScalar3SameExtra) \
108 V(NEONScalar3SameFP16) \
109 V(NEONScalarByIndexedElement) \
110 V(NEONScalarCopy) \
111 V(NEONScalarPairwise) \
112 V(NEONScalarShiftImmediate) \
113 V(NEONShiftImmediate) \
114 V(NEONTable) \
115 V(PCRelAddressing) \
116 V(RotateRightIntoFlags) \
117 V(SVE32BitGatherLoad_ScalarPlus32BitUnscaledOffsets) \
118 V(SVE32BitGatherLoad_VectorPlusImm) \
119 V(SVE32BitGatherLoadHalfwords_ScalarPlus32BitScaledOffsets) \
120 V(SVE32BitGatherLoadWords_ScalarPlus32BitScaledOffsets) \
121 V(SVE32BitGatherPrefetch_ScalarPlus32BitScaledOffsets) \
122 V(SVE32BitGatherPrefetch_VectorPlusImm) \
123 V(SVE32BitScatterStore_ScalarPlus32BitScaledOffsets) \
124 V(SVE32BitScatterStore_ScalarPlus32BitUnscaledOffsets) \
125 V(SVE32BitScatterStore_VectorPlusImm) \
126 V(SVE64BitGatherLoad_ScalarPlus32BitUnpackedScaledOffsets) \
127 V(SVE64BitGatherLoad_ScalarPlus64BitScaledOffsets) \
128 V(SVE64BitGatherLoad_ScalarPlus64BitUnscaledOffsets) \
129 V(SVE64BitGatherLoad_ScalarPlusUnpacked32BitUnscaledOffsets) \
130 V(SVE64BitGatherLoad_VectorPlusImm) \
131 V(SVE64BitGatherPrefetch_ScalarPlus64BitScaledOffsets) \
132 V(SVE64BitGatherPrefetch_ScalarPlusUnpacked32BitScaledOffsets) \
133 V(SVE64BitGatherPrefetch_VectorPlusImm) \
134 V(SVE64BitScatterStore_ScalarPlus64BitScaledOffsets) \
135 V(SVE64BitScatterStore_ScalarPlus64BitUnscaledOffsets) \
136 V(SVE64BitScatterStore_ScalarPlusUnpacked32BitScaledOffsets) \
137 V(SVE64BitScatterStore_ScalarPlusUnpacked32BitUnscaledOffsets) \
138 V(SVE64BitScatterStore_VectorPlusImm) \
139 V(SVEAddressGeneration) \
140 V(SVEBitwiseLogicalUnpredicated) \
141 V(SVEBitwiseShiftUnpredicated) \
142 V(SVEFFRInitialise) \
143 V(SVEFFRWriteFromPredicate) \
144 V(SVEFPAccumulatingReduction) \
145 V(SVEFPArithmeticUnpredicated) \
146 V(SVEFPCompareVectors) \
147 V(SVEFPCompareWithZero) \
148 V(SVEFPComplexAddition) \
149 V(SVEFPComplexMulAdd) \
150 V(SVEFPComplexMulAddIndex) \
151 V(SVEFPFastReduction) \
152 V(SVEFPMulIndex) \
153 V(SVEFPMulAdd) \
154 V(SVEFPMulAddIndex) \
155 V(SVEFPUnaryOpUnpredicated) \
156 V(SVEIncDecByPredicateCount) \
157 V(SVEIndexGeneration) \
158 V(SVEIntArithmeticUnpredicated) \
159 V(SVEIntCompareSignedImm) \
160 V(SVEIntCompareUnsignedImm) \
161 V(SVEIntCompareVectors) \
162 V(SVEIntMulAddPredicated) \
163 V(SVEIntMulAddUnpredicated) \
164 V(SVEIntReduction) \
165 V(SVEIntUnaryArithmeticPredicated) \
166 V(SVEMovprfx) \
167 V(SVEMulIndex) \
168 V(SVEPermuteVectorExtract) \
169 V(SVEPermuteVectorInterleaving) \
170 V(SVEPredicateCount) \
171 V(SVEPredicateLogical) \
172 V(SVEPropagateBreak) \
173 V(SVEStackFrameAdjustment) \
174 V(SVEStackFrameSize) \
175 V(SVEVectorSelect) \
176 V(SVEBitwiseLogical_Predicated) \
177 V(SVEBitwiseLogicalWithImm_Unpredicated) \
178 V(SVEBitwiseShiftByImm_Predicated) \
179 V(SVEBitwiseShiftByVector_Predicated) \
180 V(SVEBitwiseShiftByWideElements_Predicated) \
181 V(SVEBroadcastBitmaskImm) \
182 V(SVEBroadcastFPImm_Unpredicated) \
183 V(SVEBroadcastGeneralRegister) \
184 V(SVEBroadcastIndexElement) \
185 V(SVEBroadcastIntImm_Unpredicated) \
186 V(SVECompressActiveElements) \
187 V(SVEConditionallyBroadcastElementToVector) \
188 V(SVEConditionallyExtractElementToSIMDFPScalar) \
189 V(SVEConditionallyExtractElementToGeneralRegister) \
190 V(SVEConditionallyTerminateScalars) \
191 V(SVEConstructivePrefix_Unpredicated) \
192 V(SVEContiguousFirstFaultLoad_ScalarPlusScalar) \
193 V(SVEContiguousLoad_ScalarPlusImm) \
194 V(SVEContiguousLoad_ScalarPlusScalar) \
195 V(SVEContiguousNonFaultLoad_ScalarPlusImm) \
196 V(SVEContiguousNonTemporalLoad_ScalarPlusImm) \
197 V(SVEContiguousNonTemporalLoad_ScalarPlusScalar) \
198 V(SVEContiguousNonTemporalStore_ScalarPlusImm) \
199 V(SVEContiguousNonTemporalStore_ScalarPlusScalar) \
200 V(SVEContiguousPrefetch_ScalarPlusImm) \
201 V(SVEContiguousPrefetch_ScalarPlusScalar) \
202 V(SVEContiguousStore_ScalarPlusImm) \
203 V(SVEContiguousStore_ScalarPlusScalar) \
204 V(SVECopySIMDFPScalarRegisterToVector_Predicated) \
205 V(SVECopyFPImm_Predicated) \
206 V(SVECopyGeneralRegisterToVector_Predicated) \
207 V(SVECopyIntImm_Predicated) \
208 V(SVEElementCount) \
209 V(SVEExtractElementToSIMDFPScalarRegister) \
210 V(SVEExtractElementToGeneralRegister) \
211 V(SVEFPArithmetic_Predicated) \
212 V(SVEFPArithmeticWithImm_Predicated) \
213 V(SVEFPConvertPrecision) \
214 V(SVEFPConvertToInt) \
215 V(SVEFPExponentialAccelerator) \
216 V(SVEFPRoundToIntegralValue) \
217 V(SVEFPTrigMulAddCoefficient) \
218 V(SVEFPTrigSelectCoefficient) \
219 V(SVEFPUnaryOp) \
220 V(SVEIncDecRegisterByElementCount) \
221 V(SVEIncDecVectorByElementCount) \
222 V(SVEInsertSIMDFPScalarRegister) \
223 V(SVEInsertGeneralRegister) \
224 V(SVEIntAddSubtractImm_Unpredicated) \
225 V(SVEIntAddSubtractVectors_Predicated) \
226 V(SVEIntCompareScalarCountAndLimit) \
227 V(SVEIntConvertToFP) \
228 V(SVEIntDivideVectors_Predicated) \
229 V(SVEIntMinMaxImm_Unpredicated) \
230 V(SVEIntMinMaxDifference_Predicated) \
231 V(SVEIntMulImm_Unpredicated) \
232 V(SVEIntMulVectors_Predicated) \
233 V(SVELoadAndBroadcastElement) \
234 V(SVELoadAndBroadcastQuadword_ScalarPlusImm) \
235 V(SVELoadAndBroadcastQuadword_ScalarPlusScalar) \
236 V(SVELoadMultipleStructures_ScalarPlusImm) \
237 V(SVELoadMultipleStructures_ScalarPlusScalar) \
238 V(SVELoadPredicateRegister) \
239 V(SVELoadVectorRegister) \
240 V(SVEPartitionBreakCondition) \
241 V(SVEPermutePredicateElements) \
242 V(SVEPredicateFirstActive) \
243 V(SVEPredicateInitialize) \
244 V(SVEPredicateNextActive) \
245 V(SVEPredicateReadFromFFR_Predicated) \
246 V(SVEPredicateReadFromFFR_Unpredicated) \
247 V(SVEPredicateTest) \
248 V(SVEPredicateZero) \
249 V(SVEPropagateBreakToNextPartition) \
250 V(SVEReversePredicateElements) \
251 V(SVEReverseVectorElements) \
252 V(SVEReverseWithinElements) \
253 V(SVESaturatingIncDecRegisterByElementCount) \
254 V(SVESaturatingIncDecVectorByElementCount) \
255 V(SVEStoreMultipleStructures_ScalarPlusImm) \
256 V(SVEStoreMultipleStructures_ScalarPlusScalar) \
257 V(SVEStorePredicateRegister) \
258 V(SVEStoreVectorRegister) \
259 V(SVETableLookup) \
260 V(SVEUnpackPredicateElements) \
261 V(SVEUnpackVectorElements) \
262 V(SVEVectorSplice_Destructive) \
263 V(System) \
264 V(TestBranch) \
265 V(Unallocated) \
266 V(UnconditionalBranch) \
267 V(UnconditionalBranchToRegister) \
268 V(Unimplemented)
270 #define VISITOR_LIST_THAT_DONT_RETURN(V) V(Reserved) argument
272 #define VISITOR_LIST(V) \ argument
273 VISITOR_LIST_THAT_RETURN(V) \
274 VISITOR_LIST_THAT_DONT_RETURN(V)