• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2016 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "scheduler_arm64.h"
18 
19 #include "code_generator_utils.h"
20 #include "mirror/array-inl.h"
21 #include "mirror/string.h"
22 
23 namespace art HIDDEN {
24 namespace arm64 {
25 
VisitBinaryOperation(HBinaryOperation * instr)26 void SchedulingLatencyVisitorARM64::VisitBinaryOperation(HBinaryOperation* instr) {
27   last_visited_latency_ = DataType::IsFloatingPointType(instr->GetResultType())
28       ? kArm64FloatingPointOpLatency
29       : kArm64IntegerOpLatency;
30 }
31 
VisitBitwiseNegatedRight(HBitwiseNegatedRight * ATTRIBUTE_UNUSED)32 void SchedulingLatencyVisitorARM64::VisitBitwiseNegatedRight(
33     HBitwiseNegatedRight* ATTRIBUTE_UNUSED) {
34   last_visited_latency_ = kArm64IntegerOpLatency;
35 }
36 
VisitDataProcWithShifterOp(HDataProcWithShifterOp * ATTRIBUTE_UNUSED)37 void SchedulingLatencyVisitorARM64::VisitDataProcWithShifterOp(
38     HDataProcWithShifterOp* ATTRIBUTE_UNUSED) {
39   last_visited_latency_ = kArm64DataProcWithShifterOpLatency;
40 }
41 
VisitIntermediateAddress(HIntermediateAddress * ATTRIBUTE_UNUSED)42 void SchedulingLatencyVisitorARM64::VisitIntermediateAddress(
43     HIntermediateAddress* ATTRIBUTE_UNUSED) {
44   // Although the code generated is a simple `add` instruction, we found through empirical results
45   // that spacing it from its use in memory accesses was beneficial.
46   last_visited_latency_ = kArm64IntegerOpLatency + 2;
47 }
48 
VisitIntermediateAddressIndex(HIntermediateAddressIndex * instr ATTRIBUTE_UNUSED)49 void SchedulingLatencyVisitorARM64::VisitIntermediateAddressIndex(
50     HIntermediateAddressIndex* instr ATTRIBUTE_UNUSED) {
51   // Although the code generated is a simple `add` instruction, we found through empirical results
52   // that spacing it from its use in memory accesses was beneficial.
53   last_visited_latency_ = kArm64DataProcWithShifterOpLatency + 2;
54 }
55 
VisitMultiplyAccumulate(HMultiplyAccumulate * ATTRIBUTE_UNUSED)56 void SchedulingLatencyVisitorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* ATTRIBUTE_UNUSED) {
57   last_visited_latency_ = kArm64MulIntegerLatency;
58 }
59 
VisitArrayGet(HArrayGet * instruction)60 void SchedulingLatencyVisitorARM64::VisitArrayGet(HArrayGet* instruction) {
61   if (!instruction->GetArray()->IsIntermediateAddress()) {
62     // Take the intermediate address computation into account.
63     last_visited_internal_latency_ = kArm64IntegerOpLatency;
64   }
65   last_visited_latency_ = kArm64MemoryLoadLatency;
66 }
67 
VisitArrayLength(HArrayLength * ATTRIBUTE_UNUSED)68 void SchedulingLatencyVisitorARM64::VisitArrayLength(HArrayLength* ATTRIBUTE_UNUSED) {
69   last_visited_latency_ = kArm64MemoryLoadLatency;
70 }
71 
VisitArraySet(HArraySet * ATTRIBUTE_UNUSED)72 void SchedulingLatencyVisitorARM64::VisitArraySet(HArraySet* ATTRIBUTE_UNUSED) {
73   last_visited_latency_ = kArm64MemoryStoreLatency;
74 }
75 
VisitBoundsCheck(HBoundsCheck * ATTRIBUTE_UNUSED)76 void SchedulingLatencyVisitorARM64::VisitBoundsCheck(HBoundsCheck* ATTRIBUTE_UNUSED) {
77   last_visited_internal_latency_ = kArm64IntegerOpLatency;
78   // Users do not use any data results.
79   last_visited_latency_ = 0;
80 }
81 
VisitDiv(HDiv * instr)82 void SchedulingLatencyVisitorARM64::VisitDiv(HDiv* instr) {
83   DataType::Type type = instr->GetResultType();
84   switch (type) {
85     case DataType::Type::kFloat32:
86       last_visited_latency_ = kArm64DivFloatLatency;
87       break;
88     case DataType::Type::kFloat64:
89       last_visited_latency_ = kArm64DivDoubleLatency;
90       break;
91     default:
92       // Follow the code path used by code generation.
93       if (instr->GetRight()->IsConstant()) {
94         int64_t imm = Int64FromConstant(instr->GetRight()->AsConstant());
95         if (imm == 0) {
96           last_visited_internal_latency_ = 0;
97           last_visited_latency_ = 0;
98         } else if (imm == 1 || imm == -1) {
99           last_visited_internal_latency_ = 0;
100           last_visited_latency_ = kArm64IntegerOpLatency;
101         } else if (IsPowerOfTwo(AbsOrMin(imm))) {
102           last_visited_internal_latency_ = 4 * kArm64IntegerOpLatency;
103           last_visited_latency_ = kArm64IntegerOpLatency;
104         } else {
105           DCHECK(imm <= -2 || imm >= 2);
106           last_visited_internal_latency_ = 4 * kArm64IntegerOpLatency;
107           last_visited_latency_ = kArm64MulIntegerLatency;
108         }
109       } else {
110         last_visited_latency_ = kArm64DivIntegerLatency;
111       }
112       break;
113   }
114 }
115 
VisitInstanceFieldGet(HInstanceFieldGet * ATTRIBUTE_UNUSED)116 void SchedulingLatencyVisitorARM64::VisitInstanceFieldGet(HInstanceFieldGet* ATTRIBUTE_UNUSED) {
117   last_visited_latency_ = kArm64MemoryLoadLatency;
118 }
119 
VisitInstanceOf(HInstanceOf * ATTRIBUTE_UNUSED)120 void SchedulingLatencyVisitorARM64::VisitInstanceOf(HInstanceOf* ATTRIBUTE_UNUSED) {
121   last_visited_internal_latency_ = kArm64CallInternalLatency;
122   last_visited_latency_ = kArm64IntegerOpLatency;
123 }
124 
VisitInvoke(HInvoke * ATTRIBUTE_UNUSED)125 void SchedulingLatencyVisitorARM64::VisitInvoke(HInvoke* ATTRIBUTE_UNUSED) {
126   last_visited_internal_latency_ = kArm64CallInternalLatency;
127   last_visited_latency_ = kArm64CallLatency;
128 }
129 
VisitLoadString(HLoadString * ATTRIBUTE_UNUSED)130 void SchedulingLatencyVisitorARM64::VisitLoadString(HLoadString* ATTRIBUTE_UNUSED) {
131   last_visited_internal_latency_ = kArm64LoadStringInternalLatency;
132   last_visited_latency_ = kArm64MemoryLoadLatency;
133 }
134 
VisitMul(HMul * instr)135 void SchedulingLatencyVisitorARM64::VisitMul(HMul* instr) {
136   last_visited_latency_ = DataType::IsFloatingPointType(instr->GetResultType())
137       ? kArm64MulFloatingPointLatency
138       : kArm64MulIntegerLatency;
139 }
140 
VisitNewArray(HNewArray * ATTRIBUTE_UNUSED)141 void SchedulingLatencyVisitorARM64::VisitNewArray(HNewArray* ATTRIBUTE_UNUSED) {
142   last_visited_internal_latency_ = kArm64IntegerOpLatency + kArm64CallInternalLatency;
143   last_visited_latency_ = kArm64CallLatency;
144 }
145 
VisitNewInstance(HNewInstance * instruction)146 void SchedulingLatencyVisitorARM64::VisitNewInstance(HNewInstance* instruction) {
147   if (instruction->IsStringAlloc()) {
148     last_visited_internal_latency_ = 2 + kArm64MemoryLoadLatency + kArm64CallInternalLatency;
149   } else {
150     last_visited_internal_latency_ = kArm64CallInternalLatency;
151   }
152   last_visited_latency_ = kArm64CallLatency;
153 }
154 
VisitRem(HRem * instruction)155 void SchedulingLatencyVisitorARM64::VisitRem(HRem* instruction) {
156   if (DataType::IsFloatingPointType(instruction->GetResultType())) {
157     last_visited_internal_latency_ = kArm64CallInternalLatency;
158     last_visited_latency_ = kArm64CallLatency;
159   } else {
160     // Follow the code path used by code generation.
161     if (instruction->GetRight()->IsConstant()) {
162       int64_t imm = Int64FromConstant(instruction->GetRight()->AsConstant());
163       if (imm == 0) {
164         last_visited_internal_latency_ = 0;
165         last_visited_latency_ = 0;
166       } else if (imm == 1 || imm == -1) {
167         last_visited_internal_latency_ = 0;
168         last_visited_latency_ = kArm64IntegerOpLatency;
169       } else if (IsPowerOfTwo(AbsOrMin(imm))) {
170         last_visited_internal_latency_ = 4 * kArm64IntegerOpLatency;
171         last_visited_latency_ = kArm64IntegerOpLatency;
172       } else {
173         DCHECK(imm <= -2 || imm >= 2);
174         last_visited_internal_latency_ = 4 * kArm64IntegerOpLatency;
175         last_visited_latency_ = kArm64MulIntegerLatency;
176       }
177     } else {
178       last_visited_internal_latency_ = kArm64DivIntegerLatency;
179       last_visited_latency_ = kArm64MulIntegerLatency;
180     }
181   }
182 }
183 
VisitStaticFieldGet(HStaticFieldGet * ATTRIBUTE_UNUSED)184 void SchedulingLatencyVisitorARM64::VisitStaticFieldGet(HStaticFieldGet* ATTRIBUTE_UNUSED) {
185   last_visited_latency_ = kArm64MemoryLoadLatency;
186 }
187 
VisitSuspendCheck(HSuspendCheck * instruction)188 void SchedulingLatencyVisitorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
189   HBasicBlock* block = instruction->GetBlock();
190   DCHECK_IMPLIES(block->GetLoopInformation() == nullptr,
191                  block->IsEntryBlock() && instruction->GetNext()->IsGoto());
192   // Users do not use any data results.
193   last_visited_latency_ = 0;
194 }
195 
VisitTypeConversion(HTypeConversion * instr)196 void SchedulingLatencyVisitorARM64::VisitTypeConversion(HTypeConversion* instr) {
197   if (DataType::IsFloatingPointType(instr->GetResultType()) ||
198       DataType::IsFloatingPointType(instr->GetInputType())) {
199     last_visited_latency_ = kArm64TypeConversionFloatingPointIntegerLatency;
200   } else {
201     last_visited_latency_ = kArm64IntegerOpLatency;
202   }
203 }
204 
HandleSimpleArithmeticSIMD(HVecOperation * instr)205 void SchedulingLatencyVisitorARM64::HandleSimpleArithmeticSIMD(HVecOperation *instr) {
206   if (DataType::IsFloatingPointType(instr->GetPackedType())) {
207     last_visited_latency_ = kArm64SIMDFloatingPointOpLatency;
208   } else {
209     last_visited_latency_ = kArm64SIMDIntegerOpLatency;
210   }
211 }
212 
VisitVecReplicateScalar(HVecReplicateScalar * instr ATTRIBUTE_UNUSED)213 void SchedulingLatencyVisitorARM64::VisitVecReplicateScalar(
214     HVecReplicateScalar* instr ATTRIBUTE_UNUSED) {
215   last_visited_latency_ = kArm64SIMDReplicateOpLatency;
216 }
217 
VisitVecExtractScalar(HVecExtractScalar * instr)218 void SchedulingLatencyVisitorARM64::VisitVecExtractScalar(HVecExtractScalar* instr) {
219   HandleSimpleArithmeticSIMD(instr);
220 }
221 
VisitVecReduce(HVecReduce * instr)222 void SchedulingLatencyVisitorARM64::VisitVecReduce(HVecReduce* instr) {
223   HandleSimpleArithmeticSIMD(instr);
224 }
225 
VisitVecCnv(HVecCnv * instr ATTRIBUTE_UNUSED)226 void SchedulingLatencyVisitorARM64::VisitVecCnv(HVecCnv* instr ATTRIBUTE_UNUSED) {
227   last_visited_latency_ = kArm64SIMDTypeConversionInt2FPLatency;
228 }
229 
VisitVecNeg(HVecNeg * instr)230 void SchedulingLatencyVisitorARM64::VisitVecNeg(HVecNeg* instr) {
231   HandleSimpleArithmeticSIMD(instr);
232 }
233 
VisitVecAbs(HVecAbs * instr)234 void SchedulingLatencyVisitorARM64::VisitVecAbs(HVecAbs* instr) {
235   HandleSimpleArithmeticSIMD(instr);
236 }
237 
VisitVecNot(HVecNot * instr)238 void SchedulingLatencyVisitorARM64::VisitVecNot(HVecNot* instr) {
239   if (instr->GetPackedType() == DataType::Type::kBool) {
240     last_visited_internal_latency_ = kArm64SIMDIntegerOpLatency;
241   }
242   last_visited_latency_ = kArm64SIMDIntegerOpLatency;
243 }
244 
VisitVecAdd(HVecAdd * instr)245 void SchedulingLatencyVisitorARM64::VisitVecAdd(HVecAdd* instr) {
246   HandleSimpleArithmeticSIMD(instr);
247 }
248 
VisitVecHalvingAdd(HVecHalvingAdd * instr)249 void SchedulingLatencyVisitorARM64::VisitVecHalvingAdd(HVecHalvingAdd* instr) {
250   HandleSimpleArithmeticSIMD(instr);
251 }
252 
VisitVecSub(HVecSub * instr)253 void SchedulingLatencyVisitorARM64::VisitVecSub(HVecSub* instr) {
254   HandleSimpleArithmeticSIMD(instr);
255 }
256 
VisitVecMul(HVecMul * instr)257 void SchedulingLatencyVisitorARM64::VisitVecMul(HVecMul* instr) {
258   if (DataType::IsFloatingPointType(instr->GetPackedType())) {
259     last_visited_latency_ = kArm64SIMDMulFloatingPointLatency;
260   } else {
261     last_visited_latency_ = kArm64SIMDMulIntegerLatency;
262   }
263 }
264 
VisitVecDiv(HVecDiv * instr)265 void SchedulingLatencyVisitorARM64::VisitVecDiv(HVecDiv* instr) {
266   if (instr->GetPackedType() == DataType::Type::kFloat32) {
267     last_visited_latency_ = kArm64SIMDDivFloatLatency;
268   } else {
269     DCHECK(instr->GetPackedType() == DataType::Type::kFloat64);
270     last_visited_latency_ = kArm64SIMDDivDoubleLatency;
271   }
272 }
273 
VisitVecMin(HVecMin * instr)274 void SchedulingLatencyVisitorARM64::VisitVecMin(HVecMin* instr) {
275   HandleSimpleArithmeticSIMD(instr);
276 }
277 
VisitVecMax(HVecMax * instr)278 void SchedulingLatencyVisitorARM64::VisitVecMax(HVecMax* instr) {
279   HandleSimpleArithmeticSIMD(instr);
280 }
281 
VisitVecAnd(HVecAnd * instr ATTRIBUTE_UNUSED)282 void SchedulingLatencyVisitorARM64::VisitVecAnd(HVecAnd* instr ATTRIBUTE_UNUSED) {
283   last_visited_latency_ = kArm64SIMDIntegerOpLatency;
284 }
285 
VisitVecAndNot(HVecAndNot * instr ATTRIBUTE_UNUSED)286 void SchedulingLatencyVisitorARM64::VisitVecAndNot(HVecAndNot* instr ATTRIBUTE_UNUSED) {
287   last_visited_latency_ = kArm64SIMDIntegerOpLatency;
288 }
289 
VisitVecOr(HVecOr * instr ATTRIBUTE_UNUSED)290 void SchedulingLatencyVisitorARM64::VisitVecOr(HVecOr* instr ATTRIBUTE_UNUSED) {
291   last_visited_latency_ = kArm64SIMDIntegerOpLatency;
292 }
293 
VisitVecXor(HVecXor * instr ATTRIBUTE_UNUSED)294 void SchedulingLatencyVisitorARM64::VisitVecXor(HVecXor* instr ATTRIBUTE_UNUSED) {
295   last_visited_latency_ = kArm64SIMDIntegerOpLatency;
296 }
297 
VisitVecShl(HVecShl * instr)298 void SchedulingLatencyVisitorARM64::VisitVecShl(HVecShl* instr) {
299   HandleSimpleArithmeticSIMD(instr);
300 }
301 
VisitVecShr(HVecShr * instr)302 void SchedulingLatencyVisitorARM64::VisitVecShr(HVecShr* instr) {
303   HandleSimpleArithmeticSIMD(instr);
304 }
305 
VisitVecUShr(HVecUShr * instr)306 void SchedulingLatencyVisitorARM64::VisitVecUShr(HVecUShr* instr) {
307   HandleSimpleArithmeticSIMD(instr);
308 }
309 
VisitVecSetScalars(HVecSetScalars * instr)310 void SchedulingLatencyVisitorARM64::VisitVecSetScalars(HVecSetScalars* instr) {
311   HandleSimpleArithmeticSIMD(instr);
312 }
313 
VisitVecMultiplyAccumulate(HVecMultiplyAccumulate * instr ATTRIBUTE_UNUSED)314 void SchedulingLatencyVisitorARM64::VisitVecMultiplyAccumulate(
315     HVecMultiplyAccumulate* instr ATTRIBUTE_UNUSED) {
316   last_visited_latency_ = kArm64SIMDMulIntegerLatency;
317 }
318 
HandleVecAddress(HVecMemoryOperation * instruction,size_t size ATTRIBUTE_UNUSED)319 void SchedulingLatencyVisitorARM64::HandleVecAddress(
320     HVecMemoryOperation* instruction,
321     size_t size ATTRIBUTE_UNUSED) {
322   HInstruction* index = instruction->InputAt(1);
323   if (!index->IsConstant()) {
324     last_visited_internal_latency_ += kArm64DataProcWithShifterOpLatency;
325   }
326 }
327 
VisitVecLoad(HVecLoad * instr)328 void SchedulingLatencyVisitorARM64::VisitVecLoad(HVecLoad* instr) {
329   last_visited_internal_latency_ = 0;
330   size_t size = DataType::Size(instr->GetPackedType());
331 
332   if (instr->GetPackedType() == DataType::Type::kUint16
333       && mirror::kUseStringCompression
334       && instr->IsStringCharAt()) {
335     // Set latencies for the uncompressed case.
336     last_visited_internal_latency_ += kArm64MemoryLoadLatency + kArm64BranchLatency;
337     HandleVecAddress(instr, size);
338     last_visited_latency_ = kArm64SIMDMemoryLoadLatency;
339   } else {
340     HandleVecAddress(instr, size);
341     last_visited_latency_ = kArm64SIMDMemoryLoadLatency;
342   }
343 }
344 
VisitVecStore(HVecStore * instr)345 void SchedulingLatencyVisitorARM64::VisitVecStore(HVecStore* instr) {
346   last_visited_internal_latency_ = 0;
347   size_t size = DataType::Size(instr->GetPackedType());
348   HandleVecAddress(instr, size);
349   last_visited_latency_ = kArm64SIMDMemoryStoreLatency;
350 }
351 
352 }  // namespace arm64
353 }  // namespace art
354