• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/compiler/backend/instruction-scheduler.h"
6 
7 namespace v8 {
8 namespace internal {
9 namespace compiler {
10 
SchedulerSupported()11 bool InstructionScheduler::SchedulerSupported() { return true; }
12 
GetTargetInstructionFlags(const Instruction * instr) const13 int InstructionScheduler::GetTargetInstructionFlags(
14     const Instruction* instr) const {
15   switch (instr->arch_opcode()) {
16     case kArm64Add:
17     case kArm64Add32:
18     case kArm64And:
19     case kArm64And32:
20     case kArm64Bic:
21     case kArm64Bic32:
22     case kArm64Clz:
23     case kArm64Clz32:
24     case kArm64Cmp:
25     case kArm64Cmp32:
26     case kArm64Cmn:
27     case kArm64Cmn32:
28     case kArm64Cnt:
29     case kArm64Cnt32:
30     case kArm64Cnt64:
31     case kArm64Tst:
32     case kArm64Tst32:
33     case kArm64Or:
34     case kArm64Or32:
35     case kArm64Orn:
36     case kArm64Orn32:
37     case kArm64Eor:
38     case kArm64Eor32:
39     case kArm64Eon:
40     case kArm64Eon32:
41     case kArm64Sadalp:
42     case kArm64Saddlp:
43     case kArm64Sub:
44     case kArm64Sub32:
45     case kArm64Mul:
46     case kArm64Mul32:
47     case kArm64Smlal:
48     case kArm64Smlal2:
49     case kArm64Smull:
50     case kArm64Smull2:
51     case kArm64Uadalp:
52     case kArm64Uaddlp:
53     case kArm64Umlal:
54     case kArm64Umlal2:
55     case kArm64Umull:
56     case kArm64Umull2:
57     case kArm64Madd:
58     case kArm64Madd32:
59     case kArm64Msub:
60     case kArm64Msub32:
61     case kArm64Mneg:
62     case kArm64Mneg32:
63     case kArm64Idiv:
64     case kArm64Idiv32:
65     case kArm64Udiv:
66     case kArm64Udiv32:
67     case kArm64Imod:
68     case kArm64Imod32:
69     case kArm64Umod:
70     case kArm64Umod32:
71     case kArm64Not:
72     case kArm64Not32:
73     case kArm64Lsl:
74     case kArm64Lsl32:
75     case kArm64Lsr:
76     case kArm64Lsr32:
77     case kArm64Asr:
78     case kArm64Asr32:
79     case kArm64Ror:
80     case kArm64Ror32:
81     case kArm64Mov32:
82     case kArm64Sxtb:
83     case kArm64Sxtb32:
84     case kArm64Sxth:
85     case kArm64Sxth32:
86     case kArm64Sxtw:
87     case kArm64Sbfx:
88     case kArm64Sbfx32:
89     case kArm64Ubfx:
90     case kArm64Ubfx32:
91     case kArm64Ubfiz32:
92     case kArm64Bfi:
93     case kArm64Rbit:
94     case kArm64Rbit32:
95     case kArm64Rev:
96     case kArm64Rev32:
97     case kArm64Float32Cmp:
98     case kArm64Float32Add:
99     case kArm64Float32Sub:
100     case kArm64Float32Mul:
101     case kArm64Float32Div:
102     case kArm64Float32Abs:
103     case kArm64Float32Abd:
104     case kArm64Float32Neg:
105     case kArm64Float32Sqrt:
106     case kArm64Float32Fnmul:
107     case kArm64Float32RoundDown:
108     case kArm64Float32Max:
109     case kArm64Float32Min:
110     case kArm64Float64Cmp:
111     case kArm64Float64Add:
112     case kArm64Float64Sub:
113     case kArm64Float64Mul:
114     case kArm64Float64Div:
115     case kArm64Float64Max:
116     case kArm64Float64Min:
117     case kArm64Float64Abs:
118     case kArm64Float64Abd:
119     case kArm64Float64Neg:
120     case kArm64Float64Sqrt:
121     case kArm64Float64Fnmul:
122     case kArm64Float64RoundDown:
123     case kArm64Float64RoundTiesAway:
124     case kArm64Float64RoundTruncate:
125     case kArm64Float64RoundTiesEven:
126     case kArm64Float64RoundUp:
127     case kArm64Float32RoundTiesEven:
128     case kArm64Float32RoundTruncate:
129     case kArm64Float32RoundUp:
130     case kArm64Float32ToFloat64:
131     case kArm64Float64ToFloat32:
132     case kArm64Float32ToInt32:
133     case kArm64Float64ToInt32:
134     case kArm64Float32ToUint32:
135     case kArm64Float64ToUint32:
136     case kArm64Float32ToInt64:
137     case kArm64Float64ToInt64:
138     case kArm64Float32ToUint64:
139     case kArm64Float64ToUint64:
140     case kArm64Int32ToFloat32:
141     case kArm64Int32ToFloat64:
142     case kArm64Int64ToFloat32:
143     case kArm64Int64ToFloat64:
144     case kArm64Uint32ToFloat32:
145     case kArm64Uint32ToFloat64:
146     case kArm64Uint64ToFloat32:
147     case kArm64Uint64ToFloat64:
148     case kArm64Float64ExtractLowWord32:
149     case kArm64Float64ExtractHighWord32:
150     case kArm64Float64InsertLowWord32:
151     case kArm64Float64InsertHighWord32:
152     case kArm64Float64Mod:
153     case kArm64Float64MoveU64:
154     case kArm64U64MoveFloat64:
155     case kArm64Float64SilenceNaN:
156     case kArm64FExtractLane:
157     case kArm64FReplaceLane:
158     case kArm64FSplat:
159     case kArm64FAbs:
160     case kArm64FSqrt:
161     case kArm64FNeg:
162     case kArm64FAdd:
163     case kArm64FSub:
164     case kArm64FMul:
165     case kArm64FMulElement:
166     case kArm64FDiv:
167     case kArm64FMin:
168     case kArm64FMax:
169     case kArm64FEq:
170     case kArm64FNe:
171     case kArm64FLt:
172     case kArm64FLe:
173     case kArm64FGt:
174     case kArm64FGe:
175     case kArm64F64x2Qfma:
176     case kArm64F64x2Qfms:
177     case kArm64F64x2Pmin:
178     case kArm64F64x2Pmax:
179     case kArm64F64x2ConvertLowI32x4S:
180     case kArm64F64x2ConvertLowI32x4U:
181     case kArm64F64x2PromoteLowF32x4:
182     case kArm64F32x4SConvertI32x4:
183     case kArm64F32x4UConvertI32x4:
184     case kArm64F32x4RecipApprox:
185     case kArm64F32x4RecipSqrtApprox:
186     case kArm64F32x4Qfma:
187     case kArm64F32x4Qfms:
188     case kArm64F32x4Pmin:
189     case kArm64F32x4Pmax:
190     case kArm64F32x4DemoteF64x2Zero:
191     case kArm64IExtractLane:
192     case kArm64IReplaceLane:
193     case kArm64ISplat:
194     case kArm64IAbs:
195     case kArm64INeg:
196     case kArm64Mla:
197     case kArm64Mls:
198     case kArm64RoundingAverageU:
199     case kArm64I64x2Shl:
200     case kArm64I64x2ShrS:
201     case kArm64IAdd:
202     case kArm64ISub:
203     case kArm64I64x2Mul:
204     case kArm64IEq:
205     case kArm64INe:
206     case kArm64IGtS:
207     case kArm64IGeS:
208     case kArm64ILtS:
209     case kArm64ILeS:
210     case kArm64I64x2ShrU:
211     case kArm64I64x2BitMask:
212     case kArm64I32x4SConvertF32x4:
213     case kArm64Sxtl:
214     case kArm64Sxtl2:
215     case kArm64Uxtl:
216     case kArm64Uxtl2:
217     case kArm64I32x4Shl:
218     case kArm64I32x4ShrS:
219     case kArm64I32x4Mul:
220     case kArm64IMinS:
221     case kArm64IMaxS:
222     case kArm64I32x4UConvertF32x4:
223     case kArm64I32x4ShrU:
224     case kArm64IMinU:
225     case kArm64IMaxU:
226     case kArm64IGtU:
227     case kArm64IGeU:
228     case kArm64I32x4BitMask:
229     case kArm64I32x4DotI16x8S:
230     case kArm64I32x4TruncSatF64x2SZero:
231     case kArm64I32x4TruncSatF64x2UZero:
232     case kArm64IExtractLaneU:
233     case kArm64IExtractLaneS:
234     case kArm64I16x8Shl:
235     case kArm64I16x8ShrS:
236     case kArm64I16x8SConvertI32x4:
237     case kArm64IAddSatS:
238     case kArm64ISubSatS:
239     case kArm64I16x8Mul:
240     case kArm64I16x8ShrU:
241     case kArm64I16x8UConvertI32x4:
242     case kArm64IAddSatU:
243     case kArm64ISubSatU:
244     case kArm64I16x8Q15MulRSatS:
245     case kArm64I16x8BitMask:
246     case kArm64I8x16Shl:
247     case kArm64I8x16ShrS:
248     case kArm64I8x16SConvertI16x8:
249     case kArm64I8x16UConvertI16x8:
250     case kArm64I8x16ShrU:
251     case kArm64I8x16BitMask:
252     case kArm64S128Const:
253     case kArm64S128Zero:
254     case kArm64S128Dup:
255     case kArm64S128And:
256     case kArm64S128Or:
257     case kArm64S128Xor:
258     case kArm64S128Not:
259     case kArm64S128Select:
260     case kArm64S128AndNot:
261     case kArm64Ssra:
262     case kArm64Usra:
263     case kArm64S32x4ZipLeft:
264     case kArm64S32x4ZipRight:
265     case kArm64S32x4UnzipLeft:
266     case kArm64S32x4UnzipRight:
267     case kArm64S32x4TransposeLeft:
268     case kArm64S32x4TransposeRight:
269     case kArm64S32x4Shuffle:
270     case kArm64S16x8ZipLeft:
271     case kArm64S16x8ZipRight:
272     case kArm64S16x8UnzipLeft:
273     case kArm64S16x8UnzipRight:
274     case kArm64S16x8TransposeLeft:
275     case kArm64S16x8TransposeRight:
276     case kArm64S8x16ZipLeft:
277     case kArm64S8x16ZipRight:
278     case kArm64S8x16UnzipLeft:
279     case kArm64S8x16UnzipRight:
280     case kArm64S8x16TransposeLeft:
281     case kArm64S8x16TransposeRight:
282     case kArm64S8x16Concat:
283     case kArm64I8x16Swizzle:
284     case kArm64I8x16Shuffle:
285     case kArm64S32x2Reverse:
286     case kArm64S16x4Reverse:
287     case kArm64S16x2Reverse:
288     case kArm64S8x8Reverse:
289     case kArm64S8x4Reverse:
290     case kArm64S8x2Reverse:
291     case kArm64V128AnyTrue:
292     case kArm64I64x2AllTrue:
293     case kArm64I32x4AllTrue:
294     case kArm64I16x8AllTrue:
295     case kArm64I8x16AllTrue:
296     case kArm64TestAndBranch32:
297     case kArm64TestAndBranch:
298     case kArm64CompareAndBranch32:
299     case kArm64CompareAndBranch:
300       return kNoOpcodeFlags;
301 
302     case kArm64LdrS:
303     case kArm64LdrD:
304     case kArm64LdrQ:
305     case kArm64Ldrb:
306     case kArm64Ldrsb:
307     case kArm64LdrsbW:
308     case kArm64Ldrh:
309     case kArm64Ldrsh:
310     case kArm64LdrshW:
311     case kArm64Ldrsw:
312     case kArm64LdrW:
313     case kArm64Ldr:
314     case kArm64LdrDecompressTaggedSigned:
315     case kArm64LdrDecompressTaggedPointer:
316     case kArm64LdrDecompressAnyTagged:
317     case kArm64LdarDecompressTaggedSigned:
318     case kArm64LdarDecompressTaggedPointer:
319     case kArm64LdarDecompressAnyTagged:
320     case kArm64LdrDecodeSandboxedPointer:
321     case kArm64Peek:
322     case kArm64LoadSplat:
323     case kArm64LoadLane:
324     case kArm64S128Load8x8S:
325     case kArm64S128Load8x8U:
326     case kArm64S128Load16x4S:
327     case kArm64S128Load16x4U:
328     case kArm64S128Load32x2S:
329     case kArm64S128Load32x2U:
330       return kIsLoadOperation;
331 
332     case kArm64Claim:
333     case kArm64Poke:
334     case kArm64PokePair:
335     case kArm64StrS:
336     case kArm64StrD:
337     case kArm64StrQ:
338     case kArm64Strb:
339     case kArm64Strh:
340     case kArm64StrW:
341     case kArm64Str:
342     case kArm64StrCompressTagged:
343     case kArm64StlrCompressTagged:
344     case kArm64StrEncodeSandboxedPointer:
345     case kArm64DmbIsh:
346     case kArm64DsbIsb:
347     case kArm64StoreLane:
348       return kHasSideEffect;
349 
350     case kArm64Word64AtomicLoadUint64:
351       return kIsLoadOperation;
352 
353     case kArm64Word64AtomicStoreWord64:
354     case kArm64Word64AtomicAddUint64:
355     case kArm64Word64AtomicSubUint64:
356     case kArm64Word64AtomicAndUint64:
357     case kArm64Word64AtomicOrUint64:
358     case kArm64Word64AtomicXorUint64:
359     case kArm64Word64AtomicExchangeUint64:
360     case kArm64Word64AtomicCompareExchangeUint64:
361       return kHasSideEffect;
362 
363 #define CASE(Name) case k##Name:
364       COMMON_ARCH_OPCODE_LIST(CASE)
365 #undef CASE
366       // Already covered in architecture independent code.
367       UNREACHABLE();
368   }
369 
370   UNREACHABLE();
371 }
372 
GetInstructionLatency(const Instruction * instr)373 int InstructionScheduler::GetInstructionLatency(const Instruction* instr) {
374   // Basic latency modeling for arm64 instructions. They have been determined
375   // in an empirical way.
376   switch (instr->arch_opcode()) {
377     case kArm64Add:
378     case kArm64Add32:
379     case kArm64And:
380     case kArm64And32:
381     case kArm64Bic:
382     case kArm64Bic32:
383     case kArm64Cmn:
384     case kArm64Cmn32:
385     case kArm64Cmp:
386     case kArm64Cmp32:
387     case kArm64Eon:
388     case kArm64Eon32:
389     case kArm64Eor:
390     case kArm64Eor32:
391     case kArm64Not:
392     case kArm64Not32:
393     case kArm64Or:
394     case kArm64Or32:
395     case kArm64Orn:
396     case kArm64Orn32:
397     case kArm64Sub:
398     case kArm64Sub32:
399     case kArm64Tst:
400     case kArm64Tst32:
401       if (instr->addressing_mode() != kMode_None) {
402         return 3;
403       } else {
404         return 1;
405       }
406 
407     case kArm64Clz:
408     case kArm64Clz32:
409     case kArm64Sbfx:
410     case kArm64Sbfx32:
411     case kArm64Sxtb32:
412     case kArm64Sxth32:
413     case kArm64Sxtw:
414     case kArm64Ubfiz32:
415     case kArm64Ubfx:
416     case kArm64Ubfx32:
417       return 1;
418 
419     case kArm64Lsl:
420     case kArm64Lsl32:
421     case kArm64Lsr:
422     case kArm64Lsr32:
423     case kArm64Asr:
424     case kArm64Asr32:
425     case kArm64Ror:
426     case kArm64Ror32:
427       return 1;
428 
429     case kArm64LdrDecompressTaggedSigned:
430     case kArm64LdrDecompressTaggedPointer:
431     case kArm64LdrDecompressAnyTagged:
432     case kArm64Ldr:
433     case kArm64LdrD:
434     case kArm64LdrS:
435     case kArm64LdrW:
436     case kArm64Ldrb:
437     case kArm64Ldrh:
438     case kArm64Ldrsb:
439     case kArm64Ldrsh:
440     case kArm64Ldrsw:
441       return 11;
442 
443     case kArm64Str:
444     case kArm64StrD:
445     case kArm64StrS:
446     case kArm64StrW:
447     case kArm64Strb:
448     case kArm64Strh:
449       return 1;
450 
451     case kArm64Madd32:
452     case kArm64Mneg32:
453     case kArm64Msub32:
454     case kArm64Mul32:
455       return 3;
456 
457     case kArm64Madd:
458     case kArm64Mneg:
459     case kArm64Msub:
460     case kArm64Mul:
461       return 5;
462 
463     case kArm64Idiv32:
464     case kArm64Udiv32:
465       return 12;
466 
467     case kArm64Idiv:
468     case kArm64Udiv:
469       return 20;
470 
471     case kArm64Float32Add:
472     case kArm64Float32Sub:
473     case kArm64Float64Add:
474     case kArm64Float64Sub:
475       return 5;
476 
477     case kArm64Float32Abs:
478     case kArm64Float32Cmp:
479     case kArm64Float32Neg:
480     case kArm64Float64Abs:
481     case kArm64Float64Cmp:
482     case kArm64Float64Neg:
483       return 3;
484 
485     case kArm64Float32Div:
486     case kArm64Float32Sqrt:
487       return 12;
488 
489     case kArm64Float64Div:
490     case kArm64Float64Sqrt:
491       return 19;
492 
493     case kArm64Float32RoundDown:
494     case kArm64Float32RoundTiesEven:
495     case kArm64Float32RoundTruncate:
496     case kArm64Float32RoundUp:
497     case kArm64Float64RoundDown:
498     case kArm64Float64RoundTiesAway:
499     case kArm64Float64RoundTiesEven:
500     case kArm64Float64RoundTruncate:
501     case kArm64Float64RoundUp:
502       return 5;
503 
504     case kArm64Float32ToFloat64:
505     case kArm64Float64ToFloat32:
506     case kArm64Float64ToInt32:
507     case kArm64Float64ToUint32:
508     case kArm64Float32ToInt64:
509     case kArm64Float64ToInt64:
510     case kArm64Float32ToUint64:
511     case kArm64Float64ToUint64:
512     case kArm64Int32ToFloat64:
513     case kArm64Int64ToFloat32:
514     case kArm64Int64ToFloat64:
515     case kArm64Uint32ToFloat64:
516     case kArm64Uint64ToFloat32:
517     case kArm64Uint64ToFloat64:
518       return 5;
519 
520     default:
521       return 2;
522   }
523 }
524 
525 }  // namespace compiler
526 }  // namespace internal
527 }  // namespace v8
528