1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #if V8_TARGET_ARCH_ARM64
6
7 #include "src/codegen.h"
8 #include "src/ic/ic.h"
9 #include "src/ic/stub-cache.h"
10
11 namespace v8 {
12 namespace internal {
13
14
ComputeCondition(Token::Value op)15 Condition CompareIC::ComputeCondition(Token::Value op) {
16 switch (op) {
17 case Token::EQ_STRICT:
18 case Token::EQ:
19 return eq;
20 case Token::LT:
21 return lt;
22 case Token::GT:
23 return gt;
24 case Token::LTE:
25 return le;
26 case Token::GTE:
27 return ge;
28 default:
29 UNREACHABLE();
30 return al;
31 }
32 }
33
34
HasInlinedSmiCode(Address address)35 bool CompareIC::HasInlinedSmiCode(Address address) {
36 // The address of the instruction following the call.
37 Address info_address = Assembler::return_address_from_call_start(address);
38
39 InstructionSequence* patch_info = InstructionSequence::At(info_address);
40 return patch_info->IsInlineData();
41 }
42
43
44 // Activate a SMI fast-path by patching the instructions generated by
45 // JumpPatchSite::EmitJumpIf(Not)Smi(), using the information encoded by
46 // JumpPatchSite::EmitPatchInfo().
PatchInlinedSmiCode(Isolate * isolate,Address address,InlinedSmiCheck check)47 void PatchInlinedSmiCode(Isolate* isolate, Address address,
48 InlinedSmiCheck check) {
49 // The patch information is encoded in the instruction stream using
50 // instructions which have no side effects, so we can safely execute them.
51 // The patch information is encoded directly after the call to the helper
52 // function which is requesting this patch operation.
53 Address info_address = Assembler::return_address_from_call_start(address);
54 InlineSmiCheckInfo info(info_address);
55
56 // Check and decode the patch information instruction.
57 if (!info.HasSmiCheck()) {
58 return;
59 }
60
61 if (FLAG_trace_ic) {
62 LOG(isolate, PatchIC(address, info_address, info.SmiCheckDelta()));
63 }
64
65 // Patch and activate code generated by JumpPatchSite::EmitJumpIfNotSmi()
66 // and JumpPatchSite::EmitJumpIfSmi().
67 // Changing
68 // tb(n)z xzr, #0, <target>
69 // to
70 // tb(!n)z test_reg, #0, <target>
71 Instruction* to_patch = info.SmiCheck();
72 PatchingAssembler patcher(isolate, to_patch, 1);
73 DCHECK(to_patch->IsTestBranch());
74 DCHECK(to_patch->ImmTestBranchBit5() == 0);
75 DCHECK(to_patch->ImmTestBranchBit40() == 0);
76
77 STATIC_ASSERT(kSmiTag == 0);
78 STATIC_ASSERT(kSmiTagMask == 1);
79
80 int branch_imm = to_patch->ImmTestBranch();
81 Register smi_reg;
82 if (check == ENABLE_INLINED_SMI_CHECK) {
83 DCHECK(to_patch->Rt() == xzr.code());
84 smi_reg = info.SmiRegister();
85 } else {
86 DCHECK(check == DISABLE_INLINED_SMI_CHECK);
87 DCHECK(to_patch->Rt() != xzr.code());
88 smi_reg = xzr;
89 }
90
91 if (to_patch->Mask(TestBranchMask) == TBZ) {
92 // This is JumpIfNotSmi(smi_reg, branch_imm).
93 patcher.tbnz(smi_reg, 0, branch_imm);
94 } else {
95 DCHECK(to_patch->Mask(TestBranchMask) == TBNZ);
96 // This is JumpIfSmi(smi_reg, branch_imm).
97 patcher.tbz(smi_reg, 0, branch_imm);
98 }
99 }
100 } // namespace internal
101 } // namespace v8
102
103 #endif // V8_TARGET_ARCH_ARM64
104