• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/ic/binary-op-assembler.h"
6 
7 #include "src/globals.h"
8 
9 namespace v8 {
10 namespace internal {
11 
12 using compiler::Node;
13 
Generate_AddWithFeedback(Node * context,Node * lhs,Node * rhs,Node * slot_id,Node * feedback_vector,bool rhs_is_smi)14 Node* BinaryOpAssembler::Generate_AddWithFeedback(Node* context, Node* lhs,
15                                                   Node* rhs, Node* slot_id,
16                                                   Node* feedback_vector,
17                                                   bool rhs_is_smi) {
18   // Shared entry for floating point addition.
19   Label do_fadd(this), if_lhsisnotnumber(this, Label::kDeferred),
20       check_rhsisoddball(this, Label::kDeferred),
21       call_with_oddball_feedback(this), call_with_any_feedback(this),
22       call_add_stub(this), end(this), bigint(this, Label::kDeferred);
23   VARIABLE(var_fadd_lhs, MachineRepresentation::kFloat64);
24   VARIABLE(var_fadd_rhs, MachineRepresentation::kFloat64);
25   VARIABLE(var_type_feedback, MachineRepresentation::kTaggedSigned);
26   VARIABLE(var_result, MachineRepresentation::kTagged);
27 
28   // Check if the {lhs} is a Smi or a HeapObject.
29   Label if_lhsissmi(this);
30   // If rhs is known to be an Smi we want to fast path Smi operation. This is
31   // for AddSmi operation. For the normal Add operation, we want to fast path
32   // both Smi and Number operations, so this path should not be marked as
33   // Deferred.
34   Label if_lhsisnotsmi(this,
35                        rhs_is_smi ? Label::kDeferred : Label::kNonDeferred);
36   Branch(TaggedIsNotSmi(lhs), &if_lhsisnotsmi, &if_lhsissmi);
37 
38   BIND(&if_lhsissmi);
39   {
40     Comment("lhs is Smi");
41     if (!rhs_is_smi) {
42       // Check if the {rhs} is also a Smi.
43       Label if_rhsissmi(this), if_rhsisnotsmi(this);
44       Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
45 
46       BIND(&if_rhsisnotsmi);
47       {
48         // Check if the {rhs} is a HeapNumber.
49         GotoIfNot(IsHeapNumber(rhs), &check_rhsisoddball);
50 
51         var_fadd_lhs.Bind(SmiToFloat64(lhs));
52         var_fadd_rhs.Bind(LoadHeapNumberValue(rhs));
53         Goto(&do_fadd);
54       }
55 
56       BIND(&if_rhsissmi);
57     }
58 
59     {
60       Comment("perform smi operation");
61       // If rhs is known to be an Smi we want to fast path Smi operation. This
62       // is for AddSmi operation. For the normal Add operation, we want to fast
63       // path both Smi and Number operations, so this path should not be marked
64       // as Deferred.
65       Label if_overflow(this,
66                         rhs_is_smi ? Label::kDeferred : Label::kNonDeferred);
67       TNode<Smi> smi_result = TrySmiAdd(CAST(lhs), CAST(rhs), &if_overflow);
68       // Not overflowed.
69       {
70         var_type_feedback.Bind(
71             SmiConstant(BinaryOperationFeedback::kSignedSmall));
72         var_result.Bind(smi_result);
73         Goto(&end);
74       }
75 
76       BIND(&if_overflow);
77       {
78         var_fadd_lhs.Bind(SmiToFloat64(lhs));
79         var_fadd_rhs.Bind(SmiToFloat64(rhs));
80         Goto(&do_fadd);
81       }
82     }
83   }
84 
85   BIND(&if_lhsisnotsmi);
86   {
87     // Check if {lhs} is a HeapNumber.
88     GotoIfNot(IsHeapNumber(lhs), &if_lhsisnotnumber);
89 
90     if (!rhs_is_smi) {
91       // Check if the {rhs} is Smi.
92       Label if_rhsissmi(this), if_rhsisnotsmi(this);
93       Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
94 
95       BIND(&if_rhsisnotsmi);
96       {
97         // Check if the {rhs} is a HeapNumber.
98         GotoIfNot(IsHeapNumber(rhs), &check_rhsisoddball);
99 
100         var_fadd_lhs.Bind(LoadHeapNumberValue(lhs));
101         var_fadd_rhs.Bind(LoadHeapNumberValue(rhs));
102         Goto(&do_fadd);
103       }
104 
105       BIND(&if_rhsissmi);
106     }
107     {
108       var_fadd_lhs.Bind(LoadHeapNumberValue(lhs));
109       var_fadd_rhs.Bind(SmiToFloat64(rhs));
110       Goto(&do_fadd);
111     }
112   }
113 
114   BIND(&do_fadd);
115   {
116     var_type_feedback.Bind(SmiConstant(BinaryOperationFeedback::kNumber));
117     Node* value = Float64Add(var_fadd_lhs.value(), var_fadd_rhs.value());
118     Node* result = AllocateHeapNumberWithValue(value);
119     var_result.Bind(result);
120     Goto(&end);
121   }
122 
123   BIND(&if_lhsisnotnumber);
124   {
125     // No checks on rhs are done yet. We just know lhs is not a number or Smi.
126     Label if_lhsisoddball(this), if_lhsisnotoddball(this);
127     Node* lhs_instance_type = LoadInstanceType(lhs);
128     Node* lhs_is_oddball = InstanceTypeEqual(lhs_instance_type, ODDBALL_TYPE);
129     Branch(lhs_is_oddball, &if_lhsisoddball, &if_lhsisnotoddball);
130 
131     BIND(&if_lhsisoddball);
132     {
133       GotoIf(TaggedIsSmi(rhs), &call_with_oddball_feedback);
134 
135       // Check if {rhs} is a HeapNumber.
136       Branch(IsHeapNumber(rhs), &call_with_oddball_feedback,
137              &check_rhsisoddball);
138     }
139 
140     BIND(&if_lhsisnotoddball);
141     {
142       Label lhs_is_string(this), lhs_is_bigint(this);
143       GotoIf(IsStringInstanceType(lhs_instance_type), &lhs_is_string);
144       GotoIf(IsBigIntInstanceType(lhs_instance_type), &lhs_is_bigint);
145       Goto(&call_with_any_feedback);
146 
147       BIND(&lhs_is_bigint);
148       {
149         GotoIf(TaggedIsSmi(rhs), &call_with_any_feedback);
150         Branch(IsBigInt(rhs), &bigint, &call_with_any_feedback);
151       }
152 
153       BIND(&lhs_is_string);
154       // Check if the {rhs} is a smi, and exit the string check early if it is.
155       GotoIf(TaggedIsSmi(rhs), &call_with_any_feedback);
156 
157       Node* rhs_instance_type = LoadInstanceType(rhs);
158 
159       // Exit unless {rhs} is a string. Since {lhs} is a string we no longer
160       // need an Oddball check.
161       GotoIfNot(IsStringInstanceType(rhs_instance_type),
162                 &call_with_any_feedback);
163 
164       var_type_feedback.Bind(SmiConstant(BinaryOperationFeedback::kString));
165       Callable callable =
166           CodeFactory::StringAdd(isolate(), STRING_ADD_CHECK_NONE, NOT_TENURED);
167       var_result.Bind(CallStub(callable, context, lhs, rhs));
168 
169       Goto(&end);
170     }
171   }
172 
173   BIND(&check_rhsisoddball);
174   {
175     // Check if rhs is an oddball. At this point we know lhs is either a
176     // Smi or number or oddball and rhs is not a number or Smi.
177     Node* rhs_instance_type = LoadInstanceType(rhs);
178     Node* rhs_is_oddball = InstanceTypeEqual(rhs_instance_type, ODDBALL_TYPE);
179     GotoIf(rhs_is_oddball, &call_with_oddball_feedback);
180     Branch(IsBigIntInstanceType(rhs_instance_type), &bigint,
181            &call_with_any_feedback);
182   }
183 
184   BIND(&bigint);
185   {
186     var_type_feedback.Bind(SmiConstant(BinaryOperationFeedback::kBigInt));
187     var_result.Bind(CallRuntime(Runtime::kBigIntBinaryOp, context, lhs, rhs,
188                                 SmiConstant(Operation::kAdd)));
189     Goto(&end);
190   }
191 
192   BIND(&call_with_oddball_feedback);
193   {
194     var_type_feedback.Bind(
195         SmiConstant(BinaryOperationFeedback::kNumberOrOddball));
196     Goto(&call_add_stub);
197   }
198 
199   BIND(&call_with_any_feedback);
200   {
201     var_type_feedback.Bind(SmiConstant(BinaryOperationFeedback::kAny));
202     Goto(&call_add_stub);
203   }
204 
205   BIND(&call_add_stub);
206   {
207     var_result.Bind(CallBuiltin(Builtins::kAdd, context, lhs, rhs));
208     Goto(&end);
209   }
210 
211   BIND(&end);
212   UpdateFeedback(var_type_feedback.value(), feedback_vector, slot_id);
213   return var_result.value();
214 }
215 
Generate_BinaryOperationWithFeedback(Node * context,Node * lhs,Node * rhs,Node * slot_id,Node * feedback_vector,const SmiOperation & smiOperation,const FloatOperation & floatOperation,Operation op,bool rhs_is_smi)216 Node* BinaryOpAssembler::Generate_BinaryOperationWithFeedback(
217     Node* context, Node* lhs, Node* rhs, Node* slot_id, Node* feedback_vector,
218     const SmiOperation& smiOperation, const FloatOperation& floatOperation,
219     Operation op, bool rhs_is_smi) {
220   Label do_float_operation(this), end(this), call_stub(this),
221       check_rhsisoddball(this, Label::kDeferred), call_with_any_feedback(this),
222       if_lhsisnotnumber(this, Label::kDeferred),
223       if_bigint(this, Label::kDeferred);
224   VARIABLE(var_float_lhs, MachineRepresentation::kFloat64);
225   VARIABLE(var_float_rhs, MachineRepresentation::kFloat64);
226   VARIABLE(var_type_feedback, MachineRepresentation::kTaggedSigned);
227   VARIABLE(var_result, MachineRepresentation::kTagged);
228 
229   Label if_lhsissmi(this);
230   // If rhs is known to be an Smi (in the SubSmi, MulSmi, DivSmi, ModSmi
231   // bytecode handlers) we want to fast path Smi operation. For the normal
232   // operation, we want to fast path both Smi and Number operations, so this
233   // path should not be marked as Deferred.
234   Label if_lhsisnotsmi(this,
235                        rhs_is_smi ? Label::kDeferred : Label::kNonDeferred);
236   Branch(TaggedIsNotSmi(lhs), &if_lhsisnotsmi, &if_lhsissmi);
237 
238   // Check if the {lhs} is a Smi or a HeapObject.
239   BIND(&if_lhsissmi);
240   {
241     Comment("lhs is Smi");
242     if (!rhs_is_smi) {
243       // Check if the {rhs} is also a Smi.
244       Label if_rhsissmi(this), if_rhsisnotsmi(this);
245       Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
246       BIND(&if_rhsisnotsmi);
247       {
248         // Check if {rhs} is a HeapNumber.
249         GotoIfNot(IsHeapNumber(rhs), &check_rhsisoddball);
250 
251         // Perform a floating point operation.
252         var_float_lhs.Bind(SmiToFloat64(lhs));
253         var_float_rhs.Bind(LoadHeapNumberValue(rhs));
254         Goto(&do_float_operation);
255       }
256 
257       BIND(&if_rhsissmi);
258     }
259 
260     {
261       Comment("perform smi operation");
262       var_result.Bind(smiOperation(lhs, rhs, &var_type_feedback));
263       Goto(&end);
264     }
265   }
266 
267   BIND(&if_lhsisnotsmi);
268   {
269     Comment("lhs is not Smi");
270     // Check if the {lhs} is a HeapNumber.
271     GotoIfNot(IsHeapNumber(lhs), &if_lhsisnotnumber);
272 
273     if (!rhs_is_smi) {
274       // Check if the {rhs} is a Smi.
275       Label if_rhsissmi(this), if_rhsisnotsmi(this);
276       Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
277 
278       BIND(&if_rhsisnotsmi);
279       {
280         // Check if the {rhs} is a HeapNumber.
281         GotoIfNot(IsHeapNumber(rhs), &check_rhsisoddball);
282 
283         // Perform a floating point operation.
284         var_float_lhs.Bind(LoadHeapNumberValue(lhs));
285         var_float_rhs.Bind(LoadHeapNumberValue(rhs));
286         Goto(&do_float_operation);
287       }
288 
289       BIND(&if_rhsissmi);
290     }
291 
292     {
293       // Perform floating point operation.
294       var_float_lhs.Bind(LoadHeapNumberValue(lhs));
295       var_float_rhs.Bind(SmiToFloat64(rhs));
296       Goto(&do_float_operation);
297     }
298   }
299 
300   BIND(&do_float_operation);
301   {
302     var_type_feedback.Bind(SmiConstant(BinaryOperationFeedback::kNumber));
303     Node* lhs_value = var_float_lhs.value();
304     Node* rhs_value = var_float_rhs.value();
305     Node* value = floatOperation(lhs_value, rhs_value);
306     var_result.Bind(AllocateHeapNumberWithValue(value));
307     Goto(&end);
308   }
309 
310   BIND(&if_lhsisnotnumber);
311   {
312     // No checks on rhs are done yet. We just know lhs is not a number or Smi.
313     Label if_left_bigint(this), if_left_oddball(this);
314     Node* lhs_instance_type = LoadInstanceType(lhs);
315     GotoIf(IsBigIntInstanceType(lhs_instance_type), &if_left_bigint);
316     Node* lhs_is_oddball = InstanceTypeEqual(lhs_instance_type, ODDBALL_TYPE);
317     Branch(lhs_is_oddball, &if_left_oddball, &call_with_any_feedback);
318 
319     BIND(&if_left_oddball);
320     {
321       Label if_rhsissmi(this), if_rhsisnotsmi(this);
322       Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
323 
324       BIND(&if_rhsissmi);
325       {
326         var_type_feedback.Bind(
327             SmiConstant(BinaryOperationFeedback::kNumberOrOddball));
328         Goto(&call_stub);
329       }
330 
331       BIND(&if_rhsisnotsmi);
332       {
333         // Check if {rhs} is a HeapNumber.
334         GotoIfNot(IsHeapNumber(rhs), &check_rhsisoddball);
335 
336         var_type_feedback.Bind(
337             SmiConstant(BinaryOperationFeedback::kNumberOrOddball));
338         Goto(&call_stub);
339       }
340     }
341 
342     BIND(&if_left_bigint);
343     {
344       GotoIf(TaggedIsSmi(rhs), &call_with_any_feedback);
345       Branch(IsBigInt(rhs), &if_bigint, &call_with_any_feedback);
346     }
347   }
348 
349   BIND(&check_rhsisoddball);
350   {
351     // Check if rhs is an oddball. At this point we know lhs is either a
352     // Smi or number or oddball and rhs is not a number or Smi.
353     Node* rhs_instance_type = LoadInstanceType(rhs);
354     GotoIf(IsBigIntInstanceType(rhs_instance_type), &if_bigint);
355     Node* rhs_is_oddball = InstanceTypeEqual(rhs_instance_type, ODDBALL_TYPE);
356     GotoIfNot(rhs_is_oddball, &call_with_any_feedback);
357 
358     var_type_feedback.Bind(
359         SmiConstant(BinaryOperationFeedback::kNumberOrOddball));
360     Goto(&call_stub);
361   }
362 
363   // This handles the case where at least one input is a BigInt.
364   BIND(&if_bigint);
365   {
366     var_type_feedback.Bind(SmiConstant(BinaryOperationFeedback::kBigInt));
367     var_result.Bind(CallRuntime(Runtime::kBigIntBinaryOp, context, lhs, rhs,
368                                 SmiConstant(op)));
369     Goto(&end);
370   }
371 
372   BIND(&call_with_any_feedback);
373   {
374     var_type_feedback.Bind(SmiConstant(BinaryOperationFeedback::kAny));
375     Goto(&call_stub);
376   }
377 
378   BIND(&call_stub);
379   {
380     Node* result;
381     switch (op) {
382       case Operation::kSubtract:
383         result = CallBuiltin(Builtins::kSubtract, context, lhs, rhs);
384         break;
385       case Operation::kMultiply:
386         result = CallBuiltin(Builtins::kMultiply, context, lhs, rhs);
387         break;
388       case Operation::kDivide:
389         result = CallBuiltin(Builtins::kDivide, context, lhs, rhs);
390         break;
391       case Operation::kModulus:
392         result = CallBuiltin(Builtins::kModulus, context, lhs, rhs);
393         break;
394       default:
395         UNREACHABLE();
396     }
397     var_result.Bind(result);
398     Goto(&end);
399   }
400 
401   BIND(&end);
402   UpdateFeedback(var_type_feedback.value(), feedback_vector, slot_id);
403   return var_result.value();
404 }
405 
Generate_SubtractWithFeedback(Node * context,Node * lhs,Node * rhs,Node * slot_id,Node * feedback_vector,bool rhs_is_smi)406 Node* BinaryOpAssembler::Generate_SubtractWithFeedback(Node* context, Node* lhs,
407                                                        Node* rhs, Node* slot_id,
408                                                        Node* feedback_vector,
409                                                        bool rhs_is_smi) {
410   auto smiFunction = [=](Node* lhs, Node* rhs, Variable* var_type_feedback) {
411     Label end(this);
412     TVARIABLE(Number, var_result);
413     // If rhs is known to be an Smi (for SubSmi) we want to fast path Smi
414     // operation. For the normal Sub operation, we want to fast path both
415     // Smi and Number operations, so this path should not be marked as Deferred.
416     Label if_overflow(this,
417                       rhs_is_smi ? Label::kDeferred : Label::kNonDeferred);
418     var_result = TrySmiSub(CAST(lhs), CAST(rhs), &if_overflow);
419     var_type_feedback->Bind(SmiConstant(BinaryOperationFeedback::kSignedSmall));
420     Goto(&end);
421 
422     BIND(&if_overflow);
423     {
424       var_type_feedback->Bind(SmiConstant(BinaryOperationFeedback::kNumber));
425       Node* value = Float64Sub(SmiToFloat64(lhs), SmiToFloat64(rhs));
426       var_result = AllocateHeapNumberWithValue(value);
427       Goto(&end);
428     }
429 
430     BIND(&end);
431     return var_result.value();
432   };
433   auto floatFunction = [=](Node* lhs, Node* rhs) {
434     return Float64Sub(lhs, rhs);
435   };
436   return Generate_BinaryOperationWithFeedback(
437       context, lhs, rhs, slot_id, feedback_vector, smiFunction, floatFunction,
438       Operation::kSubtract, rhs_is_smi);
439 }
440 
Generate_MultiplyWithFeedback(Node * context,Node * lhs,Node * rhs,Node * slot_id,Node * feedback_vector,bool rhs_is_smi)441 Node* BinaryOpAssembler::Generate_MultiplyWithFeedback(Node* context, Node* lhs,
442                                                        Node* rhs, Node* slot_id,
443                                                        Node* feedback_vector,
444                                                        bool rhs_is_smi) {
445   auto smiFunction = [=](Node* lhs, Node* rhs, Variable* var_type_feedback) {
446     TNode<Number> result = SmiMul(CAST(lhs), CAST(rhs));
447     var_type_feedback->Bind(SelectSmiConstant(
448         TaggedIsSmi(result), BinaryOperationFeedback::kSignedSmall,
449         BinaryOperationFeedback::kNumber));
450     return result;
451   };
452   auto floatFunction = [=](Node* lhs, Node* rhs) {
453     return Float64Mul(lhs, rhs);
454   };
455   return Generate_BinaryOperationWithFeedback(
456       context, lhs, rhs, slot_id, feedback_vector, smiFunction, floatFunction,
457       Operation::kMultiply, rhs_is_smi);
458 }
459 
Generate_DivideWithFeedback(Node * context,Node * dividend,Node * divisor,Node * slot_id,Node * feedback_vector,bool rhs_is_smi)460 Node* BinaryOpAssembler::Generate_DivideWithFeedback(
461     Node* context, Node* dividend, Node* divisor, Node* slot_id,
462     Node* feedback_vector, bool rhs_is_smi) {
463   auto smiFunction = [=](Node* lhs, Node* rhs, Variable* var_type_feedback) {
464     VARIABLE(var_result, MachineRepresentation::kTagged);
465     // If rhs is known to be an Smi (for DivSmi) we want to fast path Smi
466     // operation. For the normal Div operation, we want to fast path both
467     // Smi and Number operations, so this path should not be marked as Deferred.
468     Label bailout(this, rhs_is_smi ? Label::kDeferred : Label::kNonDeferred),
469         end(this);
470     var_result.Bind(TrySmiDiv(CAST(lhs), CAST(rhs), &bailout));
471     var_type_feedback->Bind(SmiConstant(BinaryOperationFeedback::kSignedSmall));
472     Goto(&end);
473 
474     BIND(&bailout);
475     {
476       var_type_feedback->Bind(
477           SmiConstant(BinaryOperationFeedback::kSignedSmallInputs));
478       Node* value = Float64Div(SmiToFloat64(lhs), SmiToFloat64(rhs));
479       var_result.Bind(AllocateHeapNumberWithValue(value));
480       Goto(&end);
481     }
482 
483     BIND(&end);
484     return var_result.value();
485   };
486   auto floatFunction = [=](Node* lhs, Node* rhs) {
487     return Float64Div(lhs, rhs);
488   };
489   return Generate_BinaryOperationWithFeedback(
490       context, dividend, divisor, slot_id, feedback_vector, smiFunction,
491       floatFunction, Operation::kDivide, rhs_is_smi);
492 }
493 
Generate_ModulusWithFeedback(Node * context,Node * dividend,Node * divisor,Node * slot_id,Node * feedback_vector,bool rhs_is_smi)494 Node* BinaryOpAssembler::Generate_ModulusWithFeedback(
495     Node* context, Node* dividend, Node* divisor, Node* slot_id,
496     Node* feedback_vector, bool rhs_is_smi) {
497   auto smiFunction = [=](Node* lhs, Node* rhs, Variable* var_type_feedback) {
498     TNode<Number> result = SmiMod(CAST(lhs), CAST(rhs));
499     var_type_feedback->Bind(SelectSmiConstant(
500         TaggedIsSmi(result), BinaryOperationFeedback::kSignedSmall,
501         BinaryOperationFeedback::kNumber));
502     return result;
503   };
504   auto floatFunction = [=](Node* lhs, Node* rhs) {
505     return Float64Mod(lhs, rhs);
506   };
507   return Generate_BinaryOperationWithFeedback(
508       context, dividend, divisor, slot_id, feedback_vector, smiFunction,
509       floatFunction, Operation::kModulus, rhs_is_smi);
510 }
511 
Generate_ExponentiateWithFeedback(Node * context,Node * base,Node * exponent,Node * slot_id,Node * feedback_vector,bool rhs_is_smi)512 Node* BinaryOpAssembler::Generate_ExponentiateWithFeedback(
513     Node* context, Node* base, Node* exponent, Node* slot_id,
514     Node* feedback_vector, bool rhs_is_smi) {
515   // We currently don't optimize exponentiation based on feedback.
516   Node* dummy_feedback = SmiConstant(BinaryOperationFeedback::kAny);
517   UpdateFeedback(dummy_feedback, feedback_vector, slot_id);
518   return CallBuiltin(Builtins::kExponentiate, context, base, exponent);
519 }
520 
521 }  // namespace internal
522 }  // namespace v8
523