• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions are
6 // met:
7 //
8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer.
10 //
11 // - Redistribution in binary form must reproduce the above copyright
12 // notice, this list of conditions and the following disclaimer in the
13 // documentation and/or other materials provided with the distribution.
14 //
15 // - Neither the name of Sun Microsystems or the names of contributors may
16 // be used to endorse or promote products derived from this software without
17 // specific prior written permission.
18 //
19 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
20 // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
21 // THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
23 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
24 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
25 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
26 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
27 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
28 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
29 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 
31 // The original source code covered by the above license above has been
32 // modified significantly by Google Inc.
33 // Copyright 2012 the V8 project authors. All rights reserved.
34 
35 #include "src/assembler.h"
36 
37 #include <cmath>
38 #include "src/api.h"
39 #include "src/base/cpu.h"
40 #include "src/base/lazy-instance.h"
41 #include "src/base/platform/platform.h"
42 #include "src/builtins.h"
43 #include "src/codegen.h"
44 #include "src/counters.h"
45 #include "src/cpu-profiler.h"
46 #include "src/debug.h"
47 #include "src/deoptimizer.h"
48 #include "src/execution.h"
49 #include "src/ic/ic.h"
50 #include "src/ic/stub-cache.h"
51 #include "src/isolate-inl.h"
52 #include "src/jsregexp.h"
53 #include "src/regexp-macro-assembler.h"
54 #include "src/regexp-stack.h"
55 #include "src/runtime.h"
56 #include "src/serialize.h"
57 #include "src/token.h"
58 
59 #if V8_TARGET_ARCH_IA32
60 #include "src/ia32/assembler-ia32-inl.h"  // NOLINT
61 #elif V8_TARGET_ARCH_X64
62 #include "src/x64/assembler-x64-inl.h"  // NOLINT
63 #elif V8_TARGET_ARCH_ARM64
64 #include "src/arm64/assembler-arm64-inl.h"  // NOLINT
65 #elif V8_TARGET_ARCH_ARM
66 #include "src/arm/assembler-arm-inl.h"  // NOLINT
67 #elif V8_TARGET_ARCH_MIPS
68 #include "src/mips/assembler-mips-inl.h"  // NOLINT
69 #elif V8_TARGET_ARCH_MIPS64
70 #include "src/mips64/assembler-mips64-inl.h"  // NOLINT
71 #elif V8_TARGET_ARCH_X87
72 #include "src/x87/assembler-x87-inl.h"  // NOLINT
73 #else
74 #error "Unknown architecture."
75 #endif
76 
77 // Include native regexp-macro-assembler.
78 #ifndef V8_INTERPRETED_REGEXP
79 #if V8_TARGET_ARCH_IA32
80 #include "src/ia32/regexp-macro-assembler-ia32.h"  // NOLINT
81 #elif V8_TARGET_ARCH_X64
82 #include "src/x64/regexp-macro-assembler-x64.h"  // NOLINT
83 #elif V8_TARGET_ARCH_ARM64
84 #include "src/arm64/regexp-macro-assembler-arm64.h"  // NOLINT
85 #elif V8_TARGET_ARCH_ARM
86 #include "src/arm/regexp-macro-assembler-arm.h"  // NOLINT
87 #elif V8_TARGET_ARCH_MIPS
88 #include "src/mips/regexp-macro-assembler-mips.h"  // NOLINT
89 #elif V8_TARGET_ARCH_MIPS64
90 #include "src/mips64/regexp-macro-assembler-mips64.h"  // NOLINT
91 #elif V8_TARGET_ARCH_X87
92 #include "src/x87/regexp-macro-assembler-x87.h"  // NOLINT
93 #else  // Unknown architecture.
94 #error "Unknown architecture."
95 #endif  // Target architecture.
96 #endif  // V8_INTERPRETED_REGEXP
97 
98 namespace v8 {
99 namespace internal {
100 
101 // -----------------------------------------------------------------------------
102 // Common double constants.
103 
104 struct DoubleConstant BASE_EMBEDDED {
105 double min_int;
106 double one_half;
107 double minus_one_half;
108 double negative_infinity;
109 double canonical_non_hole_nan;
110 double the_hole_nan;
111 double uint32_bias;
112 };
113 
114 static DoubleConstant double_constants;
115 
116 const char* const RelocInfo::kFillerCommentString = "DEOPTIMIZATION PADDING";
117 
118 static bool math_exp_data_initialized = false;
119 static base::Mutex* math_exp_data_mutex = NULL;
120 static double* math_exp_constants_array = NULL;
121 static double* math_exp_log_table_array = NULL;
122 
123 // -----------------------------------------------------------------------------
124 // Implementation of AssemblerBase
125 
AssemblerBase(Isolate * isolate,void * buffer,int buffer_size)126 AssemblerBase::AssemblerBase(Isolate* isolate, void* buffer, int buffer_size)
127     : isolate_(isolate),
128       jit_cookie_(0),
129       enabled_cpu_features_(0),
130       emit_debug_code_(FLAG_debug_code),
131       predictable_code_size_(false),
132       // We may use the assembler without an isolate.
133       serializer_enabled_(isolate && isolate->serializer_enabled()) {
134   if (FLAG_mask_constants_with_cookie && isolate != NULL)  {
135     jit_cookie_ = isolate->random_number_generator()->NextInt();
136   }
137   own_buffer_ = buffer == NULL;
138   if (buffer_size == 0) buffer_size = kMinimalBufferSize;
139   DCHECK(buffer_size > 0);
140   if (own_buffer_) buffer = NewArray<byte>(buffer_size);
141   buffer_ = static_cast<byte*>(buffer);
142   buffer_size_ = buffer_size;
143 
144   pc_ = buffer_;
145 }
146 
147 
~AssemblerBase()148 AssemblerBase::~AssemblerBase() {
149   if (own_buffer_) DeleteArray(buffer_);
150 }
151 
152 
153 // -----------------------------------------------------------------------------
154 // Implementation of PredictableCodeSizeScope
155 
PredictableCodeSizeScope(AssemblerBase * assembler,int expected_size)156 PredictableCodeSizeScope::PredictableCodeSizeScope(AssemblerBase* assembler,
157                                                    int expected_size)
158     : assembler_(assembler),
159       expected_size_(expected_size),
160       start_offset_(assembler->pc_offset()),
161       old_value_(assembler->predictable_code_size()) {
162   assembler_->set_predictable_code_size(true);
163 }
164 
165 
~PredictableCodeSizeScope()166 PredictableCodeSizeScope::~PredictableCodeSizeScope() {
167   // TODO(svenpanne) Remove the 'if' when everything works.
168   if (expected_size_ >= 0) {
169     CHECK_EQ(expected_size_, assembler_->pc_offset() - start_offset_);
170   }
171   assembler_->set_predictable_code_size(old_value_);
172 }
173 
174 
175 // -----------------------------------------------------------------------------
176 // Implementation of CpuFeatureScope
177 
178 #ifdef DEBUG
CpuFeatureScope(AssemblerBase * assembler,CpuFeature f)179 CpuFeatureScope::CpuFeatureScope(AssemblerBase* assembler, CpuFeature f)
180     : assembler_(assembler) {
181   DCHECK(CpuFeatures::IsSupported(f));
182   old_enabled_ = assembler_->enabled_cpu_features();
183   uint64_t mask = static_cast<uint64_t>(1) << f;
184   // TODO(svenpanne) This special case below doesn't belong here!
185 #if V8_TARGET_ARCH_ARM
186   // ARMv7 is implied by VFP3.
187   if (f == VFP3) {
188     mask |= static_cast<uint64_t>(1) << ARMv7;
189   }
190 #endif
191   assembler_->set_enabled_cpu_features(old_enabled_ | mask);
192 }
193 
194 
~CpuFeatureScope()195 CpuFeatureScope::~CpuFeatureScope() {
196   assembler_->set_enabled_cpu_features(old_enabled_);
197 }
198 #endif
199 
200 
201 bool CpuFeatures::initialized_ = false;
202 unsigned CpuFeatures::supported_ = 0;
203 unsigned CpuFeatures::cache_line_size_ = 0;
204 
205 
206 // -----------------------------------------------------------------------------
207 // Implementation of Label
208 
pos() const209 int Label::pos() const {
210   if (pos_ < 0) return -pos_ - 1;
211   if (pos_ > 0) return  pos_ - 1;
212   UNREACHABLE();
213   return 0;
214 }
215 
216 
217 // -----------------------------------------------------------------------------
218 // Implementation of RelocInfoWriter and RelocIterator
219 //
220 // Relocation information is written backwards in memory, from high addresses
221 // towards low addresses, byte by byte.  Therefore, in the encodings listed
222 // below, the first byte listed it at the highest address, and successive
223 // bytes in the record are at progressively lower addresses.
224 //
225 // Encoding
226 //
227 // The most common modes are given single-byte encodings.  Also, it is
228 // easy to identify the type of reloc info and skip unwanted modes in
229 // an iteration.
230 //
231 // The encoding relies on the fact that there are fewer than 14
232 // different relocation modes using standard non-compact encoding.
233 //
234 // The first byte of a relocation record has a tag in its low 2 bits:
235 // Here are the record schemes, depending on the low tag and optional higher
236 // tags.
237 //
238 // Low tag:
239 //   00: embedded_object:      [6-bit pc delta] 00
240 //
241 //   01: code_target:          [6-bit pc delta] 01
242 //
243 //   10: short_data_record:    [6-bit pc delta] 10 followed by
244 //                             [6-bit data delta] [2-bit data type tag]
245 //
246 //   11: long_record           [2-bit high tag][4 bit middle_tag] 11
247 //                             followed by variable data depending on type.
248 //
249 //  2-bit data type tags, used in short_data_record and data_jump long_record:
250 //   code_target_with_id: 00
251 //   position:            01
252 //   statement_position:  10
253 //   comment:             11 (not used in short_data_record)
254 //
255 //  Long record format:
256 //    4-bit middle_tag:
257 //      0000 - 1100 : Short record for RelocInfo::Mode middle_tag + 2
258 //         (The middle_tag encodes rmode - RelocInfo::LAST_COMPACT_ENUM,
259 //          and is between 0000 and 1100)
260 //        The format is:
261 //                              00 [4 bit middle_tag] 11 followed by
262 //                              00 [6 bit pc delta]
263 //
264 //      1101: constant or veneer pool. Used only on ARM and ARM64 for now.
265 //        The format is:       [2-bit sub-type] 1101 11
266 //                             signed int (size of the pool).
267 //          The 2-bit sub-types are:
268 //            00: constant pool
269 //            01: veneer pool
270 //      1110: long_data_record
271 //        The format is:       [2-bit data_type_tag] 1110 11
272 //                             signed intptr_t, lowest byte written first
273 //                             (except data_type code_target_with_id, which
274 //                             is followed by a signed int, not intptr_t.)
275 //
276 //      1111: long_pc_jump
277 //        The format is:
278 //          pc-jump:             00 1111 11,
279 //                               00 [6 bits pc delta]
280 //        or
281 //          pc-jump (variable length):
282 //                               01 1111 11,
283 //                               [7 bits data] 0
284 //                                  ...
285 //                               [7 bits data] 1
286 //               (Bits 6..31 of pc delta, with leading zeroes
287 //                dropped, and last non-zero chunk tagged with 1.)
288 
289 
290 #ifdef DEBUG
291 const int kMaxStandardNonCompactModes = 14;
292 #endif
293 
294 const int kTagBits = 2;
295 const int kTagMask = (1 << kTagBits) - 1;
296 const int kExtraTagBits = 4;
297 const int kLocatableTypeTagBits = 2;
298 const int kSmallDataBits = kBitsPerByte - kLocatableTypeTagBits;
299 
300 const int kEmbeddedObjectTag = 0;
301 const int kCodeTargetTag = 1;
302 const int kLocatableTag = 2;
303 const int kDefaultTag = 3;
304 
305 const int kPCJumpExtraTag = (1 << kExtraTagBits) - 1;
306 
307 const int kSmallPCDeltaBits = kBitsPerByte - kTagBits;
308 const int kSmallPCDeltaMask = (1 << kSmallPCDeltaBits) - 1;
309 const int RelocInfo::kMaxSmallPCDelta = kSmallPCDeltaMask;
310 
311 const int kVariableLengthPCJumpTopTag = 1;
312 const int kChunkBits = 7;
313 const int kChunkMask = (1 << kChunkBits) - 1;
314 const int kLastChunkTagBits = 1;
315 const int kLastChunkTagMask = 1;
316 const int kLastChunkTag = 1;
317 
318 
319 const int kDataJumpExtraTag = kPCJumpExtraTag - 1;
320 
321 const int kCodeWithIdTag = 0;
322 const int kNonstatementPositionTag = 1;
323 const int kStatementPositionTag = 2;
324 const int kCommentTag = 3;
325 
326 const int kPoolExtraTag = kPCJumpExtraTag - 2;
327 const int kConstPoolTag = 0;
328 const int kVeneerPoolTag = 1;
329 
330 
WriteVariableLengthPCJump(uint32_t pc_delta)331 uint32_t RelocInfoWriter::WriteVariableLengthPCJump(uint32_t pc_delta) {
332   // Return if the pc_delta can fit in kSmallPCDeltaBits bits.
333   // Otherwise write a variable length PC jump for the bits that do
334   // not fit in the kSmallPCDeltaBits bits.
335   if (is_uintn(pc_delta, kSmallPCDeltaBits)) return pc_delta;
336   WriteExtraTag(kPCJumpExtraTag, kVariableLengthPCJumpTopTag);
337   uint32_t pc_jump = pc_delta >> kSmallPCDeltaBits;
338   DCHECK(pc_jump > 0);
339   // Write kChunkBits size chunks of the pc_jump.
340   for (; pc_jump > 0; pc_jump = pc_jump >> kChunkBits) {
341     byte b = pc_jump & kChunkMask;
342     *--pos_ = b << kLastChunkTagBits;
343   }
344   // Tag the last chunk so it can be identified.
345   *pos_ = *pos_ | kLastChunkTag;
346   // Return the remaining kSmallPCDeltaBits of the pc_delta.
347   return pc_delta & kSmallPCDeltaMask;
348 }
349 
350 
WriteTaggedPC(uint32_t pc_delta,int tag)351 void RelocInfoWriter::WriteTaggedPC(uint32_t pc_delta, int tag) {
352   // Write a byte of tagged pc-delta, possibly preceded by var. length pc-jump.
353   pc_delta = WriteVariableLengthPCJump(pc_delta);
354   *--pos_ = pc_delta << kTagBits | tag;
355 }
356 
357 
WriteTaggedData(intptr_t data_delta,int tag)358 void RelocInfoWriter::WriteTaggedData(intptr_t data_delta, int tag) {
359   *--pos_ = static_cast<byte>(data_delta << kLocatableTypeTagBits | tag);
360 }
361 
362 
WriteExtraTag(int extra_tag,int top_tag)363 void RelocInfoWriter::WriteExtraTag(int extra_tag, int top_tag) {
364   *--pos_ = static_cast<int>(top_tag << (kTagBits + kExtraTagBits) |
365                              extra_tag << kTagBits |
366                              kDefaultTag);
367 }
368 
369 
WriteExtraTaggedPC(uint32_t pc_delta,int extra_tag)370 void RelocInfoWriter::WriteExtraTaggedPC(uint32_t pc_delta, int extra_tag) {
371   // Write two-byte tagged pc-delta, possibly preceded by var. length pc-jump.
372   pc_delta = WriteVariableLengthPCJump(pc_delta);
373   WriteExtraTag(extra_tag, 0);
374   *--pos_ = pc_delta;
375 }
376 
377 
WriteExtraTaggedIntData(int data_delta,int top_tag)378 void RelocInfoWriter::WriteExtraTaggedIntData(int data_delta, int top_tag) {
379   WriteExtraTag(kDataJumpExtraTag, top_tag);
380   for (int i = 0; i < kIntSize; i++) {
381     *--pos_ = static_cast<byte>(data_delta);
382     // Signed right shift is arithmetic shift.  Tested in test-utils.cc.
383     data_delta = data_delta >> kBitsPerByte;
384   }
385 }
386 
387 
WriteExtraTaggedPoolData(int data,int pool_type)388 void RelocInfoWriter::WriteExtraTaggedPoolData(int data, int pool_type) {
389   WriteExtraTag(kPoolExtraTag, pool_type);
390   for (int i = 0; i < kIntSize; i++) {
391     *--pos_ = static_cast<byte>(data);
392     // Signed right shift is arithmetic shift.  Tested in test-utils.cc.
393     data = data >> kBitsPerByte;
394   }
395 }
396 
397 
WriteExtraTaggedData(intptr_t data_delta,int top_tag)398 void RelocInfoWriter::WriteExtraTaggedData(intptr_t data_delta, int top_tag) {
399   WriteExtraTag(kDataJumpExtraTag, top_tag);
400   for (int i = 0; i < kIntptrSize; i++) {
401     *--pos_ = static_cast<byte>(data_delta);
402     // Signed right shift is arithmetic shift.  Tested in test-utils.cc.
403     data_delta = data_delta >> kBitsPerByte;
404   }
405 }
406 
407 
Write(const RelocInfo * rinfo)408 void RelocInfoWriter::Write(const RelocInfo* rinfo) {
409 #ifdef DEBUG
410   byte* begin_pos = pos_;
411 #endif
412   DCHECK(rinfo->rmode() < RelocInfo::NUMBER_OF_MODES);
413   DCHECK(rinfo->pc() - last_pc_ >= 0);
414   DCHECK(RelocInfo::LAST_STANDARD_NONCOMPACT_ENUM - RelocInfo::LAST_COMPACT_ENUM
415          <= kMaxStandardNonCompactModes);
416   // Use unsigned delta-encoding for pc.
417   uint32_t pc_delta = static_cast<uint32_t>(rinfo->pc() - last_pc_);
418   RelocInfo::Mode rmode = rinfo->rmode();
419 
420   // The two most common modes are given small tags, and usually fit in a byte.
421   if (rmode == RelocInfo::EMBEDDED_OBJECT) {
422     WriteTaggedPC(pc_delta, kEmbeddedObjectTag);
423   } else if (rmode == RelocInfo::CODE_TARGET) {
424     WriteTaggedPC(pc_delta, kCodeTargetTag);
425     DCHECK(begin_pos - pos_ <= RelocInfo::kMaxCallSize);
426   } else if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
427     // Use signed delta-encoding for id.
428     DCHECK(static_cast<int>(rinfo->data()) == rinfo->data());
429     int id_delta = static_cast<int>(rinfo->data()) - last_id_;
430     // Check if delta is small enough to fit in a tagged byte.
431     if (is_intn(id_delta, kSmallDataBits)) {
432       WriteTaggedPC(pc_delta, kLocatableTag);
433       WriteTaggedData(id_delta, kCodeWithIdTag);
434     } else {
435       // Otherwise, use costly encoding.
436       WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag);
437       WriteExtraTaggedIntData(id_delta, kCodeWithIdTag);
438     }
439     last_id_ = static_cast<int>(rinfo->data());
440   } else if (RelocInfo::IsPosition(rmode)) {
441     // Use signed delta-encoding for position.
442     DCHECK(static_cast<int>(rinfo->data()) == rinfo->data());
443     int pos_delta = static_cast<int>(rinfo->data()) - last_position_;
444     int pos_type_tag = (rmode == RelocInfo::POSITION) ? kNonstatementPositionTag
445                                                       : kStatementPositionTag;
446     // Check if delta is small enough to fit in a tagged byte.
447     if (is_intn(pos_delta, kSmallDataBits)) {
448       WriteTaggedPC(pc_delta, kLocatableTag);
449       WriteTaggedData(pos_delta, pos_type_tag);
450     } else {
451       // Otherwise, use costly encoding.
452       WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag);
453       WriteExtraTaggedIntData(pos_delta, pos_type_tag);
454     }
455     last_position_ = static_cast<int>(rinfo->data());
456   } else if (RelocInfo::IsComment(rmode)) {
457     // Comments are normally not generated, so we use the costly encoding.
458     WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag);
459     WriteExtraTaggedData(rinfo->data(), kCommentTag);
460     DCHECK(begin_pos - pos_ >= RelocInfo::kMinRelocCommentSize);
461   } else if (RelocInfo::IsConstPool(rmode) || RelocInfo::IsVeneerPool(rmode)) {
462       WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag);
463       WriteExtraTaggedPoolData(static_cast<int>(rinfo->data()),
464                                RelocInfo::IsConstPool(rmode) ? kConstPoolTag
465                                                              : kVeneerPoolTag);
466   } else {
467     DCHECK(rmode > RelocInfo::LAST_COMPACT_ENUM);
468     int saved_mode = rmode - RelocInfo::LAST_COMPACT_ENUM;
469     // For all other modes we simply use the mode as the extra tag.
470     // None of these modes need a data component.
471     DCHECK(saved_mode < kPCJumpExtraTag && saved_mode < kDataJumpExtraTag);
472     WriteExtraTaggedPC(pc_delta, saved_mode);
473   }
474   last_pc_ = rinfo->pc();
475 #ifdef DEBUG
476   DCHECK(begin_pos - pos_ <= kMaxSize);
477 #endif
478 }
479 
480 
AdvanceGetTag()481 inline int RelocIterator::AdvanceGetTag() {
482   return *--pos_ & kTagMask;
483 }
484 
485 
GetExtraTag()486 inline int RelocIterator::GetExtraTag() {
487   return (*pos_ >> kTagBits) & ((1 << kExtraTagBits) - 1);
488 }
489 
490 
GetTopTag()491 inline int RelocIterator::GetTopTag() {
492   return *pos_ >> (kTagBits + kExtraTagBits);
493 }
494 
495 
ReadTaggedPC()496 inline void RelocIterator::ReadTaggedPC() {
497   rinfo_.pc_ += *pos_ >> kTagBits;
498 }
499 
500 
AdvanceReadPC()501 inline void RelocIterator::AdvanceReadPC() {
502   rinfo_.pc_ += *--pos_;
503 }
504 
505 
AdvanceReadId()506 void RelocIterator::AdvanceReadId() {
507   int x = 0;
508   for (int i = 0; i < kIntSize; i++) {
509     x |= static_cast<int>(*--pos_) << i * kBitsPerByte;
510   }
511   last_id_ += x;
512   rinfo_.data_ = last_id_;
513 }
514 
515 
AdvanceReadPoolData()516 void RelocIterator::AdvanceReadPoolData() {
517   int x = 0;
518   for (int i = 0; i < kIntSize; i++) {
519     x |= static_cast<int>(*--pos_) << i * kBitsPerByte;
520   }
521   rinfo_.data_ = x;
522 }
523 
524 
AdvanceReadPosition()525 void RelocIterator::AdvanceReadPosition() {
526   int x = 0;
527   for (int i = 0; i < kIntSize; i++) {
528     x |= static_cast<int>(*--pos_) << i * kBitsPerByte;
529   }
530   last_position_ += x;
531   rinfo_.data_ = last_position_;
532 }
533 
534 
AdvanceReadData()535 void RelocIterator::AdvanceReadData() {
536   intptr_t x = 0;
537   for (int i = 0; i < kIntptrSize; i++) {
538     x |= static_cast<intptr_t>(*--pos_) << i * kBitsPerByte;
539   }
540   rinfo_.data_ = x;
541 }
542 
543 
AdvanceReadVariableLengthPCJump()544 void RelocIterator::AdvanceReadVariableLengthPCJump() {
545   // Read the 32-kSmallPCDeltaBits most significant bits of the
546   // pc jump in kChunkBits bit chunks and shift them into place.
547   // Stop when the last chunk is encountered.
548   uint32_t pc_jump = 0;
549   for (int i = 0; i < kIntSize; i++) {
550     byte pc_jump_part = *--pos_;
551     pc_jump |= (pc_jump_part >> kLastChunkTagBits) << i * kChunkBits;
552     if ((pc_jump_part & kLastChunkTagMask) == 1) break;
553   }
554   // The least significant kSmallPCDeltaBits bits will be added
555   // later.
556   rinfo_.pc_ += pc_jump << kSmallPCDeltaBits;
557 }
558 
559 
GetLocatableTypeTag()560 inline int RelocIterator::GetLocatableTypeTag() {
561   return *pos_ & ((1 << kLocatableTypeTagBits) - 1);
562 }
563 
564 
ReadTaggedId()565 inline void RelocIterator::ReadTaggedId() {
566   int8_t signed_b = *pos_;
567   // Signed right shift is arithmetic shift.  Tested in test-utils.cc.
568   last_id_ += signed_b >> kLocatableTypeTagBits;
569   rinfo_.data_ = last_id_;
570 }
571 
572 
ReadTaggedPosition()573 inline void RelocIterator::ReadTaggedPosition() {
574   int8_t signed_b = *pos_;
575   // Signed right shift is arithmetic shift.  Tested in test-utils.cc.
576   last_position_ += signed_b >> kLocatableTypeTagBits;
577   rinfo_.data_ = last_position_;
578 }
579 
580 
GetPositionModeFromTag(int tag)581 static inline RelocInfo::Mode GetPositionModeFromTag(int tag) {
582   DCHECK(tag == kNonstatementPositionTag ||
583          tag == kStatementPositionTag);
584   return (tag == kNonstatementPositionTag) ?
585          RelocInfo::POSITION :
586          RelocInfo::STATEMENT_POSITION;
587 }
588 
589 
next()590 void RelocIterator::next() {
591   DCHECK(!done());
592   // Basically, do the opposite of RelocInfoWriter::Write.
593   // Reading of data is as far as possible avoided for unwanted modes,
594   // but we must always update the pc.
595   //
596   // We exit this loop by returning when we find a mode we want.
597   while (pos_ > end_) {
598     int tag = AdvanceGetTag();
599     if (tag == kEmbeddedObjectTag) {
600       ReadTaggedPC();
601       if (SetMode(RelocInfo::EMBEDDED_OBJECT)) return;
602     } else if (tag == kCodeTargetTag) {
603       ReadTaggedPC();
604       if (SetMode(RelocInfo::CODE_TARGET)) return;
605     } else if (tag == kLocatableTag) {
606       ReadTaggedPC();
607       Advance();
608       int locatable_tag = GetLocatableTypeTag();
609       if (locatable_tag == kCodeWithIdTag) {
610         if (SetMode(RelocInfo::CODE_TARGET_WITH_ID)) {
611           ReadTaggedId();
612           return;
613         }
614       } else {
615         // Compact encoding is never used for comments,
616         // so it must be a position.
617         DCHECK(locatable_tag == kNonstatementPositionTag ||
618                locatable_tag == kStatementPositionTag);
619         if (mode_mask_ & RelocInfo::kPositionMask) {
620           ReadTaggedPosition();
621           if (SetMode(GetPositionModeFromTag(locatable_tag))) return;
622         }
623       }
624     } else {
625       DCHECK(tag == kDefaultTag);
626       int extra_tag = GetExtraTag();
627       if (extra_tag == kPCJumpExtraTag) {
628         if (GetTopTag() == kVariableLengthPCJumpTopTag) {
629           AdvanceReadVariableLengthPCJump();
630         } else {
631           AdvanceReadPC();
632         }
633       } else if (extra_tag == kDataJumpExtraTag) {
634         int locatable_tag = GetTopTag();
635         if (locatable_tag == kCodeWithIdTag) {
636           if (SetMode(RelocInfo::CODE_TARGET_WITH_ID)) {
637             AdvanceReadId();
638             return;
639           }
640           Advance(kIntSize);
641         } else if (locatable_tag != kCommentTag) {
642           DCHECK(locatable_tag == kNonstatementPositionTag ||
643                  locatable_tag == kStatementPositionTag);
644           if (mode_mask_ & RelocInfo::kPositionMask) {
645             AdvanceReadPosition();
646             if (SetMode(GetPositionModeFromTag(locatable_tag))) return;
647           } else {
648             Advance(kIntSize);
649           }
650         } else {
651           DCHECK(locatable_tag == kCommentTag);
652           if (SetMode(RelocInfo::COMMENT)) {
653             AdvanceReadData();
654             return;
655           }
656           Advance(kIntptrSize);
657         }
658       } else if (extra_tag == kPoolExtraTag) {
659         int pool_type = GetTopTag();
660         DCHECK(pool_type == kConstPoolTag || pool_type == kVeneerPoolTag);
661         RelocInfo::Mode rmode = (pool_type == kConstPoolTag) ?
662           RelocInfo::CONST_POOL : RelocInfo::VENEER_POOL;
663         if (SetMode(rmode)) {
664           AdvanceReadPoolData();
665           return;
666         }
667         Advance(kIntSize);
668       } else {
669         AdvanceReadPC();
670         int rmode = extra_tag + RelocInfo::LAST_COMPACT_ENUM;
671         if (SetMode(static_cast<RelocInfo::Mode>(rmode))) return;
672       }
673     }
674   }
675   if (code_age_sequence_ != NULL) {
676     byte* old_code_age_sequence = code_age_sequence_;
677     code_age_sequence_ = NULL;
678     if (SetMode(RelocInfo::CODE_AGE_SEQUENCE)) {
679       rinfo_.data_ = 0;
680       rinfo_.pc_ = old_code_age_sequence;
681       return;
682     }
683   }
684   done_ = true;
685 }
686 
687 
RelocIterator(Code * code,int mode_mask)688 RelocIterator::RelocIterator(Code* code, int mode_mask) {
689   rinfo_.host_ = code;
690   rinfo_.pc_ = code->instruction_start();
691   rinfo_.data_ = 0;
692   // Relocation info is read backwards.
693   pos_ = code->relocation_start() + code->relocation_size();
694   end_ = code->relocation_start();
695   done_ = false;
696   mode_mask_ = mode_mask;
697   last_id_ = 0;
698   last_position_ = 0;
699   byte* sequence = code->FindCodeAgeSequence();
700   // We get the isolate from the map, because at serialization time
701   // the code pointer has been cloned and isn't really in heap space.
702   Isolate* isolate = code->map()->GetIsolate();
703   if (sequence != NULL && !Code::IsYoungSequence(isolate, sequence)) {
704     code_age_sequence_ = sequence;
705   } else {
706     code_age_sequence_ = NULL;
707   }
708   if (mode_mask_ == 0) pos_ = end_;
709   next();
710 }
711 
712 
RelocIterator(const CodeDesc & desc,int mode_mask)713 RelocIterator::RelocIterator(const CodeDesc& desc, int mode_mask) {
714   rinfo_.pc_ = desc.buffer;
715   rinfo_.data_ = 0;
716   // Relocation info is read backwards.
717   pos_ = desc.buffer + desc.buffer_size;
718   end_ = pos_ - desc.reloc_size;
719   done_ = false;
720   mode_mask_ = mode_mask;
721   last_id_ = 0;
722   last_position_ = 0;
723   code_age_sequence_ = NULL;
724   if (mode_mask_ == 0) pos_ = end_;
725   next();
726 }
727 
728 
729 // -----------------------------------------------------------------------------
730 // Implementation of RelocInfo
731 
732 
733 #ifdef DEBUG
RequiresRelocation(const CodeDesc & desc)734 bool RelocInfo::RequiresRelocation(const CodeDesc& desc) {
735   // Ensure there are no code targets or embedded objects present in the
736   // deoptimization entries, they would require relocation after code
737   // generation.
738   int mode_mask = RelocInfo::kCodeTargetMask |
739                   RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
740                   RelocInfo::ModeMask(RelocInfo::CELL) |
741                   RelocInfo::kApplyMask;
742   RelocIterator it(desc, mode_mask);
743   return !it.done();
744 }
745 #endif
746 
747 
748 #ifdef ENABLE_DISASSEMBLER
RelocModeName(RelocInfo::Mode rmode)749 const char* RelocInfo::RelocModeName(RelocInfo::Mode rmode) {
750   switch (rmode) {
751     case RelocInfo::NONE32:
752       return "no reloc 32";
753     case RelocInfo::NONE64:
754       return "no reloc 64";
755     case RelocInfo::EMBEDDED_OBJECT:
756       return "embedded object";
757     case RelocInfo::CONSTRUCT_CALL:
758       return "code target (js construct call)";
759     case RelocInfo::DEBUG_BREAK:
760       return "debug break";
761     case RelocInfo::CODE_TARGET:
762       return "code target";
763     case RelocInfo::CODE_TARGET_WITH_ID:
764       return "code target with id";
765     case RelocInfo::CELL:
766       return "property cell";
767     case RelocInfo::RUNTIME_ENTRY:
768       return "runtime entry";
769     case RelocInfo::JS_RETURN:
770       return "js return";
771     case RelocInfo::COMMENT:
772       return "comment";
773     case RelocInfo::POSITION:
774       return "position";
775     case RelocInfo::STATEMENT_POSITION:
776       return "statement position";
777     case RelocInfo::EXTERNAL_REFERENCE:
778       return "external reference";
779     case RelocInfo::INTERNAL_REFERENCE:
780       return "internal reference";
781     case RelocInfo::CONST_POOL:
782       return "constant pool";
783     case RelocInfo::VENEER_POOL:
784       return "veneer pool";
785     case RelocInfo::DEBUG_BREAK_SLOT:
786       return "debug break slot";
787     case RelocInfo::CODE_AGE_SEQUENCE:
788       return "code_age_sequence";
789     case RelocInfo::NUMBER_OF_MODES:
790       UNREACHABLE();
791       return "number_of_modes";
792   }
793   return "unknown relocation type";
794 }
795 
796 
Print(Isolate * isolate,OStream & os)797 void RelocInfo::Print(Isolate* isolate, OStream& os) {  // NOLINT
798   os << pc_ << "  " << RelocModeName(rmode_);
799   if (IsComment(rmode_)) {
800     os << "  (" << reinterpret_cast<char*>(data_) << ")";
801   } else if (rmode_ == EMBEDDED_OBJECT) {
802     os << "  (" << Brief(target_object()) << ")";
803   } else if (rmode_ == EXTERNAL_REFERENCE) {
804     ExternalReferenceEncoder ref_encoder(isolate);
805     os << " (" << ref_encoder.NameOfAddress(target_reference()) << ")  ("
806        << target_reference() << ")";
807   } else if (IsCodeTarget(rmode_)) {
808     Code* code = Code::GetCodeFromTargetAddress(target_address());
809     os << " (" << Code::Kind2String(code->kind()) << ")  (" << target_address()
810        << ")";
811     if (rmode_ == CODE_TARGET_WITH_ID) {
812       os << " (id=" << static_cast<int>(data_) << ")";
813     }
814   } else if (IsPosition(rmode_)) {
815     os << "  (" << data() << ")";
816   } else if (IsRuntimeEntry(rmode_) &&
817              isolate->deoptimizer_data() != NULL) {
818     // Depotimization bailouts are stored as runtime entries.
819     int id = Deoptimizer::GetDeoptimizationId(
820         isolate, target_address(), Deoptimizer::EAGER);
821     if (id != Deoptimizer::kNotDeoptimizationEntry) {
822       os << "  (deoptimization bailout " << id << ")";
823     }
824   }
825 
826   os << "\n";
827 }
828 #endif  // ENABLE_DISASSEMBLER
829 
830 
831 #ifdef VERIFY_HEAP
Verify(Isolate * isolate)832 void RelocInfo::Verify(Isolate* isolate) {
833   switch (rmode_) {
834     case EMBEDDED_OBJECT:
835       Object::VerifyPointer(target_object());
836       break;
837     case CELL:
838       Object::VerifyPointer(target_cell());
839       break;
840     case DEBUG_BREAK:
841     case CONSTRUCT_CALL:
842     case CODE_TARGET_WITH_ID:
843     case CODE_TARGET: {
844       // convert inline target address to code object
845       Address addr = target_address();
846       CHECK(addr != NULL);
847       // Check that we can find the right code object.
848       Code* code = Code::GetCodeFromTargetAddress(addr);
849       Object* found = isolate->FindCodeObject(addr);
850       CHECK(found->IsCode());
851       CHECK(code->address() == HeapObject::cast(found)->address());
852       break;
853     }
854     case RUNTIME_ENTRY:
855     case JS_RETURN:
856     case COMMENT:
857     case POSITION:
858     case STATEMENT_POSITION:
859     case EXTERNAL_REFERENCE:
860     case INTERNAL_REFERENCE:
861     case CONST_POOL:
862     case VENEER_POOL:
863     case DEBUG_BREAK_SLOT:
864     case NONE32:
865     case NONE64:
866       break;
867     case NUMBER_OF_MODES:
868       UNREACHABLE();
869       break;
870     case CODE_AGE_SEQUENCE:
871       DCHECK(Code::IsYoungSequence(isolate, pc_) || code_age_stub()->IsCode());
872       break;
873   }
874 }
875 #endif  // VERIFY_HEAP
876 
877 
878 // -----------------------------------------------------------------------------
879 // Implementation of ExternalReference
880 
SetUp()881 void ExternalReference::SetUp() {
882   double_constants.min_int = kMinInt;
883   double_constants.one_half = 0.5;
884   double_constants.minus_one_half = -0.5;
885   double_constants.canonical_non_hole_nan = base::OS::nan_value();
886   double_constants.the_hole_nan = bit_cast<double>(kHoleNanInt64);
887   double_constants.negative_infinity = -V8_INFINITY;
888   double_constants.uint32_bias =
889     static_cast<double>(static_cast<uint32_t>(0xFFFFFFFF)) + 1;
890 
891   math_exp_data_mutex = new base::Mutex();
892 }
893 
894 
InitializeMathExpData()895 void ExternalReference::InitializeMathExpData() {
896   // Early return?
897   if (math_exp_data_initialized) return;
898 
899   base::LockGuard<base::Mutex> lock_guard(math_exp_data_mutex);
900   if (!math_exp_data_initialized) {
901     // If this is changed, generated code must be adapted too.
902     const int kTableSizeBits = 11;
903     const int kTableSize = 1 << kTableSizeBits;
904     const double kTableSizeDouble = static_cast<double>(kTableSize);
905 
906     math_exp_constants_array = new double[9];
907     // Input values smaller than this always return 0.
908     math_exp_constants_array[0] = -708.39641853226408;
909     // Input values larger than this always return +Infinity.
910     math_exp_constants_array[1] = 709.78271289338397;
911     math_exp_constants_array[2] = V8_INFINITY;
912     // The rest is black magic. Do not attempt to understand it. It is
913     // loosely based on the "expd" function published at:
914     // http://herumi.blogspot.com/2011/08/fast-double-precision-exponential.html
915     const double constant3 = (1 << kTableSizeBits) / std::log(2.0);
916     math_exp_constants_array[3] = constant3;
917     math_exp_constants_array[4] =
918         static_cast<double>(static_cast<int64_t>(3) << 51);
919     math_exp_constants_array[5] = 1 / constant3;
920     math_exp_constants_array[6] = 3.0000000027955394;
921     math_exp_constants_array[7] = 0.16666666685227835;
922     math_exp_constants_array[8] = 1;
923 
924     math_exp_log_table_array = new double[kTableSize];
925     for (int i = 0; i < kTableSize; i++) {
926       double value = std::pow(2, i / kTableSizeDouble);
927       uint64_t bits = bit_cast<uint64_t, double>(value);
928       bits &= (static_cast<uint64_t>(1) << 52) - 1;
929       double mantissa = bit_cast<double, uint64_t>(bits);
930       math_exp_log_table_array[i] = mantissa;
931     }
932 
933     math_exp_data_initialized = true;
934   }
935 }
936 
937 
TearDownMathExpData()938 void ExternalReference::TearDownMathExpData() {
939   delete[] math_exp_constants_array;
940   math_exp_constants_array = NULL;
941   delete[] math_exp_log_table_array;
942   math_exp_log_table_array = NULL;
943   delete math_exp_data_mutex;
944   math_exp_data_mutex = NULL;
945 }
946 
947 
ExternalReference(Builtins::CFunctionId id,Isolate * isolate)948 ExternalReference::ExternalReference(Builtins::CFunctionId id, Isolate* isolate)
949   : address_(Redirect(isolate, Builtins::c_function_address(id))) {}
950 
951 
ExternalReference(ApiFunction * fun,Type type=ExternalReference::BUILTIN_CALL,Isolate * isolate=NULL)952 ExternalReference::ExternalReference(
953     ApiFunction* fun,
954     Type type = ExternalReference::BUILTIN_CALL,
955     Isolate* isolate = NULL)
956   : address_(Redirect(isolate, fun->address(), type)) {}
957 
958 
ExternalReference(Builtins::Name name,Isolate * isolate)959 ExternalReference::ExternalReference(Builtins::Name name, Isolate* isolate)
960   : address_(isolate->builtins()->builtin_address(name)) {}
961 
962 
ExternalReference(Runtime::FunctionId id,Isolate * isolate)963 ExternalReference::ExternalReference(Runtime::FunctionId id,
964                                      Isolate* isolate)
965   : address_(Redirect(isolate, Runtime::FunctionForId(id)->entry)) {}
966 
967 
ExternalReference(const Runtime::Function * f,Isolate * isolate)968 ExternalReference::ExternalReference(const Runtime::Function* f,
969                                      Isolate* isolate)
970   : address_(Redirect(isolate, f->entry)) {}
971 
972 
isolate_address(Isolate * isolate)973 ExternalReference ExternalReference::isolate_address(Isolate* isolate) {
974   return ExternalReference(isolate);
975 }
976 
977 
ExternalReference(const IC_Utility & ic_utility,Isolate * isolate)978 ExternalReference::ExternalReference(const IC_Utility& ic_utility,
979                                      Isolate* isolate)
980   : address_(Redirect(isolate, ic_utility.address())) {}
981 
982 
ExternalReference(StatsCounter * counter)983 ExternalReference::ExternalReference(StatsCounter* counter)
984   : address_(reinterpret_cast<Address>(counter->GetInternalPointer())) {}
985 
986 
ExternalReference(Isolate::AddressId id,Isolate * isolate)987 ExternalReference::ExternalReference(Isolate::AddressId id, Isolate* isolate)
988   : address_(isolate->get_address_from_id(id)) {}
989 
990 
ExternalReference(const SCTableReference & table_ref)991 ExternalReference::ExternalReference(const SCTableReference& table_ref)
992   : address_(table_ref.address()) {}
993 
994 
995 ExternalReference ExternalReference::
incremental_marking_record_write_function(Isolate * isolate)996     incremental_marking_record_write_function(Isolate* isolate) {
997   return ExternalReference(Redirect(
998       isolate,
999       FUNCTION_ADDR(IncrementalMarking::RecordWriteFromCode)));
1000 }
1001 
1002 
1003 ExternalReference ExternalReference::
store_buffer_overflow_function(Isolate * isolate)1004     store_buffer_overflow_function(Isolate* isolate) {
1005   return ExternalReference(Redirect(
1006       isolate,
1007       FUNCTION_ADDR(StoreBuffer::StoreBufferOverflow)));
1008 }
1009 
1010 
flush_icache_function(Isolate * isolate)1011 ExternalReference ExternalReference::flush_icache_function(Isolate* isolate) {
1012   return ExternalReference(
1013       Redirect(isolate, FUNCTION_ADDR(CpuFeatures::FlushICache)));
1014 }
1015 
1016 
delete_handle_scope_extensions(Isolate * isolate)1017 ExternalReference ExternalReference::delete_handle_scope_extensions(
1018     Isolate* isolate) {
1019   return ExternalReference(Redirect(
1020       isolate,
1021       FUNCTION_ADDR(HandleScope::DeleteExtensions)));
1022 }
1023 
1024 
get_date_field_function(Isolate * isolate)1025 ExternalReference ExternalReference::get_date_field_function(
1026     Isolate* isolate) {
1027   return ExternalReference(Redirect(isolate, FUNCTION_ADDR(JSDate::GetField)));
1028 }
1029 
1030 
get_make_code_young_function(Isolate * isolate)1031 ExternalReference ExternalReference::get_make_code_young_function(
1032     Isolate* isolate) {
1033   return ExternalReference(Redirect(
1034       isolate, FUNCTION_ADDR(Code::MakeCodeAgeSequenceYoung)));
1035 }
1036 
1037 
get_mark_code_as_executed_function(Isolate * isolate)1038 ExternalReference ExternalReference::get_mark_code_as_executed_function(
1039     Isolate* isolate) {
1040   return ExternalReference(Redirect(
1041       isolate, FUNCTION_ADDR(Code::MarkCodeAsExecuted)));
1042 }
1043 
1044 
date_cache_stamp(Isolate * isolate)1045 ExternalReference ExternalReference::date_cache_stamp(Isolate* isolate) {
1046   return ExternalReference(isolate->date_cache()->stamp_address());
1047 }
1048 
1049 
stress_deopt_count(Isolate * isolate)1050 ExternalReference ExternalReference::stress_deopt_count(Isolate* isolate) {
1051   return ExternalReference(isolate->stress_deopt_count_address());
1052 }
1053 
1054 
new_deoptimizer_function(Isolate * isolate)1055 ExternalReference ExternalReference::new_deoptimizer_function(
1056     Isolate* isolate) {
1057   return ExternalReference(
1058       Redirect(isolate, FUNCTION_ADDR(Deoptimizer::New)));
1059 }
1060 
1061 
compute_output_frames_function(Isolate * isolate)1062 ExternalReference ExternalReference::compute_output_frames_function(
1063     Isolate* isolate) {
1064   return ExternalReference(
1065       Redirect(isolate, FUNCTION_ADDR(Deoptimizer::ComputeOutputFrames)));
1066 }
1067 
1068 
log_enter_external_function(Isolate * isolate)1069 ExternalReference ExternalReference::log_enter_external_function(
1070     Isolate* isolate) {
1071   return ExternalReference(
1072       Redirect(isolate, FUNCTION_ADDR(Logger::EnterExternal)));
1073 }
1074 
1075 
log_leave_external_function(Isolate * isolate)1076 ExternalReference ExternalReference::log_leave_external_function(
1077     Isolate* isolate) {
1078   return ExternalReference(
1079       Redirect(isolate, FUNCTION_ADDR(Logger::LeaveExternal)));
1080 }
1081 
1082 
keyed_lookup_cache_keys(Isolate * isolate)1083 ExternalReference ExternalReference::keyed_lookup_cache_keys(Isolate* isolate) {
1084   return ExternalReference(isolate->keyed_lookup_cache()->keys_address());
1085 }
1086 
1087 
keyed_lookup_cache_field_offsets(Isolate * isolate)1088 ExternalReference ExternalReference::keyed_lookup_cache_field_offsets(
1089     Isolate* isolate) {
1090   return ExternalReference(
1091       isolate->keyed_lookup_cache()->field_offsets_address());
1092 }
1093 
1094 
roots_array_start(Isolate * isolate)1095 ExternalReference ExternalReference::roots_array_start(Isolate* isolate) {
1096   return ExternalReference(isolate->heap()->roots_array_start());
1097 }
1098 
1099 
allocation_sites_list_address(Isolate * isolate)1100 ExternalReference ExternalReference::allocation_sites_list_address(
1101     Isolate* isolate) {
1102   return ExternalReference(isolate->heap()->allocation_sites_list_address());
1103 }
1104 
1105 
address_of_stack_limit(Isolate * isolate)1106 ExternalReference ExternalReference::address_of_stack_limit(Isolate* isolate) {
1107   return ExternalReference(isolate->stack_guard()->address_of_jslimit());
1108 }
1109 
1110 
address_of_real_stack_limit(Isolate * isolate)1111 ExternalReference ExternalReference::address_of_real_stack_limit(
1112     Isolate* isolate) {
1113   return ExternalReference(isolate->stack_guard()->address_of_real_jslimit());
1114 }
1115 
1116 
address_of_regexp_stack_limit(Isolate * isolate)1117 ExternalReference ExternalReference::address_of_regexp_stack_limit(
1118     Isolate* isolate) {
1119   return ExternalReference(isolate->regexp_stack()->limit_address());
1120 }
1121 
1122 
new_space_start(Isolate * isolate)1123 ExternalReference ExternalReference::new_space_start(Isolate* isolate) {
1124   return ExternalReference(isolate->heap()->NewSpaceStart());
1125 }
1126 
1127 
store_buffer_top(Isolate * isolate)1128 ExternalReference ExternalReference::store_buffer_top(Isolate* isolate) {
1129   return ExternalReference(isolate->heap()->store_buffer()->TopAddress());
1130 }
1131 
1132 
new_space_mask(Isolate * isolate)1133 ExternalReference ExternalReference::new_space_mask(Isolate* isolate) {
1134   return ExternalReference(reinterpret_cast<Address>(
1135       isolate->heap()->NewSpaceMask()));
1136 }
1137 
1138 
new_space_allocation_top_address(Isolate * isolate)1139 ExternalReference ExternalReference::new_space_allocation_top_address(
1140     Isolate* isolate) {
1141   return ExternalReference(isolate->heap()->NewSpaceAllocationTopAddress());
1142 }
1143 
1144 
new_space_allocation_limit_address(Isolate * isolate)1145 ExternalReference ExternalReference::new_space_allocation_limit_address(
1146     Isolate* isolate) {
1147   return ExternalReference(isolate->heap()->NewSpaceAllocationLimitAddress());
1148 }
1149 
1150 
old_pointer_space_allocation_top_address(Isolate * isolate)1151 ExternalReference ExternalReference::old_pointer_space_allocation_top_address(
1152     Isolate* isolate) {
1153   return ExternalReference(
1154       isolate->heap()->OldPointerSpaceAllocationTopAddress());
1155 }
1156 
1157 
old_pointer_space_allocation_limit_address(Isolate * isolate)1158 ExternalReference ExternalReference::old_pointer_space_allocation_limit_address(
1159     Isolate* isolate) {
1160   return ExternalReference(
1161       isolate->heap()->OldPointerSpaceAllocationLimitAddress());
1162 }
1163 
1164 
old_data_space_allocation_top_address(Isolate * isolate)1165 ExternalReference ExternalReference::old_data_space_allocation_top_address(
1166     Isolate* isolate) {
1167   return ExternalReference(
1168       isolate->heap()->OldDataSpaceAllocationTopAddress());
1169 }
1170 
1171 
old_data_space_allocation_limit_address(Isolate * isolate)1172 ExternalReference ExternalReference::old_data_space_allocation_limit_address(
1173     Isolate* isolate) {
1174   return ExternalReference(
1175       isolate->heap()->OldDataSpaceAllocationLimitAddress());
1176 }
1177 
1178 
handle_scope_level_address(Isolate * isolate)1179 ExternalReference ExternalReference::handle_scope_level_address(
1180     Isolate* isolate) {
1181   return ExternalReference(HandleScope::current_level_address(isolate));
1182 }
1183 
1184 
handle_scope_next_address(Isolate * isolate)1185 ExternalReference ExternalReference::handle_scope_next_address(
1186     Isolate* isolate) {
1187   return ExternalReference(HandleScope::current_next_address(isolate));
1188 }
1189 
1190 
handle_scope_limit_address(Isolate * isolate)1191 ExternalReference ExternalReference::handle_scope_limit_address(
1192     Isolate* isolate) {
1193   return ExternalReference(HandleScope::current_limit_address(isolate));
1194 }
1195 
1196 
scheduled_exception_address(Isolate * isolate)1197 ExternalReference ExternalReference::scheduled_exception_address(
1198     Isolate* isolate) {
1199   return ExternalReference(isolate->scheduled_exception_address());
1200 }
1201 
1202 
address_of_pending_message_obj(Isolate * isolate)1203 ExternalReference ExternalReference::address_of_pending_message_obj(
1204     Isolate* isolate) {
1205   return ExternalReference(isolate->pending_message_obj_address());
1206 }
1207 
1208 
address_of_has_pending_message(Isolate * isolate)1209 ExternalReference ExternalReference::address_of_has_pending_message(
1210     Isolate* isolate) {
1211   return ExternalReference(isolate->has_pending_message_address());
1212 }
1213 
1214 
address_of_pending_message_script(Isolate * isolate)1215 ExternalReference ExternalReference::address_of_pending_message_script(
1216     Isolate* isolate) {
1217   return ExternalReference(isolate->pending_message_script_address());
1218 }
1219 
1220 
address_of_min_int()1221 ExternalReference ExternalReference::address_of_min_int() {
1222   return ExternalReference(reinterpret_cast<void*>(&double_constants.min_int));
1223 }
1224 
1225 
address_of_one_half()1226 ExternalReference ExternalReference::address_of_one_half() {
1227   return ExternalReference(reinterpret_cast<void*>(&double_constants.one_half));
1228 }
1229 
1230 
address_of_minus_one_half()1231 ExternalReference ExternalReference::address_of_minus_one_half() {
1232   return ExternalReference(
1233       reinterpret_cast<void*>(&double_constants.minus_one_half));
1234 }
1235 
1236 
address_of_negative_infinity()1237 ExternalReference ExternalReference::address_of_negative_infinity() {
1238   return ExternalReference(
1239       reinterpret_cast<void*>(&double_constants.negative_infinity));
1240 }
1241 
1242 
address_of_canonical_non_hole_nan()1243 ExternalReference ExternalReference::address_of_canonical_non_hole_nan() {
1244   return ExternalReference(
1245       reinterpret_cast<void*>(&double_constants.canonical_non_hole_nan));
1246 }
1247 
1248 
address_of_the_hole_nan()1249 ExternalReference ExternalReference::address_of_the_hole_nan() {
1250   return ExternalReference(
1251       reinterpret_cast<void*>(&double_constants.the_hole_nan));
1252 }
1253 
1254 
address_of_uint32_bias()1255 ExternalReference ExternalReference::address_of_uint32_bias() {
1256   return ExternalReference(
1257       reinterpret_cast<void*>(&double_constants.uint32_bias));
1258 }
1259 
1260 
is_profiling_address(Isolate * isolate)1261 ExternalReference ExternalReference::is_profiling_address(Isolate* isolate) {
1262   return ExternalReference(isolate->cpu_profiler()->is_profiling_address());
1263 }
1264 
1265 
invoke_function_callback(Isolate * isolate)1266 ExternalReference ExternalReference::invoke_function_callback(
1267     Isolate* isolate) {
1268   Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback);
1269   ExternalReference::Type thunk_type = ExternalReference::PROFILING_API_CALL;
1270   ApiFunction thunk_fun(thunk_address);
1271   return ExternalReference(&thunk_fun, thunk_type, isolate);
1272 }
1273 
1274 
invoke_accessor_getter_callback(Isolate * isolate)1275 ExternalReference ExternalReference::invoke_accessor_getter_callback(
1276     Isolate* isolate) {
1277   Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback);
1278   ExternalReference::Type thunk_type =
1279       ExternalReference::PROFILING_GETTER_CALL;
1280   ApiFunction thunk_fun(thunk_address);
1281   return ExternalReference(&thunk_fun, thunk_type, isolate);
1282 }
1283 
1284 
1285 #ifndef V8_INTERPRETED_REGEXP
1286 
re_check_stack_guard_state(Isolate * isolate)1287 ExternalReference ExternalReference::re_check_stack_guard_state(
1288     Isolate* isolate) {
1289   Address function;
1290 #if V8_TARGET_ARCH_X64
1291   function = FUNCTION_ADDR(RegExpMacroAssemblerX64::CheckStackGuardState);
1292 #elif V8_TARGET_ARCH_IA32
1293   function = FUNCTION_ADDR(RegExpMacroAssemblerIA32::CheckStackGuardState);
1294 #elif V8_TARGET_ARCH_ARM64
1295   function = FUNCTION_ADDR(RegExpMacroAssemblerARM64::CheckStackGuardState);
1296 #elif V8_TARGET_ARCH_ARM
1297   function = FUNCTION_ADDR(RegExpMacroAssemblerARM::CheckStackGuardState);
1298 #elif V8_TARGET_ARCH_MIPS
1299   function = FUNCTION_ADDR(RegExpMacroAssemblerMIPS::CheckStackGuardState);
1300 #elif V8_TARGET_ARCH_MIPS64
1301   function = FUNCTION_ADDR(RegExpMacroAssemblerMIPS::CheckStackGuardState);
1302 #elif V8_TARGET_ARCH_X87
1303   function = FUNCTION_ADDR(RegExpMacroAssemblerX87::CheckStackGuardState);
1304 #else
1305   UNREACHABLE();
1306 #endif
1307   return ExternalReference(Redirect(isolate, function));
1308 }
1309 
1310 
re_grow_stack(Isolate * isolate)1311 ExternalReference ExternalReference::re_grow_stack(Isolate* isolate) {
1312   return ExternalReference(
1313       Redirect(isolate, FUNCTION_ADDR(NativeRegExpMacroAssembler::GrowStack)));
1314 }
1315 
re_case_insensitive_compare_uc16(Isolate * isolate)1316 ExternalReference ExternalReference::re_case_insensitive_compare_uc16(
1317     Isolate* isolate) {
1318   return ExternalReference(Redirect(
1319       isolate,
1320       FUNCTION_ADDR(NativeRegExpMacroAssembler::CaseInsensitiveCompareUC16)));
1321 }
1322 
1323 
re_word_character_map()1324 ExternalReference ExternalReference::re_word_character_map() {
1325   return ExternalReference(
1326       NativeRegExpMacroAssembler::word_character_map_address());
1327 }
1328 
address_of_static_offsets_vector(Isolate * isolate)1329 ExternalReference ExternalReference::address_of_static_offsets_vector(
1330     Isolate* isolate) {
1331   return ExternalReference(
1332       reinterpret_cast<Address>(isolate->jsregexp_static_offsets_vector()));
1333 }
1334 
address_of_regexp_stack_memory_address(Isolate * isolate)1335 ExternalReference ExternalReference::address_of_regexp_stack_memory_address(
1336     Isolate* isolate) {
1337   return ExternalReference(
1338       isolate->regexp_stack()->memory_address());
1339 }
1340 
address_of_regexp_stack_memory_size(Isolate * isolate)1341 ExternalReference ExternalReference::address_of_regexp_stack_memory_size(
1342     Isolate* isolate) {
1343   return ExternalReference(isolate->regexp_stack()->memory_size_address());
1344 }
1345 
1346 #endif  // V8_INTERPRETED_REGEXP
1347 
1348 
math_log_double_function(Isolate * isolate)1349 ExternalReference ExternalReference::math_log_double_function(
1350     Isolate* isolate) {
1351   typedef double (*d2d)(double x);
1352   return ExternalReference(Redirect(isolate,
1353                                     FUNCTION_ADDR(static_cast<d2d>(std::log)),
1354                                     BUILTIN_FP_CALL));
1355 }
1356 
1357 
math_exp_constants(int constant_index)1358 ExternalReference ExternalReference::math_exp_constants(int constant_index) {
1359   DCHECK(math_exp_data_initialized);
1360   return ExternalReference(
1361       reinterpret_cast<void*>(math_exp_constants_array + constant_index));
1362 }
1363 
1364 
math_exp_log_table()1365 ExternalReference ExternalReference::math_exp_log_table() {
1366   DCHECK(math_exp_data_initialized);
1367   return ExternalReference(reinterpret_cast<void*>(math_exp_log_table_array));
1368 }
1369 
1370 
page_flags(Page * page)1371 ExternalReference ExternalReference::page_flags(Page* page) {
1372   return ExternalReference(reinterpret_cast<Address>(page) +
1373                            MemoryChunk::kFlagsOffset);
1374 }
1375 
1376 
ForDeoptEntry(Address entry)1377 ExternalReference ExternalReference::ForDeoptEntry(Address entry) {
1378   return ExternalReference(entry);
1379 }
1380 
1381 
cpu_features()1382 ExternalReference ExternalReference::cpu_features() {
1383   DCHECK(CpuFeatures::initialized_);
1384   return ExternalReference(&CpuFeatures::supported_);
1385 }
1386 
1387 
debug_is_active_address(Isolate * isolate)1388 ExternalReference ExternalReference::debug_is_active_address(
1389     Isolate* isolate) {
1390   return ExternalReference(isolate->debug()->is_active_address());
1391 }
1392 
1393 
debug_after_break_target_address(Isolate * isolate)1394 ExternalReference ExternalReference::debug_after_break_target_address(
1395     Isolate* isolate) {
1396   return ExternalReference(isolate->debug()->after_break_target_address());
1397 }
1398 
1399 
1400 ExternalReference
debug_restarter_frame_function_pointer_address(Isolate * isolate)1401     ExternalReference::debug_restarter_frame_function_pointer_address(
1402         Isolate* isolate) {
1403   return ExternalReference(
1404       isolate->debug()->restarter_frame_function_pointer_address());
1405 }
1406 
1407 
power_helper(double x,double y)1408 double power_helper(double x, double y) {
1409   int y_int = static_cast<int>(y);
1410   if (y == y_int) {
1411     return power_double_int(x, y_int);  // Returns 1 if exponent is 0.
1412   }
1413   if (y == 0.5) {
1414     return (std::isinf(x)) ? V8_INFINITY
1415                            : fast_sqrt(x + 0.0);  // Convert -0 to +0.
1416   }
1417   if (y == -0.5) {
1418     return (std::isinf(x)) ? 0 : 1.0 / fast_sqrt(x + 0.0);  // Convert -0 to +0.
1419   }
1420   return power_double_double(x, y);
1421 }
1422 
1423 
1424 // Helper function to compute x^y, where y is known to be an
1425 // integer. Uses binary decomposition to limit the number of
1426 // multiplications; see the discussion in "Hacker's Delight" by Henry
1427 // S. Warren, Jr., figure 11-6, page 213.
power_double_int(double x,int y)1428 double power_double_int(double x, int y) {
1429   double m = (y < 0) ? 1 / x : x;
1430   unsigned n = (y < 0) ? -y : y;
1431   double p = 1;
1432   while (n != 0) {
1433     if ((n & 1) != 0) p *= m;
1434     m *= m;
1435     if ((n & 2) != 0) p *= m;
1436     m *= m;
1437     n >>= 2;
1438   }
1439   return p;
1440 }
1441 
1442 
power_double_double(double x,double y)1443 double power_double_double(double x, double y) {
1444 #if defined(__MINGW64_VERSION_MAJOR) && \
1445     (!defined(__MINGW64_VERSION_RC) || __MINGW64_VERSION_RC < 1)
1446   // MinGW64 has a custom implementation for pow.  This handles certain
1447   // special cases that are different.
1448   if ((x == 0.0 || std::isinf(x)) && std::isfinite(y)) {
1449     double f;
1450     if (std::modf(y, &f) != 0.0) {
1451       return ((x == 0.0) ^ (y > 0)) ? V8_INFINITY : 0;
1452     }
1453   }
1454 
1455   if (x == 2.0) {
1456     int y_int = static_cast<int>(y);
1457     if (y == y_int) {
1458       return std::ldexp(1.0, y_int);
1459     }
1460   }
1461 #endif
1462 
1463   // The checks for special cases can be dropped in ia32 because it has already
1464   // been done in generated code before bailing out here.
1465   if (std::isnan(y) || ((x == 1 || x == -1) && std::isinf(y))) {
1466     return base::OS::nan_value();
1467   }
1468   return std::pow(x, y);
1469 }
1470 
1471 
power_double_double_function(Isolate * isolate)1472 ExternalReference ExternalReference::power_double_double_function(
1473     Isolate* isolate) {
1474   return ExternalReference(Redirect(isolate,
1475                                     FUNCTION_ADDR(power_double_double),
1476                                     BUILTIN_FP_FP_CALL));
1477 }
1478 
1479 
power_double_int_function(Isolate * isolate)1480 ExternalReference ExternalReference::power_double_int_function(
1481     Isolate* isolate) {
1482   return ExternalReference(Redirect(isolate,
1483                                     FUNCTION_ADDR(power_double_int),
1484                                     BUILTIN_FP_INT_CALL));
1485 }
1486 
1487 
EvalComparison(Token::Value op,double op1,double op2)1488 bool EvalComparison(Token::Value op, double op1, double op2) {
1489   DCHECK(Token::IsCompareOp(op));
1490   switch (op) {
1491     case Token::EQ:
1492     case Token::EQ_STRICT: return (op1 == op2);
1493     case Token::NE: return (op1 != op2);
1494     case Token::LT: return (op1 < op2);
1495     case Token::GT: return (op1 > op2);
1496     case Token::LTE: return (op1 <= op2);
1497     case Token::GTE: return (op1 >= op2);
1498     default:
1499       UNREACHABLE();
1500       return false;
1501   }
1502 }
1503 
1504 
mod_two_doubles_operation(Isolate * isolate)1505 ExternalReference ExternalReference::mod_two_doubles_operation(
1506     Isolate* isolate) {
1507   return ExternalReference(Redirect(isolate,
1508                                     FUNCTION_ADDR(modulo),
1509                                     BUILTIN_FP_FP_CALL));
1510 }
1511 
1512 
debug_break(Isolate * isolate)1513 ExternalReference ExternalReference::debug_break(Isolate* isolate) {
1514   return ExternalReference(Redirect(isolate, FUNCTION_ADDR(Debug_Break)));
1515 }
1516 
1517 
debug_step_in_fp_address(Isolate * isolate)1518 ExternalReference ExternalReference::debug_step_in_fp_address(
1519     Isolate* isolate) {
1520   return ExternalReference(isolate->debug()->step_in_fp_addr());
1521 }
1522 
1523 
RecordPosition(int pos)1524 void PositionsRecorder::RecordPosition(int pos) {
1525   DCHECK(pos != RelocInfo::kNoPosition);
1526   DCHECK(pos >= 0);
1527   state_.current_position = pos;
1528   LOG_CODE_EVENT(assembler_->isolate(),
1529                  CodeLinePosInfoAddPositionEvent(jit_handler_data_,
1530                                                  assembler_->pc_offset(),
1531                                                  pos));
1532 }
1533 
1534 
RecordStatementPosition(int pos)1535 void PositionsRecorder::RecordStatementPosition(int pos) {
1536   DCHECK(pos != RelocInfo::kNoPosition);
1537   DCHECK(pos >= 0);
1538   state_.current_statement_position = pos;
1539   LOG_CODE_EVENT(assembler_->isolate(),
1540                  CodeLinePosInfoAddStatementPositionEvent(
1541                      jit_handler_data_,
1542                      assembler_->pc_offset(),
1543                      pos));
1544 }
1545 
1546 
WriteRecordedPositions()1547 bool PositionsRecorder::WriteRecordedPositions() {
1548   bool written = false;
1549 
1550   // Write the statement position if it is different from what was written last
1551   // time.
1552   if (state_.current_statement_position != state_.written_statement_position) {
1553     EnsureSpace ensure_space(assembler_);
1554     assembler_->RecordRelocInfo(RelocInfo::STATEMENT_POSITION,
1555                                 state_.current_statement_position);
1556     state_.written_statement_position = state_.current_statement_position;
1557     written = true;
1558   }
1559 
1560   // Write the position if it is different from what was written last time and
1561   // also different from the written statement position.
1562   if (state_.current_position != state_.written_position &&
1563       state_.current_position != state_.written_statement_position) {
1564     EnsureSpace ensure_space(assembler_);
1565     assembler_->RecordRelocInfo(RelocInfo::POSITION, state_.current_position);
1566     state_.written_position = state_.current_position;
1567     written = true;
1568   }
1569 
1570   // Return whether something was written.
1571   return written;
1572 }
1573 
1574 } }  // namespace v8::internal
1575