• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions are
6 // met:
7 //
8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer.
10 //
11 // - Redistribution in binary form must reproduce the above copyright
12 // notice, this list of conditions and the following disclaimer in the
13 // documentation and/or other materials provided with the distribution.
14 //
15 // - Neither the name of Sun Microsystems or the names of contributors may
16 // be used to endorse or promote products derived from this software without
17 // specific prior written permission.
18 //
19 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
20 // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
21 // THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
23 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
24 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
25 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
26 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
27 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
28 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
29 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 
31 // The original source code covered by the above license above has been
32 // modified significantly by Google Inc.
33 // Copyright 2012 the V8 project authors. All rights reserved.
34 
35 #include "src/assembler.h"
36 
37 #include <math.h>
38 #include <cmath>
39 #include "src/api.h"
40 #include "src/base/cpu.h"
41 #include "src/base/functional.h"
42 #include "src/base/ieee754.h"
43 #include "src/base/lazy-instance.h"
44 #include "src/base/platform/platform.h"
45 #include "src/base/utils/random-number-generator.h"
46 #include "src/builtins.h"
47 #include "src/codegen.h"
48 #include "src/counters.h"
49 #include "src/debug/debug.h"
50 #include "src/deoptimizer.h"
51 #include "src/disassembler.h"
52 #include "src/execution.h"
53 #include "src/ic/ic.h"
54 #include "src/ic/stub-cache.h"
55 #include "src/interpreter/interpreter.h"
56 #include "src/ostreams.h"
57 #include "src/regexp/jsregexp.h"
58 #include "src/regexp/regexp-macro-assembler.h"
59 #include "src/regexp/regexp-stack.h"
60 #include "src/register-configuration.h"
61 #include "src/runtime/runtime.h"
62 #include "src/simulator.h"  // For flushing instruction cache.
63 #include "src/snapshot/serializer-common.h"
64 #include "src/wasm/wasm-external-refs.h"
65 
66 #if V8_TARGET_ARCH_IA32
67 #include "src/ia32/assembler-ia32-inl.h"  // NOLINT
68 #elif V8_TARGET_ARCH_X64
69 #include "src/x64/assembler-x64-inl.h"  // NOLINT
70 #elif V8_TARGET_ARCH_ARM64
71 #include "src/arm64/assembler-arm64-inl.h"  // NOLINT
72 #elif V8_TARGET_ARCH_ARM
73 #include "src/arm/assembler-arm-inl.h"  // NOLINT
74 #elif V8_TARGET_ARCH_PPC
75 #include "src/ppc/assembler-ppc-inl.h"  // NOLINT
76 #elif V8_TARGET_ARCH_MIPS
77 #include "src/mips/assembler-mips-inl.h"  // NOLINT
78 #elif V8_TARGET_ARCH_MIPS64
79 #include "src/mips64/assembler-mips64-inl.h"  // NOLINT
80 #elif V8_TARGET_ARCH_S390
81 #include "src/s390/assembler-s390-inl.h"  // NOLINT
82 #elif V8_TARGET_ARCH_X87
83 #include "src/x87/assembler-x87-inl.h"  // NOLINT
84 #else
85 #error "Unknown architecture."
86 #endif
87 
88 // Include native regexp-macro-assembler.
89 #ifndef V8_INTERPRETED_REGEXP
90 #if V8_TARGET_ARCH_IA32
91 #include "src/regexp/ia32/regexp-macro-assembler-ia32.h"  // NOLINT
92 #elif V8_TARGET_ARCH_X64
93 #include "src/regexp/x64/regexp-macro-assembler-x64.h"  // NOLINT
94 #elif V8_TARGET_ARCH_ARM64
95 #include "src/regexp/arm64/regexp-macro-assembler-arm64.h"  // NOLINT
96 #elif V8_TARGET_ARCH_ARM
97 #include "src/regexp/arm/regexp-macro-assembler-arm.h"  // NOLINT
98 #elif V8_TARGET_ARCH_PPC
99 #include "src/regexp/ppc/regexp-macro-assembler-ppc.h"  // NOLINT
100 #elif V8_TARGET_ARCH_MIPS
101 #include "src/regexp/mips/regexp-macro-assembler-mips.h"  // NOLINT
102 #elif V8_TARGET_ARCH_MIPS64
103 #include "src/regexp/mips64/regexp-macro-assembler-mips64.h"  // NOLINT
104 #elif V8_TARGET_ARCH_S390
105 #include "src/regexp/s390/regexp-macro-assembler-s390.h"  // NOLINT
106 #elif V8_TARGET_ARCH_X87
107 #include "src/regexp/x87/regexp-macro-assembler-x87.h"  // NOLINT
108 #else  // Unknown architecture.
109 #error "Unknown architecture."
110 #endif  // Target architecture.
111 #endif  // V8_INTERPRETED_REGEXP
112 
113 namespace v8 {
114 namespace internal {
115 
116 // -----------------------------------------------------------------------------
117 // Common double constants.
118 
119 struct DoubleConstant BASE_EMBEDDED {
120 double min_int;
121 double one_half;
122 double minus_one_half;
123 double negative_infinity;
124 double the_hole_nan;
125 double uint32_bias;
126 };
127 
128 static DoubleConstant double_constants;
129 
130 const char* const RelocInfo::kFillerCommentString = "DEOPTIMIZATION PADDING";
131 
132 // -----------------------------------------------------------------------------
133 // Implementation of AssemblerBase
134 
AssemblerBase(Isolate * isolate,void * buffer,int buffer_size)135 AssemblerBase::AssemblerBase(Isolate* isolate, void* buffer, int buffer_size)
136     : isolate_(isolate),
137       jit_cookie_(0),
138       enabled_cpu_features_(0),
139       emit_debug_code_(FLAG_debug_code),
140       predictable_code_size_(false),
141       // We may use the assembler without an isolate.
142       serializer_enabled_(isolate && isolate->serializer_enabled()),
143       constant_pool_available_(false) {
144   DCHECK_NOT_NULL(isolate);
145   if (FLAG_mask_constants_with_cookie) {
146     jit_cookie_ = isolate->random_number_generator()->NextInt();
147   }
148   own_buffer_ = buffer == NULL;
149   if (buffer_size == 0) buffer_size = kMinimalBufferSize;
150   DCHECK(buffer_size > 0);
151   if (own_buffer_) buffer = NewArray<byte>(buffer_size);
152   buffer_ = static_cast<byte*>(buffer);
153   buffer_size_ = buffer_size;
154 
155   pc_ = buffer_;
156 }
157 
158 
~AssemblerBase()159 AssemblerBase::~AssemblerBase() {
160   if (own_buffer_) DeleteArray(buffer_);
161 }
162 
163 
FlushICache(Isolate * isolate,void * start,size_t size)164 void AssemblerBase::FlushICache(Isolate* isolate, void* start, size_t size) {
165   if (size == 0) return;
166 
167 #if defined(USE_SIMULATOR)
168   Simulator::FlushICache(isolate->simulator_i_cache(), start, size);
169 #else
170   CpuFeatures::FlushICache(start, size);
171 #endif  // USE_SIMULATOR
172 }
173 
174 
Print()175 void AssemblerBase::Print() {
176   OFStream os(stdout);
177   v8::internal::Disassembler::Decode(isolate(), &os, buffer_, pc_, nullptr);
178 }
179 
180 
181 // -----------------------------------------------------------------------------
182 // Implementation of PredictableCodeSizeScope
183 
PredictableCodeSizeScope(AssemblerBase * assembler)184 PredictableCodeSizeScope::PredictableCodeSizeScope(AssemblerBase* assembler)
185     : PredictableCodeSizeScope(assembler, -1) {}
186 
187 
PredictableCodeSizeScope(AssemblerBase * assembler,int expected_size)188 PredictableCodeSizeScope::PredictableCodeSizeScope(AssemblerBase* assembler,
189                                                    int expected_size)
190     : assembler_(assembler),
191       expected_size_(expected_size),
192       start_offset_(assembler->pc_offset()),
193       old_value_(assembler->predictable_code_size()) {
194   assembler_->set_predictable_code_size(true);
195 }
196 
197 
~PredictableCodeSizeScope()198 PredictableCodeSizeScope::~PredictableCodeSizeScope() {
199   // TODO(svenpanne) Remove the 'if' when everything works.
200   if (expected_size_ >= 0) {
201     CHECK_EQ(expected_size_, assembler_->pc_offset() - start_offset_);
202   }
203   assembler_->set_predictable_code_size(old_value_);
204 }
205 
206 
207 // -----------------------------------------------------------------------------
208 // Implementation of CpuFeatureScope
209 
210 #ifdef DEBUG
CpuFeatureScope(AssemblerBase * assembler,CpuFeature f)211 CpuFeatureScope::CpuFeatureScope(AssemblerBase* assembler, CpuFeature f)
212     : assembler_(assembler) {
213   DCHECK(CpuFeatures::IsSupported(f));
214   old_enabled_ = assembler_->enabled_cpu_features();
215   uint64_t mask = static_cast<uint64_t>(1) << f;
216   // TODO(svenpanne) This special case below doesn't belong here!
217 #if V8_TARGET_ARCH_ARM
218   // ARMv7 is implied by VFP3.
219   if (f == VFP3) {
220     mask |= static_cast<uint64_t>(1) << ARMv7;
221   }
222 #endif
223   assembler_->set_enabled_cpu_features(old_enabled_ | mask);
224 }
225 
226 
~CpuFeatureScope()227 CpuFeatureScope::~CpuFeatureScope() {
228   assembler_->set_enabled_cpu_features(old_enabled_);
229 }
230 #endif
231 
232 
233 bool CpuFeatures::initialized_ = false;
234 unsigned CpuFeatures::supported_ = 0;
235 unsigned CpuFeatures::icache_line_size_ = 0;
236 unsigned CpuFeatures::dcache_line_size_ = 0;
237 
238 // -----------------------------------------------------------------------------
239 // Implementation of Label
240 
pos() const241 int Label::pos() const {
242   if (pos_ < 0) return -pos_ - 1;
243   if (pos_ > 0) return  pos_ - 1;
244   UNREACHABLE();
245   return 0;
246 }
247 
248 
249 // -----------------------------------------------------------------------------
250 // Implementation of RelocInfoWriter and RelocIterator
251 //
252 // Relocation information is written backwards in memory, from high addresses
253 // towards low addresses, byte by byte.  Therefore, in the encodings listed
254 // below, the first byte listed it at the highest address, and successive
255 // bytes in the record are at progressively lower addresses.
256 //
257 // Encoding
258 //
259 // The most common modes are given single-byte encodings.  Also, it is
260 // easy to identify the type of reloc info and skip unwanted modes in
261 // an iteration.
262 //
263 // The encoding relies on the fact that there are fewer than 14
264 // different relocation modes using standard non-compact encoding.
265 //
266 // The first byte of a relocation record has a tag in its low 2 bits:
267 // Here are the record schemes, depending on the low tag and optional higher
268 // tags.
269 //
270 // Low tag:
271 //   00: embedded_object:      [6-bit pc delta] 00
272 //
273 //   01: code_target:          [6-bit pc delta] 01
274 //
275 //   10: short_data_record:    [6-bit pc delta] 10 followed by
276 //                             [6-bit data delta] [2-bit data type tag]
277 //
278 //   11: long_record           [6 bit reloc mode] 11
279 //                             followed by pc delta
280 //                             followed by optional data depending on type.
281 //
282 //  2-bit data type tags, used in short_data_record and data_jump long_record:
283 //   code_target_with_id: 00
284 //   position:            01
285 //   statement_position:  10
286 //   deopt_reason:        11
287 //
288 //  If a pc delta exceeds 6 bits, it is split into a remainder that fits into
289 //  6 bits and a part that does not. The latter is encoded as a long record
290 //  with PC_JUMP as pseudo reloc info mode. The former is encoded as part of
291 //  the following record in the usual way. The long pc jump record has variable
292 //  length:
293 //               pc-jump:        [PC_JUMP] 11
294 //                               [7 bits data] 0
295 //                                  ...
296 //                               [7 bits data] 1
297 //               (Bits 6..31 of pc delta, with leading zeroes
298 //                dropped, and last non-zero chunk tagged with 1.)
299 
300 const int kTagBits = 2;
301 const int kTagMask = (1 << kTagBits) - 1;
302 const int kLongTagBits = 6;
303 const int kShortDataTypeTagBits = 2;
304 const int kShortDataBits = kBitsPerByte - kShortDataTypeTagBits;
305 
306 const int kEmbeddedObjectTag = 0;
307 const int kCodeTargetTag = 1;
308 const int kLocatableTag = 2;
309 const int kDefaultTag = 3;
310 
311 const int kSmallPCDeltaBits = kBitsPerByte - kTagBits;
312 const int kSmallPCDeltaMask = (1 << kSmallPCDeltaBits) - 1;
313 const int RelocInfo::kMaxSmallPCDelta = kSmallPCDeltaMask;
314 
315 const int kChunkBits = 7;
316 const int kChunkMask = (1 << kChunkBits) - 1;
317 const int kLastChunkTagBits = 1;
318 const int kLastChunkTagMask = 1;
319 const int kLastChunkTag = 1;
320 
321 const int kCodeWithIdTag = 0;
322 const int kNonstatementPositionTag = 1;
323 const int kStatementPositionTag = 2;
324 const int kDeoptReasonTag = 3;
325 
update_wasm_memory_reference(Address old_base,Address new_base,uint32_t old_size,uint32_t new_size,ICacheFlushMode icache_flush_mode)326 void RelocInfo::update_wasm_memory_reference(
327     Address old_base, Address new_base, uint32_t old_size, uint32_t new_size,
328     ICacheFlushMode icache_flush_mode) {
329   DCHECK(IsWasmMemoryReference(rmode_) || IsWasmMemorySizeReference(rmode_));
330   if (IsWasmMemoryReference(rmode_)) {
331     Address updated_reference;
332     DCHECK(old_size == 0 || Memory::IsAddressInRange(
333                                 old_base, wasm_memory_reference(), old_size));
334     updated_reference = new_base + (wasm_memory_reference() - old_base);
335     DCHECK(new_size == 0 ||
336            Memory::IsAddressInRange(new_base, updated_reference, new_size));
337     unchecked_update_wasm_memory_reference(updated_reference,
338                                            icache_flush_mode);
339   } else if (IsWasmMemorySizeReference(rmode_)) {
340     uint32_t updated_size_reference;
341     DCHECK(old_size == 0 || wasm_memory_size_reference() <= old_size);
342     updated_size_reference =
343         new_size + (wasm_memory_size_reference() - old_size);
344     DCHECK(updated_size_reference <= new_size);
345     unchecked_update_wasm_memory_size(updated_size_reference,
346                                       icache_flush_mode);
347   } else {
348     UNREACHABLE();
349   }
350   if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
351     Assembler::FlushICache(isolate_, pc_, sizeof(int64_t));
352   }
353 }
354 
update_wasm_global_reference(Address old_base,Address new_base,ICacheFlushMode icache_flush_mode)355 void RelocInfo::update_wasm_global_reference(
356     Address old_base, Address new_base, ICacheFlushMode icache_flush_mode) {
357   DCHECK(IsWasmGlobalReference(rmode_));
358   Address updated_reference;
359   DCHECK(reinterpret_cast<uintptr_t>(old_base) <=
360          reinterpret_cast<uintptr_t>(wasm_global_reference()));
361   updated_reference = new_base + (wasm_global_reference() - old_base);
362   DCHECK(reinterpret_cast<uintptr_t>(new_base) <=
363          reinterpret_cast<uintptr_t>(updated_reference));
364   unchecked_update_wasm_memory_reference(updated_reference, icache_flush_mode);
365   if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
366     Assembler::FlushICache(isolate_, pc_, sizeof(int32_t));
367   }
368 }
369 
WriteLongPCJump(uint32_t pc_delta)370 uint32_t RelocInfoWriter::WriteLongPCJump(uint32_t pc_delta) {
371   // Return if the pc_delta can fit in kSmallPCDeltaBits bits.
372   // Otherwise write a variable length PC jump for the bits that do
373   // not fit in the kSmallPCDeltaBits bits.
374   if (is_uintn(pc_delta, kSmallPCDeltaBits)) return pc_delta;
375   WriteMode(RelocInfo::PC_JUMP);
376   uint32_t pc_jump = pc_delta >> kSmallPCDeltaBits;
377   DCHECK(pc_jump > 0);
378   // Write kChunkBits size chunks of the pc_jump.
379   for (; pc_jump > 0; pc_jump = pc_jump >> kChunkBits) {
380     byte b = pc_jump & kChunkMask;
381     *--pos_ = b << kLastChunkTagBits;
382   }
383   // Tag the last chunk so it can be identified.
384   *pos_ = *pos_ | kLastChunkTag;
385   // Return the remaining kSmallPCDeltaBits of the pc_delta.
386   return pc_delta & kSmallPCDeltaMask;
387 }
388 
389 
WriteShortTaggedPC(uint32_t pc_delta,int tag)390 void RelocInfoWriter::WriteShortTaggedPC(uint32_t pc_delta, int tag) {
391   // Write a byte of tagged pc-delta, possibly preceded by an explicit pc-jump.
392   pc_delta = WriteLongPCJump(pc_delta);
393   *--pos_ = pc_delta << kTagBits | tag;
394 }
395 
396 
WriteShortTaggedData(intptr_t data_delta,int tag)397 void RelocInfoWriter::WriteShortTaggedData(intptr_t data_delta, int tag) {
398   *--pos_ = static_cast<byte>(data_delta << kShortDataTypeTagBits | tag);
399 }
400 
401 
WriteMode(RelocInfo::Mode rmode)402 void RelocInfoWriter::WriteMode(RelocInfo::Mode rmode) {
403   STATIC_ASSERT(RelocInfo::NUMBER_OF_MODES <= (1 << kLongTagBits));
404   *--pos_ = static_cast<int>((rmode << kTagBits) | kDefaultTag);
405 }
406 
407 
WriteModeAndPC(uint32_t pc_delta,RelocInfo::Mode rmode)408 void RelocInfoWriter::WriteModeAndPC(uint32_t pc_delta, RelocInfo::Mode rmode) {
409   // Write two-byte tagged pc-delta, possibly preceded by var. length pc-jump.
410   pc_delta = WriteLongPCJump(pc_delta);
411   WriteMode(rmode);
412   *--pos_ = pc_delta;
413 }
414 
415 
WriteIntData(int number)416 void RelocInfoWriter::WriteIntData(int number) {
417   for (int i = 0; i < kIntSize; i++) {
418     *--pos_ = static_cast<byte>(number);
419     // Signed right shift is arithmetic shift.  Tested in test-utils.cc.
420     number = number >> kBitsPerByte;
421   }
422 }
423 
424 
WriteData(intptr_t data_delta)425 void RelocInfoWriter::WriteData(intptr_t data_delta) {
426   for (int i = 0; i < kIntptrSize; i++) {
427     *--pos_ = static_cast<byte>(data_delta);
428     // Signed right shift is arithmetic shift.  Tested in test-utils.cc.
429     data_delta = data_delta >> kBitsPerByte;
430   }
431 }
432 
433 
WritePosition(int pc_delta,int pos_delta,RelocInfo::Mode rmode)434 void RelocInfoWriter::WritePosition(int pc_delta, int pos_delta,
435                                     RelocInfo::Mode rmode) {
436   int pos_type_tag = (rmode == RelocInfo::POSITION) ? kNonstatementPositionTag
437                                                     : kStatementPositionTag;
438   // Check if delta is small enough to fit in a tagged byte.
439   if (is_intn(pos_delta, kShortDataBits)) {
440     WriteShortTaggedPC(pc_delta, kLocatableTag);
441     WriteShortTaggedData(pos_delta, pos_type_tag);
442   } else {
443     // Otherwise, use costly encoding.
444     WriteModeAndPC(pc_delta, rmode);
445     WriteIntData(pos_delta);
446   }
447 }
448 
449 
FlushPosition()450 void RelocInfoWriter::FlushPosition() {
451   if (!next_position_candidate_flushed_) {
452     WritePosition(next_position_candidate_pc_delta_,
453                   next_position_candidate_pos_delta_, RelocInfo::POSITION);
454     next_position_candidate_pos_delta_ = 0;
455     next_position_candidate_pc_delta_ = 0;
456     next_position_candidate_flushed_ = true;
457   }
458 }
459 
460 
Write(const RelocInfo * rinfo)461 void RelocInfoWriter::Write(const RelocInfo* rinfo) {
462   RelocInfo::Mode rmode = rinfo->rmode();
463   if (rmode != RelocInfo::POSITION) {
464     FlushPosition();
465   }
466 #ifdef DEBUG
467   byte* begin_pos = pos_;
468 #endif
469   DCHECK(rinfo->rmode() < RelocInfo::NUMBER_OF_MODES);
470   DCHECK(rinfo->pc() - last_pc_ >= 0);
471   // Use unsigned delta-encoding for pc.
472   uint32_t pc_delta = static_cast<uint32_t>(rinfo->pc() - last_pc_);
473 
474   // The two most common modes are given small tags, and usually fit in a byte.
475   if (rmode == RelocInfo::EMBEDDED_OBJECT) {
476     WriteShortTaggedPC(pc_delta, kEmbeddedObjectTag);
477   } else if (rmode == RelocInfo::CODE_TARGET) {
478     WriteShortTaggedPC(pc_delta, kCodeTargetTag);
479     DCHECK(begin_pos - pos_ <= RelocInfo::kMaxCallSize);
480   } else if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
481     // Use signed delta-encoding for id.
482     DCHECK_EQ(static_cast<int>(rinfo->data()), rinfo->data());
483     int id_delta = static_cast<int>(rinfo->data()) - last_id_;
484     // Check if delta is small enough to fit in a tagged byte.
485     if (is_intn(id_delta, kShortDataBits)) {
486       WriteShortTaggedPC(pc_delta, kLocatableTag);
487       WriteShortTaggedData(id_delta, kCodeWithIdTag);
488     } else {
489       // Otherwise, use costly encoding.
490       WriteModeAndPC(pc_delta, rmode);
491       WriteIntData(id_delta);
492     }
493     last_id_ = static_cast<int>(rinfo->data());
494   } else if (rmode == RelocInfo::DEOPT_REASON) {
495     DCHECK(rinfo->data() < (1 << kShortDataBits));
496     WriteShortTaggedPC(pc_delta, kLocatableTag);
497     WriteShortTaggedData(rinfo->data(), kDeoptReasonTag);
498   } else if (RelocInfo::IsPosition(rmode)) {
499     // Use signed delta-encoding for position.
500     DCHECK_EQ(static_cast<int>(rinfo->data()), rinfo->data());
501     int pos_delta = static_cast<int>(rinfo->data()) - last_position_;
502     if (rmode == RelocInfo::STATEMENT_POSITION) {
503       WritePosition(pc_delta, pos_delta, rmode);
504     } else {
505       DCHECK_EQ(rmode, RelocInfo::POSITION);
506       if (pc_delta != 0 || last_mode_ != RelocInfo::POSITION) {
507         FlushPosition();
508         next_position_candidate_pc_delta_ = pc_delta;
509         next_position_candidate_pos_delta_ = pos_delta;
510       } else {
511         next_position_candidate_pos_delta_ += pos_delta;
512       }
513       next_position_candidate_flushed_ = false;
514     }
515     last_position_ = static_cast<int>(rinfo->data());
516   } else {
517     WriteModeAndPC(pc_delta, rmode);
518     if (RelocInfo::IsComment(rmode)) {
519       WriteData(rinfo->data());
520     } else if (RelocInfo::IsConstPool(rmode) ||
521                RelocInfo::IsVeneerPool(rmode) ||
522                RelocInfo::IsDeoptId(rmode)) {
523       WriteIntData(static_cast<int>(rinfo->data()));
524     }
525   }
526   last_pc_ = rinfo->pc();
527   last_mode_ = rmode;
528 #ifdef DEBUG
529   DCHECK(begin_pos - pos_ <= kMaxSize);
530 #endif
531 }
532 
533 
AdvanceGetTag()534 inline int RelocIterator::AdvanceGetTag() {
535   return *--pos_ & kTagMask;
536 }
537 
538 
GetMode()539 inline RelocInfo::Mode RelocIterator::GetMode() {
540   return static_cast<RelocInfo::Mode>((*pos_ >> kTagBits) &
541                                       ((1 << kLongTagBits) - 1));
542 }
543 
544 
ReadShortTaggedPC()545 inline void RelocIterator::ReadShortTaggedPC() {
546   rinfo_.pc_ += *pos_ >> kTagBits;
547 }
548 
549 
AdvanceReadPC()550 inline void RelocIterator::AdvanceReadPC() {
551   rinfo_.pc_ += *--pos_;
552 }
553 
554 
AdvanceReadId()555 void RelocIterator::AdvanceReadId() {
556   int x = 0;
557   for (int i = 0; i < kIntSize; i++) {
558     x |= static_cast<int>(*--pos_) << i * kBitsPerByte;
559   }
560   last_id_ += x;
561   rinfo_.data_ = last_id_;
562 }
563 
564 
AdvanceReadInt()565 void RelocIterator::AdvanceReadInt() {
566   int x = 0;
567   for (int i = 0; i < kIntSize; i++) {
568     x |= static_cast<int>(*--pos_) << i * kBitsPerByte;
569   }
570   rinfo_.data_ = x;
571 }
572 
573 
AdvanceReadPosition()574 void RelocIterator::AdvanceReadPosition() {
575   int x = 0;
576   for (int i = 0; i < kIntSize; i++) {
577     x |= static_cast<int>(*--pos_) << i * kBitsPerByte;
578   }
579   last_position_ += x;
580   rinfo_.data_ = last_position_;
581 }
582 
583 
AdvanceReadData()584 void RelocIterator::AdvanceReadData() {
585   intptr_t x = 0;
586   for (int i = 0; i < kIntptrSize; i++) {
587     x |= static_cast<intptr_t>(*--pos_) << i * kBitsPerByte;
588   }
589   rinfo_.data_ = x;
590 }
591 
592 
AdvanceReadLongPCJump()593 void RelocIterator::AdvanceReadLongPCJump() {
594   // Read the 32-kSmallPCDeltaBits most significant bits of the
595   // pc jump in kChunkBits bit chunks and shift them into place.
596   // Stop when the last chunk is encountered.
597   uint32_t pc_jump = 0;
598   for (int i = 0; i < kIntSize; i++) {
599     byte pc_jump_part = *--pos_;
600     pc_jump |= (pc_jump_part >> kLastChunkTagBits) << i * kChunkBits;
601     if ((pc_jump_part & kLastChunkTagMask) == 1) break;
602   }
603   // The least significant kSmallPCDeltaBits bits will be added
604   // later.
605   rinfo_.pc_ += pc_jump << kSmallPCDeltaBits;
606 }
607 
608 
GetShortDataTypeTag()609 inline int RelocIterator::GetShortDataTypeTag() {
610   return *pos_ & ((1 << kShortDataTypeTagBits) - 1);
611 }
612 
613 
ReadShortTaggedId()614 inline void RelocIterator::ReadShortTaggedId() {
615   int8_t signed_b = *pos_;
616   // Signed right shift is arithmetic shift.  Tested in test-utils.cc.
617   last_id_ += signed_b >> kShortDataTypeTagBits;
618   rinfo_.data_ = last_id_;
619 }
620 
621 
ReadShortTaggedPosition()622 inline void RelocIterator::ReadShortTaggedPosition() {
623   int8_t signed_b = *pos_;
624   // Signed right shift is arithmetic shift.  Tested in test-utils.cc.
625   last_position_ += signed_b >> kShortDataTypeTagBits;
626   rinfo_.data_ = last_position_;
627 }
628 
629 
ReadShortTaggedData()630 inline void RelocIterator::ReadShortTaggedData() {
631   uint8_t unsigned_b = *pos_;
632   rinfo_.data_ = unsigned_b >> kTagBits;
633 }
634 
635 
GetPositionModeFromTag(int tag)636 static inline RelocInfo::Mode GetPositionModeFromTag(int tag) {
637   DCHECK(tag == kNonstatementPositionTag ||
638          tag == kStatementPositionTag);
639   return (tag == kNonstatementPositionTag) ?
640          RelocInfo::POSITION :
641          RelocInfo::STATEMENT_POSITION;
642 }
643 
644 
next()645 void RelocIterator::next() {
646   DCHECK(!done());
647   // Basically, do the opposite of RelocInfoWriter::Write.
648   // Reading of data is as far as possible avoided for unwanted modes,
649   // but we must always update the pc.
650   //
651   // We exit this loop by returning when we find a mode we want.
652   while (pos_ > end_) {
653     int tag = AdvanceGetTag();
654     if (tag == kEmbeddedObjectTag) {
655       ReadShortTaggedPC();
656       if (SetMode(RelocInfo::EMBEDDED_OBJECT)) return;
657     } else if (tag == kCodeTargetTag) {
658       ReadShortTaggedPC();
659       if (SetMode(RelocInfo::CODE_TARGET)) return;
660     } else if (tag == kLocatableTag) {
661       ReadShortTaggedPC();
662       Advance();
663       int data_type_tag = GetShortDataTypeTag();
664       if (data_type_tag == kCodeWithIdTag) {
665         if (SetMode(RelocInfo::CODE_TARGET_WITH_ID)) {
666           ReadShortTaggedId();
667           return;
668         }
669       } else if (data_type_tag == kDeoptReasonTag) {
670         if (SetMode(RelocInfo::DEOPT_REASON)) {
671           ReadShortTaggedData();
672           return;
673         }
674       } else {
675         DCHECK(data_type_tag == kNonstatementPositionTag ||
676                data_type_tag == kStatementPositionTag);
677         if (mode_mask_ & RelocInfo::kPositionMask) {
678           // Always update the position if we are interested in either
679           // statement positions or non-statement positions.
680           ReadShortTaggedPosition();
681           if (SetMode(GetPositionModeFromTag(data_type_tag))) return;
682         }
683       }
684     } else {
685       DCHECK(tag == kDefaultTag);
686       RelocInfo::Mode rmode = GetMode();
687       if (rmode == RelocInfo::PC_JUMP) {
688         AdvanceReadLongPCJump();
689       } else {
690         AdvanceReadPC();
691         if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
692           if (SetMode(rmode)) {
693             AdvanceReadId();
694             return;
695           }
696           Advance(kIntSize);
697         } else if (RelocInfo::IsComment(rmode)) {
698           if (SetMode(rmode)) {
699             AdvanceReadData();
700             return;
701           }
702           Advance(kIntptrSize);
703         } else if (RelocInfo::IsPosition(rmode)) {
704           if (mode_mask_ & RelocInfo::kPositionMask) {
705             // Always update the position if we are interested in either
706             // statement positions or non-statement positions.
707             AdvanceReadPosition();
708             if (SetMode(rmode)) return;
709           } else {
710             Advance(kIntSize);
711           }
712         } else if (RelocInfo::IsConstPool(rmode) ||
713                    RelocInfo::IsVeneerPool(rmode) ||
714                    RelocInfo::IsDeoptId(rmode)) {
715           if (SetMode(rmode)) {
716             AdvanceReadInt();
717             return;
718           }
719           Advance(kIntSize);
720         } else if (SetMode(static_cast<RelocInfo::Mode>(rmode))) {
721           return;
722         }
723       }
724     }
725   }
726   if (code_age_sequence_ != NULL) {
727     byte* old_code_age_sequence = code_age_sequence_;
728     code_age_sequence_ = NULL;
729     if (SetMode(RelocInfo::CODE_AGE_SEQUENCE)) {
730       rinfo_.data_ = 0;
731       rinfo_.pc_ = old_code_age_sequence;
732       return;
733     }
734   }
735   done_ = true;
736 }
737 
738 
RelocIterator(Code * code,int mode_mask)739 RelocIterator::RelocIterator(Code* code, int mode_mask)
740     : rinfo_(code->map()->GetIsolate()) {
741   rinfo_.host_ = code;
742   rinfo_.pc_ = code->instruction_start();
743   rinfo_.data_ = 0;
744   // Relocation info is read backwards.
745   pos_ = code->relocation_start() + code->relocation_size();
746   end_ = code->relocation_start();
747   done_ = false;
748   mode_mask_ = mode_mask;
749   last_id_ = 0;
750   last_position_ = 0;
751   byte* sequence = code->FindCodeAgeSequence();
752   // We get the isolate from the map, because at serialization time
753   // the code pointer has been cloned and isn't really in heap space.
754   Isolate* isolate = code->map()->GetIsolate();
755   if (sequence != NULL && !Code::IsYoungSequence(isolate, sequence)) {
756     code_age_sequence_ = sequence;
757   } else {
758     code_age_sequence_ = NULL;
759   }
760   if (mode_mask_ == 0) pos_ = end_;
761   next();
762 }
763 
764 
RelocIterator(const CodeDesc & desc,int mode_mask)765 RelocIterator::RelocIterator(const CodeDesc& desc, int mode_mask)
766     : rinfo_(desc.origin->isolate()) {
767   rinfo_.pc_ = desc.buffer;
768   rinfo_.data_ = 0;
769   // Relocation info is read backwards.
770   pos_ = desc.buffer + desc.buffer_size;
771   end_ = pos_ - desc.reloc_size;
772   done_ = false;
773   mode_mask_ = mode_mask;
774   last_id_ = 0;
775   last_position_ = 0;
776   code_age_sequence_ = NULL;
777   if (mode_mask_ == 0) pos_ = end_;
778   next();
779 }
780 
781 
782 // -----------------------------------------------------------------------------
783 // Implementation of RelocInfo
784 
IsPatchedDebugBreakSlotSequence()785 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
786   return DebugCodegen::DebugBreakSlotIsPatched(pc_);
787 }
788 
789 #ifdef DEBUG
RequiresRelocation(const CodeDesc & desc)790 bool RelocInfo::RequiresRelocation(const CodeDesc& desc) {
791   // Ensure there are no code targets or embedded objects present in the
792   // deoptimization entries, they would require relocation after code
793   // generation.
794   int mode_mask = RelocInfo::kCodeTargetMask |
795                   RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
796                   RelocInfo::ModeMask(RelocInfo::CELL) |
797                   RelocInfo::kApplyMask;
798   RelocIterator it(desc, mode_mask);
799   return !it.done();
800 }
801 #endif
802 
803 
804 #ifdef ENABLE_DISASSEMBLER
RelocModeName(RelocInfo::Mode rmode)805 const char* RelocInfo::RelocModeName(RelocInfo::Mode rmode) {
806   switch (rmode) {
807     case NONE32:
808       return "no reloc 32";
809     case NONE64:
810       return "no reloc 64";
811     case EMBEDDED_OBJECT:
812       return "embedded object";
813     case DEBUGGER_STATEMENT:
814       return "debugger statement";
815     case CODE_TARGET:
816       return "code target";
817     case CODE_TARGET_WITH_ID:
818       return "code target with id";
819     case CELL:
820       return "property cell";
821     case RUNTIME_ENTRY:
822       return "runtime entry";
823     case COMMENT:
824       return "comment";
825     case POSITION:
826       return "position";
827     case STATEMENT_POSITION:
828       return "statement position";
829     case EXTERNAL_REFERENCE:
830       return "external reference";
831     case INTERNAL_REFERENCE:
832       return "internal reference";
833     case INTERNAL_REFERENCE_ENCODED:
834       return "encoded internal reference";
835     case DEOPT_REASON:
836       return "deopt reason";
837     case DEOPT_ID:
838       return "deopt index";
839     case CONST_POOL:
840       return "constant pool";
841     case VENEER_POOL:
842       return "veneer pool";
843     case DEBUG_BREAK_SLOT_AT_POSITION:
844       return "debug break slot at position";
845     case DEBUG_BREAK_SLOT_AT_RETURN:
846       return "debug break slot at return";
847     case DEBUG_BREAK_SLOT_AT_CALL:
848       return "debug break slot at call";
849     case DEBUG_BREAK_SLOT_AT_TAIL_CALL:
850       return "debug break slot at tail call";
851     case CODE_AGE_SEQUENCE:
852       return "code age sequence";
853     case GENERATOR_CONTINUATION:
854       return "generator continuation";
855     case WASM_MEMORY_REFERENCE:
856       return "wasm memory reference";
857     case WASM_MEMORY_SIZE_REFERENCE:
858       return "wasm memory size reference";
859     case WASM_GLOBAL_REFERENCE:
860       return "wasm global value reference";
861     case NUMBER_OF_MODES:
862     case PC_JUMP:
863       UNREACHABLE();
864       return "number_of_modes";
865   }
866   return "unknown relocation type";
867 }
868 
869 
Print(Isolate * isolate,std::ostream & os)870 void RelocInfo::Print(Isolate* isolate, std::ostream& os) {  // NOLINT
871   os << static_cast<const void*>(pc_) << "  " << RelocModeName(rmode_);
872   if (IsComment(rmode_)) {
873     os << "  (" << reinterpret_cast<char*>(data_) << ")";
874   } else if (rmode_ == DEOPT_REASON) {
875     os << "  (" << Deoptimizer::GetDeoptReason(
876                        static_cast<Deoptimizer::DeoptReason>(data_)) << ")";
877   } else if (rmode_ == EMBEDDED_OBJECT) {
878     os << "  (" << Brief(target_object()) << ")";
879   } else if (rmode_ == EXTERNAL_REFERENCE) {
880     ExternalReferenceEncoder ref_encoder(isolate);
881     os << " ("
882        << ref_encoder.NameOfAddress(isolate, target_external_reference())
883        << ")  (" << static_cast<const void*>(target_external_reference())
884        << ")";
885   } else if (IsCodeTarget(rmode_)) {
886     Code* code = Code::GetCodeFromTargetAddress(target_address());
887     os << " (" << Code::Kind2String(code->kind()) << ")  ("
888        << static_cast<const void*>(target_address()) << ")";
889     if (rmode_ == CODE_TARGET_WITH_ID) {
890       os << " (id=" << static_cast<int>(data_) << ")";
891     }
892   } else if (IsPosition(rmode_)) {
893     os << "  (" << data() << ")";
894   } else if (IsRuntimeEntry(rmode_) &&
895              isolate->deoptimizer_data() != NULL) {
896     // Depotimization bailouts are stored as runtime entries.
897     int id = Deoptimizer::GetDeoptimizationId(
898         isolate, target_address(), Deoptimizer::EAGER);
899     if (id != Deoptimizer::kNotDeoptimizationEntry) {
900       os << "  (deoptimization bailout " << id << ")";
901     }
902   } else if (IsConstPool(rmode_)) {
903     os << " (size " << static_cast<int>(data_) << ")";
904   }
905 
906   os << "\n";
907 }
908 #endif  // ENABLE_DISASSEMBLER
909 
910 
911 #ifdef VERIFY_HEAP
Verify(Isolate * isolate)912 void RelocInfo::Verify(Isolate* isolate) {
913   switch (rmode_) {
914     case EMBEDDED_OBJECT:
915       Object::VerifyPointer(target_object());
916       break;
917     case CELL:
918       Object::VerifyPointer(target_cell());
919       break;
920     case DEBUGGER_STATEMENT:
921     case CODE_TARGET_WITH_ID:
922     case CODE_TARGET: {
923       // convert inline target address to code object
924       Address addr = target_address();
925       CHECK(addr != NULL);
926       // Check that we can find the right code object.
927       Code* code = Code::GetCodeFromTargetAddress(addr);
928       Object* found = isolate->FindCodeObject(addr);
929       CHECK(found->IsCode());
930       CHECK(code->address() == HeapObject::cast(found)->address());
931       break;
932     }
933     case INTERNAL_REFERENCE:
934     case INTERNAL_REFERENCE_ENCODED: {
935       Address target = target_internal_reference();
936       Address pc = target_internal_reference_address();
937       Code* code = Code::cast(isolate->FindCodeObject(pc));
938       CHECK(target >= code->instruction_start());
939       CHECK(target <= code->instruction_end());
940       break;
941     }
942     case RUNTIME_ENTRY:
943     case COMMENT:
944     case POSITION:
945     case STATEMENT_POSITION:
946     case EXTERNAL_REFERENCE:
947     case DEOPT_REASON:
948     case DEOPT_ID:
949     case CONST_POOL:
950     case VENEER_POOL:
951     case DEBUG_BREAK_SLOT_AT_POSITION:
952     case DEBUG_BREAK_SLOT_AT_RETURN:
953     case DEBUG_BREAK_SLOT_AT_CALL:
954     case DEBUG_BREAK_SLOT_AT_TAIL_CALL:
955     case GENERATOR_CONTINUATION:
956     case WASM_MEMORY_REFERENCE:
957     case WASM_MEMORY_SIZE_REFERENCE:
958     case WASM_GLOBAL_REFERENCE:
959     case NONE32:
960     case NONE64:
961       break;
962     case NUMBER_OF_MODES:
963     case PC_JUMP:
964       UNREACHABLE();
965       break;
966     case CODE_AGE_SEQUENCE:
967       DCHECK(Code::IsYoungSequence(isolate, pc_) || code_age_stub()->IsCode());
968       break;
969   }
970 }
971 #endif  // VERIFY_HEAP
972 
973 
974 // Implementation of ExternalReference
975 
BuiltinCallTypeForResultSize(int result_size)976 static ExternalReference::Type BuiltinCallTypeForResultSize(int result_size) {
977   switch (result_size) {
978     case 1:
979       return ExternalReference::BUILTIN_CALL;
980     case 2:
981       return ExternalReference::BUILTIN_CALL_PAIR;
982     case 3:
983       return ExternalReference::BUILTIN_CALL_TRIPLE;
984   }
985   UNREACHABLE();
986   return ExternalReference::BUILTIN_CALL;
987 }
988 
989 
SetUp()990 void ExternalReference::SetUp() {
991   double_constants.min_int = kMinInt;
992   double_constants.one_half = 0.5;
993   double_constants.minus_one_half = -0.5;
994   double_constants.the_hole_nan = bit_cast<double>(kHoleNanInt64);
995   double_constants.negative_infinity = -V8_INFINITY;
996   double_constants.uint32_bias =
997     static_cast<double>(static_cast<uint32_t>(0xFFFFFFFF)) + 1;
998 }
999 
1000 
ExternalReference(Builtins::CFunctionId id,Isolate * isolate)1001 ExternalReference::ExternalReference(Builtins::CFunctionId id, Isolate* isolate)
1002   : address_(Redirect(isolate, Builtins::c_function_address(id))) {}
1003 
1004 
ExternalReference(ApiFunction * fun,Type type=ExternalReference::BUILTIN_CALL,Isolate * isolate=NULL)1005 ExternalReference::ExternalReference(
1006     ApiFunction* fun,
1007     Type type = ExternalReference::BUILTIN_CALL,
1008     Isolate* isolate = NULL)
1009   : address_(Redirect(isolate, fun->address(), type)) {}
1010 
1011 
ExternalReference(Builtins::Name name,Isolate * isolate)1012 ExternalReference::ExternalReference(Builtins::Name name, Isolate* isolate)
1013   : address_(isolate->builtins()->builtin_address(name)) {}
1014 
1015 
ExternalReference(Runtime::FunctionId id,Isolate * isolate)1016 ExternalReference::ExternalReference(Runtime::FunctionId id, Isolate* isolate)
1017     : ExternalReference(Runtime::FunctionForId(id), isolate) {}
1018 
1019 
ExternalReference(const Runtime::Function * f,Isolate * isolate)1020 ExternalReference::ExternalReference(const Runtime::Function* f,
1021                                      Isolate* isolate)
1022     : address_(Redirect(isolate, f->entry,
1023                         BuiltinCallTypeForResultSize(f->result_size))) {}
1024 
1025 
isolate_address(Isolate * isolate)1026 ExternalReference ExternalReference::isolate_address(Isolate* isolate) {
1027   return ExternalReference(isolate);
1028 }
1029 
interpreter_dispatch_table_address(Isolate * isolate)1030 ExternalReference ExternalReference::interpreter_dispatch_table_address(
1031     Isolate* isolate) {
1032   return ExternalReference(isolate->interpreter()->dispatch_table_address());
1033 }
1034 
interpreter_dispatch_counters(Isolate * isolate)1035 ExternalReference ExternalReference::interpreter_dispatch_counters(
1036     Isolate* isolate) {
1037   return ExternalReference(
1038       isolate->interpreter()->bytecode_dispatch_counters_table());
1039 }
1040 
ExternalReference(StatsCounter * counter)1041 ExternalReference::ExternalReference(StatsCounter* counter)
1042   : address_(reinterpret_cast<Address>(counter->GetInternalPointer())) {}
1043 
1044 
ExternalReference(Isolate::AddressId id,Isolate * isolate)1045 ExternalReference::ExternalReference(Isolate::AddressId id, Isolate* isolate)
1046   : address_(isolate->get_address_from_id(id)) {}
1047 
1048 
ExternalReference(const SCTableReference & table_ref)1049 ExternalReference::ExternalReference(const SCTableReference& table_ref)
1050   : address_(table_ref.address()) {}
1051 
1052 
1053 ExternalReference ExternalReference::
incremental_marking_record_write_function(Isolate * isolate)1054     incremental_marking_record_write_function(Isolate* isolate) {
1055   return ExternalReference(Redirect(
1056       isolate,
1057       FUNCTION_ADDR(IncrementalMarking::RecordWriteFromCode)));
1058 }
1059 
1060 ExternalReference
incremental_marking_record_write_code_entry_function(Isolate * isolate)1061 ExternalReference::incremental_marking_record_write_code_entry_function(
1062     Isolate* isolate) {
1063   return ExternalReference(Redirect(
1064       isolate,
1065       FUNCTION_ADDR(IncrementalMarking::RecordWriteOfCodeEntryFromCode)));
1066 }
1067 
store_buffer_overflow_function(Isolate * isolate)1068 ExternalReference ExternalReference::store_buffer_overflow_function(
1069     Isolate* isolate) {
1070   return ExternalReference(Redirect(
1071       isolate,
1072       FUNCTION_ADDR(StoreBuffer::StoreBufferOverflow)));
1073 }
1074 
1075 
delete_handle_scope_extensions(Isolate * isolate)1076 ExternalReference ExternalReference::delete_handle_scope_extensions(
1077     Isolate* isolate) {
1078   return ExternalReference(Redirect(
1079       isolate,
1080       FUNCTION_ADDR(HandleScope::DeleteExtensions)));
1081 }
1082 
1083 
get_date_field_function(Isolate * isolate)1084 ExternalReference ExternalReference::get_date_field_function(
1085     Isolate* isolate) {
1086   return ExternalReference(Redirect(isolate, FUNCTION_ADDR(JSDate::GetField)));
1087 }
1088 
1089 
get_make_code_young_function(Isolate * isolate)1090 ExternalReference ExternalReference::get_make_code_young_function(
1091     Isolate* isolate) {
1092   return ExternalReference(Redirect(
1093       isolate, FUNCTION_ADDR(Code::MakeCodeAgeSequenceYoung)));
1094 }
1095 
1096 
get_mark_code_as_executed_function(Isolate * isolate)1097 ExternalReference ExternalReference::get_mark_code_as_executed_function(
1098     Isolate* isolate) {
1099   return ExternalReference(Redirect(
1100       isolate, FUNCTION_ADDR(Code::MarkCodeAsExecuted)));
1101 }
1102 
1103 
date_cache_stamp(Isolate * isolate)1104 ExternalReference ExternalReference::date_cache_stamp(Isolate* isolate) {
1105   return ExternalReference(isolate->date_cache()->stamp_address());
1106 }
1107 
1108 
stress_deopt_count(Isolate * isolate)1109 ExternalReference ExternalReference::stress_deopt_count(Isolate* isolate) {
1110   return ExternalReference(isolate->stress_deopt_count_address());
1111 }
1112 
1113 
new_deoptimizer_function(Isolate * isolate)1114 ExternalReference ExternalReference::new_deoptimizer_function(
1115     Isolate* isolate) {
1116   return ExternalReference(
1117       Redirect(isolate, FUNCTION_ADDR(Deoptimizer::New)));
1118 }
1119 
1120 
compute_output_frames_function(Isolate * isolate)1121 ExternalReference ExternalReference::compute_output_frames_function(
1122     Isolate* isolate) {
1123   return ExternalReference(
1124       Redirect(isolate, FUNCTION_ADDR(Deoptimizer::ComputeOutputFrames)));
1125 }
1126 
wasm_f32_trunc(Isolate * isolate)1127 ExternalReference ExternalReference::wasm_f32_trunc(Isolate* isolate) {
1128   return ExternalReference(
1129       Redirect(isolate, FUNCTION_ADDR(wasm::f32_trunc_wrapper)));
1130 }
wasm_f32_floor(Isolate * isolate)1131 ExternalReference ExternalReference::wasm_f32_floor(Isolate* isolate) {
1132   return ExternalReference(
1133       Redirect(isolate, FUNCTION_ADDR(wasm::f32_floor_wrapper)));
1134 }
wasm_f32_ceil(Isolate * isolate)1135 ExternalReference ExternalReference::wasm_f32_ceil(Isolate* isolate) {
1136   return ExternalReference(
1137       Redirect(isolate, FUNCTION_ADDR(wasm::f32_ceil_wrapper)));
1138 }
wasm_f32_nearest_int(Isolate * isolate)1139 ExternalReference ExternalReference::wasm_f32_nearest_int(Isolate* isolate) {
1140   return ExternalReference(
1141       Redirect(isolate, FUNCTION_ADDR(wasm::f32_nearest_int_wrapper)));
1142 }
1143 
wasm_f64_trunc(Isolate * isolate)1144 ExternalReference ExternalReference::wasm_f64_trunc(Isolate* isolate) {
1145   return ExternalReference(
1146       Redirect(isolate, FUNCTION_ADDR(wasm::f64_trunc_wrapper)));
1147 }
1148 
wasm_f64_floor(Isolate * isolate)1149 ExternalReference ExternalReference::wasm_f64_floor(Isolate* isolate) {
1150   return ExternalReference(
1151       Redirect(isolate, FUNCTION_ADDR(wasm::f64_floor_wrapper)));
1152 }
1153 
wasm_f64_ceil(Isolate * isolate)1154 ExternalReference ExternalReference::wasm_f64_ceil(Isolate* isolate) {
1155   return ExternalReference(
1156       Redirect(isolate, FUNCTION_ADDR(wasm::f64_ceil_wrapper)));
1157 }
1158 
wasm_f64_nearest_int(Isolate * isolate)1159 ExternalReference ExternalReference::wasm_f64_nearest_int(Isolate* isolate) {
1160   return ExternalReference(
1161       Redirect(isolate, FUNCTION_ADDR(wasm::f64_nearest_int_wrapper)));
1162 }
1163 
wasm_int64_to_float32(Isolate * isolate)1164 ExternalReference ExternalReference::wasm_int64_to_float32(Isolate* isolate) {
1165   return ExternalReference(
1166       Redirect(isolate, FUNCTION_ADDR(wasm::int64_to_float32_wrapper)));
1167 }
1168 
wasm_uint64_to_float32(Isolate * isolate)1169 ExternalReference ExternalReference::wasm_uint64_to_float32(Isolate* isolate) {
1170   return ExternalReference(
1171       Redirect(isolate, FUNCTION_ADDR(wasm::uint64_to_float32_wrapper)));
1172 }
1173 
wasm_int64_to_float64(Isolate * isolate)1174 ExternalReference ExternalReference::wasm_int64_to_float64(Isolate* isolate) {
1175   return ExternalReference(
1176       Redirect(isolate, FUNCTION_ADDR(wasm::int64_to_float64_wrapper)));
1177 }
1178 
wasm_uint64_to_float64(Isolate * isolate)1179 ExternalReference ExternalReference::wasm_uint64_to_float64(Isolate* isolate) {
1180   return ExternalReference(
1181       Redirect(isolate, FUNCTION_ADDR(wasm::uint64_to_float64_wrapper)));
1182 }
1183 
wasm_float32_to_int64(Isolate * isolate)1184 ExternalReference ExternalReference::wasm_float32_to_int64(Isolate* isolate) {
1185   return ExternalReference(
1186       Redirect(isolate, FUNCTION_ADDR(wasm::float32_to_int64_wrapper)));
1187 }
1188 
wasm_float32_to_uint64(Isolate * isolate)1189 ExternalReference ExternalReference::wasm_float32_to_uint64(Isolate* isolate) {
1190   return ExternalReference(
1191       Redirect(isolate, FUNCTION_ADDR(wasm::float32_to_uint64_wrapper)));
1192 }
1193 
wasm_float64_to_int64(Isolate * isolate)1194 ExternalReference ExternalReference::wasm_float64_to_int64(Isolate* isolate) {
1195   return ExternalReference(
1196       Redirect(isolate, FUNCTION_ADDR(wasm::float64_to_int64_wrapper)));
1197 }
1198 
wasm_float64_to_uint64(Isolate * isolate)1199 ExternalReference ExternalReference::wasm_float64_to_uint64(Isolate* isolate) {
1200   return ExternalReference(
1201       Redirect(isolate, FUNCTION_ADDR(wasm::float64_to_uint64_wrapper)));
1202 }
1203 
wasm_int64_div(Isolate * isolate)1204 ExternalReference ExternalReference::wasm_int64_div(Isolate* isolate) {
1205   return ExternalReference(
1206       Redirect(isolate, FUNCTION_ADDR(wasm::int64_div_wrapper)));
1207 }
1208 
wasm_int64_mod(Isolate * isolate)1209 ExternalReference ExternalReference::wasm_int64_mod(Isolate* isolate) {
1210   return ExternalReference(
1211       Redirect(isolate, FUNCTION_ADDR(wasm::int64_mod_wrapper)));
1212 }
1213 
wasm_uint64_div(Isolate * isolate)1214 ExternalReference ExternalReference::wasm_uint64_div(Isolate* isolate) {
1215   return ExternalReference(
1216       Redirect(isolate, FUNCTION_ADDR(wasm::uint64_div_wrapper)));
1217 }
1218 
wasm_uint64_mod(Isolate * isolate)1219 ExternalReference ExternalReference::wasm_uint64_mod(Isolate* isolate) {
1220   return ExternalReference(
1221       Redirect(isolate, FUNCTION_ADDR(wasm::uint64_mod_wrapper)));
1222 }
1223 
wasm_word32_ctz(Isolate * isolate)1224 ExternalReference ExternalReference::wasm_word32_ctz(Isolate* isolate) {
1225   return ExternalReference(
1226       Redirect(isolate, FUNCTION_ADDR(wasm::word32_ctz_wrapper)));
1227 }
1228 
wasm_word64_ctz(Isolate * isolate)1229 ExternalReference ExternalReference::wasm_word64_ctz(Isolate* isolate) {
1230   return ExternalReference(
1231       Redirect(isolate, FUNCTION_ADDR(wasm::word64_ctz_wrapper)));
1232 }
1233 
wasm_word32_popcnt(Isolate * isolate)1234 ExternalReference ExternalReference::wasm_word32_popcnt(Isolate* isolate) {
1235   return ExternalReference(
1236       Redirect(isolate, FUNCTION_ADDR(wasm::word32_popcnt_wrapper)));
1237 }
1238 
wasm_word64_popcnt(Isolate * isolate)1239 ExternalReference ExternalReference::wasm_word64_popcnt(Isolate* isolate) {
1240   return ExternalReference(
1241       Redirect(isolate, FUNCTION_ADDR(wasm::word64_popcnt_wrapper)));
1242 }
1243 
f64_acos_wrapper(double * param)1244 static void f64_acos_wrapper(double* param) {
1245   WriteDoubleValue(param, std::acos(ReadDoubleValue(param)));
1246 }
1247 
f64_acos_wrapper_function(Isolate * isolate)1248 ExternalReference ExternalReference::f64_acos_wrapper_function(
1249     Isolate* isolate) {
1250   return ExternalReference(Redirect(isolate, FUNCTION_ADDR(f64_acos_wrapper)));
1251 }
1252 
f64_asin_wrapper(double * param)1253 static void f64_asin_wrapper(double* param) {
1254   WriteDoubleValue(param, std::asin(ReadDoubleValue(param)));
1255 }
1256 
f64_asin_wrapper_function(Isolate * isolate)1257 ExternalReference ExternalReference::f64_asin_wrapper_function(
1258     Isolate* isolate) {
1259   return ExternalReference(Redirect(isolate, FUNCTION_ADDR(f64_asin_wrapper)));
1260 }
1261 
f64_pow_wrapper(double * param0,double * param1)1262 static void f64_pow_wrapper(double* param0, double* param1) {
1263   WriteDoubleValue(param0, power_double_double(ReadDoubleValue(param0),
1264                                                ReadDoubleValue(param1)));
1265 }
1266 
f64_pow_wrapper_function(Isolate * isolate)1267 ExternalReference ExternalReference::f64_pow_wrapper_function(
1268     Isolate* isolate) {
1269   return ExternalReference(Redirect(isolate, FUNCTION_ADDR(f64_pow_wrapper)));
1270 }
1271 
f64_mod_wrapper(double * param0,double * param1)1272 static void f64_mod_wrapper(double* param0, double* param1) {
1273   WriteDoubleValue(param0,
1274                    modulo(ReadDoubleValue(param0), ReadDoubleValue(param1)));
1275 }
1276 
f64_mod_wrapper_function(Isolate * isolate)1277 ExternalReference ExternalReference::f64_mod_wrapper_function(
1278     Isolate* isolate) {
1279   return ExternalReference(Redirect(isolate, FUNCTION_ADDR(f64_mod_wrapper)));
1280 }
1281 
log_enter_external_function(Isolate * isolate)1282 ExternalReference ExternalReference::log_enter_external_function(
1283     Isolate* isolate) {
1284   return ExternalReference(
1285       Redirect(isolate, FUNCTION_ADDR(Logger::EnterExternal)));
1286 }
1287 
1288 
log_leave_external_function(Isolate * isolate)1289 ExternalReference ExternalReference::log_leave_external_function(
1290     Isolate* isolate) {
1291   return ExternalReference(
1292       Redirect(isolate, FUNCTION_ADDR(Logger::LeaveExternal)));
1293 }
1294 
1295 
keyed_lookup_cache_keys(Isolate * isolate)1296 ExternalReference ExternalReference::keyed_lookup_cache_keys(Isolate* isolate) {
1297   return ExternalReference(isolate->keyed_lookup_cache()->keys_address());
1298 }
1299 
1300 
keyed_lookup_cache_field_offsets(Isolate * isolate)1301 ExternalReference ExternalReference::keyed_lookup_cache_field_offsets(
1302     Isolate* isolate) {
1303   return ExternalReference(
1304       isolate->keyed_lookup_cache()->field_offsets_address());
1305 }
1306 
1307 
roots_array_start(Isolate * isolate)1308 ExternalReference ExternalReference::roots_array_start(Isolate* isolate) {
1309   return ExternalReference(isolate->heap()->roots_array_start());
1310 }
1311 
1312 
allocation_sites_list_address(Isolate * isolate)1313 ExternalReference ExternalReference::allocation_sites_list_address(
1314     Isolate* isolate) {
1315   return ExternalReference(isolate->heap()->allocation_sites_list_address());
1316 }
1317 
1318 
address_of_stack_limit(Isolate * isolate)1319 ExternalReference ExternalReference::address_of_stack_limit(Isolate* isolate) {
1320   return ExternalReference(isolate->stack_guard()->address_of_jslimit());
1321 }
1322 
1323 
address_of_real_stack_limit(Isolate * isolate)1324 ExternalReference ExternalReference::address_of_real_stack_limit(
1325     Isolate* isolate) {
1326   return ExternalReference(isolate->stack_guard()->address_of_real_jslimit());
1327 }
1328 
1329 
address_of_regexp_stack_limit(Isolate * isolate)1330 ExternalReference ExternalReference::address_of_regexp_stack_limit(
1331     Isolate* isolate) {
1332   return ExternalReference(isolate->regexp_stack()->limit_address());
1333 }
1334 
store_buffer_top(Isolate * isolate)1335 ExternalReference ExternalReference::store_buffer_top(Isolate* isolate) {
1336   return ExternalReference(isolate->heap()->store_buffer_top_address());
1337 }
1338 
1339 
new_space_allocation_top_address(Isolate * isolate)1340 ExternalReference ExternalReference::new_space_allocation_top_address(
1341     Isolate* isolate) {
1342   return ExternalReference(isolate->heap()->NewSpaceAllocationTopAddress());
1343 }
1344 
1345 
new_space_allocation_limit_address(Isolate * isolate)1346 ExternalReference ExternalReference::new_space_allocation_limit_address(
1347     Isolate* isolate) {
1348   return ExternalReference(isolate->heap()->NewSpaceAllocationLimitAddress());
1349 }
1350 
1351 
old_space_allocation_top_address(Isolate * isolate)1352 ExternalReference ExternalReference::old_space_allocation_top_address(
1353     Isolate* isolate) {
1354   return ExternalReference(isolate->heap()->OldSpaceAllocationTopAddress());
1355 }
1356 
1357 
old_space_allocation_limit_address(Isolate * isolate)1358 ExternalReference ExternalReference::old_space_allocation_limit_address(
1359     Isolate* isolate) {
1360   return ExternalReference(isolate->heap()->OldSpaceAllocationLimitAddress());
1361 }
1362 
1363 
handle_scope_level_address(Isolate * isolate)1364 ExternalReference ExternalReference::handle_scope_level_address(
1365     Isolate* isolate) {
1366   return ExternalReference(HandleScope::current_level_address(isolate));
1367 }
1368 
1369 
handle_scope_next_address(Isolate * isolate)1370 ExternalReference ExternalReference::handle_scope_next_address(
1371     Isolate* isolate) {
1372   return ExternalReference(HandleScope::current_next_address(isolate));
1373 }
1374 
1375 
handle_scope_limit_address(Isolate * isolate)1376 ExternalReference ExternalReference::handle_scope_limit_address(
1377     Isolate* isolate) {
1378   return ExternalReference(HandleScope::current_limit_address(isolate));
1379 }
1380 
1381 
scheduled_exception_address(Isolate * isolate)1382 ExternalReference ExternalReference::scheduled_exception_address(
1383     Isolate* isolate) {
1384   return ExternalReference(isolate->scheduled_exception_address());
1385 }
1386 
1387 
address_of_pending_message_obj(Isolate * isolate)1388 ExternalReference ExternalReference::address_of_pending_message_obj(
1389     Isolate* isolate) {
1390   return ExternalReference(isolate->pending_message_obj_address());
1391 }
1392 
1393 
address_of_min_int()1394 ExternalReference ExternalReference::address_of_min_int() {
1395   return ExternalReference(reinterpret_cast<void*>(&double_constants.min_int));
1396 }
1397 
1398 
address_of_one_half()1399 ExternalReference ExternalReference::address_of_one_half() {
1400   return ExternalReference(reinterpret_cast<void*>(&double_constants.one_half));
1401 }
1402 
1403 
address_of_minus_one_half()1404 ExternalReference ExternalReference::address_of_minus_one_half() {
1405   return ExternalReference(
1406       reinterpret_cast<void*>(&double_constants.minus_one_half));
1407 }
1408 
1409 
address_of_negative_infinity()1410 ExternalReference ExternalReference::address_of_negative_infinity() {
1411   return ExternalReference(
1412       reinterpret_cast<void*>(&double_constants.negative_infinity));
1413 }
1414 
1415 
address_of_the_hole_nan()1416 ExternalReference ExternalReference::address_of_the_hole_nan() {
1417   return ExternalReference(
1418       reinterpret_cast<void*>(&double_constants.the_hole_nan));
1419 }
1420 
1421 
address_of_uint32_bias()1422 ExternalReference ExternalReference::address_of_uint32_bias() {
1423   return ExternalReference(
1424       reinterpret_cast<void*>(&double_constants.uint32_bias));
1425 }
1426 
1427 
is_profiling_address(Isolate * isolate)1428 ExternalReference ExternalReference::is_profiling_address(Isolate* isolate) {
1429   return ExternalReference(isolate->is_profiling_address());
1430 }
1431 
1432 
invoke_function_callback(Isolate * isolate)1433 ExternalReference ExternalReference::invoke_function_callback(
1434     Isolate* isolate) {
1435   Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback);
1436   ExternalReference::Type thunk_type = ExternalReference::PROFILING_API_CALL;
1437   ApiFunction thunk_fun(thunk_address);
1438   return ExternalReference(&thunk_fun, thunk_type, isolate);
1439 }
1440 
1441 
invoke_accessor_getter_callback(Isolate * isolate)1442 ExternalReference ExternalReference::invoke_accessor_getter_callback(
1443     Isolate* isolate) {
1444   Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback);
1445   ExternalReference::Type thunk_type =
1446       ExternalReference::PROFILING_GETTER_CALL;
1447   ApiFunction thunk_fun(thunk_address);
1448   return ExternalReference(&thunk_fun, thunk_type, isolate);
1449 }
1450 
1451 
1452 #ifndef V8_INTERPRETED_REGEXP
1453 
re_check_stack_guard_state(Isolate * isolate)1454 ExternalReference ExternalReference::re_check_stack_guard_state(
1455     Isolate* isolate) {
1456   Address function;
1457 #if V8_TARGET_ARCH_X64
1458   function = FUNCTION_ADDR(RegExpMacroAssemblerX64::CheckStackGuardState);
1459 #elif V8_TARGET_ARCH_IA32
1460   function = FUNCTION_ADDR(RegExpMacroAssemblerIA32::CheckStackGuardState);
1461 #elif V8_TARGET_ARCH_ARM64
1462   function = FUNCTION_ADDR(RegExpMacroAssemblerARM64::CheckStackGuardState);
1463 #elif V8_TARGET_ARCH_ARM
1464   function = FUNCTION_ADDR(RegExpMacroAssemblerARM::CheckStackGuardState);
1465 #elif V8_TARGET_ARCH_PPC
1466   function = FUNCTION_ADDR(RegExpMacroAssemblerPPC::CheckStackGuardState);
1467 #elif V8_TARGET_ARCH_MIPS
1468   function = FUNCTION_ADDR(RegExpMacroAssemblerMIPS::CheckStackGuardState);
1469 #elif V8_TARGET_ARCH_MIPS64
1470   function = FUNCTION_ADDR(RegExpMacroAssemblerMIPS::CheckStackGuardState);
1471 #elif V8_TARGET_ARCH_S390
1472   function = FUNCTION_ADDR(RegExpMacroAssemblerS390::CheckStackGuardState);
1473 #elif V8_TARGET_ARCH_X87
1474   function = FUNCTION_ADDR(RegExpMacroAssemblerX87::CheckStackGuardState);
1475 #else
1476   UNREACHABLE();
1477 #endif
1478   return ExternalReference(Redirect(isolate, function));
1479 }
1480 
1481 
re_grow_stack(Isolate * isolate)1482 ExternalReference ExternalReference::re_grow_stack(Isolate* isolate) {
1483   return ExternalReference(
1484       Redirect(isolate, FUNCTION_ADDR(NativeRegExpMacroAssembler::GrowStack)));
1485 }
1486 
re_case_insensitive_compare_uc16(Isolate * isolate)1487 ExternalReference ExternalReference::re_case_insensitive_compare_uc16(
1488     Isolate* isolate) {
1489   return ExternalReference(Redirect(
1490       isolate,
1491       FUNCTION_ADDR(NativeRegExpMacroAssembler::CaseInsensitiveCompareUC16)));
1492 }
1493 
1494 
re_word_character_map()1495 ExternalReference ExternalReference::re_word_character_map() {
1496   return ExternalReference(
1497       NativeRegExpMacroAssembler::word_character_map_address());
1498 }
1499 
address_of_static_offsets_vector(Isolate * isolate)1500 ExternalReference ExternalReference::address_of_static_offsets_vector(
1501     Isolate* isolate) {
1502   return ExternalReference(
1503       reinterpret_cast<Address>(isolate->jsregexp_static_offsets_vector()));
1504 }
1505 
address_of_regexp_stack_memory_address(Isolate * isolate)1506 ExternalReference ExternalReference::address_of_regexp_stack_memory_address(
1507     Isolate* isolate) {
1508   return ExternalReference(
1509       isolate->regexp_stack()->memory_address());
1510 }
1511 
address_of_regexp_stack_memory_size(Isolate * isolate)1512 ExternalReference ExternalReference::address_of_regexp_stack_memory_size(
1513     Isolate* isolate) {
1514   return ExternalReference(isolate->regexp_stack()->memory_size_address());
1515 }
1516 
1517 #endif  // V8_INTERPRETED_REGEXP
1518 
ieee754_atan_function(Isolate * isolate)1519 ExternalReference ExternalReference::ieee754_atan_function(Isolate* isolate) {
1520   return ExternalReference(
1521       Redirect(isolate, FUNCTION_ADDR(base::ieee754::atan), BUILTIN_FP_CALL));
1522 }
1523 
ieee754_atan2_function(Isolate * isolate)1524 ExternalReference ExternalReference::ieee754_atan2_function(Isolate* isolate) {
1525   return ExternalReference(Redirect(
1526       isolate, FUNCTION_ADDR(base::ieee754::atan2), BUILTIN_FP_FP_CALL));
1527 }
1528 
ieee754_atanh_function(Isolate * isolate)1529 ExternalReference ExternalReference::ieee754_atanh_function(Isolate* isolate) {
1530   return ExternalReference(Redirect(
1531       isolate, FUNCTION_ADDR(base::ieee754::atanh), BUILTIN_FP_FP_CALL));
1532 }
1533 
ieee754_cbrt_function(Isolate * isolate)1534 ExternalReference ExternalReference::ieee754_cbrt_function(Isolate* isolate) {
1535   return ExternalReference(Redirect(isolate, FUNCTION_ADDR(base::ieee754::cbrt),
1536                                     BUILTIN_FP_FP_CALL));
1537 }
1538 
ieee754_cos_function(Isolate * isolate)1539 ExternalReference ExternalReference::ieee754_cos_function(Isolate* isolate) {
1540   return ExternalReference(
1541       Redirect(isolate, FUNCTION_ADDR(base::ieee754::cos), BUILTIN_FP_CALL));
1542 }
1543 
ieee754_exp_function(Isolate * isolate)1544 ExternalReference ExternalReference::ieee754_exp_function(Isolate* isolate) {
1545   return ExternalReference(
1546       Redirect(isolate, FUNCTION_ADDR(base::ieee754::exp), BUILTIN_FP_CALL));
1547 }
1548 
ieee754_expm1_function(Isolate * isolate)1549 ExternalReference ExternalReference::ieee754_expm1_function(Isolate* isolate) {
1550   return ExternalReference(Redirect(
1551       isolate, FUNCTION_ADDR(base::ieee754::expm1), BUILTIN_FP_FP_CALL));
1552 }
1553 
ieee754_log_function(Isolate * isolate)1554 ExternalReference ExternalReference::ieee754_log_function(Isolate* isolate) {
1555   return ExternalReference(
1556       Redirect(isolate, FUNCTION_ADDR(base::ieee754::log), BUILTIN_FP_CALL));
1557 }
1558 
ieee754_log1p_function(Isolate * isolate)1559 ExternalReference ExternalReference::ieee754_log1p_function(Isolate* isolate) {
1560   return ExternalReference(
1561       Redirect(isolate, FUNCTION_ADDR(base::ieee754::log1p), BUILTIN_FP_CALL));
1562 }
1563 
ieee754_log10_function(Isolate * isolate)1564 ExternalReference ExternalReference::ieee754_log10_function(Isolate* isolate) {
1565   return ExternalReference(
1566       Redirect(isolate, FUNCTION_ADDR(base::ieee754::log10), BUILTIN_FP_CALL));
1567 }
1568 
ieee754_log2_function(Isolate * isolate)1569 ExternalReference ExternalReference::ieee754_log2_function(Isolate* isolate) {
1570   return ExternalReference(
1571       Redirect(isolate, FUNCTION_ADDR(base::ieee754::log2), BUILTIN_FP_CALL));
1572 }
1573 
ieee754_sin_function(Isolate * isolate)1574 ExternalReference ExternalReference::ieee754_sin_function(Isolate* isolate) {
1575   return ExternalReference(
1576       Redirect(isolate, FUNCTION_ADDR(base::ieee754::sin), BUILTIN_FP_CALL));
1577 }
1578 
ieee754_tan_function(Isolate * isolate)1579 ExternalReference ExternalReference::ieee754_tan_function(Isolate* isolate) {
1580   return ExternalReference(
1581       Redirect(isolate, FUNCTION_ADDR(base::ieee754::tan), BUILTIN_FP_CALL));
1582 }
1583 
page_flags(Page * page)1584 ExternalReference ExternalReference::page_flags(Page* page) {
1585   return ExternalReference(reinterpret_cast<Address>(page) +
1586                            MemoryChunk::kFlagsOffset);
1587 }
1588 
1589 
ForDeoptEntry(Address entry)1590 ExternalReference ExternalReference::ForDeoptEntry(Address entry) {
1591   return ExternalReference(entry);
1592 }
1593 
1594 
cpu_features()1595 ExternalReference ExternalReference::cpu_features() {
1596   DCHECK(CpuFeatures::initialized_);
1597   return ExternalReference(&CpuFeatures::supported_);
1598 }
1599 
is_tail_call_elimination_enabled_address(Isolate * isolate)1600 ExternalReference ExternalReference::is_tail_call_elimination_enabled_address(
1601     Isolate* isolate) {
1602   return ExternalReference(isolate->is_tail_call_elimination_enabled_address());
1603 }
1604 
debug_is_active_address(Isolate * isolate)1605 ExternalReference ExternalReference::debug_is_active_address(
1606     Isolate* isolate) {
1607   return ExternalReference(isolate->debug()->is_active_address());
1608 }
1609 
1610 
debug_after_break_target_address(Isolate * isolate)1611 ExternalReference ExternalReference::debug_after_break_target_address(
1612     Isolate* isolate) {
1613   return ExternalReference(isolate->debug()->after_break_target_address());
1614 }
1615 
1616 
virtual_handler_register(Isolate * isolate)1617 ExternalReference ExternalReference::virtual_handler_register(
1618     Isolate* isolate) {
1619   return ExternalReference(isolate->virtual_handler_register_address());
1620 }
1621 
1622 
virtual_slot_register(Isolate * isolate)1623 ExternalReference ExternalReference::virtual_slot_register(Isolate* isolate) {
1624   return ExternalReference(isolate->virtual_slot_register_address());
1625 }
1626 
1627 
runtime_function_table_address(Isolate * isolate)1628 ExternalReference ExternalReference::runtime_function_table_address(
1629     Isolate* isolate) {
1630   return ExternalReference(
1631       const_cast<Runtime::Function*>(Runtime::RuntimeFunctionTable(isolate)));
1632 }
1633 
1634 
power_helper(Isolate * isolate,double x,double y)1635 double power_helper(Isolate* isolate, double x, double y) {
1636   int y_int = static_cast<int>(y);
1637   if (y == y_int) {
1638     return power_double_int(x, y_int);  // Returns 1 if exponent is 0.
1639   }
1640   if (y == 0.5) {
1641     lazily_initialize_fast_sqrt(isolate);
1642     return (std::isinf(x)) ? V8_INFINITY
1643                            : fast_sqrt(x + 0.0, isolate);  // Convert -0 to +0.
1644   }
1645   if (y == -0.5) {
1646     lazily_initialize_fast_sqrt(isolate);
1647     return (std::isinf(x)) ? 0 : 1.0 / fast_sqrt(x + 0.0,
1648                                                  isolate);  // Convert -0 to +0.
1649   }
1650   return power_double_double(x, y);
1651 }
1652 
1653 
1654 // Helper function to compute x^y, where y is known to be an
1655 // integer. Uses binary decomposition to limit the number of
1656 // multiplications; see the discussion in "Hacker's Delight" by Henry
1657 // S. Warren, Jr., figure 11-6, page 213.
power_double_int(double x,int y)1658 double power_double_int(double x, int y) {
1659   double m = (y < 0) ? 1 / x : x;
1660   unsigned n = (y < 0) ? -y : y;
1661   double p = 1;
1662   while (n != 0) {
1663     if ((n & 1) != 0) p *= m;
1664     m *= m;
1665     if ((n & 2) != 0) p *= m;
1666     m *= m;
1667     n >>= 2;
1668   }
1669   return p;
1670 }
1671 
1672 
power_double_double(double x,double y)1673 double power_double_double(double x, double y) {
1674   // The checks for special cases can be dropped in ia32 because it has already
1675   // been done in generated code before bailing out here.
1676   if (std::isnan(y) || ((x == 1 || x == -1) && std::isinf(y))) {
1677     return std::numeric_limits<double>::quiet_NaN();
1678   }
1679   return Pow(x, y);
1680 }
1681 
1682 
power_double_double_function(Isolate * isolate)1683 ExternalReference ExternalReference::power_double_double_function(
1684     Isolate* isolate) {
1685   return ExternalReference(Redirect(isolate,
1686                                     FUNCTION_ADDR(power_double_double),
1687                                     BUILTIN_FP_FP_CALL));
1688 }
1689 
1690 
power_double_int_function(Isolate * isolate)1691 ExternalReference ExternalReference::power_double_int_function(
1692     Isolate* isolate) {
1693   return ExternalReference(Redirect(isolate,
1694                                     FUNCTION_ADDR(power_double_int),
1695                                     BUILTIN_FP_INT_CALL));
1696 }
1697 
1698 
mod_two_doubles_operation(Isolate * isolate)1699 ExternalReference ExternalReference::mod_two_doubles_operation(
1700     Isolate* isolate) {
1701   return ExternalReference(Redirect(isolate,
1702                                     FUNCTION_ADDR(modulo),
1703                                     BUILTIN_FP_FP_CALL));
1704 }
1705 
debug_last_step_action_address(Isolate * isolate)1706 ExternalReference ExternalReference::debug_last_step_action_address(
1707     Isolate* isolate) {
1708   return ExternalReference(isolate->debug()->last_step_action_address());
1709 }
1710 
debug_suspended_generator_address(Isolate * isolate)1711 ExternalReference ExternalReference::debug_suspended_generator_address(
1712     Isolate* isolate) {
1713   return ExternalReference(isolate->debug()->suspended_generator_address());
1714 }
1715 
fixed_typed_array_base_data_offset()1716 ExternalReference ExternalReference::fixed_typed_array_base_data_offset() {
1717   return ExternalReference(reinterpret_cast<void*>(
1718       FixedTypedArrayBase::kDataOffset - kHeapObjectTag));
1719 }
1720 
1721 
operator ==(ExternalReference lhs,ExternalReference rhs)1722 bool operator==(ExternalReference lhs, ExternalReference rhs) {
1723   return lhs.address() == rhs.address();
1724 }
1725 
1726 
operator !=(ExternalReference lhs,ExternalReference rhs)1727 bool operator!=(ExternalReference lhs, ExternalReference rhs) {
1728   return !(lhs == rhs);
1729 }
1730 
1731 
hash_value(ExternalReference reference)1732 size_t hash_value(ExternalReference reference) {
1733   return base::hash<Address>()(reference.address());
1734 }
1735 
1736 
operator <<(std::ostream & os,ExternalReference reference)1737 std::ostream& operator<<(std::ostream& os, ExternalReference reference) {
1738   os << static_cast<const void*>(reference.address());
1739   const Runtime::Function* fn = Runtime::FunctionForEntry(reference.address());
1740   if (fn) os << "<" << fn->name << ".entry>";
1741   return os;
1742 }
1743 
RecordPosition(int pos)1744 void AssemblerPositionsRecorder::RecordPosition(int pos) {
1745   DCHECK(pos != RelocInfo::kNoPosition);
1746   DCHECK(pos >= 0);
1747   current_position_ = pos;
1748   LOG_CODE_EVENT(assembler_->isolate(),
1749                  CodeLinePosInfoAddPositionEvent(jit_handler_data_,
1750                                                  assembler_->pc_offset(),
1751                                                  pos));
1752   WriteRecordedPositions();
1753 }
1754 
RecordStatementPosition(int pos)1755 void AssemblerPositionsRecorder::RecordStatementPosition(int pos) {
1756   DCHECK(pos != RelocInfo::kNoPosition);
1757   DCHECK(pos >= 0);
1758   current_statement_position_ = pos;
1759   LOG_CODE_EVENT(assembler_->isolate(),
1760                  CodeLinePosInfoAddStatementPositionEvent(
1761                      jit_handler_data_,
1762                      assembler_->pc_offset(),
1763                      pos));
1764   RecordPosition(pos);
1765 }
1766 
WriteRecordedPositions()1767 void AssemblerPositionsRecorder::WriteRecordedPositions() {
1768   // Write the statement position if it is different from what was written last
1769   // time.
1770   if (current_statement_position_ != written_statement_position_) {
1771     EnsureSpace ensure_space(assembler_);
1772     assembler_->RecordRelocInfo(RelocInfo::STATEMENT_POSITION,
1773                                 current_statement_position_);
1774     written_position_ = current_statement_position_;
1775     written_statement_position_ = current_statement_position_;
1776   }
1777 
1778   // Write the position if it is different from what was written last time and
1779   // also different from the statement position that was just written.
1780   if (current_position_ != written_position_) {
1781     EnsureSpace ensure_space(assembler_);
1782     assembler_->RecordRelocInfo(RelocInfo::POSITION, current_position_);
1783     written_position_ = current_position_;
1784   }
1785 }
1786 
1787 
ConstantPoolBuilder(int ptr_reach_bits,int double_reach_bits)1788 ConstantPoolBuilder::ConstantPoolBuilder(int ptr_reach_bits,
1789                                          int double_reach_bits) {
1790   info_[ConstantPoolEntry::INTPTR].entries.reserve(64);
1791   info_[ConstantPoolEntry::INTPTR].regular_reach_bits = ptr_reach_bits;
1792   info_[ConstantPoolEntry::DOUBLE].regular_reach_bits = double_reach_bits;
1793 }
1794 
1795 
NextAccess(ConstantPoolEntry::Type type) const1796 ConstantPoolEntry::Access ConstantPoolBuilder::NextAccess(
1797     ConstantPoolEntry::Type type) const {
1798   const PerTypeEntryInfo& info = info_[type];
1799 
1800   if (info.overflow()) return ConstantPoolEntry::OVERFLOWED;
1801 
1802   int dbl_count = info_[ConstantPoolEntry::DOUBLE].regular_count;
1803   int dbl_offset = dbl_count * kDoubleSize;
1804   int ptr_count = info_[ConstantPoolEntry::INTPTR].regular_count;
1805   int ptr_offset = ptr_count * kPointerSize + dbl_offset;
1806 
1807   if (type == ConstantPoolEntry::DOUBLE) {
1808     // Double overflow detection must take into account the reach for both types
1809     int ptr_reach_bits = info_[ConstantPoolEntry::INTPTR].regular_reach_bits;
1810     if (!is_uintn(dbl_offset, info.regular_reach_bits) ||
1811         (ptr_count > 0 &&
1812          !is_uintn(ptr_offset + kDoubleSize - kPointerSize, ptr_reach_bits))) {
1813       return ConstantPoolEntry::OVERFLOWED;
1814     }
1815   } else {
1816     DCHECK(type == ConstantPoolEntry::INTPTR);
1817     if (!is_uintn(ptr_offset, info.regular_reach_bits)) {
1818       return ConstantPoolEntry::OVERFLOWED;
1819     }
1820   }
1821 
1822   return ConstantPoolEntry::REGULAR;
1823 }
1824 
1825 
AddEntry(ConstantPoolEntry & entry,ConstantPoolEntry::Type type)1826 ConstantPoolEntry::Access ConstantPoolBuilder::AddEntry(
1827     ConstantPoolEntry& entry, ConstantPoolEntry::Type type) {
1828   DCHECK(!emitted_label_.is_bound());
1829   PerTypeEntryInfo& info = info_[type];
1830   const int entry_size = ConstantPoolEntry::size(type);
1831   bool merged = false;
1832 
1833   if (entry.sharing_ok()) {
1834     // Try to merge entries
1835     std::vector<ConstantPoolEntry>::iterator it = info.shared_entries.begin();
1836     int end = static_cast<int>(info.shared_entries.size());
1837     for (int i = 0; i < end; i++, it++) {
1838       if ((entry_size == kPointerSize) ? entry.value() == it->value()
1839                                        : entry.value64() == it->value64()) {
1840         // Merge with found entry.
1841         entry.set_merged_index(i);
1842         merged = true;
1843         break;
1844       }
1845     }
1846   }
1847 
1848   // By definition, merged entries have regular access.
1849   DCHECK(!merged || entry.merged_index() < info.regular_count);
1850   ConstantPoolEntry::Access access =
1851       (merged ? ConstantPoolEntry::REGULAR : NextAccess(type));
1852 
1853   // Enforce an upper bound on search time by limiting the search to
1854   // unique sharable entries which fit in the regular section.
1855   if (entry.sharing_ok() && !merged && access == ConstantPoolEntry::REGULAR) {
1856     info.shared_entries.push_back(entry);
1857   } else {
1858     info.entries.push_back(entry);
1859   }
1860 
1861   // We're done if we found a match or have already triggered the
1862   // overflow state.
1863   if (merged || info.overflow()) return access;
1864 
1865   if (access == ConstantPoolEntry::REGULAR) {
1866     info.regular_count++;
1867   } else {
1868     info.overflow_start = static_cast<int>(info.entries.size()) - 1;
1869   }
1870 
1871   return access;
1872 }
1873 
1874 
EmitSharedEntries(Assembler * assm,ConstantPoolEntry::Type type)1875 void ConstantPoolBuilder::EmitSharedEntries(Assembler* assm,
1876                                             ConstantPoolEntry::Type type) {
1877   PerTypeEntryInfo& info = info_[type];
1878   std::vector<ConstantPoolEntry>& shared_entries = info.shared_entries;
1879   const int entry_size = ConstantPoolEntry::size(type);
1880   int base = emitted_label_.pos();
1881   DCHECK(base > 0);
1882   int shared_end = static_cast<int>(shared_entries.size());
1883   std::vector<ConstantPoolEntry>::iterator shared_it = shared_entries.begin();
1884   for (int i = 0; i < shared_end; i++, shared_it++) {
1885     int offset = assm->pc_offset() - base;
1886     shared_it->set_offset(offset);  // Save offset for merged entries.
1887     if (entry_size == kPointerSize) {
1888       assm->dp(shared_it->value());
1889     } else {
1890       assm->dq(shared_it->value64());
1891     }
1892     DCHECK(is_uintn(offset, info.regular_reach_bits));
1893 
1894     // Patch load sequence with correct offset.
1895     assm->PatchConstantPoolAccessInstruction(shared_it->position(), offset,
1896                                              ConstantPoolEntry::REGULAR, type);
1897   }
1898 }
1899 
1900 
EmitGroup(Assembler * assm,ConstantPoolEntry::Access access,ConstantPoolEntry::Type type)1901 void ConstantPoolBuilder::EmitGroup(Assembler* assm,
1902                                     ConstantPoolEntry::Access access,
1903                                     ConstantPoolEntry::Type type) {
1904   PerTypeEntryInfo& info = info_[type];
1905   const bool overflow = info.overflow();
1906   std::vector<ConstantPoolEntry>& entries = info.entries;
1907   std::vector<ConstantPoolEntry>& shared_entries = info.shared_entries;
1908   const int entry_size = ConstantPoolEntry::size(type);
1909   int base = emitted_label_.pos();
1910   DCHECK(base > 0);
1911   int begin;
1912   int end;
1913 
1914   if (access == ConstantPoolEntry::REGULAR) {
1915     // Emit any shared entries first
1916     EmitSharedEntries(assm, type);
1917   }
1918 
1919   if (access == ConstantPoolEntry::REGULAR) {
1920     begin = 0;
1921     end = overflow ? info.overflow_start : static_cast<int>(entries.size());
1922   } else {
1923     DCHECK(access == ConstantPoolEntry::OVERFLOWED);
1924     if (!overflow) return;
1925     begin = info.overflow_start;
1926     end = static_cast<int>(entries.size());
1927   }
1928 
1929   std::vector<ConstantPoolEntry>::iterator it = entries.begin();
1930   if (begin > 0) std::advance(it, begin);
1931   for (int i = begin; i < end; i++, it++) {
1932     // Update constant pool if necessary and get the entry's offset.
1933     int offset;
1934     ConstantPoolEntry::Access entry_access;
1935     if (!it->is_merged()) {
1936       // Emit new entry
1937       offset = assm->pc_offset() - base;
1938       entry_access = access;
1939       if (entry_size == kPointerSize) {
1940         assm->dp(it->value());
1941       } else {
1942         assm->dq(it->value64());
1943       }
1944     } else {
1945       // Retrieve offset from shared entry.
1946       offset = shared_entries[it->merged_index()].offset();
1947       entry_access = ConstantPoolEntry::REGULAR;
1948     }
1949 
1950     DCHECK(entry_access == ConstantPoolEntry::OVERFLOWED ||
1951            is_uintn(offset, info.regular_reach_bits));
1952 
1953     // Patch load sequence with correct offset.
1954     assm->PatchConstantPoolAccessInstruction(it->position(), offset,
1955                                              entry_access, type);
1956   }
1957 }
1958 
1959 
1960 // Emit and return position of pool.  Zero implies no constant pool.
Emit(Assembler * assm)1961 int ConstantPoolBuilder::Emit(Assembler* assm) {
1962   bool emitted = emitted_label_.is_bound();
1963   bool empty = IsEmpty();
1964 
1965   if (!emitted) {
1966     // Mark start of constant pool.  Align if necessary.
1967     if (!empty) assm->DataAlign(kDoubleSize);
1968     assm->bind(&emitted_label_);
1969     if (!empty) {
1970       // Emit in groups based on access and type.
1971       // Emit doubles first for alignment purposes.
1972       EmitGroup(assm, ConstantPoolEntry::REGULAR, ConstantPoolEntry::DOUBLE);
1973       EmitGroup(assm, ConstantPoolEntry::REGULAR, ConstantPoolEntry::INTPTR);
1974       if (info_[ConstantPoolEntry::DOUBLE].overflow()) {
1975         assm->DataAlign(kDoubleSize);
1976         EmitGroup(assm, ConstantPoolEntry::OVERFLOWED,
1977                   ConstantPoolEntry::DOUBLE);
1978       }
1979       if (info_[ConstantPoolEntry::INTPTR].overflow()) {
1980         EmitGroup(assm, ConstantPoolEntry::OVERFLOWED,
1981                   ConstantPoolEntry::INTPTR);
1982       }
1983     }
1984   }
1985 
1986   return !empty ? emitted_label_.pos() : 0;
1987 }
1988 
1989 
1990 // Platform specific but identical code for all the platforms.
1991 
RecordDeoptReason(const int reason,int raw_position,int id)1992 void Assembler::RecordDeoptReason(const int reason, int raw_position, int id) {
1993   if (FLAG_trace_deopt || isolate()->is_profiling()) {
1994     EnsureSpace ensure_space(this);
1995     RecordRelocInfo(RelocInfo::POSITION, raw_position);
1996     RecordRelocInfo(RelocInfo::DEOPT_REASON, reason);
1997     RecordRelocInfo(RelocInfo::DEOPT_ID, id);
1998   }
1999 }
2000 
2001 
RecordComment(const char * msg)2002 void Assembler::RecordComment(const char* msg) {
2003   if (FLAG_code_comments) {
2004     EnsureSpace ensure_space(this);
2005     RecordRelocInfo(RelocInfo::COMMENT, reinterpret_cast<intptr_t>(msg));
2006   }
2007 }
2008 
2009 
RecordGeneratorContinuation()2010 void Assembler::RecordGeneratorContinuation() {
2011   EnsureSpace ensure_space(this);
2012   RecordRelocInfo(RelocInfo::GENERATOR_CONTINUATION);
2013 }
2014 
2015 
RecordDebugBreakSlot(RelocInfo::Mode mode)2016 void Assembler::RecordDebugBreakSlot(RelocInfo::Mode mode) {
2017   EnsureSpace ensure_space(this);
2018   DCHECK(RelocInfo::IsDebugBreakSlot(mode));
2019   RecordRelocInfo(mode);
2020 }
2021 
2022 
DataAlign(int m)2023 void Assembler::DataAlign(int m) {
2024   DCHECK(m >= 2 && base::bits::IsPowerOfTwo32(m));
2025   while ((pc_offset() & (m - 1)) != 0) {
2026     db(0);
2027   }
2028 }
2029 }  // namespace internal
2030 }  // namespace v8
2031