• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions are
6 // met:
7 //
8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer.
10 //
11 // - Redistribution in binary form must reproduce the above copyright
12 // notice, this list of conditions and the following disclaimer in the
13 // documentation and/or other materials provided with the distribution.
14 //
15 // - Neither the name of Sun Microsystems or the names of contributors may
16 // be used to endorse or promote products derived from this software without
17 // specific prior written permission.
18 //
19 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
20 // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
21 // THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
23 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
24 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
25 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
26 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
27 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
28 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
29 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 
31 // The original source code covered by the above license above has been
32 // modified significantly by Google Inc.
33 // Copyright 2012 the V8 project authors. All rights reserved.
34 
35 #include "src/assembler.h"
36 
37 #include <math.h>
38 #include <string.h>
39 #include <cmath>
40 
41 #include "src/api.h"
42 #include "src/assembler-inl.h"
43 #include "src/base/cpu.h"
44 #include "src/base/functional.h"
45 #include "src/base/ieee754.h"
46 #include "src/base/lazy-instance.h"
47 #include "src/base/platform/platform.h"
48 #include "src/base/utils/random-number-generator.h"
49 #include "src/codegen.h"
50 #include "src/counters.h"
51 #include "src/debug/debug.h"
52 #include "src/deoptimizer.h"
53 #include "src/disassembler.h"
54 #include "src/execution.h"
55 #include "src/ic/ic.h"
56 #include "src/ic/stub-cache.h"
57 #include "src/interpreter/interpreter.h"
58 #include "src/ostreams.h"
59 #include "src/regexp/jsregexp.h"
60 #include "src/regexp/regexp-macro-assembler.h"
61 #include "src/regexp/regexp-stack.h"
62 #include "src/register-configuration.h"
63 #include "src/runtime/runtime.h"
64 #include "src/simulator.h"  // For flushing instruction cache.
65 #include "src/snapshot/serializer-common.h"
66 #include "src/wasm/wasm-external-refs.h"
67 
68 // Include native regexp-macro-assembler.
69 #ifndef V8_INTERPRETED_REGEXP
70 #if V8_TARGET_ARCH_IA32
71 #include "src/regexp/ia32/regexp-macro-assembler-ia32.h"  // NOLINT
72 #elif V8_TARGET_ARCH_X64
73 #include "src/regexp/x64/regexp-macro-assembler-x64.h"  // NOLINT
74 #elif V8_TARGET_ARCH_ARM64
75 #include "src/regexp/arm64/regexp-macro-assembler-arm64.h"  // NOLINT
76 #elif V8_TARGET_ARCH_ARM
77 #include "src/regexp/arm/regexp-macro-assembler-arm.h"  // NOLINT
78 #elif V8_TARGET_ARCH_PPC
79 #include "src/regexp/ppc/regexp-macro-assembler-ppc.h"  // NOLINT
80 #elif V8_TARGET_ARCH_MIPS
81 #include "src/regexp/mips/regexp-macro-assembler-mips.h"  // NOLINT
82 #elif V8_TARGET_ARCH_MIPS64
83 #include "src/regexp/mips64/regexp-macro-assembler-mips64.h"  // NOLINT
84 #elif V8_TARGET_ARCH_S390
85 #include "src/regexp/s390/regexp-macro-assembler-s390.h"  // NOLINT
86 #elif V8_TARGET_ARCH_X87
87 #include "src/regexp/x87/regexp-macro-assembler-x87.h"  // NOLINT
88 #else  // Unknown architecture.
89 #error "Unknown architecture."
90 #endif  // Target architecture.
91 #endif  // V8_INTERPRETED_REGEXP
92 
93 namespace v8 {
94 namespace internal {
95 
96 // -----------------------------------------------------------------------------
97 // Common double constants.
98 
99 struct DoubleConstant BASE_EMBEDDED {
100 double min_int;
101 double one_half;
102 double minus_one_half;
103 double negative_infinity;
104 uint64_t the_hole_nan;
105 double uint32_bias;
106 };
107 
108 static DoubleConstant double_constants;
109 
110 static struct V8_ALIGNED(16) {
111   uint32_t a;
112   uint32_t b;
113   uint32_t c;
114   uint32_t d;
115 } float_absolute_constant = {0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF};
116 
117 static struct V8_ALIGNED(16) {
118   uint32_t a;
119   uint32_t b;
120   uint32_t c;
121   uint32_t d;
122 } float_negate_constant = {0x80000000, 0x80000000, 0x80000000, 0x80000000};
123 
124 static struct V8_ALIGNED(16) {
125   uint64_t a;
126   uint64_t b;
127 } double_absolute_constant = {V8_UINT64_C(0x7FFFFFFFFFFFFFFF),
128                               V8_UINT64_C(0x7FFFFFFFFFFFFFFF)};
129 
130 static struct V8_ALIGNED(16) {
131   uint64_t a;
132   uint64_t b;
133 } double_negate_constant = {V8_UINT64_C(0x8000000000000000),
134                             V8_UINT64_C(0x8000000000000000)};
135 
136 const char* const RelocInfo::kFillerCommentString = "DEOPTIMIZATION PADDING";
137 
138 // -----------------------------------------------------------------------------
139 // Implementation of AssemblerBase
140 
AssemblerBase(Isolate * isolate,void * buffer,int buffer_size)141 AssemblerBase::AssemblerBase(Isolate* isolate, void* buffer, int buffer_size)
142     : isolate_(isolate),
143       jit_cookie_(0),
144       enabled_cpu_features_(0),
145       emit_debug_code_(FLAG_debug_code),
146       predictable_code_size_(false),
147       // We may use the assembler without an isolate.
148       serializer_enabled_(isolate && isolate->serializer_enabled()),
149       constant_pool_available_(false) {
150   DCHECK_NOT_NULL(isolate);
151   if (FLAG_mask_constants_with_cookie) {
152     jit_cookie_ = isolate->random_number_generator()->NextInt();
153   }
154   own_buffer_ = buffer == NULL;
155   if (buffer_size == 0) buffer_size = kMinimalBufferSize;
156   DCHECK(buffer_size > 0);
157   if (own_buffer_) buffer = NewArray<byte>(buffer_size);
158   buffer_ = static_cast<byte*>(buffer);
159   buffer_size_ = buffer_size;
160 
161   pc_ = buffer_;
162 }
163 
164 
~AssemblerBase()165 AssemblerBase::~AssemblerBase() {
166   if (own_buffer_) DeleteArray(buffer_);
167 }
168 
169 
FlushICache(Isolate * isolate,void * start,size_t size)170 void AssemblerBase::FlushICache(Isolate* isolate, void* start, size_t size) {
171   if (size == 0) return;
172 
173 #if defined(USE_SIMULATOR)
174   base::LockGuard<base::Mutex> lock_guard(isolate->simulator_i_cache_mutex());
175   Simulator::FlushICache(isolate->simulator_i_cache(), start, size);
176 #else
177   CpuFeatures::FlushICache(start, size);
178 #endif  // USE_SIMULATOR
179 }
180 
181 
Print()182 void AssemblerBase::Print() {
183   OFStream os(stdout);
184   v8::internal::Disassembler::Decode(isolate(), &os, buffer_, pc_, nullptr);
185 }
186 
187 
188 // -----------------------------------------------------------------------------
189 // Implementation of PredictableCodeSizeScope
190 
PredictableCodeSizeScope(AssemblerBase * assembler)191 PredictableCodeSizeScope::PredictableCodeSizeScope(AssemblerBase* assembler)
192     : PredictableCodeSizeScope(assembler, -1) {}
193 
194 
PredictableCodeSizeScope(AssemblerBase * assembler,int expected_size)195 PredictableCodeSizeScope::PredictableCodeSizeScope(AssemblerBase* assembler,
196                                                    int expected_size)
197     : assembler_(assembler),
198       expected_size_(expected_size),
199       start_offset_(assembler->pc_offset()),
200       old_value_(assembler->predictable_code_size()) {
201   assembler_->set_predictable_code_size(true);
202 }
203 
204 
~PredictableCodeSizeScope()205 PredictableCodeSizeScope::~PredictableCodeSizeScope() {
206   // TODO(svenpanne) Remove the 'if' when everything works.
207   if (expected_size_ >= 0) {
208     CHECK_EQ(expected_size_, assembler_->pc_offset() - start_offset_);
209   }
210   assembler_->set_predictable_code_size(old_value_);
211 }
212 
213 
214 // -----------------------------------------------------------------------------
215 // Implementation of CpuFeatureScope
216 
217 #ifdef DEBUG
CpuFeatureScope(AssemblerBase * assembler,CpuFeature f,CheckPolicy check)218 CpuFeatureScope::CpuFeatureScope(AssemblerBase* assembler, CpuFeature f,
219                                  CheckPolicy check)
220     : assembler_(assembler) {
221   DCHECK_IMPLIES(check == kCheckSupported, CpuFeatures::IsSupported(f));
222   old_enabled_ = assembler_->enabled_cpu_features();
223   assembler_->EnableCpuFeature(f);
224 }
225 
~CpuFeatureScope()226 CpuFeatureScope::~CpuFeatureScope() {
227   assembler_->set_enabled_cpu_features(old_enabled_);
228 }
229 #endif
230 
231 
232 bool CpuFeatures::initialized_ = false;
233 unsigned CpuFeatures::supported_ = 0;
234 unsigned CpuFeatures::icache_line_size_ = 0;
235 unsigned CpuFeatures::dcache_line_size_ = 0;
236 
237 // -----------------------------------------------------------------------------
238 // Implementation of RelocInfoWriter and RelocIterator
239 //
240 // Relocation information is written backwards in memory, from high addresses
241 // towards low addresses, byte by byte.  Therefore, in the encodings listed
242 // below, the first byte listed it at the highest address, and successive
243 // bytes in the record are at progressively lower addresses.
244 //
245 // Encoding
246 //
247 // The most common modes are given single-byte encodings.  Also, it is
248 // easy to identify the type of reloc info and skip unwanted modes in
249 // an iteration.
250 //
251 // The encoding relies on the fact that there are fewer than 14
252 // different relocation modes using standard non-compact encoding.
253 //
254 // The first byte of a relocation record has a tag in its low 2 bits:
255 // Here are the record schemes, depending on the low tag and optional higher
256 // tags.
257 //
258 // Low tag:
259 //   00: embedded_object:      [6-bit pc delta] 00
260 //
261 //   01: code_target:          [6-bit pc delta] 01
262 //
263 //   10: short_data_record:    [6-bit pc delta] 10 followed by
264 //                             [6-bit data delta] [2-bit data type tag]
265 //
266 //   11: long_record           [6 bit reloc mode] 11
267 //                             followed by pc delta
268 //                             followed by optional data depending on type.
269 //
270 //  1-bit data type tags, used in short_data_record and data_jump long_record:
271 //   code_target_with_id: 0
272 //   deopt_reason:        1
273 //
274 //  If a pc delta exceeds 6 bits, it is split into a remainder that fits into
275 //  6 bits and a part that does not. The latter is encoded as a long record
276 //  with PC_JUMP as pseudo reloc info mode. The former is encoded as part of
277 //  the following record in the usual way. The long pc jump record has variable
278 //  length:
279 //               pc-jump:        [PC_JUMP] 11
280 //                               [7 bits data] 0
281 //                                  ...
282 //                               [7 bits data] 1
283 //               (Bits 6..31 of pc delta, with leading zeroes
284 //                dropped, and last non-zero chunk tagged with 1.)
285 
286 const int kTagBits = 2;
287 const int kTagMask = (1 << kTagBits) - 1;
288 const int kLongTagBits = 6;
289 const int kShortDataTypeTagBits = 1;
290 const int kShortDataBits = kBitsPerByte - kShortDataTypeTagBits;
291 
292 const int kEmbeddedObjectTag = 0;
293 const int kCodeTargetTag = 1;
294 const int kLocatableTag = 2;
295 const int kDefaultTag = 3;
296 
297 const int kSmallPCDeltaBits = kBitsPerByte - kTagBits;
298 const int kSmallPCDeltaMask = (1 << kSmallPCDeltaBits) - 1;
299 const int RelocInfo::kMaxSmallPCDelta = kSmallPCDeltaMask;
300 
301 const int kChunkBits = 7;
302 const int kChunkMask = (1 << kChunkBits) - 1;
303 const int kLastChunkTagBits = 1;
304 const int kLastChunkTagMask = 1;
305 const int kLastChunkTag = 1;
306 
307 const int kCodeWithIdTag = 0;
308 const int kDeoptReasonTag = 1;
309 
update_wasm_memory_reference(Address old_base,Address new_base,ICacheFlushMode icache_flush_mode)310 void RelocInfo::update_wasm_memory_reference(
311     Address old_base, Address new_base, ICacheFlushMode icache_flush_mode) {
312   DCHECK(IsWasmMemoryReference(rmode_));
313   DCHECK_GE(wasm_memory_reference(), old_base);
314   Address updated_reference = new_base + (wasm_memory_reference() - old_base);
315   // The reference is not checked here but at runtime. Validity of references
316   // may change over time.
317   unchecked_update_wasm_memory_reference(updated_reference, icache_flush_mode);
318   if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
319     Assembler::FlushICache(isolate_, pc_, sizeof(int64_t));
320   }
321 }
322 
update_wasm_memory_size(uint32_t old_size,uint32_t new_size,ICacheFlushMode icache_flush_mode)323 void RelocInfo::update_wasm_memory_size(uint32_t old_size, uint32_t new_size,
324                                         ICacheFlushMode icache_flush_mode) {
325   DCHECK(IsWasmMemorySizeReference(rmode_));
326   uint32_t current_size_reference = wasm_memory_size_reference();
327   uint32_t updated_size_reference =
328       new_size + (current_size_reference - old_size);
329   unchecked_update_wasm_size(updated_size_reference, icache_flush_mode);
330   if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
331     Assembler::FlushICache(isolate_, pc_, sizeof(int64_t));
332   }
333 }
334 
update_wasm_global_reference(Address old_base,Address new_base,ICacheFlushMode icache_flush_mode)335 void RelocInfo::update_wasm_global_reference(
336     Address old_base, Address new_base, ICacheFlushMode icache_flush_mode) {
337   DCHECK(IsWasmGlobalReference(rmode_));
338   Address updated_reference;
339   DCHECK(reinterpret_cast<uintptr_t>(old_base) <=
340          reinterpret_cast<uintptr_t>(wasm_global_reference()));
341   updated_reference = new_base + (wasm_global_reference() - old_base);
342   DCHECK(reinterpret_cast<uintptr_t>(new_base) <=
343          reinterpret_cast<uintptr_t>(updated_reference));
344   unchecked_update_wasm_memory_reference(updated_reference, icache_flush_mode);
345   if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
346     Assembler::FlushICache(isolate_, pc_, sizeof(int32_t));
347   }
348 }
349 
update_wasm_function_table_size_reference(uint32_t old_size,uint32_t new_size,ICacheFlushMode icache_flush_mode)350 void RelocInfo::update_wasm_function_table_size_reference(
351     uint32_t old_size, uint32_t new_size, ICacheFlushMode icache_flush_mode) {
352   DCHECK(IsWasmFunctionTableSizeReference(rmode_));
353   uint32_t current_size_reference = wasm_function_table_size_reference();
354   uint32_t updated_size_reference =
355       new_size + (current_size_reference - old_size);
356   unchecked_update_wasm_size(updated_size_reference, icache_flush_mode);
357   if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
358     Assembler::FlushICache(isolate_, pc_, sizeof(int64_t));
359   }
360 }
361 
set_target_address(Address target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)362 void RelocInfo::set_target_address(Address target,
363                                    WriteBarrierMode write_barrier_mode,
364                                    ICacheFlushMode icache_flush_mode) {
365   DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
366   Assembler::set_target_address_at(isolate_, pc_, host_, target,
367                                    icache_flush_mode);
368   if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL &&
369       IsCodeTarget(rmode_)) {
370     Object* target_code = Code::GetCodeFromTargetAddress(target);
371     host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
372         host(), this, HeapObject::cast(target_code));
373   }
374 }
375 
WriteLongPCJump(uint32_t pc_delta)376 uint32_t RelocInfoWriter::WriteLongPCJump(uint32_t pc_delta) {
377   // Return if the pc_delta can fit in kSmallPCDeltaBits bits.
378   // Otherwise write a variable length PC jump for the bits that do
379   // not fit in the kSmallPCDeltaBits bits.
380   if (is_uintn(pc_delta, kSmallPCDeltaBits)) return pc_delta;
381   WriteMode(RelocInfo::PC_JUMP);
382   uint32_t pc_jump = pc_delta >> kSmallPCDeltaBits;
383   DCHECK(pc_jump > 0);
384   // Write kChunkBits size chunks of the pc_jump.
385   for (; pc_jump > 0; pc_jump = pc_jump >> kChunkBits) {
386     byte b = pc_jump & kChunkMask;
387     *--pos_ = b << kLastChunkTagBits;
388   }
389   // Tag the last chunk so it can be identified.
390   *pos_ = *pos_ | kLastChunkTag;
391   // Return the remaining kSmallPCDeltaBits of the pc_delta.
392   return pc_delta & kSmallPCDeltaMask;
393 }
394 
395 
WriteShortTaggedPC(uint32_t pc_delta,int tag)396 void RelocInfoWriter::WriteShortTaggedPC(uint32_t pc_delta, int tag) {
397   // Write a byte of tagged pc-delta, possibly preceded by an explicit pc-jump.
398   pc_delta = WriteLongPCJump(pc_delta);
399   *--pos_ = pc_delta << kTagBits | tag;
400 }
401 
402 
WriteShortTaggedData(intptr_t data_delta,int tag)403 void RelocInfoWriter::WriteShortTaggedData(intptr_t data_delta, int tag) {
404   *--pos_ = static_cast<byte>(data_delta << kShortDataTypeTagBits | tag);
405 }
406 
407 
WriteMode(RelocInfo::Mode rmode)408 void RelocInfoWriter::WriteMode(RelocInfo::Mode rmode) {
409   STATIC_ASSERT(RelocInfo::NUMBER_OF_MODES <= (1 << kLongTagBits));
410   *--pos_ = static_cast<int>((rmode << kTagBits) | kDefaultTag);
411 }
412 
413 
WriteModeAndPC(uint32_t pc_delta,RelocInfo::Mode rmode)414 void RelocInfoWriter::WriteModeAndPC(uint32_t pc_delta, RelocInfo::Mode rmode) {
415   // Write two-byte tagged pc-delta, possibly preceded by var. length pc-jump.
416   pc_delta = WriteLongPCJump(pc_delta);
417   WriteMode(rmode);
418   *--pos_ = pc_delta;
419 }
420 
421 
WriteIntData(int number)422 void RelocInfoWriter::WriteIntData(int number) {
423   for (int i = 0; i < kIntSize; i++) {
424     *--pos_ = static_cast<byte>(number);
425     // Signed right shift is arithmetic shift.  Tested in test-utils.cc.
426     number = number >> kBitsPerByte;
427   }
428 }
429 
430 
WriteData(intptr_t data_delta)431 void RelocInfoWriter::WriteData(intptr_t data_delta) {
432   for (int i = 0; i < kIntptrSize; i++) {
433     *--pos_ = static_cast<byte>(data_delta);
434     // Signed right shift is arithmetic shift.  Tested in test-utils.cc.
435     data_delta = data_delta >> kBitsPerByte;
436   }
437 }
438 
439 
Write(const RelocInfo * rinfo)440 void RelocInfoWriter::Write(const RelocInfo* rinfo) {
441   RelocInfo::Mode rmode = rinfo->rmode();
442 #ifdef DEBUG
443   byte* begin_pos = pos_;
444 #endif
445   DCHECK(rinfo->rmode() < RelocInfo::NUMBER_OF_MODES);
446   DCHECK(rinfo->pc() - last_pc_ >= 0);
447   // Use unsigned delta-encoding for pc.
448   uint32_t pc_delta = static_cast<uint32_t>(rinfo->pc() - last_pc_);
449 
450   // The two most common modes are given small tags, and usually fit in a byte.
451   if (rmode == RelocInfo::EMBEDDED_OBJECT) {
452     WriteShortTaggedPC(pc_delta, kEmbeddedObjectTag);
453   } else if (rmode == RelocInfo::CODE_TARGET) {
454     WriteShortTaggedPC(pc_delta, kCodeTargetTag);
455     DCHECK(begin_pos - pos_ <= RelocInfo::kMaxCallSize);
456   } else if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
457     // Use signed delta-encoding for id.
458     DCHECK_EQ(static_cast<int>(rinfo->data()), rinfo->data());
459     int id_delta = static_cast<int>(rinfo->data()) - last_id_;
460     // Check if delta is small enough to fit in a tagged byte.
461     if (is_intn(id_delta, kShortDataBits)) {
462       WriteShortTaggedPC(pc_delta, kLocatableTag);
463       WriteShortTaggedData(id_delta, kCodeWithIdTag);
464     } else {
465       // Otherwise, use costly encoding.
466       WriteModeAndPC(pc_delta, rmode);
467       WriteIntData(id_delta);
468     }
469     last_id_ = static_cast<int>(rinfo->data());
470   } else if (rmode == RelocInfo::DEOPT_REASON) {
471     DCHECK(rinfo->data() < (1 << kShortDataBits));
472     WriteShortTaggedPC(pc_delta, kLocatableTag);
473     WriteShortTaggedData(rinfo->data(), kDeoptReasonTag);
474   } else {
475     WriteModeAndPC(pc_delta, rmode);
476     if (RelocInfo::IsComment(rmode)) {
477       WriteData(rinfo->data());
478     } else if (RelocInfo::IsConstPool(rmode) ||
479                RelocInfo::IsVeneerPool(rmode) || RelocInfo::IsDeoptId(rmode) ||
480                RelocInfo::IsDeoptPosition(rmode) ||
481                RelocInfo::IsWasmProtectedLanding(rmode)) {
482       WriteIntData(static_cast<int>(rinfo->data()));
483     }
484   }
485   last_pc_ = rinfo->pc();
486   last_mode_ = rmode;
487 #ifdef DEBUG
488   DCHECK(begin_pos - pos_ <= kMaxSize);
489 #endif
490 }
491 
492 
AdvanceGetTag()493 inline int RelocIterator::AdvanceGetTag() {
494   return *--pos_ & kTagMask;
495 }
496 
497 
GetMode()498 inline RelocInfo::Mode RelocIterator::GetMode() {
499   return static_cast<RelocInfo::Mode>((*pos_ >> kTagBits) &
500                                       ((1 << kLongTagBits) - 1));
501 }
502 
503 
ReadShortTaggedPC()504 inline void RelocIterator::ReadShortTaggedPC() {
505   rinfo_.pc_ += *pos_ >> kTagBits;
506 }
507 
508 
AdvanceReadPC()509 inline void RelocIterator::AdvanceReadPC() {
510   rinfo_.pc_ += *--pos_;
511 }
512 
513 
AdvanceReadId()514 void RelocIterator::AdvanceReadId() {
515   int x = 0;
516   for (int i = 0; i < kIntSize; i++) {
517     x |= static_cast<int>(*--pos_) << i * kBitsPerByte;
518   }
519   last_id_ += x;
520   rinfo_.data_ = last_id_;
521 }
522 
523 
AdvanceReadInt()524 void RelocIterator::AdvanceReadInt() {
525   int x = 0;
526   for (int i = 0; i < kIntSize; i++) {
527     x |= static_cast<int>(*--pos_) << i * kBitsPerByte;
528   }
529   rinfo_.data_ = x;
530 }
531 
532 
AdvanceReadData()533 void RelocIterator::AdvanceReadData() {
534   intptr_t x = 0;
535   for (int i = 0; i < kIntptrSize; i++) {
536     x |= static_cast<intptr_t>(*--pos_) << i * kBitsPerByte;
537   }
538   rinfo_.data_ = x;
539 }
540 
541 
AdvanceReadLongPCJump()542 void RelocIterator::AdvanceReadLongPCJump() {
543   // Read the 32-kSmallPCDeltaBits most significant bits of the
544   // pc jump in kChunkBits bit chunks and shift them into place.
545   // Stop when the last chunk is encountered.
546   uint32_t pc_jump = 0;
547   for (int i = 0; i < kIntSize; i++) {
548     byte pc_jump_part = *--pos_;
549     pc_jump |= (pc_jump_part >> kLastChunkTagBits) << i * kChunkBits;
550     if ((pc_jump_part & kLastChunkTagMask) == 1) break;
551   }
552   // The least significant kSmallPCDeltaBits bits will be added
553   // later.
554   rinfo_.pc_ += pc_jump << kSmallPCDeltaBits;
555 }
556 
557 
GetShortDataTypeTag()558 inline int RelocIterator::GetShortDataTypeTag() {
559   return *pos_ & ((1 << kShortDataTypeTagBits) - 1);
560 }
561 
562 
ReadShortTaggedId()563 inline void RelocIterator::ReadShortTaggedId() {
564   int8_t signed_b = *pos_;
565   // Signed right shift is arithmetic shift.  Tested in test-utils.cc.
566   last_id_ += signed_b >> kShortDataTypeTagBits;
567   rinfo_.data_ = last_id_;
568 }
569 
570 
ReadShortTaggedData()571 inline void RelocIterator::ReadShortTaggedData() {
572   uint8_t unsigned_b = *pos_;
573   rinfo_.data_ = unsigned_b >> kShortDataTypeTagBits;
574 }
575 
576 
next()577 void RelocIterator::next() {
578   DCHECK(!done());
579   // Basically, do the opposite of RelocInfoWriter::Write.
580   // Reading of data is as far as possible avoided for unwanted modes,
581   // but we must always update the pc.
582   //
583   // We exit this loop by returning when we find a mode we want.
584   while (pos_ > end_) {
585     int tag = AdvanceGetTag();
586     if (tag == kEmbeddedObjectTag) {
587       ReadShortTaggedPC();
588       if (SetMode(RelocInfo::EMBEDDED_OBJECT)) return;
589     } else if (tag == kCodeTargetTag) {
590       ReadShortTaggedPC();
591       if (SetMode(RelocInfo::CODE_TARGET)) return;
592     } else if (tag == kLocatableTag) {
593       ReadShortTaggedPC();
594       Advance();
595       int data_type_tag = GetShortDataTypeTag();
596       if (data_type_tag == kCodeWithIdTag) {
597         if (SetMode(RelocInfo::CODE_TARGET_WITH_ID)) {
598           ReadShortTaggedId();
599           return;
600         }
601       } else {
602         DCHECK(data_type_tag == kDeoptReasonTag);
603         if (SetMode(RelocInfo::DEOPT_REASON)) {
604           ReadShortTaggedData();
605           return;
606         }
607       }
608     } else {
609       DCHECK(tag == kDefaultTag);
610       RelocInfo::Mode rmode = GetMode();
611       if (rmode == RelocInfo::PC_JUMP) {
612         AdvanceReadLongPCJump();
613       } else {
614         AdvanceReadPC();
615         if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
616           if (SetMode(rmode)) {
617             AdvanceReadId();
618             return;
619           }
620           Advance(kIntSize);
621         } else if (RelocInfo::IsComment(rmode)) {
622           if (SetMode(rmode)) {
623             AdvanceReadData();
624             return;
625           }
626           Advance(kIntptrSize);
627         } else if (RelocInfo::IsConstPool(rmode) ||
628                    RelocInfo::IsVeneerPool(rmode) ||
629                    RelocInfo::IsDeoptId(rmode) ||
630                    RelocInfo::IsDeoptPosition(rmode) ||
631                    RelocInfo::IsWasmProtectedLanding(rmode)) {
632           if (SetMode(rmode)) {
633             AdvanceReadInt();
634             return;
635           }
636           Advance(kIntSize);
637         } else if (SetMode(static_cast<RelocInfo::Mode>(rmode))) {
638           return;
639         }
640       }
641     }
642   }
643   if (code_age_sequence_ != NULL) {
644     byte* old_code_age_sequence = code_age_sequence_;
645     code_age_sequence_ = NULL;
646     if (SetMode(RelocInfo::CODE_AGE_SEQUENCE)) {
647       rinfo_.data_ = 0;
648       rinfo_.pc_ = old_code_age_sequence;
649       return;
650     }
651   }
652   done_ = true;
653 }
654 
655 
RelocIterator(Code * code,int mode_mask)656 RelocIterator::RelocIterator(Code* code, int mode_mask)
657     : rinfo_(code->map()->GetIsolate()) {
658   rinfo_.host_ = code;
659   rinfo_.pc_ = code->instruction_start();
660   rinfo_.data_ = 0;
661   // Relocation info is read backwards.
662   pos_ = code->relocation_start() + code->relocation_size();
663   end_ = code->relocation_start();
664   done_ = false;
665   mode_mask_ = mode_mask;
666   last_id_ = 0;
667   byte* sequence = code->FindCodeAgeSequence();
668   // We get the isolate from the map, because at serialization time
669   // the code pointer has been cloned and isn't really in heap space.
670   Isolate* isolate = code->map()->GetIsolate();
671   if (sequence != NULL && !Code::IsYoungSequence(isolate, sequence)) {
672     code_age_sequence_ = sequence;
673   } else {
674     code_age_sequence_ = NULL;
675   }
676   if (mode_mask_ == 0) pos_ = end_;
677   next();
678 }
679 
680 
RelocIterator(const CodeDesc & desc,int mode_mask)681 RelocIterator::RelocIterator(const CodeDesc& desc, int mode_mask)
682     : rinfo_(desc.origin->isolate()) {
683   rinfo_.pc_ = desc.buffer;
684   rinfo_.data_ = 0;
685   // Relocation info is read backwards.
686   pos_ = desc.buffer + desc.buffer_size;
687   end_ = pos_ - desc.reloc_size;
688   done_ = false;
689   mode_mask_ = mode_mask;
690   last_id_ = 0;
691   code_age_sequence_ = NULL;
692   if (mode_mask_ == 0) pos_ = end_;
693   next();
694 }
695 
696 
697 // -----------------------------------------------------------------------------
698 // Implementation of RelocInfo
699 
IsPatchedDebugBreakSlotSequence()700 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
701   return DebugCodegen::DebugBreakSlotIsPatched(pc_);
702 }
703 
704 #ifdef DEBUG
RequiresRelocation(const CodeDesc & desc)705 bool RelocInfo::RequiresRelocation(const CodeDesc& desc) {
706   // Ensure there are no code targets or embedded objects present in the
707   // deoptimization entries, they would require relocation after code
708   // generation.
709   int mode_mask = RelocInfo::kCodeTargetMask |
710                   RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
711                   RelocInfo::ModeMask(RelocInfo::CELL) |
712                   RelocInfo::kApplyMask;
713   RelocIterator it(desc, mode_mask);
714   return !it.done();
715 }
716 #endif
717 
718 
719 #ifdef ENABLE_DISASSEMBLER
RelocModeName(RelocInfo::Mode rmode)720 const char* RelocInfo::RelocModeName(RelocInfo::Mode rmode) {
721   switch (rmode) {
722     case NONE32:
723       return "no reloc 32";
724     case NONE64:
725       return "no reloc 64";
726     case EMBEDDED_OBJECT:
727       return "embedded object";
728     case CODE_TARGET:
729       return "code target";
730     case CODE_TARGET_WITH_ID:
731       return "code target with id";
732     case CELL:
733       return "property cell";
734     case RUNTIME_ENTRY:
735       return "runtime entry";
736     case COMMENT:
737       return "comment";
738     case EXTERNAL_REFERENCE:
739       return "external reference";
740     case INTERNAL_REFERENCE:
741       return "internal reference";
742     case INTERNAL_REFERENCE_ENCODED:
743       return "encoded internal reference";
744     case DEOPT_SCRIPT_OFFSET:
745       return "deopt script offset";
746     case DEOPT_INLINING_ID:
747       return "deopt inlining id";
748     case DEOPT_REASON:
749       return "deopt reason";
750     case DEOPT_ID:
751       return "deopt index";
752     case CONST_POOL:
753       return "constant pool";
754     case VENEER_POOL:
755       return "veneer pool";
756     case DEBUG_BREAK_SLOT_AT_POSITION:
757       return "debug break slot at position";
758     case DEBUG_BREAK_SLOT_AT_RETURN:
759       return "debug break slot at return";
760     case DEBUG_BREAK_SLOT_AT_CALL:
761       return "debug break slot at call";
762     case DEBUG_BREAK_SLOT_AT_TAIL_CALL:
763       return "debug break slot at tail call";
764     case CODE_AGE_SEQUENCE:
765       return "code age sequence";
766     case WASM_MEMORY_REFERENCE:
767       return "wasm memory reference";
768     case WASM_MEMORY_SIZE_REFERENCE:
769       return "wasm memory size reference";
770     case WASM_GLOBAL_REFERENCE:
771       return "wasm global value reference";
772     case WASM_FUNCTION_TABLE_SIZE_REFERENCE:
773       return "wasm function table size reference";
774     case WASM_PROTECTED_INSTRUCTION_LANDING:
775       return "wasm protected instruction landing";
776     case NUMBER_OF_MODES:
777     case PC_JUMP:
778       UNREACHABLE();
779       return "number_of_modes";
780   }
781   return "unknown relocation type";
782 }
783 
784 
Print(Isolate * isolate,std::ostream & os)785 void RelocInfo::Print(Isolate* isolate, std::ostream& os) {  // NOLINT
786   os << static_cast<const void*>(pc_) << "  " << RelocModeName(rmode_);
787   if (IsComment(rmode_)) {
788     os << "  (" << reinterpret_cast<char*>(data_) << ")";
789   } else if (rmode_ == DEOPT_SCRIPT_OFFSET || rmode_ == DEOPT_INLINING_ID) {
790     os << "  (" << data() << ")";
791   } else if (rmode_ == DEOPT_REASON) {
792     os << "  ("
793        << DeoptimizeReasonToString(static_cast<DeoptimizeReason>(data_)) << ")";
794   } else if (rmode_ == EMBEDDED_OBJECT) {
795     os << "  (" << Brief(target_object()) << ")";
796   } else if (rmode_ == EXTERNAL_REFERENCE) {
797     ExternalReferenceEncoder ref_encoder(isolate);
798     os << " ("
799        << ref_encoder.NameOfAddress(isolate, target_external_reference())
800        << ")  (" << static_cast<const void*>(target_external_reference())
801        << ")";
802   } else if (IsCodeTarget(rmode_)) {
803     Code* code = Code::GetCodeFromTargetAddress(target_address());
804     os << " (" << Code::Kind2String(code->kind()) << ")  ("
805        << static_cast<const void*>(target_address()) << ")";
806     if (rmode_ == CODE_TARGET_WITH_ID) {
807       os << " (id=" << static_cast<int>(data_) << ")";
808     }
809   } else if (IsRuntimeEntry(rmode_) &&
810              isolate->deoptimizer_data() != NULL) {
811     // Depotimization bailouts are stored as runtime entries.
812     int id = Deoptimizer::GetDeoptimizationId(
813         isolate, target_address(), Deoptimizer::EAGER);
814     if (id != Deoptimizer::kNotDeoptimizationEntry) {
815       os << "  (deoptimization bailout " << id << ")";
816     }
817   } else if (IsConstPool(rmode_)) {
818     os << " (size " << static_cast<int>(data_) << ")";
819   }
820 
821   os << "\n";
822 }
823 #endif  // ENABLE_DISASSEMBLER
824 
825 
826 #ifdef VERIFY_HEAP
Verify(Isolate * isolate)827 void RelocInfo::Verify(Isolate* isolate) {
828   switch (rmode_) {
829     case EMBEDDED_OBJECT:
830       Object::VerifyPointer(target_object());
831       break;
832     case CELL:
833       Object::VerifyPointer(target_cell());
834       break;
835     case CODE_TARGET_WITH_ID:
836     case CODE_TARGET: {
837       // convert inline target address to code object
838       Address addr = target_address();
839       CHECK(addr != NULL);
840       // Check that we can find the right code object.
841       Code* code = Code::GetCodeFromTargetAddress(addr);
842       Object* found = isolate->FindCodeObject(addr);
843       CHECK(found->IsCode());
844       CHECK(code->address() == HeapObject::cast(found)->address());
845       break;
846     }
847     case INTERNAL_REFERENCE:
848     case INTERNAL_REFERENCE_ENCODED: {
849       Address target = target_internal_reference();
850       Address pc = target_internal_reference_address();
851       Code* code = Code::cast(isolate->FindCodeObject(pc));
852       CHECK(target >= code->instruction_start());
853       CHECK(target <= code->instruction_end());
854       break;
855     }
856     case RUNTIME_ENTRY:
857     case COMMENT:
858     case EXTERNAL_REFERENCE:
859     case DEOPT_SCRIPT_OFFSET:
860     case DEOPT_INLINING_ID:
861     case DEOPT_REASON:
862     case DEOPT_ID:
863     case CONST_POOL:
864     case VENEER_POOL:
865     case DEBUG_BREAK_SLOT_AT_POSITION:
866     case DEBUG_BREAK_SLOT_AT_RETURN:
867     case DEBUG_BREAK_SLOT_AT_CALL:
868     case DEBUG_BREAK_SLOT_AT_TAIL_CALL:
869     case WASM_MEMORY_REFERENCE:
870     case WASM_MEMORY_SIZE_REFERENCE:
871     case WASM_GLOBAL_REFERENCE:
872     case WASM_FUNCTION_TABLE_SIZE_REFERENCE:
873     case WASM_PROTECTED_INSTRUCTION_LANDING:
874     // TODO(eholk): make sure the protected instruction is in range.
875     case NONE32:
876     case NONE64:
877       break;
878     case NUMBER_OF_MODES:
879     case PC_JUMP:
880       UNREACHABLE();
881       break;
882     case CODE_AGE_SEQUENCE:
883       DCHECK(Code::IsYoungSequence(isolate, pc_) || code_age_stub()->IsCode());
884       break;
885   }
886 }
887 #endif  // VERIFY_HEAP
888 
889 
890 // Implementation of ExternalReference
891 
BuiltinCallTypeForResultSize(int result_size)892 static ExternalReference::Type BuiltinCallTypeForResultSize(int result_size) {
893   switch (result_size) {
894     case 1:
895       return ExternalReference::BUILTIN_CALL;
896     case 2:
897       return ExternalReference::BUILTIN_CALL_PAIR;
898     case 3:
899       return ExternalReference::BUILTIN_CALL_TRIPLE;
900   }
901   UNREACHABLE();
902   return ExternalReference::BUILTIN_CALL;
903 }
904 
905 
SetUp()906 void ExternalReference::SetUp() {
907   double_constants.min_int = kMinInt;
908   double_constants.one_half = 0.5;
909   double_constants.minus_one_half = -0.5;
910   double_constants.the_hole_nan = kHoleNanInt64;
911   double_constants.negative_infinity = -V8_INFINITY;
912   double_constants.uint32_bias =
913     static_cast<double>(static_cast<uint32_t>(0xFFFFFFFF)) + 1;
914 }
915 
ExternalReference(Address address,Isolate * isolate)916 ExternalReference::ExternalReference(Address address, Isolate* isolate)
917     : address_(Redirect(isolate, address)) {}
918 
ExternalReference(ApiFunction * fun,Type type=ExternalReference::BUILTIN_CALL,Isolate * isolate=NULL)919 ExternalReference::ExternalReference(
920     ApiFunction* fun,
921     Type type = ExternalReference::BUILTIN_CALL,
922     Isolate* isolate = NULL)
923   : address_(Redirect(isolate, fun->address(), type)) {}
924 
925 
ExternalReference(Builtins::Name name,Isolate * isolate)926 ExternalReference::ExternalReference(Builtins::Name name, Isolate* isolate)
927   : address_(isolate->builtins()->builtin_address(name)) {}
928 
929 
ExternalReference(Runtime::FunctionId id,Isolate * isolate)930 ExternalReference::ExternalReference(Runtime::FunctionId id, Isolate* isolate)
931     : ExternalReference(Runtime::FunctionForId(id), isolate) {}
932 
933 
ExternalReference(const Runtime::Function * f,Isolate * isolate)934 ExternalReference::ExternalReference(const Runtime::Function* f,
935                                      Isolate* isolate)
936     : address_(Redirect(isolate, f->entry,
937                         BuiltinCallTypeForResultSize(f->result_size))) {}
938 
939 
isolate_address(Isolate * isolate)940 ExternalReference ExternalReference::isolate_address(Isolate* isolate) {
941   return ExternalReference(isolate);
942 }
943 
interpreter_dispatch_table_address(Isolate * isolate)944 ExternalReference ExternalReference::interpreter_dispatch_table_address(
945     Isolate* isolate) {
946   return ExternalReference(isolate->interpreter()->dispatch_table_address());
947 }
948 
interpreter_dispatch_counters(Isolate * isolate)949 ExternalReference ExternalReference::interpreter_dispatch_counters(
950     Isolate* isolate) {
951   return ExternalReference(
952       isolate->interpreter()->bytecode_dispatch_counters_table());
953 }
954 
ExternalReference(StatsCounter * counter)955 ExternalReference::ExternalReference(StatsCounter* counter)
956   : address_(reinterpret_cast<Address>(counter->GetInternalPointer())) {}
957 
958 
ExternalReference(Isolate::AddressId id,Isolate * isolate)959 ExternalReference::ExternalReference(Isolate::AddressId id, Isolate* isolate)
960   : address_(isolate->get_address_from_id(id)) {}
961 
962 
ExternalReference(const SCTableReference & table_ref)963 ExternalReference::ExternalReference(const SCTableReference& table_ref)
964   : address_(table_ref.address()) {}
965 
966 
967 ExternalReference ExternalReference::
incremental_marking_record_write_function(Isolate * isolate)968     incremental_marking_record_write_function(Isolate* isolate) {
969   return ExternalReference(Redirect(
970       isolate,
971       FUNCTION_ADDR(IncrementalMarking::RecordWriteFromCode)));
972 }
973 
974 ExternalReference
incremental_marking_record_write_code_entry_function(Isolate * isolate)975 ExternalReference::incremental_marking_record_write_code_entry_function(
976     Isolate* isolate) {
977   return ExternalReference(Redirect(
978       isolate,
979       FUNCTION_ADDR(IncrementalMarking::RecordWriteOfCodeEntryFromCode)));
980 }
981 
store_buffer_overflow_function(Isolate * isolate)982 ExternalReference ExternalReference::store_buffer_overflow_function(
983     Isolate* isolate) {
984   return ExternalReference(Redirect(
985       isolate,
986       FUNCTION_ADDR(StoreBuffer::StoreBufferOverflow)));
987 }
988 
989 
delete_handle_scope_extensions(Isolate * isolate)990 ExternalReference ExternalReference::delete_handle_scope_extensions(
991     Isolate* isolate) {
992   return ExternalReference(Redirect(
993       isolate,
994       FUNCTION_ADDR(HandleScope::DeleteExtensions)));
995 }
996 
997 
get_date_field_function(Isolate * isolate)998 ExternalReference ExternalReference::get_date_field_function(
999     Isolate* isolate) {
1000   return ExternalReference(Redirect(isolate, FUNCTION_ADDR(JSDate::GetField)));
1001 }
1002 
1003 
get_make_code_young_function(Isolate * isolate)1004 ExternalReference ExternalReference::get_make_code_young_function(
1005     Isolate* isolate) {
1006   return ExternalReference(Redirect(
1007       isolate, FUNCTION_ADDR(Code::MakeCodeAgeSequenceYoung)));
1008 }
1009 
1010 
get_mark_code_as_executed_function(Isolate * isolate)1011 ExternalReference ExternalReference::get_mark_code_as_executed_function(
1012     Isolate* isolate) {
1013   return ExternalReference(Redirect(
1014       isolate, FUNCTION_ADDR(Code::MarkCodeAsExecuted)));
1015 }
1016 
1017 
date_cache_stamp(Isolate * isolate)1018 ExternalReference ExternalReference::date_cache_stamp(Isolate* isolate) {
1019   return ExternalReference(isolate->date_cache()->stamp_address());
1020 }
1021 
1022 
stress_deopt_count(Isolate * isolate)1023 ExternalReference ExternalReference::stress_deopt_count(Isolate* isolate) {
1024   return ExternalReference(isolate->stress_deopt_count_address());
1025 }
1026 
1027 
new_deoptimizer_function(Isolate * isolate)1028 ExternalReference ExternalReference::new_deoptimizer_function(
1029     Isolate* isolate) {
1030   return ExternalReference(
1031       Redirect(isolate, FUNCTION_ADDR(Deoptimizer::New)));
1032 }
1033 
1034 
compute_output_frames_function(Isolate * isolate)1035 ExternalReference ExternalReference::compute_output_frames_function(
1036     Isolate* isolate) {
1037   return ExternalReference(
1038       Redirect(isolate, FUNCTION_ADDR(Deoptimizer::ComputeOutputFrames)));
1039 }
1040 
wasm_f32_trunc(Isolate * isolate)1041 ExternalReference ExternalReference::wasm_f32_trunc(Isolate* isolate) {
1042   return ExternalReference(
1043       Redirect(isolate, FUNCTION_ADDR(wasm::f32_trunc_wrapper)));
1044 }
wasm_f32_floor(Isolate * isolate)1045 ExternalReference ExternalReference::wasm_f32_floor(Isolate* isolate) {
1046   return ExternalReference(
1047       Redirect(isolate, FUNCTION_ADDR(wasm::f32_floor_wrapper)));
1048 }
wasm_f32_ceil(Isolate * isolate)1049 ExternalReference ExternalReference::wasm_f32_ceil(Isolate* isolate) {
1050   return ExternalReference(
1051       Redirect(isolate, FUNCTION_ADDR(wasm::f32_ceil_wrapper)));
1052 }
wasm_f32_nearest_int(Isolate * isolate)1053 ExternalReference ExternalReference::wasm_f32_nearest_int(Isolate* isolate) {
1054   return ExternalReference(
1055       Redirect(isolate, FUNCTION_ADDR(wasm::f32_nearest_int_wrapper)));
1056 }
1057 
wasm_f64_trunc(Isolate * isolate)1058 ExternalReference ExternalReference::wasm_f64_trunc(Isolate* isolate) {
1059   return ExternalReference(
1060       Redirect(isolate, FUNCTION_ADDR(wasm::f64_trunc_wrapper)));
1061 }
1062 
wasm_f64_floor(Isolate * isolate)1063 ExternalReference ExternalReference::wasm_f64_floor(Isolate* isolate) {
1064   return ExternalReference(
1065       Redirect(isolate, FUNCTION_ADDR(wasm::f64_floor_wrapper)));
1066 }
1067 
wasm_f64_ceil(Isolate * isolate)1068 ExternalReference ExternalReference::wasm_f64_ceil(Isolate* isolate) {
1069   return ExternalReference(
1070       Redirect(isolate, FUNCTION_ADDR(wasm::f64_ceil_wrapper)));
1071 }
1072 
wasm_f64_nearest_int(Isolate * isolate)1073 ExternalReference ExternalReference::wasm_f64_nearest_int(Isolate* isolate) {
1074   return ExternalReference(
1075       Redirect(isolate, FUNCTION_ADDR(wasm::f64_nearest_int_wrapper)));
1076 }
1077 
wasm_int64_to_float32(Isolate * isolate)1078 ExternalReference ExternalReference::wasm_int64_to_float32(Isolate* isolate) {
1079   return ExternalReference(
1080       Redirect(isolate, FUNCTION_ADDR(wasm::int64_to_float32_wrapper)));
1081 }
1082 
wasm_uint64_to_float32(Isolate * isolate)1083 ExternalReference ExternalReference::wasm_uint64_to_float32(Isolate* isolate) {
1084   return ExternalReference(
1085       Redirect(isolate, FUNCTION_ADDR(wasm::uint64_to_float32_wrapper)));
1086 }
1087 
wasm_int64_to_float64(Isolate * isolate)1088 ExternalReference ExternalReference::wasm_int64_to_float64(Isolate* isolate) {
1089   return ExternalReference(
1090       Redirect(isolate, FUNCTION_ADDR(wasm::int64_to_float64_wrapper)));
1091 }
1092 
wasm_uint64_to_float64(Isolate * isolate)1093 ExternalReference ExternalReference::wasm_uint64_to_float64(Isolate* isolate) {
1094   return ExternalReference(
1095       Redirect(isolate, FUNCTION_ADDR(wasm::uint64_to_float64_wrapper)));
1096 }
1097 
wasm_float32_to_int64(Isolate * isolate)1098 ExternalReference ExternalReference::wasm_float32_to_int64(Isolate* isolate) {
1099   return ExternalReference(
1100       Redirect(isolate, FUNCTION_ADDR(wasm::float32_to_int64_wrapper)));
1101 }
1102 
wasm_float32_to_uint64(Isolate * isolate)1103 ExternalReference ExternalReference::wasm_float32_to_uint64(Isolate* isolate) {
1104   return ExternalReference(
1105       Redirect(isolate, FUNCTION_ADDR(wasm::float32_to_uint64_wrapper)));
1106 }
1107 
wasm_float64_to_int64(Isolate * isolate)1108 ExternalReference ExternalReference::wasm_float64_to_int64(Isolate* isolate) {
1109   return ExternalReference(
1110       Redirect(isolate, FUNCTION_ADDR(wasm::float64_to_int64_wrapper)));
1111 }
1112 
wasm_float64_to_uint64(Isolate * isolate)1113 ExternalReference ExternalReference::wasm_float64_to_uint64(Isolate* isolate) {
1114   return ExternalReference(
1115       Redirect(isolate, FUNCTION_ADDR(wasm::float64_to_uint64_wrapper)));
1116 }
1117 
wasm_int64_div(Isolate * isolate)1118 ExternalReference ExternalReference::wasm_int64_div(Isolate* isolate) {
1119   return ExternalReference(
1120       Redirect(isolate, FUNCTION_ADDR(wasm::int64_div_wrapper)));
1121 }
1122 
wasm_int64_mod(Isolate * isolate)1123 ExternalReference ExternalReference::wasm_int64_mod(Isolate* isolate) {
1124   return ExternalReference(
1125       Redirect(isolate, FUNCTION_ADDR(wasm::int64_mod_wrapper)));
1126 }
1127 
wasm_uint64_div(Isolate * isolate)1128 ExternalReference ExternalReference::wasm_uint64_div(Isolate* isolate) {
1129   return ExternalReference(
1130       Redirect(isolate, FUNCTION_ADDR(wasm::uint64_div_wrapper)));
1131 }
1132 
wasm_uint64_mod(Isolate * isolate)1133 ExternalReference ExternalReference::wasm_uint64_mod(Isolate* isolate) {
1134   return ExternalReference(
1135       Redirect(isolate, FUNCTION_ADDR(wasm::uint64_mod_wrapper)));
1136 }
1137 
wasm_word32_ctz(Isolate * isolate)1138 ExternalReference ExternalReference::wasm_word32_ctz(Isolate* isolate) {
1139   return ExternalReference(
1140       Redirect(isolate, FUNCTION_ADDR(wasm::word32_ctz_wrapper)));
1141 }
1142 
wasm_word64_ctz(Isolate * isolate)1143 ExternalReference ExternalReference::wasm_word64_ctz(Isolate* isolate) {
1144   return ExternalReference(
1145       Redirect(isolate, FUNCTION_ADDR(wasm::word64_ctz_wrapper)));
1146 }
1147 
wasm_word32_popcnt(Isolate * isolate)1148 ExternalReference ExternalReference::wasm_word32_popcnt(Isolate* isolate) {
1149   return ExternalReference(
1150       Redirect(isolate, FUNCTION_ADDR(wasm::word32_popcnt_wrapper)));
1151 }
1152 
wasm_word64_popcnt(Isolate * isolate)1153 ExternalReference ExternalReference::wasm_word64_popcnt(Isolate* isolate) {
1154   return ExternalReference(
1155       Redirect(isolate, FUNCTION_ADDR(wasm::word64_popcnt_wrapper)));
1156 }
1157 
f64_acos_wrapper(double * param)1158 static void f64_acos_wrapper(double* param) {
1159   WriteDoubleValue(param, base::ieee754::acos(ReadDoubleValue(param)));
1160 }
1161 
f64_acos_wrapper_function(Isolate * isolate)1162 ExternalReference ExternalReference::f64_acos_wrapper_function(
1163     Isolate* isolate) {
1164   return ExternalReference(Redirect(isolate, FUNCTION_ADDR(f64_acos_wrapper)));
1165 }
1166 
f64_asin_wrapper(double * param)1167 static void f64_asin_wrapper(double* param) {
1168   WriteDoubleValue(param, base::ieee754::asin(ReadDoubleValue(param)));
1169 }
1170 
f64_asin_wrapper_function(Isolate * isolate)1171 ExternalReference ExternalReference::f64_asin_wrapper_function(
1172     Isolate* isolate) {
1173   return ExternalReference(Redirect(isolate, FUNCTION_ADDR(f64_asin_wrapper)));
1174 }
1175 
wasm_float64_pow(Isolate * isolate)1176 ExternalReference ExternalReference::wasm_float64_pow(Isolate* isolate) {
1177   return ExternalReference(
1178       Redirect(isolate, FUNCTION_ADDR(wasm::float64_pow_wrapper)));
1179 }
1180 
f64_mod_wrapper(double * param0,double * param1)1181 static void f64_mod_wrapper(double* param0, double* param1) {
1182   WriteDoubleValue(param0,
1183                    modulo(ReadDoubleValue(param0), ReadDoubleValue(param1)));
1184 }
1185 
f64_mod_wrapper_function(Isolate * isolate)1186 ExternalReference ExternalReference::f64_mod_wrapper_function(
1187     Isolate* isolate) {
1188   return ExternalReference(Redirect(isolate, FUNCTION_ADDR(f64_mod_wrapper)));
1189 }
1190 
wasm_call_trap_callback_for_testing(Isolate * isolate)1191 ExternalReference ExternalReference::wasm_call_trap_callback_for_testing(
1192     Isolate* isolate) {
1193   return ExternalReference(
1194       Redirect(isolate, FUNCTION_ADDR(wasm::call_trap_callback_for_testing)));
1195 }
1196 
log_enter_external_function(Isolate * isolate)1197 ExternalReference ExternalReference::log_enter_external_function(
1198     Isolate* isolate) {
1199   return ExternalReference(
1200       Redirect(isolate, FUNCTION_ADDR(Logger::EnterExternal)));
1201 }
1202 
1203 
log_leave_external_function(Isolate * isolate)1204 ExternalReference ExternalReference::log_leave_external_function(
1205     Isolate* isolate) {
1206   return ExternalReference(
1207       Redirect(isolate, FUNCTION_ADDR(Logger::LeaveExternal)));
1208 }
1209 
roots_array_start(Isolate * isolate)1210 ExternalReference ExternalReference::roots_array_start(Isolate* isolate) {
1211   return ExternalReference(isolate->heap()->roots_array_start());
1212 }
1213 
1214 
allocation_sites_list_address(Isolate * isolate)1215 ExternalReference ExternalReference::allocation_sites_list_address(
1216     Isolate* isolate) {
1217   return ExternalReference(isolate->heap()->allocation_sites_list_address());
1218 }
1219 
1220 
address_of_stack_limit(Isolate * isolate)1221 ExternalReference ExternalReference::address_of_stack_limit(Isolate* isolate) {
1222   return ExternalReference(isolate->stack_guard()->address_of_jslimit());
1223 }
1224 
1225 
address_of_real_stack_limit(Isolate * isolate)1226 ExternalReference ExternalReference::address_of_real_stack_limit(
1227     Isolate* isolate) {
1228   return ExternalReference(isolate->stack_guard()->address_of_real_jslimit());
1229 }
1230 
1231 
address_of_regexp_stack_limit(Isolate * isolate)1232 ExternalReference ExternalReference::address_of_regexp_stack_limit(
1233     Isolate* isolate) {
1234   return ExternalReference(isolate->regexp_stack()->limit_address());
1235 }
1236 
store_buffer_top(Isolate * isolate)1237 ExternalReference ExternalReference::store_buffer_top(Isolate* isolate) {
1238   return ExternalReference(isolate->heap()->store_buffer_top_address());
1239 }
1240 
1241 
new_space_allocation_top_address(Isolate * isolate)1242 ExternalReference ExternalReference::new_space_allocation_top_address(
1243     Isolate* isolate) {
1244   return ExternalReference(isolate->heap()->NewSpaceAllocationTopAddress());
1245 }
1246 
1247 
new_space_allocation_limit_address(Isolate * isolate)1248 ExternalReference ExternalReference::new_space_allocation_limit_address(
1249     Isolate* isolate) {
1250   return ExternalReference(isolate->heap()->NewSpaceAllocationLimitAddress());
1251 }
1252 
1253 
old_space_allocation_top_address(Isolate * isolate)1254 ExternalReference ExternalReference::old_space_allocation_top_address(
1255     Isolate* isolate) {
1256   return ExternalReference(isolate->heap()->OldSpaceAllocationTopAddress());
1257 }
1258 
1259 
old_space_allocation_limit_address(Isolate * isolate)1260 ExternalReference ExternalReference::old_space_allocation_limit_address(
1261     Isolate* isolate) {
1262   return ExternalReference(isolate->heap()->OldSpaceAllocationLimitAddress());
1263 }
1264 
1265 
handle_scope_level_address(Isolate * isolate)1266 ExternalReference ExternalReference::handle_scope_level_address(
1267     Isolate* isolate) {
1268   return ExternalReference(HandleScope::current_level_address(isolate));
1269 }
1270 
1271 
handle_scope_next_address(Isolate * isolate)1272 ExternalReference ExternalReference::handle_scope_next_address(
1273     Isolate* isolate) {
1274   return ExternalReference(HandleScope::current_next_address(isolate));
1275 }
1276 
1277 
handle_scope_limit_address(Isolate * isolate)1278 ExternalReference ExternalReference::handle_scope_limit_address(
1279     Isolate* isolate) {
1280   return ExternalReference(HandleScope::current_limit_address(isolate));
1281 }
1282 
1283 
scheduled_exception_address(Isolate * isolate)1284 ExternalReference ExternalReference::scheduled_exception_address(
1285     Isolate* isolate) {
1286   return ExternalReference(isolate->scheduled_exception_address());
1287 }
1288 
1289 
address_of_pending_message_obj(Isolate * isolate)1290 ExternalReference ExternalReference::address_of_pending_message_obj(
1291     Isolate* isolate) {
1292   return ExternalReference(isolate->pending_message_obj_address());
1293 }
1294 
1295 
address_of_min_int()1296 ExternalReference ExternalReference::address_of_min_int() {
1297   return ExternalReference(reinterpret_cast<void*>(&double_constants.min_int));
1298 }
1299 
1300 
address_of_one_half()1301 ExternalReference ExternalReference::address_of_one_half() {
1302   return ExternalReference(reinterpret_cast<void*>(&double_constants.one_half));
1303 }
1304 
1305 
address_of_minus_one_half()1306 ExternalReference ExternalReference::address_of_minus_one_half() {
1307   return ExternalReference(
1308       reinterpret_cast<void*>(&double_constants.minus_one_half));
1309 }
1310 
1311 
address_of_negative_infinity()1312 ExternalReference ExternalReference::address_of_negative_infinity() {
1313   return ExternalReference(
1314       reinterpret_cast<void*>(&double_constants.negative_infinity));
1315 }
1316 
1317 
address_of_the_hole_nan()1318 ExternalReference ExternalReference::address_of_the_hole_nan() {
1319   return ExternalReference(
1320       reinterpret_cast<void*>(&double_constants.the_hole_nan));
1321 }
1322 
1323 
address_of_uint32_bias()1324 ExternalReference ExternalReference::address_of_uint32_bias() {
1325   return ExternalReference(
1326       reinterpret_cast<void*>(&double_constants.uint32_bias));
1327 }
1328 
1329 
address_of_float_abs_constant()1330 ExternalReference ExternalReference::address_of_float_abs_constant() {
1331   return ExternalReference(reinterpret_cast<void*>(&float_absolute_constant));
1332 }
1333 
1334 
address_of_float_neg_constant()1335 ExternalReference ExternalReference::address_of_float_neg_constant() {
1336   return ExternalReference(reinterpret_cast<void*>(&float_negate_constant));
1337 }
1338 
1339 
address_of_double_abs_constant()1340 ExternalReference ExternalReference::address_of_double_abs_constant() {
1341   return ExternalReference(reinterpret_cast<void*>(&double_absolute_constant));
1342 }
1343 
1344 
address_of_double_neg_constant()1345 ExternalReference ExternalReference::address_of_double_neg_constant() {
1346   return ExternalReference(reinterpret_cast<void*>(&double_negate_constant));
1347 }
1348 
1349 
is_profiling_address(Isolate * isolate)1350 ExternalReference ExternalReference::is_profiling_address(Isolate* isolate) {
1351   return ExternalReference(isolate->is_profiling_address());
1352 }
1353 
1354 
invoke_function_callback(Isolate * isolate)1355 ExternalReference ExternalReference::invoke_function_callback(
1356     Isolate* isolate) {
1357   Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback);
1358   ExternalReference::Type thunk_type = ExternalReference::PROFILING_API_CALL;
1359   ApiFunction thunk_fun(thunk_address);
1360   return ExternalReference(&thunk_fun, thunk_type, isolate);
1361 }
1362 
1363 
invoke_accessor_getter_callback(Isolate * isolate)1364 ExternalReference ExternalReference::invoke_accessor_getter_callback(
1365     Isolate* isolate) {
1366   Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback);
1367   ExternalReference::Type thunk_type =
1368       ExternalReference::PROFILING_GETTER_CALL;
1369   ApiFunction thunk_fun(thunk_address);
1370   return ExternalReference(&thunk_fun, thunk_type, isolate);
1371 }
1372 
1373 
1374 #ifndef V8_INTERPRETED_REGEXP
1375 
re_check_stack_guard_state(Isolate * isolate)1376 ExternalReference ExternalReference::re_check_stack_guard_state(
1377     Isolate* isolate) {
1378   Address function;
1379 #if V8_TARGET_ARCH_X64
1380   function = FUNCTION_ADDR(RegExpMacroAssemblerX64::CheckStackGuardState);
1381 #elif V8_TARGET_ARCH_IA32
1382   function = FUNCTION_ADDR(RegExpMacroAssemblerIA32::CheckStackGuardState);
1383 #elif V8_TARGET_ARCH_ARM64
1384   function = FUNCTION_ADDR(RegExpMacroAssemblerARM64::CheckStackGuardState);
1385 #elif V8_TARGET_ARCH_ARM
1386   function = FUNCTION_ADDR(RegExpMacroAssemblerARM::CheckStackGuardState);
1387 #elif V8_TARGET_ARCH_PPC
1388   function = FUNCTION_ADDR(RegExpMacroAssemblerPPC::CheckStackGuardState);
1389 #elif V8_TARGET_ARCH_MIPS
1390   function = FUNCTION_ADDR(RegExpMacroAssemblerMIPS::CheckStackGuardState);
1391 #elif V8_TARGET_ARCH_MIPS64
1392   function = FUNCTION_ADDR(RegExpMacroAssemblerMIPS::CheckStackGuardState);
1393 #elif V8_TARGET_ARCH_S390
1394   function = FUNCTION_ADDR(RegExpMacroAssemblerS390::CheckStackGuardState);
1395 #elif V8_TARGET_ARCH_X87
1396   function = FUNCTION_ADDR(RegExpMacroAssemblerX87::CheckStackGuardState);
1397 #else
1398   UNREACHABLE();
1399 #endif
1400   return ExternalReference(Redirect(isolate, function));
1401 }
1402 
1403 
re_grow_stack(Isolate * isolate)1404 ExternalReference ExternalReference::re_grow_stack(Isolate* isolate) {
1405   return ExternalReference(
1406       Redirect(isolate, FUNCTION_ADDR(NativeRegExpMacroAssembler::GrowStack)));
1407 }
1408 
re_case_insensitive_compare_uc16(Isolate * isolate)1409 ExternalReference ExternalReference::re_case_insensitive_compare_uc16(
1410     Isolate* isolate) {
1411   return ExternalReference(Redirect(
1412       isolate,
1413       FUNCTION_ADDR(NativeRegExpMacroAssembler::CaseInsensitiveCompareUC16)));
1414 }
1415 
1416 
re_word_character_map()1417 ExternalReference ExternalReference::re_word_character_map() {
1418   return ExternalReference(
1419       NativeRegExpMacroAssembler::word_character_map_address());
1420 }
1421 
address_of_static_offsets_vector(Isolate * isolate)1422 ExternalReference ExternalReference::address_of_static_offsets_vector(
1423     Isolate* isolate) {
1424   return ExternalReference(
1425       reinterpret_cast<Address>(isolate->jsregexp_static_offsets_vector()));
1426 }
1427 
address_of_regexp_stack_memory_address(Isolate * isolate)1428 ExternalReference ExternalReference::address_of_regexp_stack_memory_address(
1429     Isolate* isolate) {
1430   return ExternalReference(
1431       isolate->regexp_stack()->memory_address());
1432 }
1433 
address_of_regexp_stack_memory_size(Isolate * isolate)1434 ExternalReference ExternalReference::address_of_regexp_stack_memory_size(
1435     Isolate* isolate) {
1436   return ExternalReference(isolate->regexp_stack()->memory_size_address());
1437 }
1438 
1439 #endif  // V8_INTERPRETED_REGEXP
1440 
ieee754_acos_function(Isolate * isolate)1441 ExternalReference ExternalReference::ieee754_acos_function(Isolate* isolate) {
1442   return ExternalReference(
1443       Redirect(isolate, FUNCTION_ADDR(base::ieee754::acos), BUILTIN_FP_CALL));
1444 }
1445 
ieee754_acosh_function(Isolate * isolate)1446 ExternalReference ExternalReference::ieee754_acosh_function(Isolate* isolate) {
1447   return ExternalReference(Redirect(
1448       isolate, FUNCTION_ADDR(base::ieee754::acosh), BUILTIN_FP_FP_CALL));
1449 }
1450 
ieee754_asin_function(Isolate * isolate)1451 ExternalReference ExternalReference::ieee754_asin_function(Isolate* isolate) {
1452   return ExternalReference(
1453       Redirect(isolate, FUNCTION_ADDR(base::ieee754::asin), BUILTIN_FP_CALL));
1454 }
1455 
ieee754_asinh_function(Isolate * isolate)1456 ExternalReference ExternalReference::ieee754_asinh_function(Isolate* isolate) {
1457   return ExternalReference(Redirect(
1458       isolate, FUNCTION_ADDR(base::ieee754::asinh), BUILTIN_FP_FP_CALL));
1459 }
1460 
ieee754_atan_function(Isolate * isolate)1461 ExternalReference ExternalReference::ieee754_atan_function(Isolate* isolate) {
1462   return ExternalReference(
1463       Redirect(isolate, FUNCTION_ADDR(base::ieee754::atan), BUILTIN_FP_CALL));
1464 }
1465 
ieee754_atanh_function(Isolate * isolate)1466 ExternalReference ExternalReference::ieee754_atanh_function(Isolate* isolate) {
1467   return ExternalReference(Redirect(
1468       isolate, FUNCTION_ADDR(base::ieee754::atanh), BUILTIN_FP_FP_CALL));
1469 }
1470 
ieee754_atan2_function(Isolate * isolate)1471 ExternalReference ExternalReference::ieee754_atan2_function(Isolate* isolate) {
1472   return ExternalReference(Redirect(
1473       isolate, FUNCTION_ADDR(base::ieee754::atan2), BUILTIN_FP_FP_CALL));
1474 }
1475 
ieee754_cbrt_function(Isolate * isolate)1476 ExternalReference ExternalReference::ieee754_cbrt_function(Isolate* isolate) {
1477   return ExternalReference(Redirect(isolate, FUNCTION_ADDR(base::ieee754::cbrt),
1478                                     BUILTIN_FP_FP_CALL));
1479 }
1480 
ieee754_cos_function(Isolate * isolate)1481 ExternalReference ExternalReference::ieee754_cos_function(Isolate* isolate) {
1482   return ExternalReference(
1483       Redirect(isolate, FUNCTION_ADDR(base::ieee754::cos), BUILTIN_FP_CALL));
1484 }
1485 
ieee754_cosh_function(Isolate * isolate)1486 ExternalReference ExternalReference::ieee754_cosh_function(Isolate* isolate) {
1487   return ExternalReference(
1488       Redirect(isolate, FUNCTION_ADDR(base::ieee754::cosh), BUILTIN_FP_CALL));
1489 }
1490 
ieee754_exp_function(Isolate * isolate)1491 ExternalReference ExternalReference::ieee754_exp_function(Isolate* isolate) {
1492   return ExternalReference(
1493       Redirect(isolate, FUNCTION_ADDR(base::ieee754::exp), BUILTIN_FP_CALL));
1494 }
1495 
ieee754_expm1_function(Isolate * isolate)1496 ExternalReference ExternalReference::ieee754_expm1_function(Isolate* isolate) {
1497   return ExternalReference(Redirect(
1498       isolate, FUNCTION_ADDR(base::ieee754::expm1), BUILTIN_FP_FP_CALL));
1499 }
1500 
ieee754_log_function(Isolate * isolate)1501 ExternalReference ExternalReference::ieee754_log_function(Isolate* isolate) {
1502   return ExternalReference(
1503       Redirect(isolate, FUNCTION_ADDR(base::ieee754::log), BUILTIN_FP_CALL));
1504 }
1505 
ieee754_log1p_function(Isolate * isolate)1506 ExternalReference ExternalReference::ieee754_log1p_function(Isolate* isolate) {
1507   return ExternalReference(
1508       Redirect(isolate, FUNCTION_ADDR(base::ieee754::log1p), BUILTIN_FP_CALL));
1509 }
1510 
ieee754_log10_function(Isolate * isolate)1511 ExternalReference ExternalReference::ieee754_log10_function(Isolate* isolate) {
1512   return ExternalReference(
1513       Redirect(isolate, FUNCTION_ADDR(base::ieee754::log10), BUILTIN_FP_CALL));
1514 }
1515 
ieee754_log2_function(Isolate * isolate)1516 ExternalReference ExternalReference::ieee754_log2_function(Isolate* isolate) {
1517   return ExternalReference(
1518       Redirect(isolate, FUNCTION_ADDR(base::ieee754::log2), BUILTIN_FP_CALL));
1519 }
1520 
ieee754_sin_function(Isolate * isolate)1521 ExternalReference ExternalReference::ieee754_sin_function(Isolate* isolate) {
1522   return ExternalReference(
1523       Redirect(isolate, FUNCTION_ADDR(base::ieee754::sin), BUILTIN_FP_CALL));
1524 }
1525 
ieee754_sinh_function(Isolate * isolate)1526 ExternalReference ExternalReference::ieee754_sinh_function(Isolate* isolate) {
1527   return ExternalReference(
1528       Redirect(isolate, FUNCTION_ADDR(base::ieee754::sinh), BUILTIN_FP_CALL));
1529 }
1530 
ieee754_tan_function(Isolate * isolate)1531 ExternalReference ExternalReference::ieee754_tan_function(Isolate* isolate) {
1532   return ExternalReference(
1533       Redirect(isolate, FUNCTION_ADDR(base::ieee754::tan), BUILTIN_FP_CALL));
1534 }
1535 
ieee754_tanh_function(Isolate * isolate)1536 ExternalReference ExternalReference::ieee754_tanh_function(Isolate* isolate) {
1537   return ExternalReference(
1538       Redirect(isolate, FUNCTION_ADDR(base::ieee754::tanh), BUILTIN_FP_CALL));
1539 }
1540 
libc_memchr(void * string,int character,size_t search_length)1541 void* libc_memchr(void* string, int character, size_t search_length) {
1542   return memchr(string, character, search_length);
1543 }
1544 
libc_memchr_function(Isolate * isolate)1545 ExternalReference ExternalReference::libc_memchr_function(Isolate* isolate) {
1546   return ExternalReference(Redirect(isolate, FUNCTION_ADDR(libc_memchr)));
1547 }
1548 
page_flags(Page * page)1549 ExternalReference ExternalReference::page_flags(Page* page) {
1550   return ExternalReference(reinterpret_cast<Address>(page) +
1551                            MemoryChunk::kFlagsOffset);
1552 }
1553 
1554 
ForDeoptEntry(Address entry)1555 ExternalReference ExternalReference::ForDeoptEntry(Address entry) {
1556   return ExternalReference(entry);
1557 }
1558 
1559 
cpu_features()1560 ExternalReference ExternalReference::cpu_features() {
1561   DCHECK(CpuFeatures::initialized_);
1562   return ExternalReference(&CpuFeatures::supported_);
1563 }
1564 
is_tail_call_elimination_enabled_address(Isolate * isolate)1565 ExternalReference ExternalReference::is_tail_call_elimination_enabled_address(
1566     Isolate* isolate) {
1567   return ExternalReference(isolate->is_tail_call_elimination_enabled_address());
1568 }
1569 
promise_hook_or_debug_is_active_address(Isolate * isolate)1570 ExternalReference ExternalReference::promise_hook_or_debug_is_active_address(
1571     Isolate* isolate) {
1572   return ExternalReference(isolate->promise_hook_or_debug_is_active_address());
1573 }
1574 
debug_is_active_address(Isolate * isolate)1575 ExternalReference ExternalReference::debug_is_active_address(
1576     Isolate* isolate) {
1577   return ExternalReference(isolate->debug()->is_active_address());
1578 }
1579 
debug_hook_on_function_call_address(Isolate * isolate)1580 ExternalReference ExternalReference::debug_hook_on_function_call_address(
1581     Isolate* isolate) {
1582   return ExternalReference(isolate->debug()->hook_on_function_call_address());
1583 }
1584 
runtime_function_table_address(Isolate * isolate)1585 ExternalReference ExternalReference::runtime_function_table_address(
1586     Isolate* isolate) {
1587   return ExternalReference(
1588       const_cast<Runtime::Function*>(Runtime::RuntimeFunctionTable(isolate)));
1589 }
1590 
1591 
power_helper(Isolate * isolate,double x,double y)1592 double power_helper(Isolate* isolate, double x, double y) {
1593   int y_int = static_cast<int>(y);
1594   if (y == y_int) {
1595     return power_double_int(x, y_int);  // Returns 1 if exponent is 0.
1596   }
1597   if (y == 0.5) {
1598     lazily_initialize_fast_sqrt(isolate);
1599     return (std::isinf(x)) ? V8_INFINITY
1600                            : fast_sqrt(x + 0.0, isolate);  // Convert -0 to +0.
1601   }
1602   if (y == -0.5) {
1603     lazily_initialize_fast_sqrt(isolate);
1604     return (std::isinf(x)) ? 0 : 1.0 / fast_sqrt(x + 0.0,
1605                                                  isolate);  // Convert -0 to +0.
1606   }
1607   return power_double_double(x, y);
1608 }
1609 
1610 
1611 // Helper function to compute x^y, where y is known to be an
1612 // integer. Uses binary decomposition to limit the number of
1613 // multiplications; see the discussion in "Hacker's Delight" by Henry
1614 // S. Warren, Jr., figure 11-6, page 213.
power_double_int(double x,int y)1615 double power_double_int(double x, int y) {
1616   double m = (y < 0) ? 1 / x : x;
1617   unsigned n = (y < 0) ? -y : y;
1618   double p = 1;
1619   while (n != 0) {
1620     if ((n & 1) != 0) p *= m;
1621     m *= m;
1622     if ((n & 2) != 0) p *= m;
1623     m *= m;
1624     n >>= 2;
1625   }
1626   return p;
1627 }
1628 
1629 
power_double_double(double x,double y)1630 double power_double_double(double x, double y) {
1631   // The checks for special cases can be dropped in ia32 because it has already
1632   // been done in generated code before bailing out here.
1633   if (std::isnan(y) || ((x == 1 || x == -1) && std::isinf(y))) {
1634     return std::numeric_limits<double>::quiet_NaN();
1635   }
1636   return Pow(x, y);
1637 }
1638 
1639 
power_double_double_function(Isolate * isolate)1640 ExternalReference ExternalReference::power_double_double_function(
1641     Isolate* isolate) {
1642   return ExternalReference(Redirect(isolate,
1643                                     FUNCTION_ADDR(power_double_double),
1644                                     BUILTIN_FP_FP_CALL));
1645 }
1646 
1647 
mod_two_doubles_operation(Isolate * isolate)1648 ExternalReference ExternalReference::mod_two_doubles_operation(
1649     Isolate* isolate) {
1650   return ExternalReference(Redirect(isolate,
1651                                     FUNCTION_ADDR(modulo),
1652                                     BUILTIN_FP_FP_CALL));
1653 }
1654 
debug_last_step_action_address(Isolate * isolate)1655 ExternalReference ExternalReference::debug_last_step_action_address(
1656     Isolate* isolate) {
1657   return ExternalReference(isolate->debug()->last_step_action_address());
1658 }
1659 
debug_suspended_generator_address(Isolate * isolate)1660 ExternalReference ExternalReference::debug_suspended_generator_address(
1661     Isolate* isolate) {
1662   return ExternalReference(isolate->debug()->suspended_generator_address());
1663 }
1664 
debug_restart_fp_address(Isolate * isolate)1665 ExternalReference ExternalReference::debug_restart_fp_address(
1666     Isolate* isolate) {
1667   return ExternalReference(isolate->debug()->restart_fp_address());
1668 }
1669 
fixed_typed_array_base_data_offset()1670 ExternalReference ExternalReference::fixed_typed_array_base_data_offset() {
1671   return ExternalReference(reinterpret_cast<void*>(
1672       FixedTypedArrayBase::kDataOffset - kHeapObjectTag));
1673 }
1674 
1675 
operator ==(ExternalReference lhs,ExternalReference rhs)1676 bool operator==(ExternalReference lhs, ExternalReference rhs) {
1677   return lhs.address() == rhs.address();
1678 }
1679 
1680 
operator !=(ExternalReference lhs,ExternalReference rhs)1681 bool operator!=(ExternalReference lhs, ExternalReference rhs) {
1682   return !(lhs == rhs);
1683 }
1684 
1685 
hash_value(ExternalReference reference)1686 size_t hash_value(ExternalReference reference) {
1687   return base::hash<Address>()(reference.address());
1688 }
1689 
1690 
operator <<(std::ostream & os,ExternalReference reference)1691 std::ostream& operator<<(std::ostream& os, ExternalReference reference) {
1692   os << static_cast<const void*>(reference.address());
1693   const Runtime::Function* fn = Runtime::FunctionForEntry(reference.address());
1694   if (fn) os << "<" << fn->name << ".entry>";
1695   return os;
1696 }
1697 
1698 
ConstantPoolBuilder(int ptr_reach_bits,int double_reach_bits)1699 ConstantPoolBuilder::ConstantPoolBuilder(int ptr_reach_bits,
1700                                          int double_reach_bits) {
1701   info_[ConstantPoolEntry::INTPTR].entries.reserve(64);
1702   info_[ConstantPoolEntry::INTPTR].regular_reach_bits = ptr_reach_bits;
1703   info_[ConstantPoolEntry::DOUBLE].regular_reach_bits = double_reach_bits;
1704 }
1705 
1706 
NextAccess(ConstantPoolEntry::Type type) const1707 ConstantPoolEntry::Access ConstantPoolBuilder::NextAccess(
1708     ConstantPoolEntry::Type type) const {
1709   const PerTypeEntryInfo& info = info_[type];
1710 
1711   if (info.overflow()) return ConstantPoolEntry::OVERFLOWED;
1712 
1713   int dbl_count = info_[ConstantPoolEntry::DOUBLE].regular_count;
1714   int dbl_offset = dbl_count * kDoubleSize;
1715   int ptr_count = info_[ConstantPoolEntry::INTPTR].regular_count;
1716   int ptr_offset = ptr_count * kPointerSize + dbl_offset;
1717 
1718   if (type == ConstantPoolEntry::DOUBLE) {
1719     // Double overflow detection must take into account the reach for both types
1720     int ptr_reach_bits = info_[ConstantPoolEntry::INTPTR].regular_reach_bits;
1721     if (!is_uintn(dbl_offset, info.regular_reach_bits) ||
1722         (ptr_count > 0 &&
1723          !is_uintn(ptr_offset + kDoubleSize - kPointerSize, ptr_reach_bits))) {
1724       return ConstantPoolEntry::OVERFLOWED;
1725     }
1726   } else {
1727     DCHECK(type == ConstantPoolEntry::INTPTR);
1728     if (!is_uintn(ptr_offset, info.regular_reach_bits)) {
1729       return ConstantPoolEntry::OVERFLOWED;
1730     }
1731   }
1732 
1733   return ConstantPoolEntry::REGULAR;
1734 }
1735 
1736 
AddEntry(ConstantPoolEntry & entry,ConstantPoolEntry::Type type)1737 ConstantPoolEntry::Access ConstantPoolBuilder::AddEntry(
1738     ConstantPoolEntry& entry, ConstantPoolEntry::Type type) {
1739   DCHECK(!emitted_label_.is_bound());
1740   PerTypeEntryInfo& info = info_[type];
1741   const int entry_size = ConstantPoolEntry::size(type);
1742   bool merged = false;
1743 
1744   if (entry.sharing_ok()) {
1745     // Try to merge entries
1746     std::vector<ConstantPoolEntry>::iterator it = info.shared_entries.begin();
1747     int end = static_cast<int>(info.shared_entries.size());
1748     for (int i = 0; i < end; i++, it++) {
1749       if ((entry_size == kPointerSize) ? entry.value() == it->value()
1750                                        : entry.value64() == it->value64()) {
1751         // Merge with found entry.
1752         entry.set_merged_index(i);
1753         merged = true;
1754         break;
1755       }
1756     }
1757   }
1758 
1759   // By definition, merged entries have regular access.
1760   DCHECK(!merged || entry.merged_index() < info.regular_count);
1761   ConstantPoolEntry::Access access =
1762       (merged ? ConstantPoolEntry::REGULAR : NextAccess(type));
1763 
1764   // Enforce an upper bound on search time by limiting the search to
1765   // unique sharable entries which fit in the regular section.
1766   if (entry.sharing_ok() && !merged && access == ConstantPoolEntry::REGULAR) {
1767     info.shared_entries.push_back(entry);
1768   } else {
1769     info.entries.push_back(entry);
1770   }
1771 
1772   // We're done if we found a match or have already triggered the
1773   // overflow state.
1774   if (merged || info.overflow()) return access;
1775 
1776   if (access == ConstantPoolEntry::REGULAR) {
1777     info.regular_count++;
1778   } else {
1779     info.overflow_start = static_cast<int>(info.entries.size()) - 1;
1780   }
1781 
1782   return access;
1783 }
1784 
1785 
EmitSharedEntries(Assembler * assm,ConstantPoolEntry::Type type)1786 void ConstantPoolBuilder::EmitSharedEntries(Assembler* assm,
1787                                             ConstantPoolEntry::Type type) {
1788   PerTypeEntryInfo& info = info_[type];
1789   std::vector<ConstantPoolEntry>& shared_entries = info.shared_entries;
1790   const int entry_size = ConstantPoolEntry::size(type);
1791   int base = emitted_label_.pos();
1792   DCHECK(base > 0);
1793   int shared_end = static_cast<int>(shared_entries.size());
1794   std::vector<ConstantPoolEntry>::iterator shared_it = shared_entries.begin();
1795   for (int i = 0; i < shared_end; i++, shared_it++) {
1796     int offset = assm->pc_offset() - base;
1797     shared_it->set_offset(offset);  // Save offset for merged entries.
1798     if (entry_size == kPointerSize) {
1799       assm->dp(shared_it->value());
1800     } else {
1801       assm->dq(shared_it->value64());
1802     }
1803     DCHECK(is_uintn(offset, info.regular_reach_bits));
1804 
1805     // Patch load sequence with correct offset.
1806     assm->PatchConstantPoolAccessInstruction(shared_it->position(), offset,
1807                                              ConstantPoolEntry::REGULAR, type);
1808   }
1809 }
1810 
1811 
EmitGroup(Assembler * assm,ConstantPoolEntry::Access access,ConstantPoolEntry::Type type)1812 void ConstantPoolBuilder::EmitGroup(Assembler* assm,
1813                                     ConstantPoolEntry::Access access,
1814                                     ConstantPoolEntry::Type type) {
1815   PerTypeEntryInfo& info = info_[type];
1816   const bool overflow = info.overflow();
1817   std::vector<ConstantPoolEntry>& entries = info.entries;
1818   std::vector<ConstantPoolEntry>& shared_entries = info.shared_entries;
1819   const int entry_size = ConstantPoolEntry::size(type);
1820   int base = emitted_label_.pos();
1821   DCHECK(base > 0);
1822   int begin;
1823   int end;
1824 
1825   if (access == ConstantPoolEntry::REGULAR) {
1826     // Emit any shared entries first
1827     EmitSharedEntries(assm, type);
1828   }
1829 
1830   if (access == ConstantPoolEntry::REGULAR) {
1831     begin = 0;
1832     end = overflow ? info.overflow_start : static_cast<int>(entries.size());
1833   } else {
1834     DCHECK(access == ConstantPoolEntry::OVERFLOWED);
1835     if (!overflow) return;
1836     begin = info.overflow_start;
1837     end = static_cast<int>(entries.size());
1838   }
1839 
1840   std::vector<ConstantPoolEntry>::iterator it = entries.begin();
1841   if (begin > 0) std::advance(it, begin);
1842   for (int i = begin; i < end; i++, it++) {
1843     // Update constant pool if necessary and get the entry's offset.
1844     int offset;
1845     ConstantPoolEntry::Access entry_access;
1846     if (!it->is_merged()) {
1847       // Emit new entry
1848       offset = assm->pc_offset() - base;
1849       entry_access = access;
1850       if (entry_size == kPointerSize) {
1851         assm->dp(it->value());
1852       } else {
1853         assm->dq(it->value64());
1854       }
1855     } else {
1856       // Retrieve offset from shared entry.
1857       offset = shared_entries[it->merged_index()].offset();
1858       entry_access = ConstantPoolEntry::REGULAR;
1859     }
1860 
1861     DCHECK(entry_access == ConstantPoolEntry::OVERFLOWED ||
1862            is_uintn(offset, info.regular_reach_bits));
1863 
1864     // Patch load sequence with correct offset.
1865     assm->PatchConstantPoolAccessInstruction(it->position(), offset,
1866                                              entry_access, type);
1867   }
1868 }
1869 
1870 
1871 // Emit and return position of pool.  Zero implies no constant pool.
Emit(Assembler * assm)1872 int ConstantPoolBuilder::Emit(Assembler* assm) {
1873   bool emitted = emitted_label_.is_bound();
1874   bool empty = IsEmpty();
1875 
1876   if (!emitted) {
1877     // Mark start of constant pool.  Align if necessary.
1878     if (!empty) assm->DataAlign(kDoubleSize);
1879     assm->bind(&emitted_label_);
1880     if (!empty) {
1881       // Emit in groups based on access and type.
1882       // Emit doubles first for alignment purposes.
1883       EmitGroup(assm, ConstantPoolEntry::REGULAR, ConstantPoolEntry::DOUBLE);
1884       EmitGroup(assm, ConstantPoolEntry::REGULAR, ConstantPoolEntry::INTPTR);
1885       if (info_[ConstantPoolEntry::DOUBLE].overflow()) {
1886         assm->DataAlign(kDoubleSize);
1887         EmitGroup(assm, ConstantPoolEntry::OVERFLOWED,
1888                   ConstantPoolEntry::DOUBLE);
1889       }
1890       if (info_[ConstantPoolEntry::INTPTR].overflow()) {
1891         EmitGroup(assm, ConstantPoolEntry::OVERFLOWED,
1892                   ConstantPoolEntry::INTPTR);
1893       }
1894     }
1895   }
1896 
1897   return !empty ? emitted_label_.pos() : 0;
1898 }
1899 
1900 
1901 // Platform specific but identical code for all the platforms.
1902 
RecordDeoptReason(DeoptimizeReason reason,SourcePosition position,int id)1903 void Assembler::RecordDeoptReason(DeoptimizeReason reason,
1904                                   SourcePosition position, int id) {
1905   if (FLAG_trace_deopt || isolate()->is_profiling()) {
1906     EnsureSpace ensure_space(this);
1907     RecordRelocInfo(RelocInfo::DEOPT_SCRIPT_OFFSET, position.ScriptOffset());
1908     RecordRelocInfo(RelocInfo::DEOPT_INLINING_ID, position.InliningId());
1909     RecordRelocInfo(RelocInfo::DEOPT_REASON, static_cast<int>(reason));
1910     RecordRelocInfo(RelocInfo::DEOPT_ID, id);
1911   }
1912 }
1913 
1914 
RecordComment(const char * msg)1915 void Assembler::RecordComment(const char* msg) {
1916   if (FLAG_code_comments) {
1917     EnsureSpace ensure_space(this);
1918     RecordRelocInfo(RelocInfo::COMMENT, reinterpret_cast<intptr_t>(msg));
1919   }
1920 }
1921 
1922 
RecordDebugBreakSlot(RelocInfo::Mode mode)1923 void Assembler::RecordDebugBreakSlot(RelocInfo::Mode mode) {
1924   EnsureSpace ensure_space(this);
1925   DCHECK(RelocInfo::IsDebugBreakSlot(mode));
1926   RecordRelocInfo(mode);
1927 }
1928 
1929 
DataAlign(int m)1930 void Assembler::DataAlign(int m) {
1931   DCHECK(m >= 2 && base::bits::IsPowerOfTwo32(m));
1932   while ((pc_offset() & (m - 1)) != 0) {
1933     db(0);
1934   }
1935 }
1936 }  // namespace internal
1937 }  // namespace v8
1938