• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_OBJECTS_CODE_INL_H_
6 #define V8_OBJECTS_CODE_INL_H_
7 
8 #include "src/objects/code.h"
9 
10 #include "src/interpreter/bytecode-register.h"
11 #include "src/isolate.h"
12 #include "src/objects/dictionary.h"
13 #include "src/objects/map-inl.h"
14 #include "src/objects/maybe-object-inl.h"
15 #include "src/v8memory.h"
16 
17 // Has to be the last include (doesn't have include guards):
18 #include "src/objects/object-macros.h"
19 
20 namespace v8 {
21 namespace internal {
22 
23 CAST_ACCESSOR(AbstractCode)
CAST_ACCESSOR(BytecodeArray)24 CAST_ACCESSOR(BytecodeArray)
25 CAST_ACCESSOR(Code)
26 CAST_ACCESSOR(CodeDataContainer)
27 CAST_ACCESSOR(DependentCode)
28 CAST_ACCESSOR(DeoptimizationData)
29 
30 int AbstractCode::raw_instruction_size() {
31   if (IsCode()) {
32     return GetCode()->raw_instruction_size();
33   } else {
34     return GetBytecodeArray()->length();
35   }
36 }
37 
InstructionSize()38 int AbstractCode::InstructionSize() {
39   if (IsCode()) {
40     return GetCode()->InstructionSize();
41   } else {
42     return GetBytecodeArray()->length();
43   }
44 }
45 
source_position_table()46 ByteArray* AbstractCode::source_position_table() {
47   if (IsCode()) {
48     return GetCode()->SourcePositionTable();
49   } else {
50     return GetBytecodeArray()->SourcePositionTable();
51   }
52 }
53 
stack_frame_cache()54 Object* AbstractCode::stack_frame_cache() {
55   Object* maybe_table;
56   if (IsCode()) {
57     maybe_table = GetCode()->source_position_table();
58   } else {
59     maybe_table = GetBytecodeArray()->source_position_table();
60   }
61   if (maybe_table->IsSourcePositionTableWithFrameCache()) {
62     return SourcePositionTableWithFrameCache::cast(maybe_table)
63         ->stack_frame_cache();
64   }
65   return Smi::kZero;
66 }
67 
SizeIncludingMetadata()68 int AbstractCode::SizeIncludingMetadata() {
69   if (IsCode()) {
70     return GetCode()->SizeIncludingMetadata();
71   } else {
72     return GetBytecodeArray()->SizeIncludingMetadata();
73   }
74 }
ExecutableSize()75 int AbstractCode::ExecutableSize() {
76   if (IsCode()) {
77     return GetCode()->ExecutableSize();
78   } else {
79     return GetBytecodeArray()->BytecodeArraySize();
80   }
81 }
82 
raw_instruction_start()83 Address AbstractCode::raw_instruction_start() {
84   if (IsCode()) {
85     return GetCode()->raw_instruction_start();
86   } else {
87     return GetBytecodeArray()->GetFirstBytecodeAddress();
88   }
89 }
90 
InstructionStart()91 Address AbstractCode::InstructionStart() {
92   if (IsCode()) {
93     return GetCode()->InstructionStart();
94   } else {
95     return GetBytecodeArray()->GetFirstBytecodeAddress();
96   }
97 }
98 
raw_instruction_end()99 Address AbstractCode::raw_instruction_end() {
100   if (IsCode()) {
101     return GetCode()->raw_instruction_end();
102   } else {
103     return GetBytecodeArray()->GetFirstBytecodeAddress() +
104            GetBytecodeArray()->length();
105   }
106 }
107 
InstructionEnd()108 Address AbstractCode::InstructionEnd() {
109   if (IsCode()) {
110     return GetCode()->InstructionEnd();
111   } else {
112     return GetBytecodeArray()->GetFirstBytecodeAddress() +
113            GetBytecodeArray()->length();
114   }
115 }
116 
contains(Address inner_pointer)117 bool AbstractCode::contains(Address inner_pointer) {
118   return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
119 }
120 
kind()121 AbstractCode::Kind AbstractCode::kind() {
122   if (IsCode()) {
123     return static_cast<AbstractCode::Kind>(GetCode()->kind());
124   } else {
125     return INTERPRETED_FUNCTION;
126   }
127 }
128 
GetCode()129 Code* AbstractCode::GetCode() { return Code::cast(this); }
130 
GetBytecodeArray()131 BytecodeArray* AbstractCode::GetBytecodeArray() {
132   return BytecodeArray::cast(this);
133 }
134 
next_link()135 DependentCode* DependentCode::next_link() {
136   return DependentCode::cast(Get(kNextLinkIndex)->ToStrongHeapObject());
137 }
138 
set_next_link(DependentCode * next)139 void DependentCode::set_next_link(DependentCode* next) {
140   Set(kNextLinkIndex, HeapObjectReference::Strong(next));
141 }
142 
flags()143 int DependentCode::flags() { return Smi::ToInt(Get(kFlagsIndex)->ToSmi()); }
144 
set_flags(int flags)145 void DependentCode::set_flags(int flags) {
146   Set(kFlagsIndex, MaybeObject::FromObject(Smi::FromInt(flags)));
147 }
148 
count()149 int DependentCode::count() { return CountField::decode(flags()); }
150 
set_count(int value)151 void DependentCode::set_count(int value) {
152   set_flags(CountField::update(flags(), value));
153 }
154 
group()155 DependentCode::DependencyGroup DependentCode::group() {
156   return static_cast<DependencyGroup>(GroupField::decode(flags()));
157 }
158 
set_object_at(int i,MaybeObject * object)159 void DependentCode::set_object_at(int i, MaybeObject* object) {
160   Set(kCodesStartIndex + i, object);
161 }
162 
object_at(int i)163 MaybeObject* DependentCode::object_at(int i) {
164   return Get(kCodesStartIndex + i);
165 }
166 
clear_at(int i)167 void DependentCode::clear_at(int i) {
168   Set(kCodesStartIndex + i,
169       HeapObjectReference::Strong(GetReadOnlyRoots().undefined_value()));
170 }
171 
copy(int from,int to)172 void DependentCode::copy(int from, int to) {
173   Set(kCodesStartIndex + to, Get(kCodesStartIndex + from));
174 }
175 
INT_ACCESSORS(Code,raw_instruction_size,kInstructionSizeOffset)176 INT_ACCESSORS(Code, raw_instruction_size, kInstructionSizeOffset)
177 INT_ACCESSORS(Code, handler_table_offset, kHandlerTableOffsetOffset)
178 #define CODE_ACCESSORS(name, type, offset) \
179   ACCESSORS_CHECKED2(Code, name, type, offset, true, !Heap::InNewSpace(value))
180 CODE_ACCESSORS(relocation_info, ByteArray, kRelocationInfoOffset)
181 CODE_ACCESSORS(deoptimization_data, FixedArray, kDeoptimizationDataOffset)
182 CODE_ACCESSORS(source_position_table, Object, kSourcePositionTableOffset)
183 CODE_ACCESSORS(code_data_container, CodeDataContainer, kCodeDataContainerOffset)
184 #undef CODE_ACCESSORS
185 
186 void Code::WipeOutHeader() {
187   WRITE_FIELD(this, kRelocationInfoOffset, nullptr);
188   WRITE_FIELD(this, kDeoptimizationDataOffset, nullptr);
189   WRITE_FIELD(this, kSourcePositionTableOffset, nullptr);
190   WRITE_FIELD(this, kCodeDataContainerOffset, nullptr);
191 }
192 
clear_padding()193 void Code::clear_padding() {
194   memset(reinterpret_cast<void*>(address() + kHeaderPaddingStart), 0,
195          kHeaderSize - kHeaderPaddingStart);
196   Address data_end =
197       has_unwinding_info() ? unwinding_info_end() : raw_instruction_end();
198   memset(reinterpret_cast<void*>(data_end), 0,
199          CodeSize() - (data_end - address()));
200 }
201 
SourcePositionTable()202 ByteArray* Code::SourcePositionTable() const {
203   Object* maybe_table = source_position_table();
204   if (maybe_table->IsByteArray()) return ByteArray::cast(maybe_table);
205   DCHECK(maybe_table->IsSourcePositionTableWithFrameCache());
206   return SourcePositionTableWithFrameCache::cast(maybe_table)
207       ->source_position_table();
208 }
209 
stub_key()210 uint32_t Code::stub_key() const {
211   DCHECK(is_stub());
212   return READ_UINT32_FIELD(this, kStubKeyOffset);
213 }
214 
set_stub_key(uint32_t key)215 void Code::set_stub_key(uint32_t key) {
216   DCHECK(is_stub() || key == 0);  // Allow zero initialization.
217   WRITE_UINT32_FIELD(this, kStubKeyOffset, key);
218 }
219 
next_code_link()220 Object* Code::next_code_link() const {
221   return code_data_container()->next_code_link();
222 }
223 
set_next_code_link(Object * value)224 void Code::set_next_code_link(Object* value) {
225   code_data_container()->set_next_code_link(value);
226 }
227 
InstructionSize()228 int Code::InstructionSize() const {
229   if (is_off_heap_trampoline()) {
230     DCHECK(FLAG_embedded_builtins);
231     return OffHeapInstructionSize();
232   }
233   return raw_instruction_size();
234 }
235 
raw_instruction_start()236 Address Code::raw_instruction_start() const {
237   return FIELD_ADDR(this, kHeaderSize);
238 }
239 
InstructionStart()240 Address Code::InstructionStart() const {
241   if (is_off_heap_trampoline()) {
242     DCHECK(FLAG_embedded_builtins);
243     return OffHeapInstructionStart();
244   }
245   return raw_instruction_start();
246 }
247 
raw_instruction_end()248 Address Code::raw_instruction_end() const {
249   return raw_instruction_start() + raw_instruction_size();
250 }
251 
InstructionEnd()252 Address Code::InstructionEnd() const {
253   if (is_off_heap_trampoline()) {
254     DCHECK(FLAG_embedded_builtins);
255     return OffHeapInstructionEnd();
256   }
257   return raw_instruction_end();
258 }
259 
GetUnwindingInfoSizeOffset()260 int Code::GetUnwindingInfoSizeOffset() const {
261   DCHECK(has_unwinding_info());
262   return RoundUp(kHeaderSize + raw_instruction_size(), kInt64Size);
263 }
264 
unwinding_info_size()265 int Code::unwinding_info_size() const {
266   DCHECK(has_unwinding_info());
267   return static_cast<int>(
268       READ_UINT64_FIELD(this, GetUnwindingInfoSizeOffset()));
269 }
270 
set_unwinding_info_size(int value)271 void Code::set_unwinding_info_size(int value) {
272   DCHECK(has_unwinding_info());
273   WRITE_UINT64_FIELD(this, GetUnwindingInfoSizeOffset(), value);
274 }
275 
unwinding_info_start()276 Address Code::unwinding_info_start() const {
277   DCHECK(has_unwinding_info());
278   return FIELD_ADDR(this, GetUnwindingInfoSizeOffset()) + kInt64Size;
279 }
280 
unwinding_info_end()281 Address Code::unwinding_info_end() const {
282   DCHECK(has_unwinding_info());
283   return unwinding_info_start() + unwinding_info_size();
284 }
285 
body_size()286 int Code::body_size() const {
287   int unpadded_body_size =
288       has_unwinding_info()
289           ? static_cast<int>(unwinding_info_end() - raw_instruction_start())
290           : raw_instruction_size();
291   return RoundUp(unpadded_body_size, kObjectAlignment);
292 }
293 
SizeIncludingMetadata()294 int Code::SizeIncludingMetadata() const {
295   int size = CodeSize();
296   size += relocation_info()->Size();
297   size += deoptimization_data()->Size();
298   return size;
299 }
300 
unchecked_relocation_info()301 ByteArray* Code::unchecked_relocation_info() const {
302   return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
303 }
304 
relocation_start()305 byte* Code::relocation_start() const {
306   return unchecked_relocation_info()->GetDataStartAddress();
307 }
308 
relocation_end()309 byte* Code::relocation_end() const {
310   return unchecked_relocation_info()->GetDataStartAddress() +
311          unchecked_relocation_info()->length();
312 }
313 
relocation_size()314 int Code::relocation_size() const {
315   return unchecked_relocation_info()->length();
316 }
317 
entry()318 Address Code::entry() const { return raw_instruction_start(); }
319 
contains(Address inner_pointer)320 bool Code::contains(Address inner_pointer) {
321   if (is_off_heap_trampoline()) {
322     DCHECK(FLAG_embedded_builtins);
323     if (OffHeapInstructionStart() <= inner_pointer &&
324         inner_pointer < OffHeapInstructionEnd()) {
325       return true;
326     }
327   }
328   return (address() <= inner_pointer) && (inner_pointer < address() + Size());
329 }
330 
ExecutableSize()331 int Code::ExecutableSize() const {
332   // Check that the assumptions about the layout of the code object holds.
333   DCHECK_EQ(static_cast<int>(raw_instruction_start() - address()),
334             Code::kHeaderSize);
335   return raw_instruction_size() + Code::kHeaderSize;
336 }
337 
CodeSize()338 int Code::CodeSize() const { return SizeFor(body_size()); }
339 
kind()340 Code::Kind Code::kind() const {
341   return KindField::decode(READ_UINT32_FIELD(this, kFlagsOffset));
342 }
343 
initialize_flags(Kind kind,bool has_unwinding_info,bool is_turbofanned,int stack_slots,bool is_off_heap_trampoline)344 void Code::initialize_flags(Kind kind, bool has_unwinding_info,
345                             bool is_turbofanned, int stack_slots,
346                             bool is_off_heap_trampoline) {
347   CHECK(0 <= stack_slots && stack_slots < StackSlotsField::kMax);
348   static_assert(Code::NUMBER_OF_KINDS <= KindField::kMax + 1, "field overflow");
349   uint32_t flags = HasUnwindingInfoField::encode(has_unwinding_info) |
350                    KindField::encode(kind) |
351                    IsTurbofannedField::encode(is_turbofanned) |
352                    StackSlotsField::encode(stack_slots) |
353                    IsOffHeapTrampoline::encode(is_off_heap_trampoline);
354   WRITE_UINT32_FIELD(this, kFlagsOffset, flags);
355   DCHECK_IMPLIES(stack_slots != 0, has_safepoint_info());
356 }
357 
is_interpreter_trampoline_builtin()358 inline bool Code::is_interpreter_trampoline_builtin() const {
359   Builtins* builtins = GetIsolate()->builtins();
360   Code* interpreter_entry_trampoline =
361       builtins->builtin(Builtins::kInterpreterEntryTrampoline);
362   bool is_interpreter_trampoline =
363       (builtin_index() == interpreter_entry_trampoline->builtin_index() ||
364        this == builtins->builtin(Builtins::kInterpreterEnterBytecodeAdvance) ||
365        this == builtins->builtin(Builtins::kInterpreterEnterBytecodeDispatch));
366   DCHECK_IMPLIES(is_interpreter_trampoline, !Builtins::IsLazy(builtin_index()));
367   return is_interpreter_trampoline;
368 }
369 
checks_optimization_marker()370 inline bool Code::checks_optimization_marker() const {
371   Builtins* builtins = GetIsolate()->builtins();
372   Code* interpreter_entry_trampoline =
373       builtins->builtin(Builtins::kInterpreterEntryTrampoline);
374   bool checks_marker =
375       (this == builtins->builtin(Builtins::kCompileLazy) ||
376        builtin_index() == interpreter_entry_trampoline->builtin_index());
377   DCHECK_IMPLIES(checks_marker, !Builtins::IsLazy(builtin_index()));
378   return checks_marker ||
379          (kind() == OPTIMIZED_FUNCTION && marked_for_deoptimization());
380 }
381 
has_tagged_params()382 inline bool Code::has_tagged_params() const {
383   return kind() != JS_TO_WASM_FUNCTION && kind() != C_WASM_ENTRY &&
384          kind() != WASM_FUNCTION;
385 }
386 
has_unwinding_info()387 inline bool Code::has_unwinding_info() const {
388   return HasUnwindingInfoField::decode(READ_UINT32_FIELD(this, kFlagsOffset));
389 }
390 
is_turbofanned()391 inline bool Code::is_turbofanned() const {
392   return IsTurbofannedField::decode(READ_UINT32_FIELD(this, kFlagsOffset));
393 }
394 
can_have_weak_objects()395 inline bool Code::can_have_weak_objects() const {
396   DCHECK(kind() == OPTIMIZED_FUNCTION);
397   int flags = code_data_container()->kind_specific_flags();
398   return CanHaveWeakObjectsField::decode(flags);
399 }
400 
set_can_have_weak_objects(bool value)401 inline void Code::set_can_have_weak_objects(bool value) {
402   DCHECK(kind() == OPTIMIZED_FUNCTION);
403   int previous = code_data_container()->kind_specific_flags();
404   int updated = CanHaveWeakObjectsField::update(previous, value);
405   code_data_container()->set_kind_specific_flags(updated);
406 }
407 
is_construct_stub()408 inline bool Code::is_construct_stub() const {
409   DCHECK(kind() == BUILTIN);
410   int flags = code_data_container()->kind_specific_flags();
411   return IsConstructStubField::decode(flags);
412 }
413 
set_is_construct_stub(bool value)414 inline void Code::set_is_construct_stub(bool value) {
415   DCHECK(kind() == BUILTIN);
416   int previous = code_data_container()->kind_specific_flags();
417   int updated = IsConstructStubField::update(previous, value);
418   code_data_container()->set_kind_specific_flags(updated);
419 }
420 
is_promise_rejection()421 inline bool Code::is_promise_rejection() const {
422   DCHECK(kind() == BUILTIN);
423   int flags = code_data_container()->kind_specific_flags();
424   return IsPromiseRejectionField::decode(flags);
425 }
426 
set_is_promise_rejection(bool value)427 inline void Code::set_is_promise_rejection(bool value) {
428   DCHECK(kind() == BUILTIN);
429   int previous = code_data_container()->kind_specific_flags();
430   int updated = IsPromiseRejectionField::update(previous, value);
431   code_data_container()->set_kind_specific_flags(updated);
432 }
433 
is_exception_caught()434 inline bool Code::is_exception_caught() const {
435   DCHECK(kind() == BUILTIN);
436   int flags = code_data_container()->kind_specific_flags();
437   return IsExceptionCaughtField::decode(flags);
438 }
439 
set_is_exception_caught(bool value)440 inline void Code::set_is_exception_caught(bool value) {
441   DCHECK(kind() == BUILTIN);
442   int previous = code_data_container()->kind_specific_flags();
443   int updated = IsExceptionCaughtField::update(previous, value);
444   code_data_container()->set_kind_specific_flags(updated);
445 }
446 
is_off_heap_trampoline()447 inline bool Code::is_off_heap_trampoline() const {
448   return IsOffHeapTrampoline::decode(READ_UINT32_FIELD(this, kFlagsOffset));
449 }
450 
GetBuiltinCatchPrediction()451 inline HandlerTable::CatchPrediction Code::GetBuiltinCatchPrediction() {
452   if (is_promise_rejection()) return HandlerTable::PROMISE;
453   if (is_exception_caught()) return HandlerTable::CAUGHT;
454   return HandlerTable::UNCAUGHT;
455 }
456 
builtin_index()457 int Code::builtin_index() const {
458   int index = READ_INT_FIELD(this, kBuiltinIndexOffset);
459   DCHECK(index == -1 || Builtins::IsBuiltinId(index));
460   return index;
461 }
462 
set_builtin_index(int index)463 void Code::set_builtin_index(int index) {
464   DCHECK(index == -1 || Builtins::IsBuiltinId(index));
465   WRITE_INT_FIELD(this, kBuiltinIndexOffset, index);
466 }
467 
is_builtin()468 bool Code::is_builtin() const { return builtin_index() != -1; }
469 
has_safepoint_info()470 bool Code::has_safepoint_info() const {
471   return is_turbofanned() || is_wasm_code();
472 }
473 
stack_slots()474 int Code::stack_slots() const {
475   DCHECK(has_safepoint_info());
476   return StackSlotsField::decode(READ_UINT32_FIELD(this, kFlagsOffset));
477 }
478 
safepoint_table_offset()479 int Code::safepoint_table_offset() const {
480   DCHECK(has_safepoint_info());
481   return READ_INT32_FIELD(this, kSafepointTableOffsetOffset);
482 }
483 
set_safepoint_table_offset(int offset)484 void Code::set_safepoint_table_offset(int offset) {
485   CHECK_LE(0, offset);
486   DCHECK(has_safepoint_info() || offset == 0);  // Allow zero initialization.
487   DCHECK(IsAligned(offset, static_cast<unsigned>(kIntSize)));
488   WRITE_INT32_FIELD(this, kSafepointTableOffsetOffset, offset);
489 }
490 
marked_for_deoptimization()491 bool Code::marked_for_deoptimization() const {
492   DCHECK(kind() == OPTIMIZED_FUNCTION);
493   int flags = code_data_container()->kind_specific_flags();
494   return MarkedForDeoptimizationField::decode(flags);
495 }
496 
set_marked_for_deoptimization(bool flag)497 void Code::set_marked_for_deoptimization(bool flag) {
498   DCHECK(kind() == OPTIMIZED_FUNCTION);
499   DCHECK_IMPLIES(flag, AllowDeoptimization::IsAllowed(GetIsolate()));
500   int previous = code_data_container()->kind_specific_flags();
501   int updated = MarkedForDeoptimizationField::update(previous, flag);
502   code_data_container()->set_kind_specific_flags(updated);
503 }
504 
deopt_already_counted()505 bool Code::deopt_already_counted() const {
506   DCHECK(kind() == OPTIMIZED_FUNCTION);
507   int flags = code_data_container()->kind_specific_flags();
508   return DeoptAlreadyCountedField::decode(flags);
509 }
510 
set_deopt_already_counted(bool flag)511 void Code::set_deopt_already_counted(bool flag) {
512   DCHECK(kind() == OPTIMIZED_FUNCTION);
513   DCHECK_IMPLIES(flag, AllowDeoptimization::IsAllowed(GetIsolate()));
514   int previous = code_data_container()->kind_specific_flags();
515   int updated = DeoptAlreadyCountedField::update(previous, flag);
516   code_data_container()->set_kind_specific_flags(updated);
517 }
518 
is_stub()519 bool Code::is_stub() const { return kind() == STUB; }
is_optimized_code()520 bool Code::is_optimized_code() const { return kind() == OPTIMIZED_FUNCTION; }
is_wasm_code()521 bool Code::is_wasm_code() const { return kind() == WASM_FUNCTION; }
522 
constant_pool_offset()523 int Code::constant_pool_offset() const {
524   if (!FLAG_enable_embedded_constant_pool) return InstructionSize();
525   return READ_INT_FIELD(this, kConstantPoolOffset);
526 }
527 
set_constant_pool_offset(int value)528 void Code::set_constant_pool_offset(int value) {
529   if (!FLAG_enable_embedded_constant_pool) return;
530   WRITE_INT_FIELD(this, kConstantPoolOffset, value);
531 }
532 
constant_pool()533 Address Code::constant_pool() const {
534   if (FLAG_enable_embedded_constant_pool) {
535     int offset = constant_pool_offset();
536     if (offset < InstructionSize()) {
537       return InstructionStart() + offset;
538     }
539   }
540   return kNullAddress;
541 }
542 
GetCodeFromTargetAddress(Address address)543 Code* Code::GetCodeFromTargetAddress(Address address) {
544   {
545     // TODO(jgruber,v8:6666): Support embedded builtins here. We'd need to pass
546     // in the current isolate.
547     Address start = reinterpret_cast<Address>(Isolate::CurrentEmbeddedBlob());
548     Address end = start + Isolate::CurrentEmbeddedBlobSize();
549     CHECK(address < start || address >= end);
550   }
551 
552   HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
553   // GetCodeFromTargetAddress might be called when marking objects during mark
554   // sweep. reinterpret_cast is therefore used instead of the more appropriate
555   // Code::cast. Code::cast does not work when the object's map is
556   // marked.
557   Code* result = reinterpret_cast<Code*>(code);
558   return result;
559 }
560 
GetObjectFromCodeEntry(Address code_entry)561 Object* Code::GetObjectFromCodeEntry(Address code_entry) {
562   return HeapObject::FromAddress(code_entry - Code::kHeaderSize);
563 }
564 
GetObjectFromEntryAddress(Address location_of_address)565 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
566   return GetObjectFromCodeEntry(Memory<Address>(location_of_address));
567 }
568 
CanContainWeakObjects()569 bool Code::CanContainWeakObjects() {
570   return is_optimized_code() && can_have_weak_objects();
571 }
572 
IsWeakObject(Object * object)573 bool Code::IsWeakObject(Object* object) {
574   return (CanContainWeakObjects() && IsWeakObjectInOptimizedCode(object));
575 }
576 
IsWeakObjectInOptimizedCode(Object * object)577 bool Code::IsWeakObjectInOptimizedCode(Object* object) {
578   if (object->IsMap()) {
579     return Map::cast(object)->CanTransition();
580   }
581   if (object->IsCell()) {
582     object = Cell::cast(object)->value();
583   } else if (object->IsPropertyCell()) {
584     object = PropertyCell::cast(object)->value();
585   }
586   if (object->IsJSReceiver() || object->IsContext()) {
587     return true;
588   }
589   return false;
590 }
591 
INT_ACCESSORS(CodeDataContainer,kind_specific_flags,kKindSpecificFlagsOffset)592 INT_ACCESSORS(CodeDataContainer, kind_specific_flags, kKindSpecificFlagsOffset)
593 ACCESSORS(CodeDataContainer, next_code_link, Object, kNextCodeLinkOffset)
594 
595 void CodeDataContainer::clear_padding() {
596   memset(reinterpret_cast<void*>(address() + kUnalignedSize), 0,
597          kSize - kUnalignedSize);
598 }
599 
get(int index)600 byte BytecodeArray::get(int index) {
601   DCHECK(index >= 0 && index < this->length());
602   return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
603 }
604 
set(int index,byte value)605 void BytecodeArray::set(int index, byte value) {
606   DCHECK(index >= 0 && index < this->length());
607   WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
608 }
609 
set_frame_size(int frame_size)610 void BytecodeArray::set_frame_size(int frame_size) {
611   DCHECK_GE(frame_size, 0);
612   DCHECK(IsAligned(frame_size, static_cast<unsigned>(kPointerSize)));
613   WRITE_INT_FIELD(this, kFrameSizeOffset, frame_size);
614 }
615 
frame_size()616 int BytecodeArray::frame_size() const {
617   return READ_INT_FIELD(this, kFrameSizeOffset);
618 }
619 
register_count()620 int BytecodeArray::register_count() const {
621   return frame_size() / kPointerSize;
622 }
623 
set_parameter_count(int number_of_parameters)624 void BytecodeArray::set_parameter_count(int number_of_parameters) {
625   DCHECK_GE(number_of_parameters, 0);
626   // Parameter count is stored as the size on stack of the parameters to allow
627   // it to be used directly by generated code.
628   WRITE_INT_FIELD(this, kParameterSizeOffset,
629                   (number_of_parameters << kPointerSizeLog2));
630 }
631 
incoming_new_target_or_generator_register()632 interpreter::Register BytecodeArray::incoming_new_target_or_generator_register()
633     const {
634   int register_operand =
635       READ_INT_FIELD(this, kIncomingNewTargetOrGeneratorRegisterOffset);
636   if (register_operand == 0) {
637     return interpreter::Register::invalid_value();
638   } else {
639     return interpreter::Register::FromOperand(register_operand);
640   }
641 }
642 
set_incoming_new_target_or_generator_register(interpreter::Register incoming_new_target_or_generator_register)643 void BytecodeArray::set_incoming_new_target_or_generator_register(
644     interpreter::Register incoming_new_target_or_generator_register) {
645   if (!incoming_new_target_or_generator_register.is_valid()) {
646     WRITE_INT_FIELD(this, kIncomingNewTargetOrGeneratorRegisterOffset, 0);
647   } else {
648     DCHECK(incoming_new_target_or_generator_register.index() <
649            register_count());
650     DCHECK_NE(0, incoming_new_target_or_generator_register.ToOperand());
651     WRITE_INT_FIELD(this, kIncomingNewTargetOrGeneratorRegisterOffset,
652                     incoming_new_target_or_generator_register.ToOperand());
653   }
654 }
655 
interrupt_budget()656 int BytecodeArray::interrupt_budget() const {
657   return READ_INT_FIELD(this, kInterruptBudgetOffset);
658 }
659 
set_interrupt_budget(int interrupt_budget)660 void BytecodeArray::set_interrupt_budget(int interrupt_budget) {
661   DCHECK_GE(interrupt_budget, 0);
662   WRITE_INT_FIELD(this, kInterruptBudgetOffset, interrupt_budget);
663 }
664 
osr_loop_nesting_level()665 int BytecodeArray::osr_loop_nesting_level() const {
666   return READ_INT8_FIELD(this, kOSRNestingLevelOffset);
667 }
668 
set_osr_loop_nesting_level(int depth)669 void BytecodeArray::set_osr_loop_nesting_level(int depth) {
670   DCHECK(0 <= depth && depth <= AbstractCode::kMaxLoopNestingMarker);
671   STATIC_ASSERT(AbstractCode::kMaxLoopNestingMarker < kMaxInt8);
672   WRITE_INT8_FIELD(this, kOSRNestingLevelOffset, depth);
673 }
674 
bytecode_age()675 BytecodeArray::Age BytecodeArray::bytecode_age() const {
676   // Bytecode is aged by the concurrent marker.
677   return static_cast<Age>(RELAXED_READ_INT8_FIELD(this, kBytecodeAgeOffset));
678 }
679 
set_bytecode_age(BytecodeArray::Age age)680 void BytecodeArray::set_bytecode_age(BytecodeArray::Age age) {
681   DCHECK_GE(age, kFirstBytecodeAge);
682   DCHECK_LE(age, kLastBytecodeAge);
683   STATIC_ASSERT(kLastBytecodeAge <= kMaxInt8);
684   // Bytecode is aged by the concurrent marker.
685   RELAXED_WRITE_INT8_FIELD(this, kBytecodeAgeOffset, static_cast<int8_t>(age));
686 }
687 
parameter_count()688 int BytecodeArray::parameter_count() const {
689   // Parameter count is stored as the size on stack of the parameters to allow
690   // it to be used directly by generated code.
691   return READ_INT_FIELD(this, kParameterSizeOffset) >> kPointerSizeLog2;
692 }
693 
ACCESSORS(BytecodeArray,constant_pool,FixedArray,kConstantPoolOffset)694 ACCESSORS(BytecodeArray, constant_pool, FixedArray, kConstantPoolOffset)
695 ACCESSORS(BytecodeArray, handler_table, ByteArray, kHandlerTableOffset)
696 ACCESSORS(BytecodeArray, source_position_table, Object,
697           kSourcePositionTableOffset)
698 
699 void BytecodeArray::clear_padding() {
700   int data_size = kHeaderSize + length();
701   memset(reinterpret_cast<void*>(address() + data_size), 0,
702          SizeFor(length()) - data_size);
703 }
704 
GetFirstBytecodeAddress()705 Address BytecodeArray::GetFirstBytecodeAddress() {
706   return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
707 }
708 
SourcePositionTable()709 ByteArray* BytecodeArray::SourcePositionTable() {
710   Object* maybe_table = source_position_table();
711   if (maybe_table->IsByteArray()) return ByteArray::cast(maybe_table);
712   DCHECK(maybe_table->IsSourcePositionTableWithFrameCache());
713   return SourcePositionTableWithFrameCache::cast(maybe_table)
714       ->source_position_table();
715 }
716 
ClearFrameCacheFromSourcePositionTable()717 void BytecodeArray::ClearFrameCacheFromSourcePositionTable() {
718   Object* maybe_table = source_position_table();
719   if (maybe_table->IsByteArray()) return;
720   DCHECK(maybe_table->IsSourcePositionTableWithFrameCache());
721   set_source_position_table(SourcePositionTableWithFrameCache::cast(maybe_table)
722                                 ->source_position_table());
723 }
724 
BytecodeArraySize()725 int BytecodeArray::BytecodeArraySize() { return SizeFor(this->length()); }
726 
SizeIncludingMetadata()727 int BytecodeArray::SizeIncludingMetadata() {
728   int size = BytecodeArraySize();
729   size += constant_pool()->Size();
730   size += handler_table()->Size();
731   size += SourcePositionTable()->Size();
732   return size;
733 }
734 
BytecodeOffset(int i)735 BailoutId DeoptimizationData::BytecodeOffset(int i) {
736   return BailoutId(BytecodeOffsetRaw(i)->value());
737 }
738 
SetBytecodeOffset(int i,BailoutId value)739 void DeoptimizationData::SetBytecodeOffset(int i, BailoutId value) {
740   SetBytecodeOffsetRaw(i, Smi::FromInt(value.ToInt()));
741 }
742 
DeoptCount()743 int DeoptimizationData::DeoptCount() {
744   return (length() - kFirstDeoptEntryIndex) / kDeoptEntrySize;
745 }
746 
747 }  // namespace internal
748 }  // namespace v8
749 
750 #include "src/objects/object-macros-undef.h"
751 
752 #endif  // V8_OBJECTS_CODE_INL_H_
753