• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2018 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_OBJECTS_HEAP_OBJECT_H_
6 #define V8_OBJECTS_HEAP_OBJECT_H_
7 
8 #include "src/common/globals.h"
9 #include "src/objects/instance-type.h"
10 #include "src/objects/objects.h"
11 #include "src/objects/tagged-field.h"
12 #include "src/roots/roots.h"
13 
14 // Has to be the last include (doesn't have include guards):
15 #include "src/objects/object-macros.h"
16 
17 namespace v8 {
18 namespace internal {
19 
20 class Heap;
21 class PrimitiveHeapObject;
22 
23 // HeapObject is the superclass for all classes describing heap allocated
24 // objects.
25 class HeapObject : public Object {
26  public:
is_null()27   bool is_null() const {
28     return static_cast<Tagged_t>(ptr()) == static_cast<Tagged_t>(kNullAddress);
29   }
30 
31   // [map]: Contains a map which contains the object's reflective
32   // information.
33   DECL_GETTER(map, Map)
34   inline void set_map(Map value);
35 
36   inline ObjectSlot map_slot() const;
37 
38   // The no-write-barrier version.  This is OK if the object is white and in
39   // new space, or if the value is an immortal immutable object, like the maps
40   // of primitive (non-JS) objects like strings, heap numbers etc.
41   inline void set_map_no_write_barrier(Map value);
42 
43   // Access the map using acquire load and release store.
44   DECL_GETTER(synchronized_map, Map)
45   inline void synchronized_set_map(Map value);
46 
47   // Compare-and-swaps map word using release store, returns true if the map
48   // word was actually swapped.
49   inline bool release_compare_and_swap_map_word(MapWord old_map_word,
50                                                 MapWord new_map_word);
51 
52   // Initialize the map immediately after the object is allocated.
53   // Do not use this outside Heap.
54   inline void set_map_after_allocation(
55       Map value, WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
56 
57   // During garbage collection, the map word of a heap object does not
58   // necessarily contain a map pointer.
59   DECL_GETTER(map_word, MapWord)
60   inline void set_map_word(MapWord map_word);
61 
62   // Access the map word using acquire load and release store.
63   DECL_GETTER(synchronized_map_word, MapWord)
64   inline void synchronized_set_map_word(MapWord map_word);
65 
66   // This method exists to help remove GetIsolate/GetHeap from HeapObject, in a
67   // way that doesn't require passing Isolate/Heap down huge call chains or to
68   // places where it might not be safe to access it.
69   inline ReadOnlyRoots GetReadOnlyRoots() const;
70   // This version is intended to be used for the isolate values produced by
71   // i::GetIsolateForPtrCompr(HeapObject) function which may return nullptr.
72   inline ReadOnlyRoots GetReadOnlyRoots(IsolateRoot isolate) const;
73 
74 #define IS_TYPE_FUNCTION_DECL(Type) \
75   V8_INLINE bool Is##Type() const;  \
76   V8_INLINE bool Is##Type(IsolateRoot isolate) const;
77   HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL)
78   IS_TYPE_FUNCTION_DECL(HashTableBase)
79   IS_TYPE_FUNCTION_DECL(SmallOrderedHashTable)
80 #undef IS_TYPE_FUNCTION_DECL
81 
82   bool IsExternal(Isolate* isolate) const;
83 
84 // Oddball checks are faster when they are raw pointer comparisons, so the
85 // isolate/read-only roots overloads should be preferred where possible.
86 #define IS_TYPE_FUNCTION_DECL(Type, Value)              \
87   V8_INLINE bool Is##Type(Isolate* isolate) const;      \
88   V8_INLINE bool Is##Type(LocalIsolate* isolate) const; \
89   V8_INLINE bool Is##Type(ReadOnlyRoots roots) const;   \
90   V8_INLINE bool Is##Type() const;
91   ODDBALL_LIST(IS_TYPE_FUNCTION_DECL)
92   IS_TYPE_FUNCTION_DECL(NullOrUndefined, /* unused */)
93 #undef IS_TYPE_FUNCTION_DECL
94 
95 #define DECL_STRUCT_PREDICATE(NAME, Name, name) \
96   V8_INLINE bool Is##Name() const;              \
97   V8_INLINE bool Is##Name(IsolateRoot isolate) const;
STRUCT_LIST(DECL_STRUCT_PREDICATE)98   STRUCT_LIST(DECL_STRUCT_PREDICATE)
99 #undef DECL_STRUCT_PREDICATE
100 
101   // Converts an address to a HeapObject pointer.
102   static inline HeapObject FromAddress(Address address) {
103     DCHECK_TAG_ALIGNED(address);
104     return HeapObject(address + kHeapObjectTag);
105   }
106 
107   // Returns the address of this HeapObject.
address()108   inline Address address() const { return ptr() - kHeapObjectTag; }
109 
110   // Iterates over pointers contained in the object (including the Map).
111   // If it's not performance critical iteration use the non-templatized
112   // version.
113   void Iterate(ObjectVisitor* v);
114 
115   template <typename ObjectVisitor>
116   inline void IterateFast(ObjectVisitor* v);
117 
118   // Iterates over all pointers contained in the object except the
119   // first map pointer.  The object type is given in the first
120   // parameter. This function does not access the map pointer in the
121   // object, and so is safe to call while the map pointer is modified.
122   // If it's not performance critical iteration use the non-templatized
123   // version.
124   void IterateBody(ObjectVisitor* v);
125   void IterateBody(Map map, int object_size, ObjectVisitor* v);
126 
127   template <typename ObjectVisitor>
128   inline void IterateBodyFast(ObjectVisitor* v);
129 
130   template <typename ObjectVisitor>
131   inline void IterateBodyFast(Map map, int object_size, ObjectVisitor* v);
132 
133   // Returns true if the object contains a tagged value at given offset.
134   // It is used for invalid slots filtering. If the offset points outside
135   // of the object or to the map word, the result is UNDEFINED (!!!).
136   V8_EXPORT_PRIVATE bool IsValidSlot(Map map, int offset);
137 
138   // Returns the heap object's size in bytes
139   inline int Size() const;
140 
141   // Given a heap object's map pointer, returns the heap size in bytes
142   // Useful when the map pointer field is used for other purposes.
143   // GC internal.
144   V8_EXPORT_PRIVATE int SizeFromMap(Map map) const;
145 
146   // Returns the field at offset in obj, as a read/write Object reference.
147   // Does no checking, and is safe to use during GC, while maps are invalid.
148   // Does not invoke write barrier, so should only be assigned to
149   // during marking GC.
150   inline ObjectSlot RawField(int byte_offset) const;
151   inline MaybeObjectSlot RawMaybeWeakField(int byte_offset) const;
152 
153   DECL_CAST(HeapObject)
154 
155   // Return the write barrier mode for this. Callers of this function
156   // must be able to present a reference to an DisallowHeapAllocation
157   // object as a sign that they are not going to use this function
158   // from code that allocates and thus invalidates the returned write
159   // barrier mode.
160   inline WriteBarrierMode GetWriteBarrierMode(
161       const DisallowHeapAllocation& promise);
162 
163   // Dispatched behavior.
164   void HeapObjectShortPrint(std::ostream& os);  // NOLINT
165 #ifdef OBJECT_PRINT
166   void PrintHeader(std::ostream& os, const char* id);  // NOLINT
167 #endif
168   DECL_PRINTER(HeapObject)
169   EXPORT_DECL_VERIFIER(HeapObject)
170 #ifdef VERIFY_HEAP
171   inline void VerifyObjectField(Isolate* isolate, int offset);
172   inline void VerifySmiField(int offset);
173   inline void VerifyMaybeObjectField(Isolate* isolate, int offset);
174 
175   // Verify a pointer is a valid HeapObject pointer that points to object
176   // areas in the heap.
177   static void VerifyHeapPointer(Isolate* isolate, Object p);
178 #endif
179 
180   static inline AllocationAlignment RequiredAlignment(Map map);
181 
182   // Whether the object needs rehashing. That is the case if the object's
183   // content depends on FLAG_hash_seed. When the object is deserialized into
184   // a heap with a different hash seed, these objects need to adapt.
185   bool NeedsRehashing(InstanceType instance_type) const;
186   bool NeedsRehashing() const;
187 
188   // Rehashing support is not implemented for all objects that need rehashing.
189   // With objects that need rehashing but cannot be rehashed, rehashing has to
190   // be disabled.
191   bool CanBeRehashed() const;
192 
193   // Rehash the object based on the layout inferred from its map.
194   void RehashBasedOnMap(Isolate* isolate);
195 
196   // Layout description.
197 #define HEAP_OBJECT_FIELDS(V) \
198   V(kMapOffset, kTaggedSize)  \
199   /* Header size. */          \
200   V(kHeaderSize, 0)
201 
202   DEFINE_FIELD_OFFSET_CONSTANTS(Object::kHeaderSize, HEAP_OBJECT_FIELDS)
203 #undef HEAP_OBJECT_FIELDS
204 
205   STATIC_ASSERT(kMapOffset == Internals::kHeapObjectMapOffset);
206 
207   using MapField = TaggedField<MapWord, HeapObject::kMapOffset>;
208 
209   inline Address GetFieldAddress(int field_offset) const;
210 
211  protected:
212   // Special-purpose constructor for subclasses that have fast paths where
213   // their ptr() is a Smi.
214   enum class AllowInlineSmiStorage { kRequireHeapObjectTag, kAllowBeingASmi };
215   inline HeapObject(Address ptr, AllowInlineSmiStorage allow_smi);
216 
217   OBJECT_CONSTRUCTORS(HeapObject, Object);
218 };
219 
220 OBJECT_CONSTRUCTORS_IMPL(HeapObject, Object)
221 CAST_ACCESSOR(HeapObject)
222 
223 }  // namespace internal
224 }  // namespace v8
225 
226 #include "src/objects/object-macros-undef.h"
227 
228 #endif  // V8_OBJECTS_HEAP_OBJECT_H_
229