• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2018 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_OBJECTS_ALLOCATION_SITE_INL_H_
6 #define V8_OBJECTS_ALLOCATION_SITE_INL_H_
7 
8 #include "src/common/globals.h"
9 #include "src/heap/heap-write-barrier-inl.h"
10 #include "src/objects/allocation-site.h"
11 #include "src/objects/js-objects-inl.h"
12 
13 // Has to be the last include (doesn't have include guards):
14 #include "src/objects/object-macros.h"
15 
16 namespace v8 {
17 namespace internal {
18 
19 #include "torque-generated/src/objects/allocation-site-tq-inl.inc"
20 
21 TQ_OBJECT_CONSTRUCTORS_IMPL(AllocationMemento)
OBJECT_CONSTRUCTORS_IMPL(AllocationSite,Struct)22 OBJECT_CONSTRUCTORS_IMPL(AllocationSite, Struct)
23 
24 NEVER_READ_ONLY_SPACE_IMPL(AllocationSite)
25 
26 CAST_ACCESSOR(AllocationSite)
27 
28 ACCESSORS(AllocationSite, transition_info_or_boilerplate, Object,
29           kTransitionInfoOrBoilerplateOffset)
30 RELEASE_ACQUIRE_ACCESSORS(AllocationSite, transition_info_or_boilerplate,
31                           Object, kTransitionInfoOrBoilerplateOffset)
32 ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
33 RELAXED_INT32_ACCESSORS(AllocationSite, pretenure_data, kPretenureDataOffset)
34 INT32_ACCESSORS(AllocationSite, pretenure_create_count,
35                 kPretenureCreateCountOffset)
36 ACCESSORS(AllocationSite, dependent_code, DependentCode, kDependentCodeOffset)
37 ACCESSORS_CHECKED(AllocationSite, weak_next, Object, kWeakNextOffset,
38                   HasWeakNext())
39 ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
40 
41 JSObject AllocationSite::boilerplate() const {
42   DCHECK(PointsToLiteral());
43   return JSObject::cast(transition_info_or_boilerplate());
44 }
45 
boilerplate(AcquireLoadTag tag)46 JSObject AllocationSite::boilerplate(AcquireLoadTag tag) const {
47   DCHECK(PointsToLiteral());
48   return JSObject::cast(transition_info_or_boilerplate(tag));
49 }
50 
set_boilerplate(JSObject value,ReleaseStoreTag tag,WriteBarrierMode mode)51 void AllocationSite::set_boilerplate(JSObject value, ReleaseStoreTag tag,
52                                      WriteBarrierMode mode) {
53   set_transition_info_or_boilerplate(value, tag, mode);
54 }
55 
transition_info()56 int AllocationSite::transition_info() const {
57   DCHECK(!PointsToLiteral());
58   return Smi::cast(transition_info_or_boilerplate(kAcquireLoad)).value();
59 }
60 
set_transition_info(int value)61 void AllocationSite::set_transition_info(int value) {
62   DCHECK(!PointsToLiteral());
63   set_transition_info_or_boilerplate(Smi::FromInt(value), kReleaseStore,
64                                      SKIP_WRITE_BARRIER);
65 }
66 
HasWeakNext()67 bool AllocationSite::HasWeakNext() const {
68   return map() == GetReadOnlyRoots().allocation_site_map();
69 }
70 
Initialize()71 void AllocationSite::Initialize() {
72   set_transition_info_or_boilerplate(Smi::zero());
73   SetElementsKind(GetInitialFastElementsKind());
74   set_nested_site(Smi::zero());
75   set_pretenure_data(0, kRelaxedStore);
76   set_pretenure_create_count(0);
77   set_dependent_code(DependentCode::empty_dependent_code(GetReadOnlyRoots()),
78                      SKIP_WRITE_BARRIER);
79 }
80 
IsZombie()81 bool AllocationSite::IsZombie() const {
82   return pretenure_decision() == kZombie;
83 }
84 
IsMaybeTenure()85 bool AllocationSite::IsMaybeTenure() const {
86   return pretenure_decision() == kMaybeTenure;
87 }
88 
PretenuringDecisionMade()89 bool AllocationSite::PretenuringDecisionMade() const {
90   return pretenure_decision() != kUndecided;
91 }
92 
MarkZombie()93 void AllocationSite::MarkZombie() {
94   DCHECK(!IsZombie());
95   Initialize();
96   set_pretenure_decision(kZombie);
97 }
98 
GetElementsKind()99 ElementsKind AllocationSite::GetElementsKind() const {
100   return ElementsKindBits::decode(transition_info());
101 }
102 
SetElementsKind(ElementsKind kind)103 void AllocationSite::SetElementsKind(ElementsKind kind) {
104   set_transition_info(ElementsKindBits::update(transition_info(), kind));
105 }
106 
CanInlineCall()107 bool AllocationSite::CanInlineCall() const {
108   return DoNotInlineBit::decode(transition_info()) == 0;
109 }
110 
SetDoNotInlineCall()111 void AllocationSite::SetDoNotInlineCall() {
112   set_transition_info(DoNotInlineBit::update(transition_info(), true));
113 }
114 
PointsToLiteral()115 bool AllocationSite::PointsToLiteral() const {
116   Object raw_value = transition_info_or_boilerplate(kAcquireLoad);
117   DCHECK_EQ(!raw_value.IsSmi(),
118             raw_value.IsJSArray() || raw_value.IsJSObject());
119   return !raw_value.IsSmi();
120 }
121 
122 // Heuristic: We only need to create allocation site info if the boilerplate
123 // elements kind is the initial elements kind.
ShouldTrack(ElementsKind boilerplate_elements_kind)124 bool AllocationSite::ShouldTrack(ElementsKind boilerplate_elements_kind) {
125   if (!V8_ALLOCATION_SITE_TRACKING_BOOL) return false;
126   return IsSmiElementsKind(boilerplate_elements_kind);
127 }
128 
CanTrack(InstanceType type)129 inline bool AllocationSite::CanTrack(InstanceType type) {
130   if (!V8_ALLOCATION_SITE_TRACKING_BOOL) return false;
131   if (FLAG_allocation_site_pretenuring) {
132     // TurboFan doesn't care at all about String pretenuring feedback,
133     // so don't bother even trying to track that.
134     return type == JS_ARRAY_TYPE || type == JS_OBJECT_TYPE;
135   }
136   return type == JS_ARRAY_TYPE;
137 }
138 
pretenure_decision()139 AllocationSite::PretenureDecision AllocationSite::pretenure_decision() const {
140   return PretenureDecisionBits::decode(pretenure_data(kRelaxedLoad));
141 }
142 
set_pretenure_decision(PretenureDecision decision)143 void AllocationSite::set_pretenure_decision(PretenureDecision decision) {
144   int32_t value = pretenure_data(kRelaxedLoad);
145   set_pretenure_data(PretenureDecisionBits::update(value, decision),
146                      kRelaxedStore);
147 }
148 
deopt_dependent_code()149 bool AllocationSite::deopt_dependent_code() const {
150   return DeoptDependentCodeBit::decode(pretenure_data(kRelaxedLoad));
151 }
152 
set_deopt_dependent_code(bool deopt)153 void AllocationSite::set_deopt_dependent_code(bool deopt) {
154   int32_t value = pretenure_data(kRelaxedLoad);
155   set_pretenure_data(DeoptDependentCodeBit::update(value, deopt),
156                      kRelaxedStore);
157 }
158 
memento_found_count()159 int AllocationSite::memento_found_count() const {
160   return MementoFoundCountBits::decode(pretenure_data(kRelaxedLoad));
161 }
162 
set_memento_found_count(int count)163 inline void AllocationSite::set_memento_found_count(int count) {
164   int32_t value = pretenure_data(kRelaxedLoad);
165   // Verify that we can count more mementos than we can possibly find in one
166   // new space collection.
167   DCHECK((GetHeap()->MaxSemiSpaceSize() /
168           (Heap::kMinObjectSizeInTaggedWords * kTaggedSize +
169            AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
170   DCHECK_LT(count, MementoFoundCountBits::kMax);
171   set_pretenure_data(MementoFoundCountBits::update(value, count),
172                      kRelaxedStore);
173 }
174 
memento_create_count()175 int AllocationSite::memento_create_count() const {
176   return pretenure_create_count();
177 }
178 
set_memento_create_count(int count)179 void AllocationSite::set_memento_create_count(int count) {
180   set_pretenure_create_count(count);
181 }
182 
IncrementMementoFoundCount(int increment)183 bool AllocationSite::IncrementMementoFoundCount(int increment) {
184   if (IsZombie()) return false;
185 
186   int value = memento_found_count();
187   set_memento_found_count(value + increment);
188   return memento_found_count() >= kPretenureMinimumCreated;
189 }
190 
IncrementMementoCreateCount()191 inline void AllocationSite::IncrementMementoCreateCount() {
192   DCHECK(FLAG_allocation_site_pretenuring);
193   int value = memento_create_count();
194   set_memento_create_count(value + 1);
195 }
196 
IsValid()197 bool AllocationMemento::IsValid() const {
198   return allocation_site().IsAllocationSite() &&
199          !AllocationSite::cast(allocation_site()).IsZombie();
200 }
201 
GetAllocationSite()202 AllocationSite AllocationMemento::GetAllocationSite() const {
203   DCHECK(IsValid());
204   return AllocationSite::cast(allocation_site());
205 }
206 
GetAllocationSiteUnchecked()207 Address AllocationMemento::GetAllocationSiteUnchecked() const {
208   return allocation_site().ptr();
209 }
210 
211 template <AllocationSiteUpdateMode update_or_check>
DigestTransitionFeedback(Handle<AllocationSite> site,ElementsKind to_kind)212 bool AllocationSite::DigestTransitionFeedback(Handle<AllocationSite> site,
213                                               ElementsKind to_kind) {
214   Isolate* isolate = site->GetIsolate();
215   bool result = false;
216 
217   if (site->PointsToLiteral() && site->boilerplate().IsJSArray()) {
218     Handle<JSArray> boilerplate(JSArray::cast(site->boilerplate()), isolate);
219     ElementsKind kind = boilerplate->GetElementsKind();
220     // if kind is holey ensure that to_kind is as well.
221     if (IsHoleyElementsKind(kind)) {
222       to_kind = GetHoleyElementsKind(to_kind);
223     }
224     if (IsMoreGeneralElementsKindTransition(kind, to_kind)) {
225       // If the array is huge, it's not likely to be defined in a local
226       // function, so we shouldn't make new instances of it very often.
227       uint32_t length = 0;
228       CHECK(boilerplate->length().ToArrayLength(&length));
229       if (length <= kMaximumArrayBytesToPretransition) {
230         if (update_or_check == AllocationSiteUpdateMode::kCheckOnly) {
231           return true;
232         }
233         if (FLAG_trace_track_allocation_sites) {
234           bool is_nested = site->IsNested();
235           PrintF("AllocationSite: JSArray %p boilerplate %supdated %s->%s\n",
236                  reinterpret_cast<void*>(site->ptr()),
237                  is_nested ? "(nested)" : " ", ElementsKindToString(kind),
238                  ElementsKindToString(to_kind));
239         }
240         CHECK_NE(to_kind, DICTIONARY_ELEMENTS);
241         JSObject::TransitionElementsKind(boilerplate, to_kind);
242         site->dependent_code().DeoptimizeDependentCodeGroup(
243             isolate, DependentCode::kAllocationSiteTransitionChangedGroup);
244         result = true;
245       }
246     }
247   } else {
248     // The AllocationSite is for a constructed Array.
249     ElementsKind kind = site->GetElementsKind();
250     // if kind is holey ensure that to_kind is as well.
251     if (IsHoleyElementsKind(kind)) {
252       to_kind = GetHoleyElementsKind(to_kind);
253     }
254     if (IsMoreGeneralElementsKindTransition(kind, to_kind)) {
255       if (update_or_check == AllocationSiteUpdateMode::kCheckOnly) return true;
256       if (FLAG_trace_track_allocation_sites) {
257         PrintF("AllocationSite: JSArray %p site updated %s->%s\n",
258                reinterpret_cast<void*>(site->ptr()), ElementsKindToString(kind),
259                ElementsKindToString(to_kind));
260       }
261       site->SetElementsKind(to_kind);
262       site->dependent_code().DeoptimizeDependentCodeGroup(
263           isolate, DependentCode::kAllocationSiteTransitionChangedGroup);
264       result = true;
265     }
266   }
267   return result;
268 }
269 
270 }  // namespace internal
271 }  // namespace v8
272 
273 #include "src/objects/object-macros-undef.h"
274 
275 #endif  // V8_OBJECTS_ALLOCATION_SITE_INL_H_
276