• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2018 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/heap/slot-set.h"
6 
7 #include "src/base/logging.h"
8 #include "src/heap/memory-chunk-layout.h"
9 
10 namespace v8 {
11 namespace internal {
12 
~TypedSlots()13 TypedSlots::~TypedSlots() {
14   Chunk* chunk = head_;
15   while (chunk != nullptr) {
16     Chunk* next = chunk->next;
17     delete chunk;
18     chunk = next;
19   }
20   head_ = nullptr;
21   tail_ = nullptr;
22 }
23 
Insert(SlotType type,uint32_t offset)24 void TypedSlots::Insert(SlotType type, uint32_t offset) {
25   TypedSlot slot = {TypeField::encode(type) | OffsetField::encode(offset)};
26   Chunk* chunk = EnsureChunk();
27   DCHECK_LT(chunk->buffer.size(), chunk->buffer.capacity());
28   chunk->buffer.push_back(slot);
29 }
30 
Merge(TypedSlots * other)31 void TypedSlots::Merge(TypedSlots* other) {
32   if (other->head_ == nullptr) {
33     return;
34   }
35   if (head_ == nullptr) {
36     head_ = other->head_;
37     tail_ = other->tail_;
38   } else {
39     tail_->next = other->head_;
40     tail_ = other->tail_;
41   }
42   other->head_ = nullptr;
43   other->tail_ = nullptr;
44 }
45 
EnsureChunk()46 TypedSlots::Chunk* TypedSlots::EnsureChunk() {
47   if (!head_) {
48     head_ = tail_ = NewChunk(nullptr, kInitialBufferSize);
49   }
50   if (head_->buffer.size() == head_->buffer.capacity()) {
51     head_ = NewChunk(head_, NextCapacity(head_->buffer.capacity()));
52   }
53   return head_;
54 }
55 
NewChunk(Chunk * next,size_t capacity)56 TypedSlots::Chunk* TypedSlots::NewChunk(Chunk* next, size_t capacity) {
57   Chunk* chunk = new Chunk;
58   chunk->next = next;
59   chunk->buffer.reserve(capacity);
60   DCHECK_EQ(chunk->buffer.capacity(), capacity);
61   return chunk;
62 }
63 
ClearInvalidSlots(const FreeRangesMap & invalid_ranges)64 void TypedSlotSet::ClearInvalidSlots(const FreeRangesMap& invalid_ranges) {
65   IterateSlotsInRanges([](TypedSlot* slot) { *slot = ClearedTypedSlot(); },
66                        invalid_ranges);
67 }
68 
AssertNoInvalidSlots(const FreeRangesMap & invalid_ranges)69 void TypedSlotSet::AssertNoInvalidSlots(const FreeRangesMap& invalid_ranges) {
70   IterateSlotsInRanges(
71       [](TypedSlot* slot) {
72         CHECK_WITH_MSG(false, "No slot in ranges expected.");
73       },
74       invalid_ranges);
75 }
76 
77 template <typename Callback>
IterateSlotsInRanges(Callback callback,const FreeRangesMap & ranges)78 void TypedSlotSet::IterateSlotsInRanges(Callback callback,
79                                         const FreeRangesMap& ranges) {
80   if (ranges.empty()) return;
81 
82   Chunk* chunk = LoadHead();
83   while (chunk != nullptr) {
84     for (TypedSlot& slot : chunk->buffer) {
85       SlotType type = TypeField::decode(slot.type_and_offset);
86       if (type == SlotType::kCleared) continue;
87       uint32_t offset = OffsetField::decode(slot.type_and_offset);
88       FreeRangesMap::const_iterator upper_bound = ranges.upper_bound(offset);
89       if (upper_bound == ranges.begin()) continue;
90       // upper_bounds points to the invalid range after the given slot. Hence,
91       // we have to go to the previous element.
92       upper_bound--;
93       DCHECK_LE(upper_bound->first, offset);
94       if (upper_bound->second > offset) {
95         callback(&slot);
96       }
97     }
98     chunk = LoadNext(chunk);
99   }
100 }
101 
102 }  // namespace internal
103 }  // namespace v8
104