1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/compiler/state-values-utils.h"
6
7 #include "src/compiler/bytecode-liveness-map.h"
8 #include "src/compiler/common-operator.h"
9 #include "src/utils/bit-vector.h"
10
11 namespace v8 {
12 namespace internal {
13 namespace compiler {
14
StateValuesCache(JSGraph * js_graph)15 StateValuesCache::StateValuesCache(JSGraph* js_graph)
16 : js_graph_(js_graph),
17 hash_map_(AreKeysEqual, ZoneHashMap::kDefaultHashMapCapacity,
18 ZoneAllocationPolicy(zone())),
19 working_space_(zone()),
20 empty_state_values_(nullptr) {}
21
22
23 // static
AreKeysEqual(void * key1,void * key2)24 bool StateValuesCache::AreKeysEqual(void* key1, void* key2) {
25 NodeKey* node_key1 = reinterpret_cast<NodeKey*>(key1);
26 NodeKey* node_key2 = reinterpret_cast<NodeKey*>(key2);
27
28 if (node_key1->node == nullptr) {
29 if (node_key2->node == nullptr) {
30 return AreValueKeysEqual(reinterpret_cast<StateValuesKey*>(key1),
31 reinterpret_cast<StateValuesKey*>(key2));
32 } else {
33 return IsKeysEqualToNode(reinterpret_cast<StateValuesKey*>(key1),
34 node_key2->node);
35 }
36 } else {
37 if (node_key2->node == nullptr) {
38 // If the nodes are already processed, they must be the same.
39 return IsKeysEqualToNode(reinterpret_cast<StateValuesKey*>(key2),
40 node_key1->node);
41 } else {
42 return node_key1->node == node_key2->node;
43 }
44 }
45 UNREACHABLE();
46 }
47
48
49 // static
IsKeysEqualToNode(StateValuesKey * key,Node * node)50 bool StateValuesCache::IsKeysEqualToNode(StateValuesKey* key, Node* node) {
51 if (key->count != static_cast<size_t>(node->InputCount())) {
52 return false;
53 }
54
55 DCHECK_EQ(IrOpcode::kStateValues, node->opcode());
56 SparseInputMask node_mask = SparseInputMaskOf(node->op());
57
58 if (node_mask != key->mask) {
59 return false;
60 }
61
62 // Comparing real inputs rather than sparse inputs, since we already know the
63 // sparse input masks are the same.
64 for (size_t i = 0; i < key->count; i++) {
65 if (key->values[i] != node->InputAt(static_cast<int>(i))) {
66 return false;
67 }
68 }
69 return true;
70 }
71
72
73 // static
AreValueKeysEqual(StateValuesKey * key1,StateValuesKey * key2)74 bool StateValuesCache::AreValueKeysEqual(StateValuesKey* key1,
75 StateValuesKey* key2) {
76 if (key1->count != key2->count) {
77 return false;
78 }
79 if (key1->mask != key2->mask) {
80 return false;
81 }
82 for (size_t i = 0; i < key1->count; i++) {
83 if (key1->values[i] != key2->values[i]) {
84 return false;
85 }
86 }
87 return true;
88 }
89
90
GetEmptyStateValues()91 Node* StateValuesCache::GetEmptyStateValues() {
92 if (empty_state_values_ == nullptr) {
93 empty_state_values_ =
94 graph()->NewNode(common()->StateValues(0, SparseInputMask::Dense()));
95 }
96 return empty_state_values_;
97 }
98
GetWorkingSpace(size_t level)99 StateValuesCache::WorkingBuffer* StateValuesCache::GetWorkingSpace(
100 size_t level) {
101 if (working_space_.size() <= level) {
102 working_space_.resize(level + 1);
103 }
104 return &working_space_[level];
105 }
106
107 namespace {
108
StateValuesHashKey(Node ** nodes,size_t count)109 int StateValuesHashKey(Node** nodes, size_t count) {
110 size_t hash = count;
111 for (size_t i = 0; i < count; i++) {
112 hash = hash * 23 + (nodes[i] == nullptr ? 0 : nodes[i]->id());
113 }
114 return static_cast<int>(hash & 0x7FFFFFFF);
115 }
116
117 } // namespace
118
GetValuesNodeFromCache(Node ** nodes,size_t count,SparseInputMask mask)119 Node* StateValuesCache::GetValuesNodeFromCache(Node** nodes, size_t count,
120 SparseInputMask mask) {
121 StateValuesKey key(count, mask, nodes);
122 int hash = StateValuesHashKey(nodes, count);
123 ZoneHashMap::Entry* lookup = hash_map_.LookupOrInsert(&key, hash);
124 DCHECK_NOT_NULL(lookup);
125 Node* node;
126 if (lookup->value == nullptr) {
127 int node_count = static_cast<int>(count);
128 node = graph()->NewNode(common()->StateValues(node_count, mask), node_count,
129 nodes);
130 NodeKey* new_key = zone()->New<NodeKey>(node);
131 lookup->key = new_key;
132 lookup->value = node;
133 } else {
134 node = reinterpret_cast<Node*>(lookup->value);
135 }
136 return node;
137 }
138
FillBufferWithValues(WorkingBuffer * node_buffer,size_t * node_count,size_t * values_idx,Node ** values,size_t count,const BytecodeLivenessState * liveness)139 SparseInputMask::BitMaskType StateValuesCache::FillBufferWithValues(
140 WorkingBuffer* node_buffer, size_t* node_count, size_t* values_idx,
141 Node** values, size_t count, const BytecodeLivenessState* liveness) {
142 SparseInputMask::BitMaskType input_mask = 0;
143
144 // Virtual nodes are the live nodes plus the implicit optimized out nodes,
145 // which are implied by the liveness mask.
146 size_t virtual_node_count = *node_count;
147
148 while (*values_idx < count && *node_count < kMaxInputCount &&
149 virtual_node_count < SparseInputMask::kMaxSparseInputs) {
150 DCHECK_LE(*values_idx, static_cast<size_t>(INT_MAX));
151
152 if (liveness == nullptr ||
153 liveness->RegisterIsLive(static_cast<int>(*values_idx))) {
154 input_mask |= 1 << (virtual_node_count);
155 (*node_buffer)[(*node_count)++] = values[*values_idx];
156 }
157 virtual_node_count++;
158
159 (*values_idx)++;
160 }
161
162 DCHECK_GE(StateValuesCache::kMaxInputCount, *node_count);
163 DCHECK_GE(SparseInputMask::kMaxSparseInputs, virtual_node_count);
164
165 // Add the end marker at the end of the mask.
166 input_mask |= SparseInputMask::kEndMarker << virtual_node_count;
167
168 return input_mask;
169 }
170
BuildTree(size_t * values_idx,Node ** values,size_t count,const BytecodeLivenessState * liveness,size_t level)171 Node* StateValuesCache::BuildTree(size_t* values_idx, Node** values,
172 size_t count,
173 const BytecodeLivenessState* liveness,
174 size_t level) {
175 WorkingBuffer* node_buffer = GetWorkingSpace(level);
176 size_t node_count = 0;
177 SparseInputMask::BitMaskType input_mask = SparseInputMask::kDenseBitMask;
178
179 if (level == 0) {
180 input_mask = FillBufferWithValues(node_buffer, &node_count, values_idx,
181 values, count, liveness);
182 // Make sure we returned a sparse input mask.
183 DCHECK_NE(input_mask, SparseInputMask::kDenseBitMask);
184 } else {
185 while (*values_idx < count && node_count < kMaxInputCount) {
186 if (count - *values_idx < kMaxInputCount - node_count) {
187 // If we have fewer values remaining than inputs remaining, dump the
188 // remaining values into this node.
189 // TODO(leszeks): We could optimise this further by only counting
190 // remaining live nodes.
191
192 size_t previous_input_count = node_count;
193 input_mask = FillBufferWithValues(node_buffer, &node_count, values_idx,
194 values, count, liveness);
195 // Make sure we have exhausted our values.
196 DCHECK_EQ(*values_idx, count);
197 // Make sure we returned a sparse input mask.
198 DCHECK_NE(input_mask, SparseInputMask::kDenseBitMask);
199
200 // Make sure we haven't touched inputs below previous_input_count in the
201 // mask.
202 DCHECK_EQ(input_mask & ((1 << previous_input_count) - 1), 0u);
203 // Mark all previous inputs as live.
204 input_mask |= ((1 << previous_input_count) - 1);
205
206 break;
207
208 } else {
209 // Otherwise, add the values to a subtree and add that as an input.
210 Node* subtree =
211 BuildTree(values_idx, values, count, liveness, level - 1);
212 (*node_buffer)[node_count++] = subtree;
213 // Don't touch the bitmask, so that it stays dense.
214 }
215 }
216 }
217
218 if (node_count == 1 && input_mask == SparseInputMask::kDenseBitMask) {
219 // Elide the StateValue node if there is only one, dense input. This will
220 // only happen if we built a single subtree (as nodes with values are always
221 // sparse), and so we can replace ourselves with it.
222 DCHECK_EQ((*node_buffer)[0]->opcode(), IrOpcode::kStateValues);
223 return (*node_buffer)[0];
224 } else {
225 return GetValuesNodeFromCache(node_buffer->data(), node_count,
226 SparseInputMask(input_mask));
227 }
228 }
229
230 #if DEBUG
231 namespace {
232
CheckTreeContainsValues(Node * tree,Node ** values,size_t count,const BytecodeLivenessState * liveness)233 void CheckTreeContainsValues(Node* tree, Node** values, size_t count,
234 const BytecodeLivenessState* liveness) {
235 DCHECK_EQ(count, StateValuesAccess(tree).size());
236
237 int i;
238 auto access = StateValuesAccess(tree);
239 auto it = access.begin();
240 auto itend = access.end();
241 for (i = 0; it != itend; ++it, ++i) {
242 if (liveness == nullptr || liveness->RegisterIsLive(i)) {
243 DCHECK_EQ(it.node(), values[i]);
244 } else {
245 DCHECK_NULL(it.node());
246 }
247 }
248 DCHECK_EQ(static_cast<size_t>(i), count);
249 }
250
251 } // namespace
252 #endif
253
GetNodeForValues(Node ** values,size_t count,const BytecodeLivenessState * liveness)254 Node* StateValuesCache::GetNodeForValues(
255 Node** values, size_t count, const BytecodeLivenessState* liveness) {
256 #if DEBUG
257 // Check that the values represent actual values, and not a tree of values.
258 for (size_t i = 0; i < count; i++) {
259 if (values[i] != nullptr) {
260 DCHECK_NE(values[i]->opcode(), IrOpcode::kStateValues);
261 DCHECK_NE(values[i]->opcode(), IrOpcode::kTypedStateValues);
262 }
263 }
264 if (liveness != nullptr) {
265 DCHECK_LE(count, static_cast<size_t>(liveness->register_count()));
266
267 for (size_t i = 0; i < count; i++) {
268 if (liveness->RegisterIsLive(static_cast<int>(i))) {
269 DCHECK_NOT_NULL(values[i]);
270 }
271 }
272 }
273 #endif
274
275 if (count == 0) {
276 return GetEmptyStateValues();
277 }
278
279 // This is a worst-case tree height estimate, assuming that all values are
280 // live. We could get a better estimate by counting zeroes in the liveness
281 // vector, but there's no point -- any excess height in the tree will be
282 // collapsed by the single-input elision at the end of BuildTree.
283 size_t height = 0;
284 size_t max_inputs = kMaxInputCount;
285 while (count > max_inputs) {
286 height++;
287 max_inputs *= kMaxInputCount;
288 }
289
290 size_t values_idx = 0;
291 Node* tree = BuildTree(&values_idx, values, count, liveness, height);
292 // The values should be exhausted by the end of BuildTree.
293 DCHECK_EQ(values_idx, count);
294
295 // The 'tree' must be rooted with a state value node.
296 DCHECK_EQ(tree->opcode(), IrOpcode::kStateValues);
297
298 #if DEBUG
299 CheckTreeContainsValues(tree, values, count, liveness);
300 #endif
301
302 return tree;
303 }
304
iterator(Node * node)305 StateValuesAccess::iterator::iterator(Node* node) : current_depth_(0) {
306 stack_[current_depth_] =
307 SparseInputMaskOf(node->op()).IterateOverInputs(node);
308 EnsureValid();
309 }
310
Top()311 SparseInputMask::InputIterator* StateValuesAccess::iterator::Top() {
312 DCHECK_LE(0, current_depth_);
313 DCHECK_GT(kMaxInlineDepth, current_depth_);
314 return &(stack_[current_depth_]);
315 }
316
Push(Node * node)317 void StateValuesAccess::iterator::Push(Node* node) {
318 current_depth_++;
319 CHECK_GT(kMaxInlineDepth, current_depth_);
320 stack_[current_depth_] =
321 SparseInputMaskOf(node->op()).IterateOverInputs(node);
322 }
323
324
Pop()325 void StateValuesAccess::iterator::Pop() {
326 DCHECK_LE(0, current_depth_);
327 current_depth_--;
328 }
329
Advance()330 void StateValuesAccess::iterator::Advance() {
331 Top()->Advance();
332 EnsureValid();
333 }
334
AdvanceTillNotEmpty()335 size_t StateValuesAccess::iterator::AdvanceTillNotEmpty() {
336 size_t count = 0;
337 while (!done() && Top()->IsEmpty()) {
338 count += Top()->AdvanceToNextRealOrEnd();
339 EnsureValid();
340 }
341 return count;
342 }
343
EnsureValid()344 void StateValuesAccess::iterator::EnsureValid() {
345 while (true) {
346 SparseInputMask::InputIterator* top = Top();
347
348 if (top->IsEmpty()) {
349 // We are on a valid (albeit optimized out) node.
350 return;
351 }
352
353 if (top->IsEnd()) {
354 // We have hit the end of this iterator. Pop the stack and move to the
355 // next sibling iterator.
356 Pop();
357 if (done()) {
358 // Stack is exhausted, we have reached the end.
359 return;
360 }
361 Top()->Advance();
362 continue;
363 }
364
365 // At this point the value is known to be live and within our input nodes.
366 Node* value_node = top->GetReal();
367
368 if (value_node->opcode() == IrOpcode::kStateValues ||
369 value_node->opcode() == IrOpcode::kTypedStateValues) {
370 // Nested state, we need to push to the stack.
371 Push(value_node);
372 continue;
373 }
374
375 // We are on a valid node, we can stop the iteration.
376 return;
377 }
378 }
379
node()380 Node* StateValuesAccess::iterator::node() {
381 DCHECK(!done());
382 return Top()->Get(nullptr);
383 }
384
type()385 MachineType StateValuesAccess::iterator::type() {
386 Node* parent = Top()->parent();
387 DCHECK(!Top()->IsEmpty());
388 if (parent->opcode() == IrOpcode::kStateValues) {
389 return MachineType::AnyTagged();
390 } else {
391 DCHECK_EQ(IrOpcode::kTypedStateValues, parent->opcode());
392
393 ZoneVector<MachineType> const* types = MachineTypesOf(parent->op());
394 return (*types)[Top()->real_index()];
395 }
396 }
397
operator !=(iterator const & other) const398 bool StateValuesAccess::iterator::operator!=(iterator const& other) const {
399 // We only allow comparison with end().
400 CHECK(other.done());
401 return !done();
402 }
403
operator ++()404 StateValuesAccess::iterator& StateValuesAccess::iterator::operator++() {
405 DCHECK(!done());
406 Advance();
407 return *this;
408 }
409
410
operator *()411 StateValuesAccess::TypedNode StateValuesAccess::iterator::operator*() {
412 return TypedNode(node(), type());
413 }
414
size() const415 size_t StateValuesAccess::size() const {
416 size_t count = 0;
417 SparseInputMask mask = SparseInputMaskOf(node_->op());
418
419 SparseInputMask::InputIterator iterator = mask.IterateOverInputs(node_);
420
421 for (; !iterator.IsEnd(); iterator.Advance()) {
422 if (iterator.IsEmpty()) {
423 count++;
424 } else {
425 Node* value = iterator.GetReal();
426 if (value->opcode() == IrOpcode::kStateValues ||
427 value->opcode() == IrOpcode::kTypedStateValues) {
428 count += StateValuesAccess(value).size();
429 } else {
430 count++;
431 }
432 }
433 }
434
435 return count;
436 }
437
438 } // namespace compiler
439 } // namespace internal
440 } // namespace v8
441