1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/debug/debug-coverage.h"
6
7 #include "src/ast/ast-source-ranges.h"
8 #include "src/ast/ast.h"
9 #include "src/base/hashmap.h"
10 #include "src/common/assert-scope.h"
11 #include "src/common/globals.h"
12 #include "src/debug/debug.h"
13 #include "src/deoptimizer/deoptimizer.h"
14 #include "src/execution/frames-inl.h"
15 #include "src/execution/isolate.h"
16 #include "src/objects/debug-objects-inl.h"
17 #include "src/objects/objects.h"
18
19 namespace v8 {
20 namespace internal {
21
22 class SharedToCounterMap
23 : public base::TemplateHashMapImpl<SharedFunctionInfo, uint32_t,
24 base::KeyEqualityMatcher<Object>,
25 base::DefaultAllocationPolicy> {
26 public:
27 using Entry = base::TemplateHashMapEntry<SharedFunctionInfo, uint32_t>;
Add(SharedFunctionInfo key,uint32_t count)28 inline void Add(SharedFunctionInfo key, uint32_t count) {
29 Entry* entry = LookupOrInsert(key, Hash(key), []() { return 0; });
30 uint32_t old_count = entry->value;
31 if (UINT32_MAX - count < old_count) {
32 entry->value = UINT32_MAX;
33 } else {
34 entry->value = old_count + count;
35 }
36 }
37
Get(SharedFunctionInfo key)38 inline uint32_t Get(SharedFunctionInfo key) {
39 Entry* entry = Lookup(key, Hash(key));
40 if (entry == nullptr) return 0;
41 return entry->value;
42 }
43
44 private:
Hash(SharedFunctionInfo key)45 static uint32_t Hash(SharedFunctionInfo key) {
46 return static_cast<uint32_t>(key.ptr());
47 }
48
49 DISALLOW_GARBAGE_COLLECTION(no_gc)
50 };
51
52 namespace {
StartPosition(SharedFunctionInfo info)53 int StartPosition(SharedFunctionInfo info) {
54 int start = info.function_token_position();
55 if (start == kNoSourcePosition) start = info.StartPosition();
56 return start;
57 }
58
CompareCoverageBlock(const CoverageBlock & a,const CoverageBlock & b)59 bool CompareCoverageBlock(const CoverageBlock& a, const CoverageBlock& b) {
60 DCHECK_NE(kNoSourcePosition, a.start);
61 DCHECK_NE(kNoSourcePosition, b.start);
62 if (a.start == b.start) return a.end > b.end;
63 return a.start < b.start;
64 }
65
SortBlockData(std::vector<CoverageBlock> & v)66 void SortBlockData(std::vector<CoverageBlock>& v) {
67 // Sort according to the block nesting structure.
68 std::sort(v.begin(), v.end(), CompareCoverageBlock);
69 }
70
GetSortedBlockData(SharedFunctionInfo shared)71 std::vector<CoverageBlock> GetSortedBlockData(SharedFunctionInfo shared) {
72 DCHECK(shared.HasCoverageInfo());
73
74 CoverageInfo coverage_info =
75 CoverageInfo::cast(shared.GetDebugInfo().coverage_info());
76
77 std::vector<CoverageBlock> result;
78 if (coverage_info.slot_count() == 0) return result;
79
80 for (int i = 0; i < coverage_info.slot_count(); i++) {
81 const int start_pos = coverage_info.slots_start_source_position(i);
82 const int until_pos = coverage_info.slots_end_source_position(i);
83 const int count = coverage_info.slots_block_count(i);
84
85 DCHECK_NE(kNoSourcePosition, start_pos);
86 result.emplace_back(start_pos, until_pos, count);
87 }
88
89 SortBlockData(result);
90
91 return result;
92 }
93
94 // A utility class to simplify logic for performing passes over block coverage
95 // ranges. Provides access to the implicit tree structure of ranges (i.e. access
96 // to parent and sibling blocks), and supports efficient in-place editing and
97 // deletion. The underlying backing store is the array of CoverageBlocks stored
98 // on the CoverageFunction.
99 class CoverageBlockIterator final {
100 public:
CoverageBlockIterator(CoverageFunction * function)101 explicit CoverageBlockIterator(CoverageFunction* function)
102 : function_(function) {
103 DCHECK(std::is_sorted(function_->blocks.begin(), function_->blocks.end(),
104 CompareCoverageBlock));
105 }
106
~CoverageBlockIterator()107 ~CoverageBlockIterator() {
108 Finalize();
109 DCHECK(std::is_sorted(function_->blocks.begin(), function_->blocks.end(),
110 CompareCoverageBlock));
111 }
112
HasNext() const113 bool HasNext() const {
114 return read_index_ + 1 < static_cast<int>(function_->blocks.size());
115 }
116
Next()117 bool Next() {
118 if (!HasNext()) {
119 if (!ended_) MaybeWriteCurrent();
120 ended_ = true;
121 return false;
122 }
123
124 // If a block has been deleted, subsequent iteration moves trailing blocks
125 // to their updated position within the array.
126 MaybeWriteCurrent();
127
128 if (read_index_ == -1) {
129 // Initialize the nesting stack with the function range.
130 nesting_stack_.emplace_back(function_->start, function_->end,
131 function_->count);
132 } else if (!delete_current_) {
133 nesting_stack_.emplace_back(GetBlock());
134 }
135
136 delete_current_ = false;
137 read_index_++;
138
139 DCHECK(IsActive());
140
141 CoverageBlock& block = GetBlock();
142 while (nesting_stack_.size() > 1 &&
143 nesting_stack_.back().end <= block.start) {
144 nesting_stack_.pop_back();
145 }
146
147 DCHECK_IMPLIES(block.start >= function_->end,
148 block.end == kNoSourcePosition);
149 DCHECK_NE(block.start, kNoSourcePosition);
150 DCHECK_LE(block.end, GetParent().end);
151
152 return true;
153 }
154
GetBlock()155 CoverageBlock& GetBlock() {
156 DCHECK(IsActive());
157 return function_->blocks[read_index_];
158 }
159
GetNextBlock()160 CoverageBlock& GetNextBlock() {
161 DCHECK(IsActive());
162 DCHECK(HasNext());
163 return function_->blocks[read_index_ + 1];
164 }
165
GetPreviousBlock()166 CoverageBlock& GetPreviousBlock() {
167 DCHECK(IsActive());
168 DCHECK_GT(read_index_, 0);
169 return function_->blocks[read_index_ - 1];
170 }
171
GetParent()172 CoverageBlock& GetParent() {
173 DCHECK(IsActive());
174 return nesting_stack_.back();
175 }
176
HasSiblingOrChild()177 bool HasSiblingOrChild() {
178 DCHECK(IsActive());
179 return HasNext() && GetNextBlock().start < GetParent().end;
180 }
181
GetSiblingOrChild()182 CoverageBlock& GetSiblingOrChild() {
183 DCHECK(HasSiblingOrChild());
184 DCHECK(IsActive());
185 return GetNextBlock();
186 }
187
188 // A range is considered to be at top level if its parent range is the
189 // function range.
IsTopLevel() const190 bool IsTopLevel() const { return nesting_stack_.size() == 1; }
191
DeleteBlock()192 void DeleteBlock() {
193 DCHECK(!delete_current_);
194 DCHECK(IsActive());
195 delete_current_ = true;
196 }
197
198 private:
MaybeWriteCurrent()199 void MaybeWriteCurrent() {
200 if (delete_current_) return;
201 if (read_index_ >= 0 && write_index_ != read_index_) {
202 function_->blocks[write_index_] = function_->blocks[read_index_];
203 }
204 write_index_++;
205 }
206
Finalize()207 void Finalize() {
208 while (Next()) {
209 // Just iterate to the end.
210 }
211 function_->blocks.resize(write_index_);
212 }
213
IsActive() const214 bool IsActive() const { return read_index_ >= 0 && !ended_; }
215
216 CoverageFunction* function_;
217 std::vector<CoverageBlock> nesting_stack_;
218 bool ended_ = false;
219 bool delete_current_ = false;
220 int read_index_ = -1;
221 int write_index_ = -1;
222 };
223
HaveSameSourceRange(const CoverageBlock & lhs,const CoverageBlock & rhs)224 bool HaveSameSourceRange(const CoverageBlock& lhs, const CoverageBlock& rhs) {
225 return lhs.start == rhs.start && lhs.end == rhs.end;
226 }
227
MergeDuplicateRanges(CoverageFunction * function)228 void MergeDuplicateRanges(CoverageFunction* function) {
229 CoverageBlockIterator iter(function);
230
231 while (iter.Next() && iter.HasNext()) {
232 CoverageBlock& block = iter.GetBlock();
233 CoverageBlock& next_block = iter.GetNextBlock();
234
235 if (!HaveSameSourceRange(block, next_block)) continue;
236
237 DCHECK_NE(kNoSourcePosition, block.end); // Non-singleton range.
238 next_block.count = std::max(block.count, next_block.count);
239 iter.DeleteBlock();
240 }
241 }
242
243 // Rewrite position singletons (produced by unconditional control flow
244 // like return statements, and by continuation counters) into source
245 // ranges that end at the next sibling range or the end of the parent
246 // range, whichever comes first.
RewritePositionSingletonsToRanges(CoverageFunction * function)247 void RewritePositionSingletonsToRanges(CoverageFunction* function) {
248 CoverageBlockIterator iter(function);
249
250 while (iter.Next()) {
251 CoverageBlock& block = iter.GetBlock();
252 CoverageBlock& parent = iter.GetParent();
253
254 if (block.start >= function->end) {
255 DCHECK_EQ(block.end, kNoSourcePosition);
256 iter.DeleteBlock();
257 } else if (block.end == kNoSourcePosition) {
258 // The current block ends at the next sibling block (if it exists) or the
259 // end of the parent block otherwise.
260 if (iter.HasSiblingOrChild()) {
261 block.end = iter.GetSiblingOrChild().start;
262 } else if (iter.IsTopLevel()) {
263 // See https://crbug.com/v8/6661. Functions are special-cased because
264 // we never want the closing brace to be uncovered. This is mainly to
265 // avoid a noisy UI.
266 block.end = parent.end - 1;
267 } else {
268 block.end = parent.end;
269 }
270 }
271 }
272 }
273
MergeConsecutiveRanges(CoverageFunction * function)274 void MergeConsecutiveRanges(CoverageFunction* function) {
275 CoverageBlockIterator iter(function);
276
277 while (iter.Next()) {
278 CoverageBlock& block = iter.GetBlock();
279
280 if (iter.HasSiblingOrChild()) {
281 CoverageBlock& sibling = iter.GetSiblingOrChild();
282 if (sibling.start == block.end && sibling.count == block.count) {
283 // Best-effort: this pass may miss mergeable siblings in the presence of
284 // child blocks.
285 sibling.start = block.start;
286 iter.DeleteBlock();
287 }
288 }
289 }
290 }
291
MergeNestedRanges(CoverageFunction * function)292 void MergeNestedRanges(CoverageFunction* function) {
293 CoverageBlockIterator iter(function);
294
295 while (iter.Next()) {
296 CoverageBlock& block = iter.GetBlock();
297 CoverageBlock& parent = iter.GetParent();
298
299 if (parent.count == block.count) {
300 // Transformation may not be valid if sibling blocks exist with a
301 // differing count.
302 iter.DeleteBlock();
303 }
304 }
305 }
306
RewriteFunctionScopeCounter(CoverageFunction * function)307 void RewriteFunctionScopeCounter(CoverageFunction* function) {
308 // Every function must have at least the top-level function counter.
309 DCHECK(!function->blocks.empty());
310
311 CoverageBlockIterator iter(function);
312 if (iter.Next()) {
313 DCHECK(iter.IsTopLevel());
314
315 CoverageBlock& block = iter.GetBlock();
316 if (block.start == SourceRange::kFunctionLiteralSourcePosition &&
317 block.end == SourceRange::kFunctionLiteralSourcePosition) {
318 // If a function-scope block exists, overwrite the function count. It has
319 // a more reliable count than what we get from the FeedbackVector (which
320 // is imprecise e.g. for generator functions and optimized code).
321 function->count = block.count;
322
323 // Then delete it; for compatibility with non-block coverage modes, the
324 // function-scope block is expected in CoverageFunction, not as a
325 // CoverageBlock.
326 iter.DeleteBlock();
327 }
328 }
329 }
330
FilterAliasedSingletons(CoverageFunction * function)331 void FilterAliasedSingletons(CoverageFunction* function) {
332 CoverageBlockIterator iter(function);
333
334 iter.Next(); // Advance once since we reference the previous block later.
335
336 while (iter.Next()) {
337 CoverageBlock& previous_block = iter.GetPreviousBlock();
338 CoverageBlock& block = iter.GetBlock();
339
340 bool is_singleton = block.end == kNoSourcePosition;
341 bool aliases_start = block.start == previous_block.start;
342
343 if (is_singleton && aliases_start) {
344 // The previous block must have a full range since duplicate singletons
345 // have already been merged.
346 DCHECK_NE(previous_block.end, kNoSourcePosition);
347 // Likewise, the next block must have another start position since
348 // singletons are sorted to the end.
349 DCHECK_IMPLIES(iter.HasNext(), iter.GetNextBlock().start != block.start);
350 iter.DeleteBlock();
351 }
352 }
353 }
354
FilterUncoveredRanges(CoverageFunction * function)355 void FilterUncoveredRanges(CoverageFunction* function) {
356 CoverageBlockIterator iter(function);
357
358 while (iter.Next()) {
359 CoverageBlock& block = iter.GetBlock();
360 CoverageBlock& parent = iter.GetParent();
361 if (block.count == 0 && parent.count == 0) iter.DeleteBlock();
362 }
363 }
364
FilterEmptyRanges(CoverageFunction * function)365 void FilterEmptyRanges(CoverageFunction* function) {
366 CoverageBlockIterator iter(function);
367
368 while (iter.Next()) {
369 CoverageBlock& block = iter.GetBlock();
370 if (block.start == block.end) iter.DeleteBlock();
371 }
372 }
373
ClampToBinary(CoverageFunction * function)374 void ClampToBinary(CoverageFunction* function) {
375 CoverageBlockIterator iter(function);
376
377 while (iter.Next()) {
378 CoverageBlock& block = iter.GetBlock();
379 if (block.count > 0) block.count = 1;
380 }
381 }
382
ResetAllBlockCounts(SharedFunctionInfo shared)383 void ResetAllBlockCounts(SharedFunctionInfo shared) {
384 DCHECK(shared.HasCoverageInfo());
385
386 CoverageInfo coverage_info =
387 CoverageInfo::cast(shared.GetDebugInfo().coverage_info());
388
389 for (int i = 0; i < coverage_info.slot_count(); i++) {
390 coverage_info.ResetBlockCount(i);
391 }
392 }
393
IsBlockMode(debug::CoverageMode mode)394 bool IsBlockMode(debug::CoverageMode mode) {
395 switch (mode) {
396 case debug::CoverageMode::kBlockBinary:
397 case debug::CoverageMode::kBlockCount:
398 return true;
399 default:
400 return false;
401 }
402 }
403
IsBinaryMode(debug::CoverageMode mode)404 bool IsBinaryMode(debug::CoverageMode mode) {
405 switch (mode) {
406 case debug::CoverageMode::kBlockBinary:
407 case debug::CoverageMode::kPreciseBinary:
408 return true;
409 default:
410 return false;
411 }
412 }
413
CollectBlockCoverageInternal(CoverageFunction * function,SharedFunctionInfo info,debug::CoverageMode mode)414 void CollectBlockCoverageInternal(CoverageFunction* function,
415 SharedFunctionInfo info,
416 debug::CoverageMode mode) {
417 DCHECK(IsBlockMode(mode));
418
419 // Functions with empty source ranges are not interesting to report. This can
420 // happen e.g. for internally-generated functions like class constructors.
421 if (!function->HasNonEmptySourceRange()) return;
422
423 function->has_block_coverage = true;
424 function->blocks = GetSortedBlockData(info);
425
426 // If in binary mode, only report counts of 0/1.
427 if (mode == debug::CoverageMode::kBlockBinary) ClampToBinary(function);
428
429 // To stay compatible with non-block coverage modes, the function-scope count
430 // is expected to be in the CoverageFunction, not as part of its blocks.
431 // This finds the function-scope counter, overwrites CoverageFunction::count,
432 // and removes it from the block list.
433 //
434 // Important: Must be called before other transformation passes.
435 RewriteFunctionScopeCounter(function);
436
437 // Functions without blocks don't need to be processed further.
438 if (!function->HasBlocks()) return;
439
440 // Remove singleton ranges with the same start position as a full range and
441 // throw away their counts.
442 // Singleton ranges are only intended to split existing full ranges and should
443 // never expand into a full range. Consider 'if (cond) { ... } else { ... }'
444 // as a problematic example; if the then-block produces a continuation
445 // singleton, it would incorrectly expand into the else range.
446 // For more context, see https://crbug.com/v8/8237.
447 FilterAliasedSingletons(function);
448
449 // Rewrite all singletons (created e.g. by continuations and unconditional
450 // control flow) to ranges.
451 RewritePositionSingletonsToRanges(function);
452
453 // Merge nested and consecutive ranges with identical counts.
454 // Note that it's necessary to merge duplicate ranges prior to merging nested
455 // changes in order to avoid invalid transformations. See crbug.com/827530.
456 MergeConsecutiveRanges(function);
457
458 SortBlockData(function->blocks);
459 MergeDuplicateRanges(function);
460 MergeNestedRanges(function);
461
462 MergeConsecutiveRanges(function);
463
464 // Filter out ranges with count == 0 unless the immediate parent range has
465 // a count != 0.
466 FilterUncoveredRanges(function);
467
468 // Filter out ranges of zero length.
469 FilterEmptyRanges(function);
470 }
471
CollectBlockCoverage(CoverageFunction * function,SharedFunctionInfo info,debug::CoverageMode mode)472 void CollectBlockCoverage(CoverageFunction* function, SharedFunctionInfo info,
473 debug::CoverageMode mode) {
474 CollectBlockCoverageInternal(function, info, mode);
475
476 // Reset all counters on the DebugInfo to zero.
477 ResetAllBlockCounts(info);
478 }
479
PrintBlockCoverage(const CoverageFunction * function,SharedFunctionInfo info,bool has_nonempty_source_range,bool function_is_relevant)480 void PrintBlockCoverage(const CoverageFunction* function,
481 SharedFunctionInfo info, bool has_nonempty_source_range,
482 bool function_is_relevant) {
483 DCHECK(FLAG_trace_block_coverage);
484 std::unique_ptr<char[]> function_name =
485 function->name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
486 i::PrintF(
487 "Coverage for function='%s', SFI=%p, has_nonempty_source_range=%d, "
488 "function_is_relevant=%d\n",
489 function_name.get(), reinterpret_cast<void*>(info.ptr()),
490 has_nonempty_source_range, function_is_relevant);
491 i::PrintF("{start: %d, end: %d, count: %d}\n", function->start, function->end,
492 function->count);
493 for (const auto& block : function->blocks) {
494 i::PrintF("{start: %d, end: %d, count: %d}\n", block.start, block.end,
495 block.count);
496 }
497 }
498
CollectAndMaybeResetCounts(Isolate * isolate,SharedToCounterMap * counter_map,v8::debug::CoverageMode coverage_mode)499 void CollectAndMaybeResetCounts(Isolate* isolate,
500 SharedToCounterMap* counter_map,
501 v8::debug::CoverageMode coverage_mode) {
502 const bool reset_count =
503 coverage_mode != v8::debug::CoverageMode::kBestEffort;
504
505 switch (isolate->code_coverage_mode()) {
506 case v8::debug::CoverageMode::kBlockBinary:
507 case v8::debug::CoverageMode::kBlockCount:
508 case v8::debug::CoverageMode::kPreciseBinary:
509 case v8::debug::CoverageMode::kPreciseCount: {
510 // Feedback vectors are already listed to prevent losing them to GC.
511 DCHECK(isolate->factory()
512 ->feedback_vectors_for_profiling_tools()
513 ->IsArrayList());
514 Handle<ArrayList> list = Handle<ArrayList>::cast(
515 isolate->factory()->feedback_vectors_for_profiling_tools());
516 for (int i = 0; i < list->Length(); i++) {
517 FeedbackVector vector = FeedbackVector::cast(list->Get(i));
518 SharedFunctionInfo shared = vector.shared_function_info();
519 DCHECK(shared.IsSubjectToDebugging());
520 uint32_t count = static_cast<uint32_t>(vector.invocation_count());
521 if (reset_count) vector.clear_invocation_count(kRelaxedStore);
522 counter_map->Add(shared, count);
523 }
524 break;
525 }
526 case v8::debug::CoverageMode::kBestEffort: {
527 DCHECK(!isolate->factory()
528 ->feedback_vectors_for_profiling_tools()
529 ->IsArrayList());
530 DCHECK_EQ(v8::debug::CoverageMode::kBestEffort, coverage_mode);
531 AllowGarbageCollection allow_gc;
532 HeapObjectIterator heap_iterator(isolate->heap());
533 for (HeapObject current_obj = heap_iterator.Next();
534 !current_obj.is_null(); current_obj = heap_iterator.Next()) {
535 if (!current_obj.IsJSFunction()) continue;
536 JSFunction func = JSFunction::cast(current_obj);
537 SharedFunctionInfo shared = func.shared();
538 if (!shared.IsSubjectToDebugging()) continue;
539 if (!(func.has_feedback_vector() ||
540 func.has_closure_feedback_cell_array())) {
541 continue;
542 }
543 uint32_t count = 0;
544 if (func.has_feedback_vector()) {
545 count =
546 static_cast<uint32_t>(func.feedback_vector().invocation_count());
547 } else if (func.raw_feedback_cell().interrupt_budget() <
548 FLAG_interrupt_budget_for_feedback_allocation) {
549 // TODO(jgruber): The condition above is no longer precise since we
550 // may use either the fixed interrupt_budget or
551 // FLAG_interrupt_budget_factor_for_feedback_allocation. If the
552 // latter, we may incorrectly set a count of 1.
553 //
554 // We haven't allocated feedback vector, but executed the function
555 // atleast once. We don't have precise invocation count here.
556 count = 1;
557 }
558 counter_map->Add(shared, count);
559 }
560
561 // Also check functions on the stack to collect the count map. With lazy
562 // feedback allocation we may miss counting functions if the feedback
563 // vector wasn't allocated yet and the function's interrupt budget wasn't
564 // updated (i.e. it didn't execute return / jump).
565 for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) {
566 SharedFunctionInfo shared = it.frame()->function().shared();
567 if (counter_map->Get(shared) != 0) continue;
568 counter_map->Add(shared, 1);
569 }
570 break;
571 }
572 }
573 }
574
575 // A {SFI, count} tuple is used to sort by source range (stored on
576 // the SFI) and call count (in the counter map).
577 struct SharedFunctionInfoAndCount {
SharedFunctionInfoAndCountv8::internal::__anonac79167d0211::SharedFunctionInfoAndCount578 SharedFunctionInfoAndCount(Handle<SharedFunctionInfo> info, uint32_t count)
579 : info(info),
580 count(count),
581 start(StartPosition(*info)),
582 end(info->EndPosition()) {}
583
584 // Sort by:
585 // - start, ascending.
586 // - end, descending.
587 // - info.is_toplevel() first
588 // - count, descending.
operator <v8::internal::__anonac79167d0211::SharedFunctionInfoAndCount589 bool operator<(const SharedFunctionInfoAndCount& that) const {
590 if (this->start != that.start) return this->start < that.start;
591 if (this->end != that.end) return this->end > that.end;
592 if (this->info->is_toplevel() != that.info->is_toplevel()) {
593 return this->info->is_toplevel();
594 }
595 return this->count > that.count;
596 }
597
598 Handle<SharedFunctionInfo> info;
599 uint32_t count;
600 int start;
601 int end;
602 };
603
604 } // anonymous namespace
605
CollectPrecise(Isolate * isolate)606 std::unique_ptr<Coverage> Coverage::CollectPrecise(Isolate* isolate) {
607 DCHECK(!isolate->is_best_effort_code_coverage());
608 std::unique_ptr<Coverage> result =
609 Collect(isolate, isolate->code_coverage_mode());
610 if (!isolate->is_collecting_type_profile() &&
611 (isolate->is_precise_binary_code_coverage() ||
612 isolate->is_block_binary_code_coverage())) {
613 // We do not have to hold onto feedback vectors for invocations we already
614 // reported. So we can reset the list.
615 isolate->SetFeedbackVectorsForProfilingTools(
616 ReadOnlyRoots(isolate).empty_array_list());
617 }
618 return result;
619 }
620
CollectBestEffort(Isolate * isolate)621 std::unique_ptr<Coverage> Coverage::CollectBestEffort(Isolate* isolate) {
622 return Collect(isolate, v8::debug::CoverageMode::kBestEffort);
623 }
624
Collect(Isolate * isolate,v8::debug::CoverageMode collectionMode)625 std::unique_ptr<Coverage> Coverage::Collect(
626 Isolate* isolate, v8::debug::CoverageMode collectionMode) {
627 // Collect call counts for all functions.
628 SharedToCounterMap counter_map;
629 CollectAndMaybeResetCounts(isolate, &counter_map, collectionMode);
630
631 // Iterate shared function infos of every script and build a mapping
632 // between source ranges and invocation counts.
633 std::unique_ptr<Coverage> result(new Coverage());
634
635 std::vector<Handle<Script>> scripts;
636 Script::Iterator scriptIt(isolate);
637 for (Script script = scriptIt.Next(); !script.is_null();
638 script = scriptIt.Next()) {
639 if (script.IsUserJavaScript()) scripts.push_back(handle(script, isolate));
640 }
641
642 for (Handle<Script> script : scripts) {
643 // Create and add new script data.
644 result->emplace_back(script);
645 std::vector<CoverageFunction>* functions = &result->back().functions;
646
647 std::vector<SharedFunctionInfoAndCount> sorted;
648
649 {
650 // Sort functions by start position, from outer to inner functions.
651 SharedFunctionInfo::ScriptIterator infos(isolate, *script);
652 for (SharedFunctionInfo info = infos.Next(); !info.is_null();
653 info = infos.Next()) {
654 sorted.emplace_back(handle(info, isolate), counter_map.Get(info));
655 }
656 std::sort(sorted.begin(), sorted.end());
657 }
658
659 // Stack to track nested functions, referring function by index.
660 std::vector<size_t> nesting;
661
662 // Use sorted list to reconstruct function nesting.
663 for (const SharedFunctionInfoAndCount& v : sorted) {
664 Handle<SharedFunctionInfo> info = v.info;
665 int start = v.start;
666 int end = v.end;
667 uint32_t count = v.count;
668
669 // Find the correct outer function based on start position.
670 //
671 // This is, in general, not robust when considering two functions with
672 // identical source ranges; then the notion of inner and outer is unclear.
673 // Identical source ranges arise when the source range of top-most entity
674 // (e.g. function) in the script is identical to the whole script, e.g.
675 // <script>function foo() {}<script>. The script has its own shared
676 // function info, which has the same source range as the SFI for `foo`.
677 // Node.js creates an additional wrapper for scripts (again with identical
678 // source range) and those wrappers will have a call count of zero even if
679 // the wrapped script was executed (see v8:9212). We mitigate this issue
680 // by sorting top-level SFIs first among SFIs with the same source range:
681 // This ensures top-level SFIs are processed first. If a top-level SFI has
682 // a non-zero call count, it gets recorded due to `function_is_relevant`
683 // below (e.g. script wrappers), while top-level SFIs with zero call count
684 // do not get reported (this ensures node's extra wrappers do not get
685 // reported). If two SFIs with identical source ranges get reported, we
686 // report them in decreasing order of call count, as in all known cases
687 // this corresponds to the nesting order. In the case of the script tag
688 // example above, we report the zero call count of `foo` last. As it turns
689 // out, embedders started to rely on functions being reported in nesting
690 // order.
691 // TODO(jgruber): Investigate whether it is possible to remove node's
692 // extra top-level wrapper script, or change its source range, or ensure
693 // that it follows the invariant that nesting order is descending count
694 // order for SFIs with identical source ranges.
695 while (!nesting.empty() && functions->at(nesting.back()).end <= start) {
696 nesting.pop_back();
697 }
698
699 if (count != 0) {
700 switch (collectionMode) {
701 case v8::debug::CoverageMode::kBlockCount:
702 case v8::debug::CoverageMode::kPreciseCount:
703 break;
704 case v8::debug::CoverageMode::kBlockBinary:
705 case v8::debug::CoverageMode::kPreciseBinary:
706 count = info->has_reported_binary_coverage() ? 0 : 1;
707 info->set_has_reported_binary_coverage(true);
708 break;
709 case v8::debug::CoverageMode::kBestEffort:
710 count = 1;
711 break;
712 }
713 }
714
715 Handle<String> name = SharedFunctionInfo::DebugName(info);
716 CoverageFunction function(start, end, count, name);
717
718 if (IsBlockMode(collectionMode) && info->HasCoverageInfo()) {
719 CollectBlockCoverage(&function, *info, collectionMode);
720 }
721
722 // Only include a function range if itself or its parent function is
723 // covered, or if it contains non-trivial block coverage.
724 bool is_covered = (count != 0);
725 bool parent_is_covered =
726 (!nesting.empty() && functions->at(nesting.back()).count != 0);
727 bool has_block_coverage = !function.blocks.empty();
728 bool function_is_relevant =
729 (is_covered || parent_is_covered || has_block_coverage);
730
731 // It must also have a non-empty source range (otherwise it is not
732 // interesting to report).
733 bool has_nonempty_source_range = function.HasNonEmptySourceRange();
734
735 if (has_nonempty_source_range && function_is_relevant) {
736 nesting.push_back(functions->size());
737 functions->emplace_back(function);
738 }
739
740 if (FLAG_trace_block_coverage) {
741 PrintBlockCoverage(&function, *info, has_nonempty_source_range,
742 function_is_relevant);
743 }
744 }
745
746 // Remove entries for scripts that have no coverage.
747 if (functions->empty()) result->pop_back();
748 }
749 return result;
750 }
751
SelectMode(Isolate * isolate,debug::CoverageMode mode)752 void Coverage::SelectMode(Isolate* isolate, debug::CoverageMode mode) {
753 if (mode != isolate->code_coverage_mode()) {
754 // Changing the coverage mode can change the bytecode that would be
755 // generated for a function, which can interfere with lazy source positions,
756 // so just force source position collection whenever there's such a change.
757 isolate->CollectSourcePositionsForAllBytecodeArrays();
758 // Changing the coverage mode changes the generated bytecode and hence it is
759 // not safe to flush bytecode. Set a flag here, so we can disable bytecode
760 // flushing.
761 isolate->set_disable_bytecode_flushing(true);
762 }
763
764 switch (mode) {
765 case debug::CoverageMode::kBestEffort:
766 // Note that DevTools switches back to best-effort coverage once the
767 // recording is stopped. Since we delete coverage infos at that point, any
768 // following coverage recording (without reloads) will be at function
769 // granularity.
770 isolate->debug()->RemoveAllCoverageInfos();
771 if (!isolate->is_collecting_type_profile()) {
772 isolate->SetFeedbackVectorsForProfilingTools(
773 ReadOnlyRoots(isolate).undefined_value());
774 }
775 break;
776 case debug::CoverageMode::kBlockBinary:
777 case debug::CoverageMode::kBlockCount:
778 case debug::CoverageMode::kPreciseBinary:
779 case debug::CoverageMode::kPreciseCount: {
780 HandleScope scope(isolate);
781
782 // Remove all optimized function. Optimized and inlined functions do not
783 // increment invocation count.
784 Deoptimizer::DeoptimizeAll(isolate);
785
786 std::vector<Handle<JSFunction>> funcs_needing_feedback_vector;
787 {
788 HeapObjectIterator heap_iterator(isolate->heap());
789 for (HeapObject o = heap_iterator.Next(); !o.is_null();
790 o = heap_iterator.Next()) {
791 if (o.IsJSFunction()) {
792 JSFunction func = JSFunction::cast(o);
793 if (func.has_closure_feedback_cell_array()) {
794 funcs_needing_feedback_vector.push_back(
795 Handle<JSFunction>(func, isolate));
796 }
797 } else if (IsBinaryMode(mode) && o.IsSharedFunctionInfo()) {
798 // If collecting binary coverage, reset
799 // SFI::has_reported_binary_coverage to avoid optimizing / inlining
800 // functions before they have reported coverage.
801 SharedFunctionInfo shared = SharedFunctionInfo::cast(o);
802 shared.set_has_reported_binary_coverage(false);
803 } else if (o.IsFeedbackVector()) {
804 // In any case, clear any collected invocation counts.
805 FeedbackVector::cast(o).clear_invocation_count(kRelaxedStore);
806 }
807 }
808 }
809
810 for (Handle<JSFunction> func : funcs_needing_feedback_vector) {
811 IsCompiledScope is_compiled_scope(
812 func->shared().is_compiled_scope(isolate));
813 CHECK(is_compiled_scope.is_compiled());
814 JSFunction::EnsureFeedbackVector(isolate, func, &is_compiled_scope);
815 }
816
817 // Root all feedback vectors to avoid early collection.
818 isolate->MaybeInitializeVectorListFromHeap();
819
820 break;
821 }
822 }
823 isolate->set_code_coverage_mode(mode);
824 }
825
826 } // namespace internal
827 } // namespace v8
828