// Copyright 2017 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "src/debug/debug-coverage.h" #include "src/ast/ast-source-ranges.h" #include "src/ast/ast.h" #include "src/base/hashmap.h" #include "src/debug/debug.h" #include "src/deoptimizer/deoptimizer.h" #include "src/execution/frames-inl.h" #include "src/execution/isolate.h" #include "src/objects/debug-objects-inl.h" #include "src/objects/objects.h" namespace v8 { namespace internal { class SharedToCounterMap : public base::TemplateHashMapImpl, base::DefaultAllocationPolicy> { public: using Entry = base::TemplateHashMapEntry; inline void Add(SharedFunctionInfo key, uint32_t count) { Entry* entry = LookupOrInsert(key, Hash(key), []() { return 0; }); uint32_t old_count = entry->value; if (UINT32_MAX - count < old_count) { entry->value = UINT32_MAX; } else { entry->value = old_count + count; } } inline uint32_t Get(SharedFunctionInfo key) { Entry* entry = Lookup(key, Hash(key)); if (entry == nullptr) return 0; return entry->value; } private: static uint32_t Hash(SharedFunctionInfo key) { return static_cast(key.ptr()); } DisallowHeapAllocation no_gc; }; namespace { int StartPosition(SharedFunctionInfo info) { int start = info.function_token_position(); if (start == kNoSourcePosition) start = info.StartPosition(); return start; } bool CompareCoverageBlock(const CoverageBlock& a, const CoverageBlock& b) { DCHECK_NE(kNoSourcePosition, a.start); DCHECK_NE(kNoSourcePosition, b.start); if (a.start == b.start) return a.end > b.end; return a.start < b.start; } void SortBlockData( std::vector& v) { // NOLINT(runtime/references) // Sort according to the block nesting structure. std::sort(v.begin(), v.end(), CompareCoverageBlock); } std::vector GetSortedBlockData(SharedFunctionInfo shared) { DCHECK(shared.HasCoverageInfo()); CoverageInfo coverage_info = CoverageInfo::cast(shared.GetDebugInfo().coverage_info()); std::vector result; if (coverage_info.slot_count() == 0) return result; for (int i = 0; i < coverage_info.slot_count(); i++) { const int start_pos = coverage_info.StartSourcePosition(i); const int until_pos = coverage_info.EndSourcePosition(i); const int count = coverage_info.BlockCount(i); DCHECK_NE(kNoSourcePosition, start_pos); result.emplace_back(start_pos, until_pos, count); } SortBlockData(result); return result; } // A utility class to simplify logic for performing passes over block coverage // ranges. Provides access to the implicit tree structure of ranges (i.e. access // to parent and sibling blocks), and supports efficient in-place editing and // deletion. The underlying backing store is the array of CoverageBlocks stored // on the CoverageFunction. class CoverageBlockIterator final { public: explicit CoverageBlockIterator(CoverageFunction* function) : function_(function) { DCHECK(std::is_sorted(function_->blocks.begin(), function_->blocks.end(), CompareCoverageBlock)); } ~CoverageBlockIterator() { Finalize(); DCHECK(std::is_sorted(function_->blocks.begin(), function_->blocks.end(), CompareCoverageBlock)); } bool HasNext() const { return read_index_ + 1 < static_cast(function_->blocks.size()); } bool Next() { if (!HasNext()) { if (!ended_) MaybeWriteCurrent(); ended_ = true; return false; } // If a block has been deleted, subsequent iteration moves trailing blocks // to their updated position within the array. MaybeWriteCurrent(); if (read_index_ == -1) { // Initialize the nesting stack with the function range. nesting_stack_.emplace_back(function_->start, function_->end, function_->count); } else if (!delete_current_) { nesting_stack_.emplace_back(GetBlock()); } delete_current_ = false; read_index_++; DCHECK(IsActive()); CoverageBlock& block = GetBlock(); while (nesting_stack_.size() > 1 && nesting_stack_.back().end <= block.start) { nesting_stack_.pop_back(); } DCHECK_IMPLIES(block.start >= function_->end, block.end == kNoSourcePosition); DCHECK_NE(block.start, kNoSourcePosition); DCHECK_LE(block.end, GetParent().end); return true; } CoverageBlock& GetBlock() { DCHECK(IsActive()); return function_->blocks[read_index_]; } CoverageBlock& GetNextBlock() { DCHECK(IsActive()); DCHECK(HasNext()); return function_->blocks[read_index_ + 1]; } CoverageBlock& GetPreviousBlock() { DCHECK(IsActive()); DCHECK_GT(read_index_, 0); return function_->blocks[read_index_ - 1]; } CoverageBlock& GetParent() { DCHECK(IsActive()); return nesting_stack_.back(); } bool HasSiblingOrChild() { DCHECK(IsActive()); return HasNext() && GetNextBlock().start < GetParent().end; } CoverageBlock& GetSiblingOrChild() { DCHECK(HasSiblingOrChild()); DCHECK(IsActive()); return GetNextBlock(); } // A range is considered to be at top level if its parent range is the // function range. bool IsTopLevel() const { return nesting_stack_.size() == 1; } void DeleteBlock() { DCHECK(!delete_current_); DCHECK(IsActive()); delete_current_ = true; } private: void MaybeWriteCurrent() { if (delete_current_) return; if (read_index_ >= 0 && write_index_ != read_index_) { function_->blocks[write_index_] = function_->blocks[read_index_]; } write_index_++; } void Finalize() { while (Next()) { // Just iterate to the end. } function_->blocks.resize(write_index_); } bool IsActive() const { return read_index_ >= 0 && !ended_; } CoverageFunction* function_; std::vector nesting_stack_; bool ended_ = false; bool delete_current_ = false; int read_index_ = -1; int write_index_ = -1; }; bool HaveSameSourceRange(const CoverageBlock& lhs, const CoverageBlock& rhs) { return lhs.start == rhs.start && lhs.end == rhs.end; } void MergeDuplicateRanges(CoverageFunction* function) { CoverageBlockIterator iter(function); while (iter.Next() && iter.HasNext()) { CoverageBlock& block = iter.GetBlock(); CoverageBlock& next_block = iter.GetNextBlock(); if (!HaveSameSourceRange(block, next_block)) continue; DCHECK_NE(kNoSourcePosition, block.end); // Non-singleton range. next_block.count = std::max(block.count, next_block.count); iter.DeleteBlock(); } } // Rewrite position singletons (produced by unconditional control flow // like return statements, and by continuation counters) into source // ranges that end at the next sibling range or the end of the parent // range, whichever comes first. void RewritePositionSingletonsToRanges(CoverageFunction* function) { CoverageBlockIterator iter(function); while (iter.Next()) { CoverageBlock& block = iter.GetBlock(); CoverageBlock& parent = iter.GetParent(); if (block.start >= function->end) { DCHECK_EQ(block.end, kNoSourcePosition); iter.DeleteBlock(); } else if (block.end == kNoSourcePosition) { // The current block ends at the next sibling block (if it exists) or the // end of the parent block otherwise. if (iter.HasSiblingOrChild()) { block.end = iter.GetSiblingOrChild().start; } else if (iter.IsTopLevel()) { // See https://crbug.com/v8/6661. Functions are special-cased because // we never want the closing brace to be uncovered. This is mainly to // avoid a noisy UI. block.end = parent.end - 1; } else { block.end = parent.end; } } } } void MergeConsecutiveRanges(CoverageFunction* function) { CoverageBlockIterator iter(function); while (iter.Next()) { CoverageBlock& block = iter.GetBlock(); if (iter.HasSiblingOrChild()) { CoverageBlock& sibling = iter.GetSiblingOrChild(); if (sibling.start == block.end && sibling.count == block.count) { // Best-effort: this pass may miss mergeable siblings in the presence of // child blocks. sibling.start = block.start; iter.DeleteBlock(); } } } } void MergeNestedRanges(CoverageFunction* function) { CoverageBlockIterator iter(function); while (iter.Next()) { CoverageBlock& block = iter.GetBlock(); CoverageBlock& parent = iter.GetParent(); if (parent.count == block.count) { // Transformation may not be valid if sibling blocks exist with a // differing count. iter.DeleteBlock(); } } } void RewriteFunctionScopeCounter(CoverageFunction* function) { // Every function must have at least the top-level function counter. DCHECK(!function->blocks.empty()); CoverageBlockIterator iter(function); if (iter.Next()) { DCHECK(iter.IsTopLevel()); CoverageBlock& block = iter.GetBlock(); if (block.start == SourceRange::kFunctionLiteralSourcePosition && block.end == SourceRange::kFunctionLiteralSourcePosition) { // If a function-scope block exists, overwrite the function count. It has // a more reliable count than what we get from the FeedbackVector (which // is imprecise e.g. for generator functions and optimized code). function->count = block.count; // Then delete it; for compatibility with non-block coverage modes, the // function-scope block is expected in CoverageFunction, not as a // CoverageBlock. iter.DeleteBlock(); } } } void FilterAliasedSingletons(CoverageFunction* function) { CoverageBlockIterator iter(function); iter.Next(); // Advance once since we reference the previous block later. while (iter.Next()) { CoverageBlock& previous_block = iter.GetPreviousBlock(); CoverageBlock& block = iter.GetBlock(); bool is_singleton = block.end == kNoSourcePosition; bool aliases_start = block.start == previous_block.start; if (is_singleton && aliases_start) { // The previous block must have a full range since duplicate singletons // have already been merged. DCHECK_NE(previous_block.end, kNoSourcePosition); // Likewise, the next block must have another start position since // singletons are sorted to the end. DCHECK_IMPLIES(iter.HasNext(), iter.GetNextBlock().start != block.start); iter.DeleteBlock(); } } } void FilterUncoveredRanges(CoverageFunction* function) { CoverageBlockIterator iter(function); while (iter.Next()) { CoverageBlock& block = iter.GetBlock(); CoverageBlock& parent = iter.GetParent(); if (block.count == 0 && parent.count == 0) iter.DeleteBlock(); } } void FilterEmptyRanges(CoverageFunction* function) { CoverageBlockIterator iter(function); while (iter.Next()) { CoverageBlock& block = iter.GetBlock(); if (block.start == block.end) iter.DeleteBlock(); } } void ClampToBinary(CoverageFunction* function) { CoverageBlockIterator iter(function); while (iter.Next()) { CoverageBlock& block = iter.GetBlock(); if (block.count > 0) block.count = 1; } } void ResetAllBlockCounts(SharedFunctionInfo shared) { DCHECK(shared.HasCoverageInfo()); CoverageInfo coverage_info = CoverageInfo::cast(shared.GetDebugInfo().coverage_info()); for (int i = 0; i < coverage_info.slot_count(); i++) { coverage_info.ResetBlockCount(i); } } bool IsBlockMode(debug::CoverageMode mode) { switch (mode) { case debug::CoverageMode::kBlockBinary: case debug::CoverageMode::kBlockCount: return true; default: return false; } } bool IsBinaryMode(debug::CoverageMode mode) { switch (mode) { case debug::CoverageMode::kBlockBinary: case debug::CoverageMode::kPreciseBinary: return true; default: return false; } } void CollectBlockCoverageInternal(CoverageFunction* function, SharedFunctionInfo info, debug::CoverageMode mode) { DCHECK(IsBlockMode(mode)); // Functions with empty source ranges are not interesting to report. This can // happen e.g. for internally-generated functions like class constructors. if (!function->HasNonEmptySourceRange()) return; function->has_block_coverage = true; function->blocks = GetSortedBlockData(info); // If in binary mode, only report counts of 0/1. if (mode == debug::CoverageMode::kBlockBinary) ClampToBinary(function); // To stay compatible with non-block coverage modes, the function-scope count // is expected to be in the CoverageFunction, not as part of its blocks. // This finds the function-scope counter, overwrites CoverageFunction::count, // and removes it from the block list. // // Important: Must be called before other transformation passes. RewriteFunctionScopeCounter(function); // Functions without blocks don't need to be processed further. if (!function->HasBlocks()) return; // Remove singleton ranges with the same start position as a full range and // throw away their counts. // Singleton ranges are only intended to split existing full ranges and should // never expand into a full range. Consider 'if (cond) { ... } else { ... }' // as a problematic example; if the then-block produces a continuation // singleton, it would incorrectly expand into the else range. // For more context, see https://crbug.com/v8/8237. FilterAliasedSingletons(function); // Rewrite all singletons (created e.g. by continuations and unconditional // control flow) to ranges. RewritePositionSingletonsToRanges(function); // Merge nested and consecutive ranges with identical counts. // Note that it's necessary to merge duplicate ranges prior to merging nested // changes in order to avoid invalid transformations. See crbug.com/827530. MergeConsecutiveRanges(function); SortBlockData(function->blocks); MergeDuplicateRanges(function); MergeNestedRanges(function); MergeConsecutiveRanges(function); // Filter out ranges with count == 0 unless the immediate parent range has // a count != 0. FilterUncoveredRanges(function); // Filter out ranges of zero length. FilterEmptyRanges(function); } void CollectBlockCoverage(CoverageFunction* function, SharedFunctionInfo info, debug::CoverageMode mode) { CollectBlockCoverageInternal(function, info, mode); // Reset all counters on the DebugInfo to zero. ResetAllBlockCounts(info); } void PrintBlockCoverage(const CoverageFunction* function, SharedFunctionInfo info, bool has_nonempty_source_range, bool function_is_relevant) { DCHECK(FLAG_trace_block_coverage); std::unique_ptr function_name = function->name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL); i::PrintF( "Coverage for function='%s', SFI=%p, has_nonempty_source_range=%d, " "function_is_relevant=%d\n", function_name.get(), reinterpret_cast(info.ptr()), has_nonempty_source_range, function_is_relevant); i::PrintF("{start: %d, end: %d, count: %d}\n", function->start, function->end, function->count); for (const auto& block : function->blocks) { i::PrintF("{start: %d, end: %d, count: %d}\n", block.start, block.end, block.count); } } void CollectAndMaybeResetCounts(Isolate* isolate, SharedToCounterMap* counter_map, v8::debug::CoverageMode coverage_mode) { const bool reset_count = coverage_mode != v8::debug::CoverageMode::kBestEffort; switch (isolate->code_coverage_mode()) { case v8::debug::CoverageMode::kBlockBinary: case v8::debug::CoverageMode::kBlockCount: case v8::debug::CoverageMode::kPreciseBinary: case v8::debug::CoverageMode::kPreciseCount: { // Feedback vectors are already listed to prevent losing them to GC. DCHECK(isolate->factory() ->feedback_vectors_for_profiling_tools() ->IsArrayList()); Handle list = Handle::cast( isolate->factory()->feedback_vectors_for_profiling_tools()); for (int i = 0; i < list->Length(); i++) { FeedbackVector vector = FeedbackVector::cast(list->Get(i)); SharedFunctionInfo shared = vector.shared_function_info(); DCHECK(shared.IsSubjectToDebugging()); uint32_t count = static_cast(vector.invocation_count()); if (reset_count) vector.clear_invocation_count(); counter_map->Add(shared, count); } break; } case v8::debug::CoverageMode::kBestEffort: { DCHECK(!isolate->factory() ->feedback_vectors_for_profiling_tools() ->IsArrayList()); DCHECK_EQ(v8::debug::CoverageMode::kBestEffort, coverage_mode); HeapObjectIterator heap_iterator(isolate->heap()); for (HeapObject current_obj = heap_iterator.Next(); !current_obj.is_null(); current_obj = heap_iterator.Next()) { if (!current_obj.IsJSFunction()) continue; JSFunction func = JSFunction::cast(current_obj); SharedFunctionInfo shared = func.shared(); if (!shared.IsSubjectToDebugging()) continue; if (!(func.has_feedback_vector() || func.has_closure_feedback_cell_array())) { continue; } uint32_t count = 0; if (func.has_feedback_vector()) { count = static_cast(func.feedback_vector().invocation_count()); } else if (func.raw_feedback_cell().interrupt_budget() < FLAG_budget_for_feedback_vector_allocation) { // We haven't allocated feedback vector, but executed the function // atleast once. We don't have precise invocation count here. count = 1; } counter_map->Add(shared, count); } // Also check functions on the stack to collect the count map. With lazy // feedback allocation we may miss counting functions if the feedback // vector wasn't allocated yet and the function's interrupt budget wasn't // updated (i.e. it didn't execute return / jump). for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) { SharedFunctionInfo shared = it.frame()->function().shared(); if (counter_map->Get(shared) != 0) continue; counter_map->Add(shared, 1); } break; } } } // A {SFI, count} tuple is used to sort by source range (stored on // the SFI) and call count (in the counter map). struct SharedFunctionInfoAndCount { SharedFunctionInfoAndCount(SharedFunctionInfo info, uint32_t count) : info(info), count(count), start(StartPosition(info)), end(info.EndPosition()) {} // Sort by: // - start, ascending. // - end, descending. // - info.is_toplevel() first // - count, descending. bool operator<(const SharedFunctionInfoAndCount& that) const { if (this->start != that.start) return this->start < that.start; if (this->end != that.end) return this->end > that.end; if (this->info.is_toplevel() != that.info.is_toplevel()) { return this->info.is_toplevel(); } return this->count > that.count; } SharedFunctionInfo info; uint32_t count; int start; int end; }; } // anonymous namespace std::unique_ptr Coverage::CollectPrecise(Isolate* isolate) { DCHECK(!isolate->is_best_effort_code_coverage()); std::unique_ptr result = Collect(isolate, isolate->code_coverage_mode()); if (!isolate->is_collecting_type_profile() && (isolate->is_precise_binary_code_coverage() || isolate->is_block_binary_code_coverage())) { // We do not have to hold onto feedback vectors for invocations we already // reported. So we can reset the list. isolate->SetFeedbackVectorsForProfilingTools(*ArrayList::New(isolate, 0)); } return result; } std::unique_ptr Coverage::CollectBestEffort(Isolate* isolate) { return Collect(isolate, v8::debug::CoverageMode::kBestEffort); } std::unique_ptr Coverage::Collect( Isolate* isolate, v8::debug::CoverageMode collectionMode) { // Collect call counts for all functions. SharedToCounterMap counter_map; CollectAndMaybeResetCounts(isolate, &counter_map, collectionMode); // Iterate shared function infos of every script and build a mapping // between source ranges and invocation counts. std::unique_ptr result(new Coverage()); Script::Iterator scripts(isolate); for (Script script = scripts.Next(); !script.is_null(); script = scripts.Next()) { if (!script.IsUserJavaScript()) continue; // Create and add new script data. Handle