/art/compiler/optimizing/ |
D | register_allocator_linear_scan.h | 73 static void AddSorted(ScopedArenaVector<LiveInterval*>* array, LiveInterval* interval); 110 ScopedArenaVector<LiveInterval*> unhandled_core_intervals_; 113 ScopedArenaVector<LiveInterval*> unhandled_fp_intervals_; 117 ScopedArenaVector<LiveInterval*>* unhandled_; 120 ScopedArenaVector<LiveInterval*> handled_; 124 ScopedArenaVector<LiveInterval*> active_; 128 ScopedArenaVector<LiveInterval*> inactive_; 132 ScopedArenaVector<LiveInterval*> physical_core_register_intervals_; 133 ScopedArenaVector<LiveInterval*> physical_fp_register_intervals_; 137 ScopedArenaVector<LiveInterval*> temp_intervals_; [all …]
|
D | ssa_builder.h | 113 bool TypeInputsOfPhi(HPhi* phi, ScopedArenaVector<HPhi*>* worklist); 114 bool UpdatePrimitiveType(HPhi* phi, ScopedArenaVector<HPhi*>* worklist); 115 void ProcessPrimitiveTypePropagationWorklist(ScopedArenaVector<HPhi*>* worklist); 134 ScopedArenaVector<HArrayGet*> ambiguous_agets_; 135 ScopedArenaVector<HArraySet*> ambiguous_asets_; 136 ScopedArenaVector<HNewInstance*> uninitialized_strings_; 137 ScopedArenaVector<HInvoke*> uninitialized_string_phis_;
|
D | register_allocator_graph_color.h | 157 ScopedArenaVector<LiveInterval*> core_intervals_; 158 ScopedArenaVector<LiveInterval*> fp_intervals_; 161 ScopedArenaVector<LiveInterval*> temp_intervals_; 164 ScopedArenaVector<HInstruction*> safepoints_; 168 ScopedArenaVector<InterferenceNode*> physical_core_nodes_; 169 ScopedArenaVector<InterferenceNode*> physical_fp_nodes_;
|
D | stack_map_stream.h | 95 ScopedArenaVector<uint8_t> Encode(); 134 ScopedArenaVector<BitVector*> lazy_stack_masks_; 141 ScopedArenaVector<BitTableBuilder<InlineInfo>::Entry> current_inline_infos_; 142 ScopedArenaVector<DexRegisterLocation> current_dex_registers_; 143 ScopedArenaVector<DexRegisterLocation> previous_dex_registers_; 144 ScopedArenaVector<uint32_t> dex_register_timestamp_; // Stack map index of last change. 150 ScopedArenaVector<BitTableBuilder<DexRegisterMapInfo>::Entry> temp_dex_register_map_;
|
D | scheduler.h | 180 const ScopedArenaVector<SchedulingNode*>& GetDataPredecessors() const { in GetDataPredecessors() 195 const ScopedArenaVector<SchedulingNode*>& GetOtherPredecessors() const { in GetOtherPredecessors() 256 ScopedArenaVector<SchedulingNode*> data_predecessors_; 257 ScopedArenaVector<SchedulingNode*> other_predecessors_; 356 const ScopedArenaVector<SchedulingNode*>& initial_candidates); 441 virtual SchedulingNode* PopHighestPriorityNode(ScopedArenaVector<SchedulingNode*>* nodes, 445 static void DeleteNodeAtIndex(ScopedArenaVector<SchedulingNode*>* nodes, size_t index) { in DeleteNodeAtIndex() 461 SchedulingNode* PopHighestPriorityNode(ScopedArenaVector<SchedulingNode*>* nodes, in PopHighestPriorityNode() 483 SchedulingNode* PopHighestPriorityNode(ScopedArenaVector<SchedulingNode*>* nodes, 490 SchedulingNode* SelectMaterializedCondition(ScopedArenaVector<SchedulingNode*>* nodes, [all …]
|
D | block_builder.h | 81 ScopedArenaVector<HBasicBlock*> branch_targets_; 82 ScopedArenaVector<HBasicBlock*> throwing_blocks_;
|
D | instruction_builder.h | 76 ScopedArenaVector<HInstruction*>* GetLocalsFor(HBasicBlock* block); 79 ScopedArenaVector<HInstruction*>* GetLocalsForWithAllocation( 80 HBasicBlock* block, ScopedArenaVector<HInstruction*>* locals, const size_t vregs); 325 ScopedArenaVector<ScopedArenaVector<HInstruction*>> locals_for_; 327 ScopedArenaVector<HInstruction*>* current_locals_; 335 ScopedArenaVector<HBasicBlock*> loop_headers_;
|
D | linear_order.cc | 40 static void AddToListForLinearization(ScopedArenaVector<HBasicBlock*>* worklist, in AddToListForLinearization() 98 ScopedArenaVector<uint32_t> forward_predecessors(graph->GetBlocks().size(), in LinearizeGraphInternal() 111 ScopedArenaVector<HBasicBlock*> worklist(allocator.Adapter(kArenaAllocLinearOrder)); in LinearizeGraphInternal()
|
D | load_store_elimination.cc | 834 void KeepStoresIfAliasedToLocation(ScopedArenaVector<ValueRecord>& heap_values, in KeepStoresIfAliasedToLocation() 959 bool MaterializeLoopPhis(const ScopedArenaVector<size_t>& phi_placeholder_indexes, 1038 ScopedArenaVector<ValueRecord>& heap_values = in VisitDeoptimize() 1073 ScopedArenaVector<ValueRecord>& heap_values = heap_values_for_[block->GetBlockId()]; in HandleExit() 1098 ScopedArenaVector<ValueRecord>& heap_values = in HandleInvoke() 1180 ScopedArenaVector<ValueRecord>& heap_values = in VisitNewInstance() 1217 ScopedArenaVector<ValueRecord>& heap_values = in VisitNewArray() 1245 ScopedArenaVector<ScopedArenaVector<ValueRecord>> heap_values_for_; 1254 ScopedArenaVector<LoadStoreRecord> loads_and_stores_; 1259 ScopedArenaVector<HInstruction*> substitute_instructions_for_loads_; [all …]
|
D | register_allocator_graph_color.cc | 236 ScopedArenaDeque<ScopedArenaVector<InterferenceNode*>>* storage) { in AddInterference() 243 ScopedArenaVector<InterferenceNode*>::allocator_type adapter(storage->get_allocator()); in AddInterference() 294 ScopedArenaDeque<ScopedArenaVector<CoalesceOpportunity*>>* storage) { in AddCoalesceOpportunity() 296 ScopedArenaVector<CoalesceOpportunity*>::allocator_type adapter(storage->get_allocator()); in AddCoalesceOpportunity() 379 ScopedArenaVector<InterferenceNode*>* adjacent_nodes_; // Owned by ColoringIteration. 382 ScopedArenaVector<CoalesceOpportunity*>* coalesce_opportunities_; // Owned by ColoringIteration. 457 void BuildInterferenceGraph(const ScopedArenaVector<LiveInterval*>& intervals, 458 const ScopedArenaVector<InterferenceNode*>& physical_nodes); 534 ScopedArenaVector<InterferenceNode*> prunable_nodes_; 558 ScopedArenaDeque<ScopedArenaVector<InterferenceNode*>> adjacent_nodes_links_; [all …]
|
D | code_sinking.cc | 126 ScopedArenaVector<HInstruction*>* worklist) { in AddInstruction() 139 ScopedArenaVector<HInstruction*>* worklist) { in AddInputs() 148 ScopedArenaVector<HInstruction*>* worklist) { in AddInputs() 262 ScopedArenaVector<HInstruction*> worklist(allocator.Adapter(kArenaAllocMisc)); in SinkCodeToUncommonBranch() 270 ScopedArenaVector<HInstruction*> move_in_order(allocator.Adapter(kArenaAllocMisc)); in SinkCodeToUncommonBranch()
|
D | ssa_phi_elimination.cc | 37 ScopedArenaVector<HPhi*> worklist(allocator.Adapter(kArenaAllocSsaPhiElimination)); in MarkDeadPhis() 131 ScopedArenaVector<HPhi*> worklist(allocator.Adapter(kArenaAllocSsaPhiElimination)); in Run() 147 ScopedArenaVector<HPhi*> cycle_worklist(allocator.Adapter(kArenaAllocSsaPhiElimination)); in Run()
|
D | execution_subgraph.cc | 86 ScopedArenaVector<std::bitset<kMaxFilterableSuccessors>> results( in Prune() 110 ScopedArenaVector<ssize_t> last_succ_seen( in Prune() 115 ScopedArenaVector<uint32_t> current_path(temporaries.Adapter(kArenaAllocLSA)); in Prune() 268 ScopedArenaVector<ExcludedCohort>& res = excluded_list_.value(); in RecalculateExcludedCohort()
|
D | scheduler.cc | 449 const ScopedArenaVector<SchedulingNode*>& initial_candidates) { in DumpAsDotGraph() 473 ScopedArenaVector<SchedulingNode*>* nodes, const SchedulingGraph& graph) const { in SelectMaterializedCondition() 513 ScopedArenaVector<SchedulingNode*>* nodes, const SchedulingGraph& graph) { in PopHighestPriorityNode() 573 ScopedArenaVector<SchedulingNode*> scheduling_nodes(allocator.Adapter(kArenaAllocScheduler)); in Schedule() 596 ScopedArenaVector<SchedulingNode*> candidates(allocator.Adapter(kArenaAllocScheduler)); in Schedule() 606 ScopedArenaVector<SchedulingNode*> initial_candidates(allocator.Adapter(kArenaAllocScheduler)); in Schedule() 630 /*inout*/ ScopedArenaVector<SchedulingNode*>* candidates) { in Schedule()
|
D | ssa_builder.cc | 111 ScopedArenaVector<HPhi*>* worklist) { in AddDependentInstructionsToWorklist() 173 bool SsaBuilder::TypeInputsOfPhi(HPhi* phi, ScopedArenaVector<HPhi*>* worklist) { in TypeInputsOfPhi() 219 bool SsaBuilder::UpdatePrimitiveType(HPhi* phi, ScopedArenaVector<HPhi*>* worklist) { in UpdatePrimitiveType() 239 ScopedArenaVector<HPhi*> worklist(local_allocator_->Adapter(kArenaAllocGraphBuilder)); in RunPrimitiveTypePropagation() 268 void SsaBuilder::ProcessPrimitiveTypePropagationWorklist(ScopedArenaVector<HPhi*>* worklist) { in ProcessPrimitiveTypePropagationWorklist() 325 ScopedArenaVector<HPhi*> worklist(local_allocator_->Adapter(kArenaAllocGraphBuilder)); in FixAmbiguousArrayOps()
|
D | stack_map_test.cc | 65 ScopedArenaVector<uint8_t> memory = stream.Encode(); in TEST() 151 ScopedArenaVector<uint8_t> memory = stream.Encode(); in TEST() 321 ScopedArenaVector<uint8_t> memory = stream.Encode(); in TEST() 376 ScopedArenaVector<uint8_t> memory = stream.Encode(); in TEST() 435 ScopedArenaVector<uint8_t> memory = stream.Encode(); in TEST() 483 ScopedArenaVector<uint8_t> memory = stream.Encode(); in TEST() 582 ScopedArenaVector<uint8_t> memory = stream.Encode(); in TEST() 718 ScopedArenaVector<uint8_t> memory = stream.Encode(); in TEST() 742 ScopedArenaVector<uint8_t> memory = stream.Encode(); in TEST()
|
D | execution_subgraph.h | 338 ScopedArenaVector<std::bitset<kMaxFilterableSuccessors>> allowed_successors_; 344 std::optional<ScopedArenaVector<ExcludedCohort>> excluded_list_;
|
D | stack_map_stream.cc | 313 ScopedArenaVector<uint8_t> StackMapStream::Encode() { in Encode() 327 ScopedArenaVector<uint8_t> buffer(allocator_->Adapter(kArenaAllocStackMapStream)); in Encode() 328 BitMemoryWriter<ScopedArenaVector<uint8_t>> out(&buffer); in Encode()
|
D | register_allocator_linear_scan.cc | 303 ScopedArenaVector<LiveInterval*>& unhandled = core_register in ProcessInstruction() 431 ScopedArenaVector<LiveInterval*> intervals( in ValidateInternal() 440 const ScopedArenaVector<LiveInterval*>* physical_register_intervals = processing_core_registers_ in ValidateInternal() 824 ScopedArenaVector<LiveInterval*>* intervals, ScopedArenaVector<LiveInterval*>::iterator pos) { in RemoveIntervalAndPotentialOtherHalf() 1055 void RegisterAllocatorLinearScan::AddSorted(ScopedArenaVector<LiveInterval*>* array, in AddSorted() 1114 ScopedArenaVector<size_t>* spill_slots = nullptr; in AllocateSpillSlotFor()
|
D | constructor_fence_redundancy_elimination.cc | 241 ScopedArenaVector<HConstructorFence*> candidate_fences_;
|
D | optimizing_compiler.cc | 721 ScopedArenaVector<uint8_t> stack_map = codegen->BuildStackMaps(code_item_for_osr_check); in Emit() 1129 static ScopedArenaVector<uint8_t> CreateJniStackMap(ScopedArenaAllocator* allocator, in CreateJniStackMap() 1203 ScopedArenaVector<uint8_t> stack_map = CreateJniStackMap( in JniCompile() 1262 ScopedArenaVector<uint8_t> stack_map = CreateJniStackMap( in JitCompile() 1362 ScopedArenaVector<uint8_t> stack_map = codegen->BuildStackMaps(code_item); in JitCompile()
|
D | loop_optimization.h | 241 void SetAlignmentStrategy(const ScopedArenaVector<uint32_t>& peeling_votes,
|
D | load_store_analysis.h | 645 ScopedArenaVector<ReferenceInfo*> ref_info_array_; // All references used for heap accesses. 646 ScopedArenaVector<HeapLocation*> heap_locations_; // All heap locations.
|
/art/runtime/verifier/ |
D | reg_type_cache.h | 209 ScopedArenaVector<const RegType*> entries_; 212 ScopedArenaVector<std::pair<GcRoot<mirror::Class>, const RegType*>> klass_entries_;
|
/art/libartbase/base/ |
D | scoped_arena_containers.h | 54 using ScopedArenaVector = dchecked_vector<T, ScopedArenaAllocatorAdapter<T>>; variable 57 using ScopedArenaPriorityQueue = std::priority_queue<T, ScopedArenaVector<T>, Comparator>;
|