/art/compiler/optimizing/ |
D | register_allocator_linear_scan.h | 73 static void AddSorted(ScopedArenaVector<LiveInterval*>* array, LiveInterval* interval); 110 ScopedArenaVector<LiveInterval*> unhandled_core_intervals_; 113 ScopedArenaVector<LiveInterval*> unhandled_fp_intervals_; 117 ScopedArenaVector<LiveInterval*>* unhandled_; 120 ScopedArenaVector<LiveInterval*> handled_; 124 ScopedArenaVector<LiveInterval*> active_; 128 ScopedArenaVector<LiveInterval*> inactive_; 132 ScopedArenaVector<LiveInterval*> physical_core_register_intervals_; 133 ScopedArenaVector<LiveInterval*> physical_fp_register_intervals_; 137 ScopedArenaVector<LiveInterval*> temp_intervals_; [all …]
|
D | register_allocator_graph_color.h | 157 ScopedArenaVector<LiveInterval*> core_intervals_; 158 ScopedArenaVector<LiveInterval*> fp_intervals_; 161 ScopedArenaVector<LiveInterval*> temp_intervals_; 164 ScopedArenaVector<HInstruction*> safepoints_; 168 ScopedArenaVector<InterferenceNode*> physical_core_nodes_; 169 ScopedArenaVector<InterferenceNode*> physical_fp_nodes_;
|
D | ssa_builder.h | 116 bool TypeInputsOfPhi(HPhi* phi, ScopedArenaVector<HPhi*>* worklist); 117 bool UpdatePrimitiveType(HPhi* phi, ScopedArenaVector<HPhi*>* worklist); 118 void ProcessPrimitiveTypePropagationWorklist(ScopedArenaVector<HPhi*>* worklist); 138 ScopedArenaVector<HArrayGet*> ambiguous_agets_; 139 ScopedArenaVector<HArraySet*> ambiguous_asets_; 140 ScopedArenaVector<HNewInstance*> uninitialized_strings_; 141 ScopedArenaVector<HInvoke*> uninitialized_string_phis_;
|
D | stack_map_stream.h | 93 ScopedArenaVector<uint8_t> Encode(); 115 ScopedArenaVector<BitVector*> lazy_stack_masks_; 122 ScopedArenaVector<BitTableBuilder<InlineInfo>::Entry> current_inline_infos_; 123 ScopedArenaVector<DexRegisterLocation> current_dex_registers_; 124 ScopedArenaVector<DexRegisterLocation> previous_dex_registers_; 125 ScopedArenaVector<uint32_t> dex_register_timestamp_; // Stack map index of last change. 131 ScopedArenaVector<BitTableBuilder<DexRegisterMapInfo>::Entry> temp_dex_register_map_;
|
D | scheduler.h | 175 const ScopedArenaVector<SchedulingNode*>& GetDataPredecessors() const { in GetDataPredecessors() 184 const ScopedArenaVector<SchedulingNode*>& GetOtherPredecessors() const { in GetOtherPredecessors() 237 ScopedArenaVector<SchedulingNode*> data_predecessors_; 238 ScopedArenaVector<SchedulingNode*> other_predecessors_; 295 const ScopedArenaVector<SchedulingNode*>& initial_candidates); 373 virtual SchedulingNode* PopHighestPriorityNode(ScopedArenaVector<SchedulingNode*>* nodes, 377 static void DeleteNodeAtIndex(ScopedArenaVector<SchedulingNode*>* nodes, size_t index) { in DeleteNodeAtIndex() 393 SchedulingNode* PopHighestPriorityNode(ScopedArenaVector<SchedulingNode*>* nodes, in PopHighestPriorityNode() 415 SchedulingNode* PopHighestPriorityNode(ScopedArenaVector<SchedulingNode*>* nodes, 422 SchedulingNode* SelectMaterializedCondition(ScopedArenaVector<SchedulingNode*>* nodes, [all …]
|
D | load_store_elimination.cc | 99 ScopedArenaVector<HInstruction*>(heap_locations_collector. in LSEVisitor() 328 void KeepStoresIfAliasedToLocation(ScopedArenaVector<HInstruction*>& heap_values, in KeepStoresIfAliasedToLocation() 340 ScopedArenaVector<HInstruction*>& heap_values = heap_values_for_[block_id]; in HandleLoopSideEffects() 342 ScopedArenaVector<HInstruction*>& pre_header_heap_values = in HandleLoopSideEffects() 396 ScopedArenaVector<HInstruction*>& heap_values = heap_values_for_[block->GetBlockId()]; in MergePredecessorValues() 472 ScopedArenaVector<HInstruction*>& pred_values = in MergePredecessorValues() 552 ScopedArenaVector<HInstruction*>& heap_values = in VisitGetLocation() 598 ScopedArenaVector<HInstruction*>& heap_values = in VisitSetLocation() 696 const ScopedArenaVector<HInstruction*>& heap_values = in VisitDeoptimize() 735 const ScopedArenaVector<HInstruction*>& heap_values = in HandleExit() [all …]
|
D | block_builder.h | 76 ScopedArenaVector<HBasicBlock*> branch_targets_; 77 ScopedArenaVector<HBasicBlock*> throwing_blocks_;
|
D | instruction_builder.h | 83 ScopedArenaVector<HInstruction*>* GetLocalsFor(HBasicBlock* block); 86 ScopedArenaVector<HInstruction*>* GetLocalsForWithAllocation( 87 HBasicBlock* block, ScopedArenaVector<HInstruction*>* locals, const size_t vregs); 325 ScopedArenaVector<ScopedArenaVector<HInstruction*>> locals_for_; 327 ScopedArenaVector<HInstruction*>* current_locals_; 335 ScopedArenaVector<HBasicBlock*> loop_headers_;
|
D | linear_order.cc | 40 static void AddToListForLinearization(ScopedArenaVector<HBasicBlock*>* worklist, in AddToListForLinearization() 98 ScopedArenaVector<uint32_t> forward_predecessors(graph->GetBlocks().size(), in LinearizeGraphInternal() 111 ScopedArenaVector<HBasicBlock*> worklist(allocator.Adapter(kArenaAllocLinearOrder)); in LinearizeGraphInternal()
|
D | register_allocator_graph_color.cc | 236 ScopedArenaDeque<ScopedArenaVector<InterferenceNode*>>* storage) { in AddInterference() 243 ScopedArenaVector<InterferenceNode*>::allocator_type adapter(storage->get_allocator()); in AddInterference() 294 ScopedArenaDeque<ScopedArenaVector<CoalesceOpportunity*>>* storage) { in AddCoalesceOpportunity() 296 ScopedArenaVector<CoalesceOpportunity*>::allocator_type adapter(storage->get_allocator()); in AddCoalesceOpportunity() 379 ScopedArenaVector<InterferenceNode*>* adjacent_nodes_; // Owned by ColoringIteration. 382 ScopedArenaVector<CoalesceOpportunity*>* coalesce_opportunities_; // Owned by ColoringIteration. 457 void BuildInterferenceGraph(const ScopedArenaVector<LiveInterval*>& intervals, 458 const ScopedArenaVector<InterferenceNode*>& physical_nodes); 534 ScopedArenaVector<InterferenceNode*> prunable_nodes_; 558 ScopedArenaDeque<ScopedArenaVector<InterferenceNode*>> adjacent_nodes_links_; [all …]
|
D | code_sinking.cc | 125 ScopedArenaVector<HInstruction*>* worklist) { in AddInstruction() 138 ScopedArenaVector<HInstruction*>* worklist) { in AddInputs() 147 ScopedArenaVector<HInstruction*>* worklist) { in AddInputs() 261 ScopedArenaVector<HInstruction*> worklist(allocator.Adapter(kArenaAllocMisc)); in SinkCodeToUncommonBranch() 269 ScopedArenaVector<HInstruction*> move_in_order(allocator.Adapter(kArenaAllocMisc)); in SinkCodeToUncommonBranch()
|
D | ssa_phi_elimination.cc | 37 ScopedArenaVector<HPhi*> worklist(allocator.Adapter(kArenaAllocSsaPhiElimination)); in MarkDeadPhis() 131 ScopedArenaVector<HPhi*> worklist(allocator.Adapter(kArenaAllocSsaPhiElimination)); in Run() 147 ScopedArenaVector<HPhi*> cycle_worklist(allocator.Adapter(kArenaAllocSsaPhiElimination)); in Run()
|
D | scheduler.cc | 471 const ScopedArenaVector<SchedulingNode*>& initial_candidates) { in DumpAsDotGraph() 495 ScopedArenaVector<SchedulingNode*>* nodes, const SchedulingGraph& graph) const { in SelectMaterializedCondition() 535 ScopedArenaVector<SchedulingNode*>* nodes, const SchedulingGraph& graph) { in PopHighestPriorityNode() 594 ScopedArenaVector<SchedulingNode*> scheduling_nodes(allocator.Adapter(kArenaAllocScheduler)); in Schedule() 617 ScopedArenaVector<SchedulingNode*> candidates(allocator.Adapter(kArenaAllocScheduler)); in Schedule() 627 ScopedArenaVector<SchedulingNode*> initial_candidates(allocator.Adapter(kArenaAllocScheduler)); in Schedule() 651 /*inout*/ ScopedArenaVector<SchedulingNode*>* candidates) { in Schedule()
|
D | ssa_builder.cc | 111 ScopedArenaVector<HPhi*>* worklist) { in AddDependentInstructionsToWorklist() 173 bool SsaBuilder::TypeInputsOfPhi(HPhi* phi, ScopedArenaVector<HPhi*>* worklist) { in TypeInputsOfPhi() 219 bool SsaBuilder::UpdatePrimitiveType(HPhi* phi, ScopedArenaVector<HPhi*>* worklist) { in UpdatePrimitiveType() 239 ScopedArenaVector<HPhi*> worklist(local_allocator_->Adapter(kArenaAllocGraphBuilder)); in RunPrimitiveTypePropagation() 268 void SsaBuilder::ProcessPrimitiveTypePropagationWorklist(ScopedArenaVector<HPhi*>* worklist) { in ProcessPrimitiveTypePropagationWorklist() 325 ScopedArenaVector<HPhi*> worklist(local_allocator_->Adapter(kArenaAllocGraphBuilder)); in FixAmbiguousArrayOps()
|
D | stack_map_test.cc | 65 ScopedArenaVector<uint8_t> memory = stream.Encode(); in TEST() 151 ScopedArenaVector<uint8_t> memory = stream.Encode(); in TEST() 321 ScopedArenaVector<uint8_t> memory = stream.Encode(); in TEST() 376 ScopedArenaVector<uint8_t> memory = stream.Encode(); in TEST() 435 ScopedArenaVector<uint8_t> memory = stream.Encode(); in TEST() 483 ScopedArenaVector<uint8_t> memory = stream.Encode(); in TEST() 582 ScopedArenaVector<uint8_t> memory = stream.Encode(); in TEST() 726 ScopedArenaVector<uint8_t> memory = stream.Encode(); in TEST() 750 ScopedArenaVector<uint8_t> memory = stream.Encode(); in TEST()
|
D | stack_map_stream.cc | 293 ScopedArenaVector<uint8_t> StackMapStream::Encode() { in Encode() 297 ScopedArenaVector<uint8_t> buffer(allocator_->Adapter(kArenaAllocStackMapStream)); in Encode() 298 BitMemoryWriter<ScopedArenaVector<uint8_t>> out(&buffer); in Encode()
|
D | register_allocator_linear_scan.cc | 303 ScopedArenaVector<LiveInterval*>& unhandled = core_register in ProcessInstruction() 431 ScopedArenaVector<LiveInterval*> intervals( in ValidateInternal() 440 const ScopedArenaVector<LiveInterval*>* physical_register_intervals = processing_core_registers_ in ValidateInternal() 824 ScopedArenaVector<LiveInterval*>* intervals, ScopedArenaVector<LiveInterval*>::iterator pos) { in RemoveIntervalAndPotentialOtherHalf() 1055 void RegisterAllocatorLinearScan::AddSorted(ScopedArenaVector<LiveInterval*>* array, in AddSorted() 1114 ScopedArenaVector<size_t>* spill_slots = nullptr; in AllocateSpillSlotFor()
|
D | constructor_fence_redundancy_elimination.cc | 241 ScopedArenaVector<HConstructorFence*> candidate_fences_;
|
D | optimizing_compiler.cc | 755 ScopedArenaVector<uint8_t> stack_map = codegen->BuildStackMaps(code_item_for_osr_check); in Emit() 1159 static ScopedArenaVector<uint8_t> CreateJniStackMap(ScopedArenaAllocator* allocator, in CreateJniStackMap() 1228 ScopedArenaVector<uint8_t> stack_map = CreateJniStackMap(&stack_map_allocator, in JniCompile() 1280 ScopedArenaVector<uint8_t> stack_map = CreateJniStackMap(&stack_map_allocator, in JitCompile() 1377 ScopedArenaVector<uint8_t> stack_map = codegen->BuildStackMaps(code_item); in JitCompile()
|
D | register_allocator.cc | 119 ScopedArenaVector<ArenaBitVector*> liveness_of_values( in ValidateIntervals()
|
/art/runtime/verifier/ |
D | reg_type_cache.h | 198 ScopedArenaVector<const RegType*> entries_; 201 ScopedArenaVector<std::pair<GcRoot<mirror::Class>, const RegType*>> klass_entries_;
|
D | method_verifier.h | 101 ScopedArenaVector<RegisterLineArenaUniquePtr> register_lines_;
|
/art/libartbase/base/ |
D | scoped_arena_containers.h | 52 using ScopedArenaVector = dchecked_vector<T, ScopedArenaAllocatorAdapter<T>>; variable 55 using ScopedArenaPriorityQueue = std::priority_queue<T, ScopedArenaVector<T>, Comparator>;
|
/art/runtime/jit/ |
D | profile_saver.cc | 286 ScopedArenaVector<ObjPtr<mirror::Class>>* out) in GetClassesVisitor() 305 ScopedArenaVector<ObjPtr<mirror::Class>>* const out_; 356 ScopedArenaVector<ObjPtr<mirror::Class>> classes(allocator->Adapter()); in SampleClassesAndExecutedMethods()
|
/art/compiler/ |
D | exception_test.cc | 84 ScopedArenaVector<uint8_t> stack_map = stack_maps.Encode(); in SetUp()
|