/art/compiler/optimizing/ |
D | graph_test.cc | 28 static HBasicBlock* createIfBlock(HGraph* graph, ArenaAllocator* allocator) { in createIfBlock() 40 static HBasicBlock* createGotoBlock(HGraph* graph, ArenaAllocator* allocator) { in createGotoBlock() 48 static HBasicBlock* createReturnBlock(HGraph* graph, ArenaAllocator* allocator) { in createReturnBlock() 56 static HBasicBlock* createExitBlock(HGraph* graph, ArenaAllocator* allocator) { in createExitBlock() 69 ArenaAllocator allocator(&pool); in TEST() local 104 ArenaAllocator allocator(&pool); in TEST() local 139 ArenaAllocator allocator(&pool); in TEST() local 175 ArenaAllocator allocator(&pool); in TEST() local 211 ArenaAllocator allocator(&pool); in TEST() local 251 ArenaAllocator allocator(&pool); in TEST() local [all …]
|
D | live_ranges_test.cc | 30 static HGraph* BuildGraph(const uint16_t* data, ArenaAllocator* allocator) { in BuildGraph() 58 ArenaAllocator allocator(&pool); in TEST() local 105 ArenaAllocator allocator(&pool); in TEST() local 154 ArenaAllocator allocator(&pool); in TEST() local 230 ArenaAllocator allocator(&pool); in TEST() local
|
D | parallel_move_test.cc | 27 explicit TestParallelMoveResolver(ArenaAllocator* allocator) : ParallelMoveResolver(allocator) {} in TestParallelMoveResolver() 67 static HParallelMove* BuildParallelMove(ArenaAllocator* allocator, in BuildParallelMove() 81 ArenaAllocator allocator(&pool); in TEST() local 100 ArenaAllocator allocator(&pool); in TEST() local
|
D | register_allocator_test.cc | 36 ArenaAllocator allocator(&pool); in Check() local 57 ArenaAllocator allocator(&pool); in TEST() local 250 static HGraph* BuildSSAGraph(const uint16_t* data, ArenaAllocator* allocator) { in BuildSSAGraph() 298 ArenaAllocator allocator(&pool); in TEST() local 330 ArenaAllocator allocator(&pool); in TEST() local
|
D | live_interval_test.cc | 27 ArenaAllocator allocator(&pool); in TEST() local 44 ArenaAllocator allocator(&pool); in TEST() local 72 ArenaAllocator allocator(&pool); in TEST() local 100 ArenaAllocator allocator(&pool); in TEST() local 192 ArenaAllocator allocator(&pool); in TEST() local
|
D | parallel_move_resolver.h | 36 explicit ParallelMoveResolver(ArenaAllocator* allocator) : moves_(allocator, 32) {} in ParallelMoveResolver()
|
D | code_generator.cc | 33 void CodeGenerator::CompileBaseline(CodeAllocator* allocator, bool is_leaf) { in CompileBaseline() 69 void CodeGenerator::CompileOptimized(CodeAllocator* allocator) { in CompileOptimized() 254 CodeGenerator* CodeGenerator::Create(ArenaAllocator* allocator, in Create()
|
D | codegen_test.cc | 51 static void Run(const InternalCodeAllocator& allocator, in Run() 76 InternalCodeAllocator allocator; in TestCode() local
|
D | code_generator_x86_64.h | 61 ParallelMoveResolverX86_64(ArenaAllocator* allocator, CodeGeneratorX86_64* codegen) in ParallelMoveResolverX86_64()
|
D | ssa_liveness_analysis.h | 28 BlockInfo(ArenaAllocator* allocator, const HBasicBlock& block, size_t number_of_ssa_values) in BlockInfo() 137 : allocator_(allocator), in allocator_() argument 149 static LiveInterval* MakeFixedInterval(ArenaAllocator* allocator, int reg, Primitive::Type type) { in MakeFixedInterval()
|
D | code_generator_x86.h | 65 ParallelMoveResolverX86(ArenaAllocator* allocator, CodeGeneratorX86* codegen) in ParallelMoveResolverX86()
|
D | code_generator_arm.h | 65 ParallelMoveResolverARM(ArenaAllocator* allocator, CodeGeneratorARM* codegen) in ParallelMoveResolverARM()
|
D | linearize_test.cc | 38 ArenaAllocator allocator(&pool); in TestCode() local
|
D | optimizing_compiler.cc | 128 CodeVectorAllocator allocator; in TryCompile() local
|
D | pretty_printer_test.cc | 32 ArenaAllocator allocator(&pool); in TestCode() local
|
/art/compiler/utils/ |
D | allocation.h | 28 void* operator new(size_t size, ArenaAllocator* allocator) { in new()
|
D | scoped_arena_allocator.h | 108 ScopedArenaAllocator* allocator = new(addr) ScopedArenaAllocator(arena_stack); in Create() local
|
/art/compiler/dex/ |
D | global_value_numbering.cc | 23 GlobalValueNumbering::GlobalValueNumbering(CompilationUnit* cu, ScopedArenaAllocator* allocator) in GlobalValueNumbering() 47 ScopedArenaAllocator* allocator) { in PrepareBasicBlock()
|
D | mir_analysis.cc | 1112 ScopedArenaAllocator allocator(&cu_->arena_stack); in DoCacheFieldLoweringInfo() local 1221 ScopedArenaAllocator allocator(&cu_->arena_stack); in DoCacheMethodLoweringInfo() local
|
D | mir_optimization.cc | 326 std::unique_ptr<ScopedArenaAllocator> allocator; in BasicBlockOpt() local 1007 ScopedArenaAllocator allocator(&cu_->arena_stack); in EliminateClassInitChecksGate() local 1188 ScopedArenaAllocator allocator(&cu_->arena_stack); // Reclaim memory after each LVN. in ApplyGlobalValueNumberingEnd() local
|
/art/runtime/entrypoints/quick/ |
D | quick_alloc_entrypoints.cc | 167 void SetQuickAllocEntryPointsAllocator(gc::AllocatorType allocator) { in SetQuickAllocEntryPointsAllocator()
|
/art/runtime/gc/space/ |
D | valgrind_malloc_space-inl.h | 96 A allocator, byte* begin, in ValgrindMallocSpace()
|
/art/compiler/dex/quick/ |
D | resource_mask.h | 153 explicit ResourceMaskCache(ArenaAllocator* allocator) in ResourceMaskCache()
|
/art/runtime/gc/ |
D | heap-inl.h | 40 size_t byte_count, AllocatorType allocator, in AllocObjectWithAllocator()
|
/art/runtime/ |
D | safe_map.h | 48 : map_(cmp, allocator) { in map_() argument
|