/art/libartbase/base/ |
D | memory_region_test.cc | 26 MemoryRegion region(&data, n); in TEST() local 28 ASSERT_EQ(0, region.LoadUnaligned<char>(0)); in TEST() 33 region.LoadUnaligned<uint32_t>(1)); in TEST() 34 ASSERT_EQ(5 + (6 << kBitsPerByte), region.LoadUnaligned<int16_t>(5)); in TEST() 35 ASSERT_EQ(7u, region.LoadUnaligned<unsigned char>(7)); in TEST() 41 MemoryRegion region(&data, n); in TEST() local 43 region.StoreUnaligned<unsigned char>(0u, 7); in TEST() 44 region.StoreUnaligned<int16_t>(1, 6 + (5 << kBitsPerByte)); in TEST() 45 region.StoreUnaligned<uint32_t>(3, in TEST() 50 region.StoreUnaligned<char>(7, 0); in TEST()
|
D | data_hash.h | 86 size_t operator()(BMR region) const { in operator() 88 size_t num_full_blocks = region.size_in_bits() / kMurmur3BlockBits; in operator() 89 size_t num_end_bits = region.size_in_bits() % kMurmur3BlockBits; in operator() 92 uint32_t block = region.LoadBits(i * kMurmur3BlockBits, kMurmur3BlockBits); in operator() 96 uint32_t end_bits = region.LoadBits(num_full_blocks * kMurmur3BlockBits, num_end_bits); in operator() 101 size_t num_full_bytes = region.size_in_bits() / kBitsPerByte; in operator() 102 size_t num_end_bits = region.size_in_bits() % kBitsPerByte; in operator() 105 uint8_t byte = region.LoadBits(i * kBitsPerByte, kBitsPerByte); in operator() 109 uint32_t end_bits = region.LoadBits(num_full_bytes * kBitsPerByte, num_end_bits); in operator()
|
D | bit_memory_region.h | 40 ALWAYS_INLINE explicit BitMemoryRegion(MemoryRegion region) in BitMemoryRegion() argument 41 : BitMemoryRegion(region.begin(), /* bit_start */ 0, region.size_in_bits()) { in BitMemoryRegion() 43 ALWAYS_INLINE BitMemoryRegion(MemoryRegion region, size_t bit_offset, size_t bit_length) in BitMemoryRegion() argument 44 : BitMemoryRegion(region) { in BitMemoryRegion() 454 BitMemoryRegion region(out_->data(), bit_offset_, bit_length); in Allocate() 457 return region; in Allocate() 460 ALWAYS_INLINE void WriteRegion(const BitMemoryRegion& region) { in WriteRegion() argument 461 Allocate(region.size_in_bits()).CopyBits(region); in WriteRegion()
|
D | bit_table.h | 415 MemoryRegion region(const_cast<void*>(bitmap), BitsToBytesRoundUp(num_bits)); in Dedup() 416 DCHECK(num_bits == 0 || BitMemoryRegion(region).LoadBit(num_bits - 1) == 1); in Dedup() 417 DCHECK_EQ(BitMemoryRegion(region).LoadBits(num_bits, region.size_in_bits() - num_bits), 0u); in Dedup() 419 uint32_t hash = hasher(region); in Dedup() 424 if (MemoryRegion::ContentEquals()(region, rows_[it->second])) { in Dedup() 431 void* copy = allocator_->Alloc(region.size(), kArenaAllocBitTableBuilder); in Dedup() 432 memcpy(copy, region.pointer(), region.size()); in Dedup() 433 rows_.push_back(MemoryRegion(copy, region.size())); in Dedup()
|
D | memory_region.h | 129 ALWAYS_INLINE void Extend(const MemoryRegion& region, uintptr_t extra) { in Extend() argument 130 pointer_ = region.pointer(); in Extend() 131 size_ = (region.size() + extra); in Extend()
|
/art/runtime/jit/ |
D | jit_scoped_code_cache_write.h | 38 explicit ScopedCodeCacheWrite(const JitMemoryRegion& region) in ScopedCodeCacheWrite() argument 40 region_(region) { in ScopedCodeCacheWrite() 41 if (kIsDebugBuild || !region.HasDualCodeMapping()) { in ScopedCodeCacheWrite() 43 const MemMap* const updatable_pages = region.GetUpdatableCodeMapping(); in ScopedCodeCacheWrite() 45 int prot = region.HasDualCodeMapping() ? kProtRW : kProtRWX; in ScopedCodeCacheWrite()
|
D | jit_code_cache.h | 117 explicit ZygoteMap(JitMemoryRegion* region) in ZygoteMap() argument 118 : map_(), region_(region), compilation_state_(nullptr) {} in ZygoteMap() 243 JitMemoryRegion* region, 261 JitMemoryRegion* region, 277 void Free(Thread* self, JitMemoryRegion* region, const uint8_t* code, const uint8_t* data) 280 void FreeLocked(JitMemoryRegion* region, const uint8_t* code, const uint8_t* data) 391 bool IsSharedRegion(const JitMemoryRegion& region) const { return ®ion == &shared_region_; } in IsSharedRegion() argument 396 JitMemoryRegion* region = GetCurrentRegion(); in CanAllocateProfilingInfo() local 397 return region->IsValid() && !IsSharedRegion(*region); in CanAllocateProfilingInfo()
|
D | jit_code_cache.cc | 227 JitMemoryRegion region; in Create() local 228 if (!region.Initialize(initial_capacity, in Create() 236 if (region.HasCodeMapping()) { in Create() 237 const MemMap* exec_pages = region.GetExecPages(); in Create() 245 jit_code_cache->shared_region_ = std::move(region); in Create() 247 jit_code_cache->private_region_ = std::move(region); in Create() 656 JitMemoryRegion* region, in Commit() argument 673 DCheckRootsAreValid(roots, IsSharedRegion(*region)); in Commit() 684 const uint8_t* code_ptr = region->CommitCode( in Commit() 692 if (!region->CommitData(reserved_data, roots, stack_map)) { in Commit() [all …]
|
/art/test/1000-non-moving-space-stress/ |
D | info.txt | 4 reference to an object cleared or moved from a newly allocated region 5 of the region space.
|
/art/compiler/utils/ |
D | assembler.cc | 54 void AssemblerBuffer::ProcessFixups(const MemoryRegion& region) { in ProcessFixups() argument 57 fixup->Process(region, fixup->position()); in ProcessFixups()
|
D | assembler.h | 50 virtual void Process(const MemoryRegion& region, int position) = 0; 168 void FinalizeInstructions(const MemoryRegion& region); 272 void ProcessFixups(const MemoryRegion& region); 378 virtual void FinalizeInstructions(const MemoryRegion& region) { in FinalizeInstructions() argument 379 buffer_.FinalizeInstructions(region); in FinalizeInstructions()
|
D | jni_macro_assembler.h | 95 virtual void FinalizeInstructions(const MemoryRegion& region) = 0; 269 void FinalizeInstructions(const MemoryRegion& region) override { in FinalizeInstructions() argument 270 asm_.FinalizeInstructions(region); in FinalizeInstructions()
|
/art/runtime/gc/space/ |
D | region_space-inl.h | 329 mirror::Object* region = nullptr; in AllocLarge() local 334 region = AllocLargeInRange<kForEvac>(cyclic_alloc_region_index_, in AllocLarge() 342 if (region == nullptr) { in AllocLarge() 346 region = AllocLargeInRange<kForEvac>( in AllocLarge() 356 if (region != nullptr) { in AllocLarge() 365 region = AllocLargeInRange<kForEvac>(0, in AllocLarge() 372 if (kForEvac && region != nullptr) { in AllocLarge() 375 return region; in AllocLarge()
|
D | region_space.cc | 294 Region* region = RefToRegionLocked(reinterpret_cast<mirror::Object*>(addr)); in ZeroLiveBytesForLargeObject() local 296 DCHECK(region->IsLarge()); in ZeroLiveBytesForLargeObject() 298 DCHECK(region->IsLargeTail()); in ZeroLiveBytesForLargeObject() 300 region->ZeroLiveBytes(); in ZeroLiveBytesForLargeObject()
|
/art/test/659-unpadded-array/ |
D | info.txt | 1 Regression test for the concurrent GC whose region space had
|
/art/test/090-loop-formation/ |
D | info.txt | 3 in the loop region, and the JIT compiler won't choke on unresolved fields.
|
/art/compiler/jit/ |
D | jit_compiler.h | 45 Thread* self, JitMemoryRegion* region, ArtMethod* method, CompilationKind kind)
|
D | jit_compiler.cc | 176 Thread* self, JitMemoryRegion* region, ArtMethod* method, CompilationKind compilation_kind) { in CompileMethod() argument 201 self, code_cache, region, method, compilation_kind, jit_logger_.get()); in CompileMethod()
|
/art/runtime/ |
D | stack_map.cc | 171 CodeInfo code_info(code_info_data, &num_bits, [&](size_t i, auto* table, BitMemoryRegion region) { in CollectSizeStats() argument 174 table_stats.AddBits(region.size_in_bits()); in CollectSizeStats() 175 table_stats["Header"].AddBits(region.size_in_bits() - table->DataBitSize()); in CollectSizeStats()
|
/art/compiler/utils/arm64/ |
D | assembler_arm64.cc | 82 void Arm64Assembler::FinalizeInstructions(const MemoryRegion& region) { in FinalizeInstructions() argument 85 region.CopyFrom(0, from); in FinalizeInstructions()
|
D | assembler_arm64.h | 94 void FinalizeInstructions(const MemoryRegion& region) override;
|
/art/compiler/ |
D | compiler.h | 78 jit::JitMemoryRegion* region ATTRIBUTE_UNUSED, in JitCompile()
|
/art/compiler/optimizing/ |
D | optimizing_compiler.cc | 299 jit::JitMemoryRegion* region, 1231 jit::JitMemoryRegion* region, in JitCompile() argument 1237 DCHECK_EQ(compiler_options.IsJitCompilerForSharedCode(), code_cache->IsSharedRegion(*region)); in JitCompile() 1284 region, in JitCompile() 1321 region, in JitCompile() 1333 code_cache->Free(self, region, reserved_code.data(), reserved_data.data()); in JitCompile() 1383 region, in JitCompile() 1429 region, in JitCompile() 1441 code_cache->Free(self, region, reserved_code.data(), reserved_data.data()); in JitCompile()
|
/art/dex2oat/linker/ |
D | code_info_table_deduper.cc | 87 BitMemoryRegion region( in Dedupe() local
|
/art/compiler/utils/arm/ |
D | assembler_arm_vixl.cc | 55 void ArmVIXLAssembler::FinalizeInstructions(const MemoryRegion& region) { in FinalizeInstructions() argument 58 region.CopyFrom(0, from); in FinalizeInstructions()
|