Home
last modified time | relevance | path

Searched refs:AllocatedSize (Results 1 – 25 of 33) sorted by relevance

12

/external/swiftshader/third_party/llvm-10.0/llvm/include/llvm/Support/
DMemory.h34 MemoryBlock() : Address(nullptr), AllocatedSize(0) {} in MemoryBlock()
36 : Address(addr), AllocatedSize(allocatedSize) {} in MemoryBlock()
40 size_t allocatedSize() const { return AllocatedSize; } in allocatedSize()
44 size_t AllocatedSize; ///< Size, in bytes of the memory area variable
/external/llvm-project/llvm/include/llvm/Support/
DMemory.h34 MemoryBlock() : Address(nullptr), AllocatedSize(0) {} in MemoryBlock()
36 : Address(addr), AllocatedSize(allocatedSize) {} in MemoryBlock()
40 size_t allocatedSize() const { return AllocatedSize; } in allocatedSize()
44 size_t AllocatedSize; ///< Size, in bytes of the memory area variable
/external/tensorflow/tensorflow/core/framework/
Dtracking_allocator.cc42 size_t allocated_bytes = allocator_->AllocatedSize(ptr); in AllocateRaw()
86 allocated_bytes = allocator_->AllocatedSize(ptr); in DeallocateRaw()
129 size_t TrackingAllocator::AllocatedSize(const void* ptr) const { in AllocatedSize() function in tensorflow::TrackingAllocator
138 return allocator_->AllocatedSize(ptr); in AllocatedSize()
Dallocator.h254 virtual size_t AllocatedSize(const void* ptr) const { in AllocatedSize() function
277 return AllocatedSize(ptr); in AllocatedSizeSlow()
329 size_t AllocatedSize(const void* ptr) const override { in AllocatedSize() function
330 return wrapped_->AllocatedSize(ptr); in AllocatedSize()
Dtracking_allocator_test.cc93 EXPECT_LE(4, ta->AllocatedSize(p1)); in TEST()
99 EXPECT_LE(12, ta->AllocatedSize(p2)); in TEST()
Dtracking_allocator.h66 size_t AllocatedSize(const void* ptr) const override;
/external/swiftshader/third_party/llvm-10.0/llvm/lib/Support/Unix/
DMemory.inc146 Result.AllocatedSize = PageSize*NumPages;
161 if (M.Address == nullptr || M.AllocatedSize == 0)
164 if (0 != ::munmap(M.Address, M.AllocatedSize))
168 M.AllocatedSize = 0;
176 if (M.Address == nullptr || M.AllocatedSize == 0)
184 uintptr_t End = alignAddr((const uint8_t *)M.Address + M.AllocatedSize, PageSize);
197 Memory::InvalidateInstructionCache(M.Address, M.AllocatedSize);
208 Memory::InvalidateInstructionCache(M.Address, M.AllocatedSize);
/external/llvm-project/llvm/lib/Support/Windows/
DMemory.inc148 Result.AllocatedSize = AllocSize;
158 if (M.Address == 0 || M.AllocatedSize == 0)
165 M.AllocatedSize = 0;
172 if (M.Address == 0 || M.AllocatedSize == 0)
178 if (!VirtualProtect(M.Address, M.AllocatedSize, Protect, &OldFlags))
182 Memory::InvalidateInstructionCache(M.Address, M.AllocatedSize);
/external/swiftshader/third_party/llvm-10.0/llvm/lib/Support/Windows/
DMemory.inc148 Result.AllocatedSize = AllocSize;
158 if (M.Address == 0 || M.AllocatedSize == 0)
165 M.AllocatedSize = 0;
172 if (M.Address == 0 || M.AllocatedSize == 0)
178 if (!VirtualProtect(M.Address, M.AllocatedSize, Protect, &OldFlags))
182 Memory::InvalidateInstructionCache(M.Address, M.AllocatedSize);
/external/tensorflow/tensorflow/core/common_runtime/gpu/
Dgpu_debug_allocator.cc126 size_t GPUDebugAllocator::AllocatedSize(const void* ptr) const { in AllocatedSize() function in tensorflow::GPUDebugAllocator
127 return base_allocator_->AllocatedSize(static_cast<const char*>(ptr) - in AllocatedSize()
209 size_t GPUNanResetAllocator::AllocatedSize(const void* ptr) const { in AllocatedSize() function in tensorflow::GPUNanResetAllocator
210 return base_allocator_->AllocatedSize(ptr); in AllocatedSize()
Dgpu_debug_allocator.h44 size_t AllocatedSize(const void* ptr) const override;
73 size_t AllocatedSize(const void* ptr) const override;
Dgpu_debug_allocator_test.cc251 EXPECT_EQ(256, a.AllocatedSize(t1)); in TEST()
Dgpu_bfc_allocator_test.cc242 EXPECT_EQ(256, a.AllocatedSize(t1)); in TEST_P()
452 initial_ptrs_allocated_sizes.push_back(a.AllocatedSize(raw)); in TestBinDebugInfo()
/external/llvm-project/llvm/lib/Support/Unix/
DMemory.inc147 Result.AllocatedSize = PageSize*NumPages;
162 if (M.Address == nullptr || M.AllocatedSize == 0)
165 if (0 != ::munmap(M.Address, M.AllocatedSize))
169 M.AllocatedSize = 0;
177 if (M.Address == nullptr || M.AllocatedSize == 0)
185 uintptr_t End = alignAddr((const uint8_t *)M.Address + M.AllocatedSize, PageSize);
198 Memory::InvalidateInstructionCache(M.Address, M.AllocatedSize);
209 Memory::InvalidateInstructionCache(M.Address, M.AllocatedSize);
/external/tensorflow/tensorflow/core/common_runtime/
Dprocess_state.h144 size_t AllocatedSize(const void* p) const override { in AllocatedSize() function
145 return a_->AllocatedSize(p); in AllocatedSize()
Dscoped_allocator.h111 size_t AllocatedSize(const void* ptr) const override { return 0; } in AllocatedSize() function
Dbfc_allocator.h72 size_t AllocatedSize(const void* ptr) const override;
/external/llvm-project/llvm/include/llvm/ExecutionEngine/Orc/
DOrcRPCTargetProcessControl.h37 uint64_t AllocatedSize = 0; member
84 TA.Address, TA.AllocatedSize}); in finalizeAsync()
136 KV.second.Address, KV.second.AllocatedSize}); in deallocate()
198 E.Address, E.AllocatedSize}; in allocate()
/external/tensorflow/tensorflow/compiler/xla/service/
Dhlo_rematerialization.cc589 int64 AllocatedSize(Item* item) const { in AllocatedSize() function in xla::__anon95ba67740111::MemoryUsageTracker
592 size += AllocatedSize(buffer_id); in AllocatedSize()
684 int64 AllocatedSize(BufferId buffer_id) const { in AllocatedSize() function in xla::__anon95ba67740111::MemoryUsageTracker
873 memory_usage_ += AllocatedSize(buffer_id); in BeginInstruction()
900 memory_usage_ -= AllocatedSize(buffer_id); in EndInstruction()
912 memory_usage_ -= AllocatedSize(buffer_id); in EndInstruction()
986 memory_reduced += AllocatedSize(buffer_id); in MemoryReducedIfRematerialized()
1001 memory_reduced -= AllocatedSize(buffer_id); in MemoryReducedIfRematerialized()
1086 memory_usage_ += AllocatedSize(buffer.id); in AddRematerializedInstruction()
1131 memory_usage_ -= AllocatedSize(old_buffer.id); in AddRematerializedInstruction()
[all …]
/external/llvm/lib/Analysis/
DInlineCost.cpp105 uint64_t AllocatedSize; member in __anoned94fc440111::CallAnalyzer
214 AllocatedSize(0), NumInstructions(0), NumVectorInstructions(0), in CallAnalyzer()
344 AllocatedSize = SaturatingMultiplyAdd( in visitAlloca()
345 AllocSize->getLimitedValue(), DL.getTypeAllocSize(Ty), AllocatedSize); in visitAlloca()
354 AllocatedSize = SaturatingAdd(DL.getTypeAllocSize(Ty), AllocatedSize); in visitAlloca()
1135 AllocatedSize > InlineConstants::TotalAllocaSizeRecursiveCaller) in analyzeBlock()
/external/angle/third_party/abseil-cpp/absl/strings/internal/
Dcord_rep_flat.h128 size_t AllocatedSize() const { return TagToAllocatedSize(tag); } in AllocatedSize() function
Dcordz_info.cc172 size_t size = rep.rep->flat()->AllocatedSize(); in CountLinearReps()
/external/llvm-project/llvm/include/llvm/ExecutionEngine/Orc/TargetProcess/
DOrcRPCTPCServer.h80 uint64_t AllocatedSize = 0; member
209 E.AllocatedSize); in serialize()
214 E.AllocatedSize); in deserialize()
/external/llvm-project/llvm/lib/Analysis/
DInlineCost.cpp281 uint64_t AllocatedSize = 0; member in __anonae1cb0f20111::CallAnalyzer
869 AllocatedSize = SaturatingMultiplyAdd( in visitAlloca()
871 AllocatedSize); in visitAlloca()
872 if (AllocatedSize > InlineConstants::MaxSimplifiedDynamicAllocaToInline) { in visitAlloca()
883 AllocatedSize = in visitAlloca()
884 SaturatingAdd(DL.getTypeAllocSize(Ty).getKnownMinSize(), AllocatedSize); in visitAlloca()
1970 AllocatedSize > InlineConstants::TotalAllocaSizeRecursiveCaller) { in analyzeBlock()
/external/swiftshader/third_party/llvm-10.0/llvm/lib/Analysis/
DInlineCost.cpp221 uint64_t AllocatedSize = 0; member in __anon18c107e60111::CallAnalyzer
724 AllocatedSize = SaturatingMultiplyAdd( in visitAlloca()
726 AllocatedSize); in visitAlloca()
734 AllocatedSize = in visitAlloca()
735 SaturatingAdd(DL.getTypeAllocSize(Ty).getFixedSize(), AllocatedSize); in visitAlloca()
1800 AllocatedSize > InlineConstants::TotalAllocaSizeRecursiveCaller) { in analyzeBlock()

12