/external/tensorflow/tensorflow/core/framework/ |
D | tracking_allocator.cc | 42 size_t allocated_bytes = allocator_->AllocatedSize(ptr); in AllocateRaw() 86 allocated_bytes = allocator_->AllocatedSize(ptr); in DeallocateRaw() 129 size_t TrackingAllocator::AllocatedSize(const void* ptr) { in AllocatedSize() function in tensorflow::TrackingAllocator 138 return allocator_->AllocatedSize(ptr); in AllocatedSize()
|
D | allocator.h | 180 virtual size_t AllocatedSize(const void* ptr) { return RequestedSize(ptr); } in AllocatedSize() function 201 return AllocatedSize(ptr); in AllocatedSizeSlow() 323 size_t AllocatedSize(const void* ptr) override { in AllocatedSize() function 324 return wrapped_->AllocatedSize(ptr); in AllocatedSize()
|
D | tracking_allocator_test.cc | 93 EXPECT_LE(4, ta->AllocatedSize(p1)); in TEST() 99 EXPECT_LE(12, ta->AllocatedSize(p2)); in TEST()
|
D | tracking_allocator.h | 67 size_t AllocatedSize(const void* ptr) override;
|
D | op_kernel.cc | 751 int64 alloc_size = a->AllocatedSize(out_temp->tensor_data().data()); in allocate_temp() 774 int64 alloc_size = a->AllocatedSize(t->tensor_data().data()); in allocate_persistent()
|
D | tensor.cc | 83 int64 ab = alloc_->AllocatedSize(data_ptr); in FillAllocationDescription()
|
/external/tensorflow/tensorflow/core/common_runtime/gpu/ |
D | gpu_debug_allocator.cc | 125 size_t GPUDebugAllocator::AllocatedSize(const void* ptr) { in AllocatedSize() function in tensorflow::GPUDebugAllocator 126 return base_allocator_->AllocatedSize(static_cast<const char*>(ptr) - in AllocatedSize() 207 size_t GPUNanResetAllocator::AllocatedSize(const void* ptr) { in AllocatedSize() function in tensorflow::GPUNanResetAllocator 208 return base_allocator_->AllocatedSize(ptr); in AllocatedSize()
|
D | gpu_debug_allocator.h | 44 size_t AllocatedSize(const void* ptr) override; 73 size_t AllocatedSize(const void* ptr) override;
|
D | gpu_bfc_allocator_test.cc | 213 EXPECT_EQ(256, a.AllocatedSize(t1)); in TEST() 438 initial_ptrs_allocated_sizes.push_back(a.AllocatedSize(raw)); in TestBinDebugInfo()
|
D | gpu_debug_allocator_test.cc | 245 EXPECT_EQ(256, a.AllocatedSize(t1)); in TEST()
|
/external/tensorflow/tensorflow/core/common_runtime/ |
D | process_state.h | 132 size_t AllocatedSize(const void* p) override { return a_->AllocatedSize(p); } in AllocatedSize() function
|
D | scoped_allocator.h | 109 size_t AllocatedSize(const void* ptr) override { return 0; } in AllocatedSize() function
|
D | bfc_allocator.h | 68 size_t AllocatedSize(const void* ptr) override;
|
D | bfc_allocator.cc | 536 size_t BFCAllocator::AllocatedSize(const void* ptr) { in AllocatedSize() function in tensorflow::BFCAllocator
|
/external/tensorflow/tensorflow/compiler/xla/service/ |
D | hlo_rematerialization.cc | 446 int64 AllocatedSize(BufferId buffer_id) const { in AllocatedSize() function in xla::__anon4fe3fcd60111::MemoryUsageTracker 582 memory_usage_ += AllocatedSize(buffer_id); in BeginInstruction() 609 memory_usage_ -= AllocatedSize(buffer_id); in EndInstruction() 620 memory_usage_ -= AllocatedSize(buffer_id); in EndInstruction() 663 memory_reduced += AllocatedSize(buffer_id); in MemoryReducedIfRematerialized() 674 memory_reduced -= AllocatedSize(buffer_id); in MemoryReducedIfRematerialized() 701 memory_usage_ += AllocatedSize(buffer.id); in AddRematerializedInstruction() 728 memory_usage_ -= AllocatedSize(old_buffer.id); in AddRematerializedInstruction()
|
/external/llvm/lib/Analysis/ |
D | InlineCost.cpp | 105 uint64_t AllocatedSize; member in __anond8f3b4e60111::CallAnalyzer 214 AllocatedSize(0), NumInstructions(0), NumVectorInstructions(0), in CallAnalyzer() 344 AllocatedSize = SaturatingMultiplyAdd( in visitAlloca() 345 AllocSize->getLimitedValue(), DL.getTypeAllocSize(Ty), AllocatedSize); in visitAlloca() 354 AllocatedSize = SaturatingAdd(DL.getTypeAllocSize(Ty), AllocatedSize); in visitAlloca() 1135 AllocatedSize > InlineConstants::TotalAllocaSizeRecursiveCaller) in analyzeBlock()
|
/external/swiftshader/third_party/llvm-7.0/llvm/lib/Analysis/ |
D | InlineCost.cpp | 143 uint64_t AllocatedSize; member in __anon5999813e0111::CallAnalyzer 285 HasUninlineableIntrinsic(false), UsesVarArgs(false), AllocatedSize(0), in CallAnalyzer() 426 AllocatedSize = SaturatingMultiplyAdd( in visitAlloca() 427 AllocSize->getLimitedValue(), DL.getTypeAllocSize(Ty), AllocatedSize); in visitAlloca() 435 AllocatedSize = SaturatingAdd(DL.getTypeAllocSize(Ty), AllocatedSize); in visitAlloca() 1594 AllocatedSize > InlineConstants::TotalAllocaSizeRecursiveCaller) { in analyzeBlock()
|