Home
last modified time | relevance | path

Searched refs:device_memory (Results 1 – 25 of 37) sorted by relevance

12

/external/tensorflow/tensorflow/stream_executor/
Dtemporary_memory_manager.cc31 DeviceMemoryBase device_memory = it->first; in ForceDeallocateAll() local
32 stream_->parent()->Deallocate(&device_memory); in ForceDeallocateAll()
37 const DeviceMemoryBase& device_memory, uint64 generation, bool must_exist) { in MarkFinalized() argument
39 auto it = records_.find(device_memory); in MarkFinalized()
55 DeviceMemoryBase device_memory = it->first; in DeallocateFinalizedTemporaries() local
56 stream_->parent()->Deallocate(&device_memory); in DeallocateFinalizedTemporaries()
66 bool TemporaryMemoryManager::IsFinalized(const DeviceMemoryBase& device_memory, in IsFinalized() argument
69 auto it = records_.find(device_memory); in IsFinalized()
82 bool TemporaryMemoryManager::HasAllocated(const DeviceMemoryBase& device_memory, in HasAllocated() argument
85 auto it = records_.find(device_memory); in HasAllocated()
[all …]
Dtemporary_device_memory.h65 const DeviceMemoryBase& device_memory() const;
93 TemporaryDeviceMemoryBase(Stream* parent, DeviceMemoryBase device_memory,
119 const DeviceMemory<T>& device_memory() const { in device_memory() function
122 TemporaryDeviceMemoryBase::device_memory()); in device_memory()
DBUILD77 "device_memory.h",
132 ":device_memory",
243 ":device_memory",
257 ":device_memory",
292 name = "device_memory",
293 hdrs = ["device_memory.h"],
302 ":device_memory",
319 "device_memory.h",
409 ":device_memory",
439 ":device_memory",
[all …]
Dtemporary_device_memory.cc34 const DeviceMemoryBase& TemporaryDeviceMemoryBase::device_memory() const { in device_memory() function in stream_executor::TemporaryDeviceMemoryBase
58 Stream* parent, DeviceMemoryBase device_memory, in TemporaryDeviceMemoryBase() argument
60 : device_memory_(device_memory), in TemporaryDeviceMemoryBase()
Dtemporary_memory_manager.h77 void MarkFinalized(const DeviceMemoryBase& device_memory, uint64 generation,
91 bool IsFinalized(const DeviceMemoryBase& device_memory,
99 bool HasAllocated(const DeviceMemoryBase& device_memory,
Dscratch_allocator.cc36 return temporary_->device_memory(); in AllocateBytes()
/external/tensorflow/tensorflow/compiler/xla/pjrt/
Dtracked_device_buffer_test.cc37 se::OwningDeviceMemory device_memory, in MakeArray()
42 device_buffers.push_back(device_memory.Release()); in MakeArray()
61 ASSERT_EQ(a_buffer->device_memory().size(), 1); in TEST()
62 ASSERT_EQ(b_buffer->device_memory().size(), 1); in TEST()
63 ASSERT_EQ(c_buffer->device_memory().size(), 1); in TEST()
65 a_buffer->device_memory()[0], b_buffer->device_memory()[0], in TEST()
66 c_buffer->device_memory()[0]}; in TEST()
108 EXPECT_EQ(device_buffer->device_memory().size(), in TEST()
Dtracked_device_buffer.h168 absl::InlinedVector<se::DeviceMemoryBase, 1>& device_memory() { in device_memory() function
171 const absl::InlinedVector<se::DeviceMemoryBase, 1>& device_memory() const { in device_memory() function
208 absl::Span<se::DeviceMemoryBase const> device_memory,
Dtracked_device_buffer.cc165 absl::Span<se::DeviceMemoryBase const> device_memory, in TrackedDeviceBuffer() argument
170 device_memory_(device_memory.begin(), device_memory.end()), in TrackedDeviceBuffer()
DBUILD86 "//tensorflow/stream_executor:device_memory",
106 "//tensorflow/stream_executor:device_memory",
252 "//tensorflow/stream_executor:device_memory",
/external/tensorflow/tensorflow/compiler/xla/service/
Dallocation_tracker.cc207 se::DeviceMemoryBase device_memory, int device_ordinal) { in AddAllocationOrIncrementRefCount() argument
209 auto it = allocation_map.find(device_memory.opaque()); in AddAllocationOrIncrementRefCount()
211 allocation_map[device_memory.opaque()] = { in AddAllocationOrIncrementRefCount()
212 se::OwningDeviceMemory(device_memory, device_ordinal, in AddAllocationOrIncrementRefCount()
220 Status AllocationTracker::DecrementRefCount(se::DeviceMemoryBase device_memory, in DecrementRefCount() argument
223 auto it = allocation_map.find(device_memory.opaque()); in DecrementRefCount()
228 TF_RETURN_IF_ERROR(allocation.device_memory.Free()); in DecrementRefCount()
Dallocation_tracker.h80 se::OwningDeviceMemory device_memory; member
103 void AddAllocationOrIncrementRefCount(se::DeviceMemoryBase device_memory,
109 Status DecrementRefCount(se::DeviceMemoryBase device_memory,
Dtransfer_manager.cc302 se::DeviceMemoryBase device_memory = device_buffer.buffer(index); in WriteTupleIndexTablesAsync() local
304 device_memory.size()); in WriteTupleIndexTablesAsync()
315 &device_memory); in WriteTupleIndexTablesAsync()
328 se::DeviceMemoryBase device_memory = device_buffer.buffer({}); in WriteRootTupleIndexTable() local
330 device_memory.size()); in WriteRootTupleIndexTable()
338 stream, elements, device_buffer.on_device_shape(), &device_memory); in WriteRootTupleIndexTable()
347 se::DeviceMemoryBase device_memory = in WriteRootTupleIndexTable() local
350 device_memory.size()); in WriteRootTupleIndexTable()
358 &device_memory); in WriteRootTupleIndexTable()
Dgeneric_transfer_manager.cc111 se::DeviceMemoryBase device_memory = device_buffer.buffer(index); in TransferLiteralToDeviceAsync() local
114 device_memory.size()); in TransferLiteralToDeviceAsync()
125 &device_memory); in TransferLiteralToDeviceAsync()
134 &device_memory)); in TransferLiteralToDeviceAsync()
/external/tensorflow/tensorflow/compiler/tf2tensorrt/utils/
Dtrt_engine_utils.cc51 void* device_memory = nullptr; in Create() local
62 device_memory = allocator->allocate(device_memory_size, in Create()
64 if (device_memory == nullptr) { in Create()
69 execution_context->setDeviceMemory(device_memory); in Create()
71 return ExecutionContext(allocator, device_memory, execution_context); in Create()
Dtrt_engine_utils.h71 ExecutionContext(TRTBaseAllocator* allocator, void* device_memory, in ExecutionContext() argument
74 device_memory_(device_memory), in ExecutionContext()
/external/tensorflow/tensorflow/stream_executor/tpu/
Dtpu_executable_interface.cc117 MaybeOwningDeviceMemory* device_memory = in AllocateOutputMemoryWithInputReuse() local
119 if (auto owning = device_memory->Release()) { in AllocateOutputMemoryWithInputReuse()
125 *device_memory = device_memory_base; in AllocateOutputMemoryWithInputReuse()
/external/crosvm/rutabaga_gfx/src/rutabaga_gralloc/
Dvulkano_gralloc.rs350 let device_memory = in allocate_memory() localVariable
356 let descriptor = device_memory.export_fd(handle_type)?.into(); in allocate_memory()
392 let device_memory = in import_and_map() localVariable
396 let mapping = DeviceMemoryMapping::new(device.clone(), device_memory.clone(), 0, size, 0)?; in import_and_map()
/external/tensorflow/tensorflow/compiler/xla/tests/
Dbuffer_donation_test.cc112 [&](const ShapeIndex& index, MaybeOwningDeviceMemory* device_memory) { in RunAndCheck() argument
114 *device_memory = se::OwningDeviceMemory( in RunAndCheck()
118 *device_memory = input_buffers.element(index); in RunAndCheck()
/external/tensorflow/tensorflow/compiler/xla/service/gpu/
Dinfeed_manager.h57 se::DeviceMemoryBase* device_memory() { return device_memory_.ptr(); } in device_memory() function
Dinfeed_thunk.cc58 stream.ThenMemcpy(&dest_address, *buffer.device_memory(), buffer.length()); in ExecuteOnStream()
Dgpu_transfer_manager.cc109 stream->ThenMemcpy(buffer.device_memory(), source, size); in TransferBufferToInfeedInternal()
/external/tensorflow/tensorflow/compiler/xla/service/cpu/
Dcpu_transfer_manager.cc55 se::DeviceMemoryBase* device_memory() { return &device_memory_; } in device_memory() function in xla::__anon31ee37eb0111::CpuInfeedBuffer
169 /*host_src=*/source, /*size=*/size, queued_buffer->device_memory()); in TransferBufferToInfeedInternal()
/external/tensorflow/tensorflow/stream_executor/rocm/
Drocm_blas.h134 *device_memory,
/external/perfetto/protos/perfetto/trace/gpu/
Dvulkan_memory_event.proto99 optional fixed64 device_memory = 17; field

12