Home
last modified time | relevance | path

Searched refs:DEVICE_MEMORY (Results 1 – 18 of 18) sorted by relevance

/external/tensorflow/tensorflow/core/framework/
Dmemory_types_test.cc71 MemoryTypeVector({DEVICE_MEMORY, DEVICE_MEMORY, HOST_MEMORY, HOST_MEMORY, in TEST()
72 HOST_MEMORY, DEVICE_MEMORY, DEVICE_MEMORY, in TEST()
73 DEVICE_MEMORY, HOST_MEMORY, HOST_MEMORY, HOST_MEMORY}), in TEST()
76 EXPECT_EQ(MemoryTypeVector({DEVICE_MEMORY, DEVICE_MEMORY, DEVICE_MEMORY, in TEST()
77 DEVICE_MEMORY, DEVICE_MEMORY, DEVICE_MEMORY}), in TEST()
83 MemoryTypeVector({HOST_MEMORY, DEVICE_MEMORY, HOST_MEMORY, HOST_MEMORY, in TEST()
88 DEVICE_MEMORY, DEVICE_MEMORY, DEVICE_MEMORY}), in TEST()
Dmemory_types.cc72 : DEVICE_MEMORY; in MTypeFromDType()
114 inp_mtypes->resize(GetTotal(inp_names), DEVICE_MEMORY); in MemoryTypesForNode()
115 out_mtypes->resize(GetTotal(out_names), DEVICE_MEMORY); in MemoryTypesForNode()
Dtypes.h48 DEVICE_MEMORY = 0, enumerator
Dop_kernel.cc571 type, shape, DEVICE_MEMORY, allocator_attr); in forward_input_or_allocate_temp()
/external/swiftshader/src/Vulkan/
DVkCommandPool.cpp34 vk::destroy(commandBuffer, DEVICE_MEMORY); in destroy()
50 DispatchableCommandBuffer* commandBuffer = new (DEVICE_MEMORY) DispatchableCommandBuffer(level); in allocateCommandBuffers()
59 vk::destroy(pCommandBuffers[j], DEVICE_MEMORY); in allocateCommandBuffers()
79 vk::destroy(pCommandBuffers[i], DEVICE_MEMORY); in freeCommandBuffers()
DVkDeviceMemory.cpp30 vk::deallocate(buffer, DEVICE_MEMORY); in destroy()
43 buffer = vk::allocate(size, REQUIRED_MEMORY_ALIGNMENT, DEVICE_MEMORY); in allocate()
DVkObject.hpp27 static constexpr VkAllocationCallbacks* DEVICE_MEMORY = nullptr; variable
/external/tensorflow/tensorflow/core/common_runtime/
Dmemory_types.cc82 DEVICE_MEMORY); in ProcessMemoryTypes()
84 DEVICE_MEMORY); in ProcessMemoryTypes()
163 if (((sm == HOST_MEMORY) && (dm == DEVICE_MEMORY)) || in EnsureMemoryTypes()
164 ((sm == DEVICE_MEMORY) && (dm == HOST_MEMORY))) { in EnsureMemoryTypes()
Dmemory_types_test.cc82 EXPECT_EQ(memory_type, DEVICE_MEMORY); in TEST()
Dconstant_folding.cc505 (memory_type == DEVICE_MEMORY && is_int32)) { in ReplaceTensorWithConstant()
/external/tensorflow/tensorflow/compiler/jit/
Dcreate_xla_launch_op_test.cc111 EXPECT_EQ(DEVICE_MEMORY, kernel_->input_memory_types()[0]); in TEST_F()
116 EXPECT_EQ(DEVICE_MEMORY, kernel_->output_memory_types()[0]); in TEST_F()
Dcreate_xla_launch_op.cc178 MemoryTypeVector input_memory_types(fbody->arg_types.size(), DEVICE_MEMORY); in CreateXlaLaunchOp()
217 MemoryTypeVector output_memory_types(fbody->ret_types.size(), DEVICE_MEMORY); in CreateXlaLaunchOp()
Dpartially_decluster_pass.cc72 if (output_mtypes[e->src_output()] == DEVICE_MEMORY) { in FindNodesToDecluster()
/external/tensorflow/tensorflow/core/kernels/
Dassign_op.h104 rhs.dtype(), rhs.shape(), DEVICE_MEMORY, attr); in Compute()
Dlist_kernels.cc544 DEVICE_MEMORY /* input is always on DEVICE_MEMORY */, attr); in Compute()
Dscatter_nd_op.cc200 0, 0, input.dtype(), shape, DEVICE_MEMORY, AllocatorAttributes()); in Compute()
Dresource_variable_ops.cc423 DEVICE_MEMORY /* HOST_MEMORY is only reserved for special cases */, in Compute()
Dlist_kernels.h937 DEVICE_MEMORY /* input is always on DEVICE_MEMORY */, attr); in Compute()