Home
last modified time | relevance | path

Searched refs:xla_tensor (Results 1 – 8 of 8) sorted by relevance

/external/tensorflow/tensorflow/compiler/jit/
Dxla_device_context.cc133 XlaTensor* xla_tensor = XlaTensor::FromTensor(device_tensor); in CopyCPUTensorToDevice() local
134 CHECK(xla_tensor); in CopyCPUTensorToDevice()
143 TF_RET_CHECK(!xla_tensor->has_shaped_buffer()); in CopyCPUTensorToDevice()
146 xla_tensor->AllocateShapedBuffer(device_tensor->dtype(), shape, client_, in CopyCPUTensorToDevice()
160 << xla_tensor->shaped_buffer().ToString(); in CopyCPUTensorToDevice()
163 stream_->parent(), xla_tensor->shaped_buffer())) { in CopyCPUTensorToDevice()
170 host_to_device_stream_.get(), literal, xla_tensor->shaped_buffer())); in CopyCPUTensorToDevice()
176 xla_tensor->ResetDefinitionEvent(std::move(event), in CopyCPUTensorToDevice()
239 XlaTensor* xla_tensor = XlaTensor::FromTensor(device_tensor); in CopyDeviceTensorToCPU() local
240 xla_tensor->WaitForDefinitionEventOnStream(device_to_host_stream.get()); in CopyDeviceTensorToCPU()
[all …]
Dxla_tensor.cc25 XlaTensor* xla_tensor = in FromTensor() local
27 return xla_tensor; in FromTensor()
32 const XlaTensor* xla_tensor = FromTensor(&tensor); in DeviceMemoryFromTensor() local
33 if (xla_tensor) { in DeviceMemoryFromTensor()
34 CHECK(xla_tensor->has_shaped_buffer()); in DeviceMemoryFromTensor()
35 return xla_tensor->shaped_buffer().root_buffer(); in DeviceMemoryFromTensor()
Dxla_launch_util.cc262 XlaTensor* xla_tensor = XlaTensor::FromTensor(t); in PopulateInputs() local
263 CHECK(xla_tensor); in PopulateInputs()
264 xla_tensor->WaitForDefinitionEventOnStream( in PopulateInputs()
276 XlaTensor* xla_tensor = XlaTensor::FromTensor(t); in PopulateInputs() local
277 CHECK(xla_tensor && xla_tensor->has_shaped_buffer()); in PopulateInputs()
278 xla_tensor->shaped_buffer().buffers().ForEachMutableElement( in PopulateInputs()
334 XlaTensor* xla_tensor = XlaTensor::FromTensor(output_tensor); in PopulateXlaTensor() local
335 CHECK(xla_tensor); in PopulateXlaTensor()
336 xla_tensor->set_shaped_buffer(output->TakeSubTree({output_num})); in PopulateXlaTensor()
338 xla_tensor->ResetDefinitionEvent(definition_event, stream); in PopulateXlaTensor()
Dxla_tpu_device.cc74 const tensorflow::XlaTensor* xla_tensor = in TpuPaddedShapeFn() local
76 if (xla_tensor == nullptr) { in TpuPaddedShapeFn()
81 if (!xla_tensor->has_shaped_buffer()) { in TpuPaddedShapeFn()
88 xla_tensor->shaped_buffer().on_device_shape(); in TpuPaddedShapeFn()
Dxla_device.cc68 const tensorflow::XlaTensor* xla_tensor = in DefaultPaddedShapeFn() local
70 if (xla_tensor == nullptr) { in DefaultPaddedShapeFn()
74 const xla::ShapedBuffer& shaped_buffer = xla_tensor->shaped_buffer(); in DefaultPaddedShapeFn()
DBUILD206 name = "xla_tensor",
207 srcs = ["xla_tensor.cc"],
208 hdrs = ["xla_tensor.h"],
225 ":xla_tensor",
387 ":xla_tensor",
/external/tensorflow/tensorflow/core/tpu/kernels/
Dtpu_execute_op.cc215 XlaTensor* xla_tensor = XlaTensor::FromTensor(&tensor); in BuildComputationInputs() local
217 if (xla_tensor == nullptr) { in BuildComputationInputs()
231 const xla::Shape& xla_shape = xla_tensor->shaped_buffer().on_host_shape(); in BuildComputationInputs()
315 XlaTensor* xla_tensor = XlaTensor::FromTensor(&tensor); in BuildComputationInputs() local
319 if (xla_tensor == nullptr) { in BuildComputationInputs()
332 &xla_tensor->shaped_buffer()); in BuildComputationInputs()
333 xla_tensor->WaitForDefinitionEventOnStream(stream); in BuildComputationInputs()
479 XlaTensor* xla_tensor = XlaTensor::FromTensor(output_tensor); in AllocateOutputTensors() local
480 xla_tensor->set_shaped_buffer(std::move(shaped_buffer)); in AllocateOutputTensors()
481 xla_tensor->ResetDefinitionEvent(definition_event, stream); in AllocateOutputTensors()
DBUILD691 "//tensorflow/compiler/jit:xla_tensor",