/external/tensorflow/tensorflow/compiler/xla/service/ |
D | transfer_manager.cc | 158 const Shape on_device_shape = HostShapeToDeviceShape(literal.shape()); in TransferArrayToDeviceAsync() local 159 TF_RET_CHECK(on_device_shape.IsArray()) in TransferArrayToDeviceAsync() 162 << " is not an array: " << ShapeUtil::HumanString(on_device_shape); in TransferArrayToDeviceAsync() 163 if (dest.size() < GetByteSizeRequirement(on_device_shape)) { in TransferArrayToDeviceAsync() 167 dest.size(), GetByteSizeRequirement(on_device_shape)); in TransferArrayToDeviceAsync() 169 ShapedBuffer shaped_buffer(/*on_host_shape=*/literal.shape(), on_device_shape, in TransferArrayToDeviceAsync() 244 device_buffer.on_device_shape(), in WriteTupleIndexTablesAsync() 269 TF_RET_CHECK(device_buffer.on_device_shape().IsTuple()); in WriteRootTupleIndexTable() 271 TF_RET_CHECK(GetByteSizeRequirement(device_buffer.on_device_shape()) == in WriteRootTupleIndexTable() 276 i < ShapeUtil::TupleElementCount(device_buffer.on_device_shape()); ++i) { in WriteRootTupleIndexTable() [all …]
|
D | shaped_buffer.cc | 35 const Shape& on_device_shape, in ShapedBuffer() argument 38 on_device_shape_(on_device_shape), in ShapedBuffer() 82 ShapeUtil::HumanStringWithLayout(on_device_shape()), in ToString() 85 on_device_shape(), in ToString() 107 const Shape& on_device_shape, in ScopedShapedBuffer() argument 110 : ShapedBuffer(on_host_shape, on_device_shape, allocator->platform(), in ScopedShapedBuffer() 164 xla::ShapeUtil::GetSubshape(on_device_shape(), {index}); in TakeSubTree()
|
D | allocation_tracker.cc | 80 shaped_buffer.on_device_shape(), in RegisterInternal() 113 shaped_buffer->on_device_shape(), in Unregister() 151 TF_RET_CHECK(shaped_buffer->on_device_shape().IsTuple()); in DeconstructTuple() 153 if (ShapeUtil::IsNestedTuple(shaped_buffer->on_device_shape())) { in DeconstructTuple() 159 i < ShapeUtil::TupleElementCount(shaped_buffer->on_device_shape()); in DeconstructTuple() 163 ShapeUtil::GetTupleElementShape(shaped_buffer->on_device_shape(), i), in DeconstructTuple()
|
D | shaped_buffer.h | 45 ShapedBuffer(const Shape& on_host_shape, const Shape& on_device_shape, 66 const Shape& on_device_shape() const { return on_device_shape_; } in on_device_shape() function 137 const Shape& on_device_shape,
|
D | generic_transfer_manager.cc | 80 TF_RET_CHECK(ShapeUtil::Equal(device_buffer.on_device_shape(), in TransferLiteralFromDeviceInternal() 110 TF_RET_CHECK(ShapeUtil::Equal(device_buffer.on_device_shape(), in TransferLiteralToDeviceAsync()
|
/external/tensorflow/tensorflow/compiler/xrt/ |
D | xrt_state.cc | 101 xla::Shape on_device_shape = transfer_manager->HostShapeToDeviceShape(shape); in AllocateScopedShapedBuffer() local 104 << xla::ShapeUtil::HumanStringWithLayout(on_device_shape); in AllocateScopedShapedBuffer() 110 shape, on_device_shape, allocator, device_ordinal); in AllocateScopedShapedBuffer() 113 xla::ShapeUtil::GetSubshape(on_device_shape, index_to_buffer.first); in AllocateScopedShapedBuffer() 170 const xla::Shape& on_device_shape) in XRTTupleAllocation() argument 174 on_device_shape_(on_device_shape), in XRTTupleAllocation() 203 shaped_buffer.on_device_shape()); in CreateAndTransfer() 216 shaped_buffer.on_device_shape()); in CreateFromBuffer() 262 const xla::Shape& XRTTupleAllocation::on_device_shape() { in on_device_shape() function in tensorflow::XRTTupleAllocation 303 xla::ShapeUtil::TryGetSubshape(parent->on_device_shape(), subshape)); in MakeSubBuffer() [all …]
|
D | xrt_state.h | 166 const xla::Shape& on_device_shape(); 198 const xla::Shape& on_device_shape);
|
/external/tensorflow/tensorflow/compiler/jit/ |
D | xla_tensor.cc | 50 xla::Shape on_device_shape = in AllocateShapedBuffer() local 54 xla::ScopedShapedBuffer shaped_buffer(on_host_shape, on_device_shape, in AllocateShapedBuffer() 59 xla::ShapeUtil::GetSubshape(on_device_shape, index_to_buffer.first); in AllocateShapedBuffer()
|
D | xla_launch_util.cc | 238 const xla::Shape on_device_shape = in PopulateInputs() local 240 if (on_device_shape.IsTuple()) { in PopulateInputs() 245 CHECK(xla::ShapeUtil::Equal(shape, on_device_shape)) in PopulateInputs() 247 << xla::ShapeUtil::HumanStringWithLayout(on_device_shape) in PopulateInputs() 270 << output.on_device_shape().DebugString(); in PopulateOutputs() 281 xla::ShapeUtil::MakeTupleShape({nontuple_buffer.on_device_shape()}), in PopulateOutputs()
|
D | xla_device.cc | 125 *shape = shaped_buffer.on_device_shape(); in DefaultPaddedShapeFn()
|
/external/tensorflow/tensorflow/compiler/xrt/kernels/ |
D | xrt_execute_op.cc | 251 output_tuple->on_device_shape().IsTuple()) { in DoWork() 253 xla::ShapeUtil::TupleElementCount(output_tuple->on_device_shape()); in DoWork()
|
/external/tensorflow/tensorflow/compiler/xla/service/interpreter/ |
D | executable.cc | 74 const auto& actual_shape = arguments[i]->on_device_shape(); in ExecuteOnStream()
|
/external/tensorflow/tensorflow/compiler/xla/tests/ |
D | local_client_execute_test.cc | 133 EXPECT_TRUE(LayoutUtil::Equal(x_array.on_device_shape().layout(), in XLA_TEST_F() 139 EXPECT_TRUE(LayoutUtil::Equal(y_array.on_device_shape().layout(), in XLA_TEST_F() 174 EXPECT_TRUE(LayoutUtil::Equal(result_colmaj.on_device_shape().layout(), in XLA_TEST_F() 186 EXPECT_TRUE(LayoutUtil::Equal(result_rowmaj.on_device_shape().layout(), in XLA_TEST_F()
|
/external/tensorflow/tensorflow/compiler/xla/python/ |
D | local_computation_builder.cc | 130 return shaped_buffer()->on_device_shape(); in shape()
|