Home
last modified time | relevance | path

Searched refs:TupleElementCount (Results 1 – 25 of 42) sorted by relevance

12

/external/tensorflow/tensorflow/compiler/tf2xla/kernels/
Dtensor_list_utils.cc127 *is_nested_list = (xla::ShapeUtil::TupleElementCount(list_shape) > 2); in IsNestedTensorList()
166 int tuple_size = xla::ShapeUtil::TupleElementCount(list_shape); in GetTensorListPushIndex()
179 int tuple_size = xla::ShapeUtil::TupleElementCount(list_shape); in SetTensorListPushIndex()
214 int tuple_size = xla::ShapeUtil::TupleElementCount(element_tensor_list_shape); in GetTensorListShapeFromElementTensorListShape()
253 int tuple_size = xla::ShapeUtil::TupleElementCount(list_shape); in CreateZerosTensorListWithShape()
313 int list_tuple_size = xla::ShapeUtil::TupleElementCount(list_shape); in ExecuteTensorListPushBack()
320 int element_tuple_size = xla::ShapeUtil::TupleElementCount(element_shape); in ExecuteTensorListPushBack()
378 int list_tuple_size = xla::ShapeUtil::TupleElementCount(list_shape); in ExecuteTensorListPopBack()
/external/tensorflow/tensorflow/compiler/xla/
Dlayout_util.cc365 if (ShapeUtil::TupleElementCount(src) != in CopyLayoutInternal()
366 ShapeUtil::TupleElementCount(*dst)) { in CopyLayoutInternal()
370 for (int64 i = 0; i < ShapeUtil::TupleElementCount(src); ++i) { in CopyLayoutInternal()
399 if (!rhs.IsTuple() || ShapeUtil::TupleElementCount(lhs) != in LayoutsInShapesEqual()
400 ShapeUtil::TupleElementCount(rhs)) { in LayoutsInShapesEqual()
403 for (int i = 0; i < ShapeUtil::TupleElementCount(lhs); ++i) { in LayoutsInShapesEqual()
Dliteral_comparison.cc205 const int64 tuple_elements = ShapeUtil::TupleElementCount(shape); in RecursiveElementCount()
696 for (int i = 0; i < ShapeUtil::TupleElementCount(expected.shape()); ++i) { in EqualHelper()
792 for (int64 i = 0; i < ShapeUtil::TupleElementCount(expected.shape()); ++i) { in NearHelper()
879 if (ShapeUtil::TupleElementCount(expected) != in EqualShapes()
880 ShapeUtil::TupleElementCount(actual)) { in EqualShapes()
883 ShapeUtil::TupleElementCount(expected), in EqualShapes()
884 ShapeUtil::TupleElementCount(actual)); in EqualShapes()
Dshape_util.cc382 return shape.IsTuple() && TupleElementCount(shape) == 0; in IsEmptyTuple()
385 /* static */ int64 ShapeUtil::TupleElementCount(const Shape& shape) { in TupleElementCount() function in xla::ShapeUtil
393 CHECK_GT(TupleElementCount(shape), index); in GetTupleElementShape()
409 CHECK_LE(start, TupleElementCount(tuple)); in SliceTuple()
410 CHECK_LE(limit, TupleElementCount(tuple)); in SliceTuple()
863 for (int64 i = 0; i < ShapeUtil::TupleElementCount(shape); ++i) { in ForEachSubshapeHelper()
880 for (int64 i = 0; i < ShapeUtil::TupleElementCount(*shape); ++i) { in ForEachMutableSubshapeHelper()
Dliteral.cc123 for (int i = 0; i < ShapeUtil::TupleElementCount(shape); ++i) { in SetPiece()
310 ShapeUtil::TupleElementCount(piece->subshape())) { in CreateFromProto()
313 ShapeUtil::TupleElementCount(piece->subshape()), in CreateFromProto()
340 for (int i = 0; i < ShapeUtil::TupleElementCount(shape()); ++i) { in DecomposeTuple()
978 for (int i = 0; i < ShapeUtil::TupleElementCount(subshape); ++i) { in TupleToStringHelper()
1322 for (int i = 0; i < ShapeUtil::TupleElementCount(shape()); ++i) { in ConvertToShape()
1996 for (int i = 0; i < ShapeUtil::TupleElementCount(shape); ++i) { in CopyPieceSubtree()
2092 for (int i = 0; i < ShapeUtil::TupleElementCount(shape); ++i) { in BuildPieceSubtree()
2121 CHECK_EQ(src_buf_ptrs.size(), ShapeUtil::TupleElementCount(*shape_)); in BorrowingLiteral()
Dshape_tree.h402 int64 count = ShapeUtil::TupleElementCount(shape); in CountSubshapes()
414 const int64 size = ShapeUtil::TupleElementCount(shape); in InitChildren()
450 const int64 size = ShapeUtil::TupleElementCount(shape); in InitChildren()
/external/tensorflow/tensorflow/compiler/xla/service/
Dconditional_simplifier_test.cc327 EXPECT_EQ(ShapeUtil::TupleElementCount(conditional->shape()), 0); in TEST_F()
373 EXPECT_EQ(ShapeUtil::TupleElementCount(conditional->shape()), 1); in TEST_F()
419 EXPECT_EQ(ShapeUtil::TupleElementCount(conditional->shape()), 1); in TEST_F()
482 EXPECT_EQ(ShapeUtil::TupleElementCount(conditional->shape()), 1); in TEST_F()
Dtransfer_manager.cc248 ShapeUtil::TupleElementCount(device_subshape) > 0) { in WriteTupleIndexTablesAsync()
255 for (int64 i = 0; i < ShapeUtil::TupleElementCount(device_subshape); in WriteTupleIndexTablesAsync()
272 if (ShapeUtil::TupleElementCount(device_buffer.on_device_shape()) == 0) { in WriteRootTupleIndexTable()
281 i < ShapeUtil::TupleElementCount(device_buffer.on_device_shape()); ++i) { in WriteRootTupleIndexTable()
Dhlo_element_type_converter.cc69 for (int64 i = 0; i < ShapeUtil::TupleElementCount(shape); ++i) { in GetConvertedTupleShape()
90 for (int64 i = 0; i < ShapeUtil::TupleElementCount(shape); ++i) { in ConvertTupleElements()
Dhlo_module_dce.cc62 ShapeUtil::TupleElementCount(xla_while->shape()); in RunWhileDCE()
Dconditional_simplifier.cc149 ShapeUtil::TupleElementCount(f->shape()); in TryRemoveConditional()
201 ShapeUtil::TupleElementCount(param->shape())) { in TryRemoveUnusedConditionalOperands()
212 int64 old_tuple_element_count = ShapeUtil::TupleElementCount(old_shape); in TryRemoveUnusedConditionalOperands()
Dgather_expander_test.cc97 ASSERT_EQ(ShapeUtil::TupleElementCount(while_shape), 4); in TEST_F()
Dgeneric_transfer_manager.cc45 TF_RET_CHECK(elements.size() == ShapeUtil::TupleElementCount(shape)); in WriteSingleTupleIndexTable()
Dallocation_tracker.cc160 i < ShapeUtil::TupleElementCount(shaped_buffer->on_device_shape()); in DeconstructTuple()
/external/tensorflow/tensorflow/compiler/tf2xla/
Dshape_util.cc31 int64 tuple_elements = xla::ShapeUtil::TupleElementCount(shape); in PopulateInfeedLayoutVector()
135 int64 tuple_elements = xla::ShapeUtil::TupleElementCount(input_shape); in GetShapeWithLayout()
Dxla_jit_compiled_cpu_function_test.cc224 ASSERT_EQ(ShapeUtil::TupleElementCount(result), 1); in TEST()
267 ASSERT_EQ(ShapeUtil::TupleElementCount(result), 2); in TEST()
/external/tensorflow/tensorflow/compiler/xla/service/cpu/
Dcpu_transfer_manager.cc114 buffers.reserve(ShapeUtil::TupleElementCount(shape)); in TransferLiteralToInfeed()
121 for (int64 i = 0; i < ShapeUtil::TupleElementCount(shape); ++i) { in TransferLiteralToInfeed()
/external/tensorflow/tensorflow/compiler/xla/service/gpu/
Dinfeed_thunk.cc79 const int64 tuple_element_count = ShapeUtil::TupleElementCount(shape); in ExecuteOnStream()
Dgpu_transfer_manager.cc134 const int64 tuple_element_count = ShapeUtil::TupleElementCount(shape); in ShapeTreeToLiteral()
/external/tensorflow/tensorflow/compiler/xla/tests/
Dclient_test.cc100 EXPECT_EQ(2, ShapeUtil::TupleElementCount(result.shape())); in XLA_TEST_F()
Dbuffer_donation_test.cc115 for (int i = 0; i < ShapeUtil::TupleElementCount(argument_literal.shape()); in RunAndCheck()
Dlocal_client_execute_test.cc212 EXPECT_EQ(3, ShapeUtil::TupleElementCount(result.on_host_shape())); in XLA_TEST_F()
240 EXPECT_EQ(2, ShapeUtil::TupleElementCount(result.on_host_shape())); in XLA_TEST_F()
318 EXPECT_EQ(2, ShapeUtil::TupleElementCount(result.on_host_shape())); in XLA_TEST_F()
/external/tensorflow/tensorflow/c/eager/
Dc_api_debug.cc95 if (xla::ShapeUtil::TupleElementCount(padded_shape) != 2) { in TensorDebugInfo()
/external/tensorflow/tensorflow/compiler/xla/service/llvm_ir/
Dfused_ir_emitter.cc185 for (size_t i = 0; i < ShapeUtil::TupleElementCount(tuple->shape()); ++i) { in HandleTuple()
/external/tensorflow/tensorflow/compiler/xla/python/
Dshared_device_buffer.cc76 int num_children = ShapeUtil::TupleElementCount(on_device_shape); in BufferFromScopedShapedBufferIterator()

12