Home
last modified time | relevance | path

Searched full:shape (Results 1 – 25 of 4483) sorted by relevance

12345678910>>...180

/external/tensorflow/tensorflow/compiler/xla/service/
Dshape_inference.h16 // Shape inference is used by the XLA service as the user builds up
35 // For a given operation and input shapes, infers what the resulting shape is
38 // the shape that results from an operation is inferred. Some methods have
39 // overloads for inferring shape at the HLO level.
41 // TODO(b/73352135): Shape inference does not issue very good error messages, in
42 // part because HloInstruction::ToString() is not available since shape
47 // Infers the shape produced by applying the given unary operation to the
48 // given input shape.
49 static StatusOr<Shape> InferUnaryOpShape(HloOpcode opcode,
50 const Shape& shape);
[all …]
Dbfloat16_propagation_test.cc77 if (inst->shape().element_type() == BF16) { in OutputsBF16()
82 inst->users()[0]->shape().element_type() == BF16; in OutputsBF16()
85 std::unique_ptr<HloInstruction> CreateDot(const Shape& shape, in CreateDot() argument
91 return HloInstruction::CreateDot(shape, lhs, rhs, dot_dnums, in CreateDot()
100 Shape shape = ShapeUtil::MakeShape(F32, {2, 4}); in TEST_F() local
103 builder.AddInstruction(HloInstruction::CreateParameter(0, shape, "a")); in TEST_F()
105 builder.AddInstruction(HloInstruction::CreateParameter(1, shape, "b")); in TEST_F()
107 builder.AddInstruction(HloInstruction::CreateParameter(2, shape, "c")); in TEST_F()
109 HloInstruction::CreateBinary(shape, HloOpcode::kAdd, a, b)); in TEST_F()
111 HloInstruction::CreateBinary(shape, HloOpcode::kAdd, add0, b)); in TEST_F()
[all …]
Dhlo_instructions.h40 explicit HloBatchNormInstruction(HloOpcode opcode, const Shape& shape,
61 explicit HloBatchNormTrainingInstruction(const Shape& shape,
70 const Shape& shape, absl::Span<HloInstruction* const> new_operands,
77 const Shape& shape, HloInstruction* operand, HloInstruction* scale,
84 const Shape& shape, absl::Span<HloInstruction* const> new_operands,
91 const Shape& shape, HloInstruction* operand, HloInstruction* scale,
98 const Shape& shape, absl::Span<HloInstruction* const> new_operands,
104 explicit HloFftInstruction(const Shape& shape, HloInstruction* operand,
124 const Shape& shape, absl::Span<HloInstruction* const> new_operands,
136 explicit HloCompareInstruction(const Shape& shape, HloInstruction* lhs,
[all …]
Dhlo_verifier.cc31 Status VerifyNotSparse(const Shape& shape) { in VerifyNotSparse() argument
33 shape, [](const Shape& subshape, const ShapeIndex&) -> Status { in VerifyNotSparse()
91 TF_RETURN_IF_ERROR(VerifyNotSparse(hlo->shape())); in Preprocess()
121 std::vector<const Shape*> operand_shapes; in HandleConcatenate()
123 operand_shapes.push_back(&operand->shape()); in HandleConcatenate()
132 convert->operand(0)->shape(), in HandleConvert()
133 convert->shape().element_type())); in HandleConvert()
138 convert->operand(0)->shape(), in HandleBitcastConvert()
139 convert->shape().element_type())); in HandleBitcastConvert()
147 TF_ASSIGN_OR_RETURN(const Shape expected, in HandleDot()
[all …]
/external/tensorflow/tensorflow/compiler/xla/
Dshape_util.h31 #include "tensorflow/compiler/xla/shape.h"
46 // An index for specifying a particular nested subshape within a shape. Used in
50 // shape. For a non-nested tuple, an index has a single element. For example,
157 // Returns true if this shape index starts with 'prefix'.
167 // Namespaced collection of (static) shape utilities.
173 // Data structure which describes the coordinates and the shape, of a tuple
174 // shaped sub-shape.
177 IndexedShape(ShapeIndex index, Shape shape) in IndexedShape()
178 : index(std::move(index)), shape(std::move(shape)) {} in IndexedShape()
180 Shape shape; member
[all …]
Dshape_util.cc89 // Constructs and returns the new shape with the given minor_to_major order in
91 StatusOr<Shape> MakeShapeWithLayoutInternal( in MakeShapeWithLayoutInternal()
103 TF_ASSIGN_OR_RETURN(Shape shape, in MakeShapeWithLayoutInternal()
105 *shape.mutable_layout() = in MakeShapeWithLayoutInternal()
107 if (!shape.has_layout()) { in MakeShapeWithLayoutInternal()
108 return InvalidArgument("Shape has no layout."); in MakeShapeWithLayoutInternal()
110 TF_RETURN_IF_ERROR(ShapeUtil::ValidateShape(shape)); in MakeShapeWithLayoutInternal()
111 return shape; in MakeShapeWithLayoutInternal()
115 /* static */ bool ShapeUtil::Equal(const Shape& lhs, const Shape& rhs) { in Equal()
116 bool equal = Shape::Equal()(lhs, rhs); in Equal()
[all …]
Dlayout_util_test.cc29 Shape MakeShapeWithLayout(PrimitiveType element_type, in MakeShapeWithLayout()
32 Shape shape = ShapeUtil::MakeShape(element_type, dimensions); in MakeShapeWithLayout() local
33 *shape.mutable_layout() = LayoutUtil::MakeLayout(minor_to_major); in MakeShapeWithLayout()
34 return shape; in MakeShapeWithLayout()
37 Shape MakeShapeWithSparseLayout(PrimitiveType element_type, in MakeShapeWithSparseLayout()
40 Shape shape = ShapeUtil::MakeShape(element_type, dimensions); in MakeShapeWithSparseLayout() local
41 *shape.mutable_layout() = LayoutUtil::MakeSparseLayout(max_sparse_elements); in MakeShapeWithSparseLayout()
42 return shape; in MakeShapeWithSparseLayout()
47 Shape shape = in TEST_F() local
49 Shape other_shape = in TEST_F()
[all …]
Dlayout_util.cc115 /* static */ Layout LayoutUtil::GetDefaultLayoutForShape(const Shape& shape) { in GetDefaultLayoutForShape() argument
116 if (shape.IsOpaque() || shape.IsToken()) { in GetDefaultLayoutForShape()
122 CHECK(shape.IsArray()); in GetDefaultLayoutForShape()
123 return CreateDefaultLayoutForRank(shape.dimensions_size()); in GetDefaultLayoutForShape()
142 /* static */ void LayoutUtil::SetToDefaultLayout(Shape* shape) { in SetToDefaultLayout() argument
143 if (shape->IsTuple()) { in SetToDefaultLayout()
144 // Tuple shape. in SetToDefaultLayout()
145 for (auto& element_shape : *shape->mutable_tuple_shapes()) { in SetToDefaultLayout()
148 shape->clear_layout(); in SetToDefaultLayout()
149 } else if (shape->IsArray()) { in SetToDefaultLayout()
[all …]
Dshape_layout.h28 // A ShapeLayout object encapsulates the layout of a particular shape (including
30 // single array. ShapeLayout contains a Layout proto for each array in the shape
33 // shape with mutable layouts.
36 // Constructs a ShapeLayout of the given shape. Layouts are copied from the
37 // shape parameter.
38 explicit ShapeLayout(const Shape& shape) : shape_(shape) {} in ShapeLayout() argument
41 // shape. 'to_shape' and the shape of the ShapeLayout object must be
43 Status AssignLayoutToShape(Shape* to_shape) const;
46 // given shape. Returns false otherwise. If the given shape is not compatible
47 // with the ShapeLayout's shape, then false is returned.
[all …]
/external/gemmlowp/test/
Dbenchmark_meta_gemm.cc64 struct Shape { struct
73 Shape(std::int32_t n, std::int32_t m, std::int32_t k) in Shape() argument
104 double run_gemms(std::vector<Shape>* shapes) { in run_gemms() argument
106 for (auto& shape : *shapes) { in run_gemms()
107 ops += run_gemm(shape.n, shape.m, shape.k, shape.working_set().lhs, in run_gemms()
108 shape.working_set().rhs, shape.working_set().result); in run_gemms()
159 void time_all(std::vector<Shape>* shapes, std::int32_t repetitions, in time_all()
179 void time_one(Shape* shape, double max_time) { in time_one() argument
184 std::cout << std::setprecision(6) << std::fixed << shape->n << ", " in time_one()
185 << shape->m << ", " << shape->k << ", " << std::flush; in time_one()
[all …]
Dbenchmark_all_sizes.cc202 struct Shape { struct
208 bool operator==(const Shape& s1, const Shape& s2) { in operator ==() argument
212 bool operator<(const Shape& shape1, const Shape& shape2) { in operator <()
223 float benchmark(const Shape& shape) { in benchmark() argument
234 shape.rows, shape.depth, shape.cols); in benchmark()
240 shape.rows, shape.depth, shape.cols); in benchmark()
245 shape.rows, shape.depth, shape.cols); in benchmark()
250 shape.rows, shape.depth, shape.cols); in benchmark()
252 return benchmark_float(shape.rows, shape.depth, shape.cols); in benchmark()
277 std::vector<Shape> all_shapes_in_random_order() { in all_shapes_in_random_order()
[all …]
/external/tensorflow/tensorflow/python/ops/
Dinit_ops_v2_test.py40 shape=None, argument
42 if shape is None:
43 shape = [100]
44 t1 = self.evaluate(init1(shape, dtype))
45 t2 = self.evaluate(init2(shape, dtype))
46 self.assertEqual(tensor_shape.as_shape(shape), t1.shape)
47 self.assertEqual(tensor_shape.as_shape(shape), t2.shape)
52 shape=None, argument
54 if shape is None:
55 shape = [100]
[all …]
Drandom_ops.py38 def _ShapeTensor(shape): argument
40 if isinstance(shape, (tuple, list)) and not shape:
44 return ops.convert_to_tensor(shape, dtype=dtype, name="shape")
49 def random_normal(shape, argument
58 shape: A 1-D integer Tensor or Python array. The shape of the output tensor.
71 A tensor of the specified shape filled with random normal values.
73 with ops.name_scope(name, "random_normal", [shape, mean, stddev]) as name:
74 shape_tensor = _ShapeTensor(shape)
88 def parameterized_truncated_normal(shape, argument
103 shape: A 1-D integer Tensor or Python array. The shape of the output tensor.
[all …]
Dinit_ops_test.py40 shape, argument
45 output = self.evaluate(init(shape))
46 self.assertEqual(output.shape, shape)
58 shape = (9, 6, 99)
60 for tensor_shape in [shape, tensor_shape_lib.TensorShape(shape)]:
69 shape = (8, 12, 99)
71 for tensor_shape in [shape, tensor_shape_lib.TensorShape(shape)]:
79 shape = (12, 99, 7)
81 for tensor_shape in [shape, tensor_shape_lib.TensorShape(shape)]:
90 shape = (5, 6, 4)
[all …]
/external/tensorflow/tensorflow/java/src/main/java/org/tensorflow/
DShape.java20 /** The possibly partially known shape of a tensor produced by an operation. */
21 public final class Shape { class
23 /** Create a Shape representing an unknown number of dimensions. */
24 public static Shape unknown() { in unknown()
25 return new Shape(null); in unknown()
28 /** Create a Shape representing a scalar value. */
29 public static Shape scalar() { in scalar()
30 return new Shape(new long[0]); in scalar()
34 * Create a Shape representing an N-dimensional value.
36 * <p>Creates a Shape representing an N-dimensional value (N being at least 1), with the provided
[all …]
/external/tensorflow/tensorflow/python/grappler/
Ddatasets_test.py15 """Tests for the datasets shape inference."""
39 'shape': tensor_shape.TensorShape([])
42 'shape': tensor_shape.TensorShape([3])
45 'shape': tensor_shape.TensorShape([1, 3])
58 self.assertEqual(test_case['shape'],
59 op_properties['IteratorGetNext'][0].shape)
64 'shape': tensor_shape.TensorShape([])
67 'shape': tensor_shape.TensorShape([3])
70 'shape': tensor_shape.TensorShape([1, 3])
83 self.assertEqual(test_case['shape'],
[all …]
/external/tensorflow/tensorflow/java/src/test/java/org/tensorflow/
DShapeTest.java25 /** Unit tests for {@link Shape}. */
31 assertEquals(-1, Shape.unknown().numDimensions()); in unknown()
32 assertEquals("<unknown>", Shape.unknown().toString()); in unknown()
37 assertEquals(0, Shape.scalar().numDimensions()); in scalar()
38 assertEquals("[]", Shape.scalar().toString()); in scalar()
43 Shape s = Shape.make(2); in make()
48 s = Shape.make(2, 3); in make()
54 s = Shape.make(-1, 2, 3); in make()
66 assertEquals(-1, n.shape().numDimensions()); in nodesInAGraph()
69 assertEquals(0, n.shape().numDimensions()); in nodesInAGraph()
[all …]
/external/tensorflow/tensorflow/compiler/tests/
Dfft_test.py66 shape = BATCH_DIMS + indims
67 data = np.arange(np.prod(shape) * 2) / np.prod(indims)
70 data = np.reshape(data.astype(np.float32).view(np.complex64), shape)
76 dtypes.as_dtype(data.dtype), shape=data.shape)
85 shape = BATCH_DIMS + dims
86 data = np.arange(np.prod(shape)) / np.prod(dims)
89 data = np.reshape(data.astype(np.float32), shape)
98 dtypes.as_dtype(data.dtype), shape=data.shape)
135 INNER_DIMS_1D, np.real, lambda x: np.fft.rfft(x, n=x.shape[-1]),
136 lambda x: signal.rfft(x, fft_length=[x.shape[-1].value]))
[all …]
/external/tensorflow/tensorflow/contrib/distributions/python/ops/bijectors/
Dreshape.py49 def _static_ndims_from_shape(shape): argument
50 return tensor_shape.dimension_value(shape.shape.with_rank_at_least(1)[0])
61 def _ndims_from_shape(shape): argument
62 return array_ops.shape(shape)[0]
71 * The user must provide both the input and output shape, so that
72 the transformation can be inverted. If an input shape is not
89 r.forward([3., 4.]) # shape [2]
90 # ==> [[3., 4.]] # shape [1, 2]
92 r.forward([[1., 2.], [3., 4.]]) # shape [2, 2]
94 # [[3., 4.]]] # shape [2, 1, 2]
[all …]
/external/tensorflow/tensorflow/python/tpu/
Dtpu_sharding.py137 def get_sharded_shape(self, shape, shard_index=None): argument
138 """Returns the shape of a shard of a full Tensor.
140 When given the shape of a 'full-size' Tensor, returns the shape of
145 shape: The shape of the full-size Tensor to be sharded.
146 shard_index: The index of the shard whose shape should be returned.
148 shape for every shard.
152 The shape of the sharded version of the Tensor.
157 !(0<=shard_index<number_of_shards); or shape does not have at
159 shape's shard dimension is not a multiple of
169 shape = tensor_shape.as_shape(shape)
[all …]
/external/tensorflow/tensorflow/python/keras/engine/
Dinput_spec.py31 """Specifies the ndim, dtype and shape of every input to a layer.
36 A None entry in a shape is compatible with any dimension,
37 a None shape is compatible with any shape.
41 shape: Shape tuple, expected shape of the input
52 shape=None, argument
58 self.shape = shape
59 if shape is not None:
60 self.ndim = len(shape)
69 ('shape=' + str(self.shape)) if self.shape else '',
110 if x.shape.ndims is None:
[all …]
/external/tensorflow/tensorflow/cc/framework/
Dgradient_checker_test.cc43 TensorShape shape({2, 4, 3}); in TEST() local
44 auto x = Placeholder(scope, DT_FLOAT, Placeholder::Shape(shape)); in TEST()
48 scope, {x}, {shape}, {y}, {shape}, &max_error))); in TEST()
54 TensorShape shape({2, 4, 3}); in TEST() local
55 auto x = Placeholder(scope, DT_DOUBLE, Placeholder::Shape(shape)); in TEST()
59 scope, {x}, {shape}, {y}, {shape}, &max_error))); in TEST()
65 TensorShape shape({2, 4, 3}); in TEST() local
66 auto x = Placeholder(scope, DT_COMPLEX64, Placeholder::Shape(shape)); in TEST()
70 scope, {x}, {shape}, {y}, {shape}, &max_error))); in TEST()
76 TensorShape shape({2, 4, 3}); in TEST() local
[all …]
/external/tensorflow/tensorflow/contrib/seq2seq/python/kernel_tests/
Dattention_wrapper_test.py58 collections.namedtuple('ResultSummary', ('shape', 'dtype', 'mean'))):
64 return ResultSummary(x.shape, x.dtype, x.mean())
98 state = cell.zero_state(array_ops.shape(memory)[0], dtypes.float32)
103 self.assertEqual(state.cell_state.c.shape, static_state.cell_state.c.shape)
104 self.assertEqual(state.cell_state.h.shape, static_state.cell_state.h.shape)
105 self.assertEqual(state.attention.shape, static_state.attention.shape)
172 shape=(None, None, input_depth))
176 shape=(None, None, encoder_output_depth))
288 encoder_outputs = array_ops.placeholder(dtype, shape=[64, None, 256])
289 encoder_sequence_length = array_ops.placeholder(dtypes.int32, shape=[64])
[all …]
/external/tensorflow/tensorflow/compiler/xla/client/
Dxla_builder.cc90 TF_ASSIGN_OR_RETURN(xla::Shape shape, builder->GetShape(x)); in operator >>()
91 if (!ShapeUtil::ElementIsIntegral(shape)) { in operator >>()
94 ShapeUtil::HumanString(shape)); in operator >>()
96 if (ShapeUtil::ElementIsSigned(shape)) { in operator >>()
104 StatusOr<Shape> XlaBuilder::GetShape(const XlaOp& op) const { in GetShape()
108 return Shape(instr->shape()); in GetShape()
111 StatusOr<std::vector<Shape>> XlaBuilder::GetOperandShapes( in GetOperandShapes()
113 std::vector<Shape> operand_shapes; in GetOperandShapes()
115 TF_ASSIGN_OR_RETURN(const Shape& shape, GetShape(operand)); in GetOperandShapes()
116 operand_shapes.push_back(shape); in GetOperandShapes()
[all …]
/external/python/cpython3/Lib/test/
Dtest_buffer.py256 def strides_from_shape(ndim, shape, itemsize, layout): argument
262 strides = list(shape[1:]) + [itemsize]
266 strides = [itemsize] + list(shape[:-1])
273 multidimensional C array with shape 's'."""
287 multidimensional Fortran array with shape 's'."""
298 def carray(items, shape): argument
299 if listp(items) and not 0 in shape and prod(shape) != len(items):
300 raise ValueError("prod(shape) != len(items)")
301 return _ca(items, shape)
303 def farray(items, shape): argument
[all …]

12345678910>>...180