• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Copyright 2017 The TensorFlow Authors. All Rights Reserved.
2 
3 Licensed under the Apache License, Version 2.0 (the "License");
4 you may not use this file except in compliance with the License.
5 You may obtain a copy of the License at
6 
7     http://www.apache.org/licenses/LICENSE-2.0
8 
9 Unless required by applicable law or agreed to in writing, software
10 distributed under the License is distributed on an "AS IS" BASIS,
11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 See the License for the specific language governing permissions and
13 limitations under the License.
14 ==============================================================================*/
15 
16 // Utility functions related to layouts of Shapes.
17 
18 #ifndef TENSORFLOW_COMPILER_XLA_LAYOUT_UTIL_H_
19 #define TENSORFLOW_COMPILER_XLA_LAYOUT_UTIL_H_
20 
21 #include <string>
22 
23 #include "absl/types/span.h"
24 #include "tensorflow/compiler/xla/layout.h"
25 #include "tensorflow/compiler/xla/shape.h"
26 #include "tensorflow/compiler/xla/status.h"
27 #include "tensorflow/compiler/xla/types.h"
28 #include "tensorflow/compiler/xla/xla_data.pb.h"
29 #include "tensorflow/core/platform/macros.h"
30 #include "tensorflow/core/platform/types.h"
31 
32 namespace xla {
33 
34 // Namespaced collection of (static) Layout utilities.
35 class LayoutUtil {
36  public:
37   // Creates a layout with the given minor-to-major dimension order. (This is a
38   // convenience function for protobuf construction.)
39   static Layout MakeLayout(absl::Span<const int64> minor_to_major,
40                            absl::Span<const Tile> tiles = {},
41                            int64 element_size_in_bits = 0,
42                            int64 memory_space = 0);
43 
44   // Similar to MakeLayout, but take indices in reverse order.
45   static Layout MakeLayoutFromMajorToMinor(
46       absl::Span<const int64> major_to_minor);
47 
48   // Returns a layout with descending ((i.e. {n-1, n-2, ... 0}) minor-to-major
49   // dimensions.
50   static Layout MakeDescendingLayout(int64 rank);
51 
52   // Returns a layout with ascending ((i.e. {0, 1, ... n-1}) minor-to-major
53   // dimensions.
54   static Layout MakeAscendingLayout(int64 rank);
55 
56   // Returns default layout for the given shape.
57   static Layout GetDefaultLayoutForShape(const Shape& shape);
58 
59   // Helper functions that create default layouts for various ranks.
60   static Layout GetDefaultLayoutForRank(int64 rank);
61   static Layout GetDefaultLayoutForR2();
62   static Layout GetDefaultLayoutForR3();
63   static Layout GetDefaultLayoutForR4();
64 
65   // Sets the default layout on the Shape.
66   static void SetToDefaultLayout(Shape* shape);
67 
68   // Returns a shape with the same dimensions as `shape` but with the default
69   // layout.
70   static Shape GetWithDefaultLayout(const Shape& shape);
71 
72   // Sets the layouts of all Shapes within the given ProgramShape to the
73   // default.
74   static void SetToDefaultLayout(ProgramShape* program_shape);
75 
76   // Validates that the layout within the given shape is correct. The check
77   // is performed for all subshapes as well. If missing layouts are allowed
78   // the check does not fail on array shapes without layouts.
79   static Status ValidateLayoutInShape(const Shape& shape,
80                                       bool allow_missing_layouts = false);
81 
82   // Validates that the provided layout satisfies invariants for the given
83   // shape.
84   static Status ValidateLayoutForShape(const Layout& layout,
85                                        const Shape& shape);
86 
87   // Clears the layout in the given Shape. After this function is called,
88   // HasLayout will return false for the shape.
89   static void ClearLayout(Shape* shape);
90 
91   // Clears the layout on all Shapes within the given ProgramShape.
92   static void ClearLayout(ProgramShape* program_shape);
93 
94   // Returns whether the given Shape is an array and has a dense format layout.
95   static bool IsDenseArray(const Shape& shape);
96 
97   // Returns whether the given Layout has a dense format.
98   static bool IsDense(const Layout& layout);
99 
100   // Returns whether the layout is monotonic and dim 0 is minor in the layout.
101   // * R0 and R1: this is always trivially true.
102   // * R2+: equivalent to column-major. Dimension 0 is the minor, dimension 1 is
103   //        more major, and so on until dimension N-1 which is the major.
104   static bool IsMonotonicWithDim0Minor(const Layout& layout);
105 
106   // Returns whether the layout is monotonic and dim 0 is major in the layout.
107   // * R0 and R1: this is always trivially true.
108   // * R2+: equivalent to row-major. Dimension 0 is the major, dimension 1 is
109   //        more minor, and so on until dimension N-1 which is the minor.
110   static bool IsMonotonicWithDim0Major(const Layout& layout);
111 
112   // Returns whether the given shape has a layout. For tuple shapes, true is
113   // returned only if all elements have layouts.
114   static bool HasLayout(const Shape& shape);
115 
116   // Returns whether all Shapes within the given ProgramShape have layouts.
117   static bool HasLayout(const ProgramShape& program_shape);
118 
119   // Returns whether lhs and rhs are identical.
120   static bool Equal(const Layout& lhs, const Layout& rhs);
121 
122   // Returns the minor_to_major array for the given Shape.  Requires that the
123   // shape is an array and has a dense layout.
124   static absl::Span<const int64> MinorToMajor(const Shape& shape);
125   static absl::Span<const int64> MinorToMajor(const Layout& layout);
126 
127   // Major(0) is the most major logical dimension number, Major(1) is the
128   // second-most-major logical dimension number and so on.
129   //
130   // This can be used to translate physical dimension numbers to logical
131   // dimension numbers. Assume that we are numbering the physical dimensions so
132   // that the most major physical dimension has physical dimension number 0 and
133   // so on. Then a physical dimension number p corresponds to the logical
134   // dimension number Major(p). So this function could also be called
135   // PhysicalToLogical().
136   //
137   // As an example, consider physical dimension number 0, which by definition is
138   // the most major. Then Major(0) is the most major logical dimension, so Major
139   // maps the physical dimension number 0 to the most major logical dimension
140   // number Major(0).
141   static int64 Major(const Layout& layout, int64 physical_dimension_number);
142 
143   // Minor(0) is the most minor logical dimension number, minor(1) is the
144   // second-most-minor logical dimension number and so on.
145   static int64 Minor(const Layout& layout, int64 physical_dimension_number);
146 
147   // Returns the inverse mapping of the Major() function. More precisely, return
148   // a vector v such that if l == Major(p), then v[l] == p.
149   //
150   // This can be used to translate logical dimension numbers into physical
151   // dimension numbers. Assume that we are numbering the physical dimensions so
152   // that the most major physical dimension has physical dimension number 0 and
153   // so on. Then a logical dimension number l corresponds to the physical
154   // dimension number MakeLogicalToPhysical(layout)[l].
155   //
156   // In the returned vector, the first element represents the most major logical
157   // dimension. The element whose contents are 0 represents the most major
158   // physical dimension, and the element with contents (rank - 1) represents
159   // the most minor physical dimension.
160   static std::vector<int64> MakeLogicalToPhysical(const Layout& layout);
161 
162   // Returns a human-readable string that represents the given layout.
163   static string HumanString(const Layout& layout);
164 
165   // Copies the layout from 'src' to 'dst'. Recursively copies layouts of
166   // tuples.  'src' and 'dst' need not be compatible but the two shapes must
167   // have the same tuple structure (if any) and arrays must have the same
168   // rank. within the shapes must have the same number of dimensions.
169   static Status CopyLayoutBetweenShapes(const Shape& src, Shape* dst);
170 
171   // Returns true if the layouts of lhs and rhs are equal, false
172   // otherwise. Recursively compares layouts of tuples.
173   //
174   // lhs and rhs need not be compatible to have the same layout but the two
175   // shapes must have the same tuple structure (if any) and arrays must have the
176   // same rank. Element type is ignored.
177   static bool LayoutsInShapesEqual(const Shape& lhs, const Shape& rhs);
178 
179   // Returns whether the given dimensions are consecutive in the given layout,
180   // not necessarily in the order given.
181   static bool AreDimensionsConsecutive(const Layout& layout,
182                                        absl::Span<const int64> dims);
183 
184   // Compute a hash for `layout`.
185   static size_t Hash(const Layout& layout);
186 
187  private:
188   TF_DISALLOW_COPY_AND_ASSIGN(LayoutUtil);
189 };
190 
191 }  // namespace xla
192 
193 #endif  // TENSORFLOW_COMPILER_XLA_LAYOUT_UTIL_H_
194