1# Copyright 2018 The TensorFlow Authors. All Rights Reserved. 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); 4# you may not use this file except in compliance with the License. 5# You may obtain a copy of the License at 6# 7# http://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, 11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12# See the License for the specific language governing permissions and 13# limitations under the License. 14# ============================================================================== 15"""A TensorSpec class.""" 16 17from __future__ import absolute_import 18from __future__ import division 19from __future__ import print_function 20 21import numpy as np 22 23from tensorflow.python import _pywrap_utils 24from tensorflow.python.framework import common_shapes 25from tensorflow.python.framework import dtypes 26from tensorflow.python.framework import ops 27from tensorflow.python.framework import tensor_shape 28from tensorflow.python.framework import type_spec 29from tensorflow.python.util.tf_export import tf_export 30 31 32class DenseSpec(type_spec.TypeSpec): 33 """Describes a dense object with shape, dtype, and name.""" 34 35 __slots__ = ["_shape", "_shape_tuple", "_dtype", "_name"] 36 37 _component_specs = property(lambda self: self) 38 39 def __init__(self, shape, dtype=dtypes.float32, name=None): 40 """Creates a TensorSpec. 41 42 Args: 43 shape: Value convertible to `tf.TensorShape`. The shape of the tensor. 44 dtype: Value convertible to `tf.DType`. The type of the tensor values. 45 name: Optional name for the Tensor. 46 47 Raises: 48 TypeError: If shape is not convertible to a `tf.TensorShape`, or dtype is 49 not convertible to a `tf.DType`. 50 """ 51 self._shape = tensor_shape.TensorShape(shape) 52 try: 53 self._shape_tuple = tuple(self.shape.as_list()) 54 except ValueError: 55 self._shape_tuple = None 56 self._dtype = dtypes.as_dtype(dtype) 57 self._name = name 58 59 @classmethod 60 def from_spec(cls, spec, name=None): 61 return cls(spec.shape, spec.dtype, name or spec.name) 62 63 @property 64 def shape(self): 65 """Returns the `TensorShape` that represents the shape of the tensor.""" 66 return self._shape 67 68 @property 69 def dtype(self): 70 """Returns the `dtype` of elements in the tensor.""" 71 return self._dtype 72 73 @property 74 def name(self): 75 """Returns the (optionally provided) name of the described tensor.""" 76 return self._name 77 78 def is_compatible_with(self, spec_or_value): 79 return (isinstance(spec_or_value, (DenseSpec, self.value_type)) and 80 self._dtype.is_compatible_with(spec_or_value.dtype) and 81 self._shape.is_compatible_with(spec_or_value.shape)) 82 83 def __repr__(self): 84 return "{}(shape={}, dtype={}, name={})".format( 85 type(self).__name__, self.shape, repr(self.dtype), repr(self.name)) 86 87 def __hash__(self): 88 return hash((self._shape_tuple, self.dtype)) 89 90 def __eq__(self, other): 91 # pylint: disable=protected-access 92 return (type(self) is type(other) and 93 self._shape_tuple == other._shape_tuple 94 and self._dtype == other._dtype 95 and self._name == other._name) 96 97 def __ne__(self, other): 98 return not self == other 99 100 def most_specific_compatible_type(self, other): 101 if (type(self) is not type(other)) or (self._dtype != other.dtype): 102 raise ValueError("Types are not compatible: %r vs %r" % (self, other)) 103 shape = self._shape.most_specific_compatible_shape(other.shape) 104 name = self._name if self._name == other.name else None 105 return type(self)(shape, self._dtype, name) 106 107 def _serialize(self): 108 return (self._shape, self._dtype, self._name) 109 110 def _to_legacy_output_types(self): 111 return self._dtype 112 113 def _to_legacy_output_shapes(self): 114 return self._shape 115 116 def _to_legacy_output_classes(self): 117 return self.value_type 118 119 120@tf_export("TensorSpec") 121class TensorSpec(DenseSpec, type_spec.BatchableTypeSpec): 122 """Describes a tf.Tensor. 123 124 Metadata for describing the `tf.Tensor` objects accepted or returned 125 by some TensorFlow APIs. 126 """ 127 128 __slots__ = [] 129 130 def is_compatible_with(self, spec_or_tensor): # pylint:disable=useless-super-delegation 131 """Returns True if spec_or_tensor is compatible with this TensorSpec. 132 133 Two tensors are considered compatible if they have the same dtype 134 and their shapes are compatible (see `tf.TensorShape.is_compatible_with`). 135 136 Args: 137 spec_or_tensor: A tf.TensorSpec or a tf.Tensor 138 139 Returns: 140 True if spec_or_tensor is compatible with self. 141 """ 142 return super(TensorSpec, self).is_compatible_with(spec_or_tensor) 143 144 @classmethod 145 def from_tensor(cls, tensor, name=None): 146 if isinstance(tensor, ops.EagerTensor): 147 return TensorSpec(tensor.shape, tensor.dtype, name) 148 elif isinstance(tensor, ops.Tensor): 149 return TensorSpec(tensor.shape, tensor.dtype, name or tensor.op.name) 150 else: 151 raise ValueError("`tensor` should be a tf.Tensor") 152 153 value_type = property(lambda self: ops.Tensor) 154 155 def _to_components(self, value): 156 try: 157 value = ops.convert_to_tensor(value, self._dtype) 158 except (TypeError, ValueError): 159 raise ValueError("Value %r is not convertible to a tensor with dtype %s " 160 "and shape %s." % (value, self._dtype, self._shape)) 161 if not value.shape.is_compatible_with(self._shape): 162 raise ValueError("Value %r is not convertible to a tensor with dtype %s " 163 "and shape %s." % (value, self._dtype, self._shape)) 164 return value 165 166 def _from_components(self, components): 167 return components 168 169 def _from_compatible_tensor_list(self, tensor_list): 170 # TODO(b/112266545): It would be cleaner to create a new `ensure_shape()` 171 # op here and return that, instead of mutating the input's shape using 172 # `Tensor.set_shape()`. However, that would add extra ops, which could 173 # impact performance. When this bug is resolved, we should be able to add 174 # the `ensure_shape()` ops and optimize them away using contextual shape 175 # information. 176 assert len(tensor_list) == 1 177 tensor_list[0].set_shape(self._shape) 178 return tensor_list[0] 179 180 def _to_batchable_tensor_list(self, value, batched=False): 181 if batched and self._shape.merge_with(value.shape).ndims == 0: 182 raise ValueError("Unbatching a tensor is only supported for rank >= 1") 183 return self._to_components(value) 184 185 def _batch(self, batch_size): 186 return TensorSpec( 187 tensor_shape.TensorShape([batch_size]).concatenate(self._shape), 188 self._dtype) 189 190 def _unbatch(self): 191 if self._shape.ndims == 0: 192 raise ValueError("Unbatching a tensor is only supported for rank >= 1") 193 return TensorSpec(self._shape[1:], self._dtype) 194 195 196# TODO(b/133606651): Should is_compatible_with should check min/max bounds? 197class BoundedTensorSpec(TensorSpec): 198 """A `TensorSpec` that specifies minimum and maximum values. 199 200 Example usage: 201 ```python 202 spec = tensor_spec.BoundedTensorSpec((1, 2, 3), tf.float32, 0, (5, 5, 5)) 203 tf_minimum = tf.convert_to_tensor(spec.minimum, dtype=spec.dtype) 204 tf_maximum = tf.convert_to_tensor(spec.maximum, dtype=spec.dtype) 205 ``` 206 207 Bounds are meant to be inclusive. This is especially important for 208 integer types. The following spec will be satisfied by tensors 209 with values in the set {0, 1, 2}: 210 ```python 211 spec = tensor_spec.BoundedTensorSpec((3, 5), tf.int32, 0, 2) 212 ``` 213 """ 214 215 __slots__ = ("_minimum", "_maximum") 216 217 def __init__(self, shape, dtype, minimum, maximum, name=None): 218 """Initializes a new `BoundedTensorSpec`. 219 220 Args: 221 shape: Value convertible to `tf.TensorShape`. The shape of the tensor. 222 dtype: Value convertible to `tf.DType`. The type of the tensor values. 223 minimum: Number or sequence specifying the minimum element bounds 224 (inclusive). Must be broadcastable to `shape`. 225 maximum: Number or sequence specifying the maximum element bounds 226 (inclusive). Must be broadcastable to `shape`. 227 name: Optional string containing a semantic name for the corresponding 228 array. Defaults to `None`. 229 230 Raises: 231 ValueError: If `minimum` or `maximum` are not provided or not 232 broadcastable to `shape`. 233 TypeError: If the shape is not an iterable or if the `dtype` is an invalid 234 numpy dtype. 235 """ 236 super(BoundedTensorSpec, self).__init__(shape, dtype, name) 237 238 if minimum is None or maximum is None: 239 raise ValueError("minimum and maximum must be provided; but saw " 240 "'%s' and '%s'" % (minimum, maximum)) 241 242 try: 243 minimum_shape = np.shape(minimum) 244 common_shapes.broadcast_shape( 245 tensor_shape.TensorShape(minimum_shape), self.shape) 246 except ValueError as exception: 247 raise ValueError("minimum is not compatible with shape. " 248 "Message: {!r}.".format(exception)) 249 250 try: 251 maximum_shape = np.shape(maximum) 252 common_shapes.broadcast_shape( 253 tensor_shape.TensorShape(maximum_shape), self.shape) 254 except ValueError as exception: 255 raise ValueError("maximum is not compatible with shape. " 256 "Message: {!r}.".format(exception)) 257 258 self._minimum = np.array(minimum, dtype=self.dtype.as_numpy_dtype) 259 self._minimum.setflags(write=False) 260 261 self._maximum = np.array(maximum, dtype=self.dtype.as_numpy_dtype) 262 self._maximum.setflags(write=False) 263 264 @classmethod 265 def from_spec(cls, spec): 266 dtype = dtypes.as_dtype(spec.dtype) 267 minimum = getattr(spec, "minimum", dtype.min) 268 maximum = getattr(spec, "maximum", dtype.max) 269 return BoundedTensorSpec(spec.shape, dtype, minimum, maximum, spec.name) 270 271 @property 272 def minimum(self): 273 """Returns a NumPy array specifying the minimum bounds (inclusive).""" 274 return self._minimum 275 276 @property 277 def maximum(self): 278 """Returns a NumPy array specifying the maximum bounds (inclusive).""" 279 return self._maximum 280 281 def __repr__(self): 282 s = "BoundedTensorSpec(shape={}, dtype={}, name={}, minimum={}, maximum={})" 283 return s.format(self.shape, repr(self.dtype), repr(self.name), 284 repr(self.minimum), repr(self.maximum)) 285 286 def __eq__(self, other): 287 tensor_spec_eq = super(BoundedTensorSpec, self).__eq__(other) 288 return (tensor_spec_eq and np.allclose(self.minimum, other.minimum) and 289 np.allclose(self.maximum, other.maximum)) 290 291 def __hash__(self): 292 return hash((self._shape_tuple, self.dtype)) 293 294 def __reduce__(self): 295 return BoundedTensorSpec, (self._shape, self._dtype, self._minimum, 296 self._maximum, self._name) 297 298 def _serialize(self): 299 return (self._shape, self._dtype, self._minimum, self._maximum, self._name) 300 301 302_pywrap_utils.RegisterType("TensorSpec", TensorSpec) 303 304 305# Note: we do not include Tensor names when constructing TypeSpecs. 306type_spec.register_type_spec_from_value_converter( 307 ops.Tensor, 308 lambda tensor: TensorSpec(tensor.shape, tensor.dtype)) 309 310type_spec.register_type_spec_from_value_converter( 311 np.ndarray, 312 lambda array: TensorSpec(array.shape, array.dtype)) 313