• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#     http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ==============================================================================
15"""Tests for ragged.to_tensor."""
16
17from __future__ import absolute_import
18from __future__ import division
19from __future__ import print_function
20
21import random
22
23from absl.testing import parameterized
24import numpy as np
25
26from tensorflow.python.client import session
27from tensorflow.python.eager import context
28from tensorflow.python.framework import constant_op
29from tensorflow.python.framework import dtypes
30
31from tensorflow.python.framework import errors
32from tensorflow.python.framework import indexed_slices
33from tensorflow.python.framework import ops
34from tensorflow.python.framework import tensor_shape
35from tensorflow.python.framework import test_util
36from tensorflow.python.ops import array_ops
37from tensorflow.python.ops import gradients_impl
38from tensorflow.python.ops.ragged import ragged_factory_ops
39from tensorflow.python.ops.ragged import ragged_tensor
40from tensorflow.python.ops.ragged.ragged_tensor import RaggedTensor
41from tensorflow.python.platform import benchmark
42from tensorflow.python.platform import googletest
43from tensorflow.python.util import nest
44
45
46def make_placeholder(t):
47  return array_ops.placeholder_with_default(t, None)
48
49
50def rebuild_ragged_tensor_with_value_rowids(rt, feed_dict=None, sess=None):
51  """Returns a copy of `rt`, built using `from_value_rowids`.
52
53  This ensures that RaggedTensor._cached_value_rowids is populated, which
54  triggers a different code-path for converting ragged tensors to tensors.
55
56  If `feed_dict` and `sess` are specified, then build the new `RaggedTensor`
57  using placeholder tensors, and populate a feed dictionary that can be used
58  to feed the placeholders.
59
60  Args:
61    rt: The RaggedTensor to copy.
62    feed_dict: If specified, then build the new `RaggedTensor` using
63      placeholders, and populate this dict with entries to feed those
64      placeholders.
65    sess: A session used to evaluate tensors; required if feed_dict is
66      specified.
67
68  Returns:
69    A copy of `rt`, built using `from_value_rowids`.
70  """
71  if isinstance(rt, ragged_tensor.RaggedTensor):
72    values = rebuild_ragged_tensor_with_value_rowids(rt.values, feed_dict, sess)
73    rowids = rt.value_rowids()
74    nrows = rt.nrows()
75    if feed_dict is not None:
76      rowids_ph = make_placeholder(rowids)
77      nrows_ph = make_placeholder(nrows)
78      feed_dict[rowids_ph] = sess.run(rowids)
79      feed_dict[nrows_ph] = sess.run(nrows)
80      rowids, nrows = rowids_ph, nrows_ph
81    return ragged_tensor.RaggedTensor.from_value_rowids(values, rowids, nrows)
82  else:
83    if feed_dict is not None:
84      rt_ph = make_placeholder(rt)
85      feed_dict[rt_ph] = sess.run(rt)
86      rt = rt_ph
87    return rt
88
89
90@test_util.run_all_in_graph_and_eager_modes
91class RaggedTensorToTensorOpTest(test_util.TensorFlowTestCase,
92                                 parameterized.TestCase):
93
94  def testDocStringExamples(self):
95    """Example from ragged_to_tensor.__doc__."""
96    rt = ragged_factory_ops.constant([[9, 8, 7], [], [6, 5], [4]])
97    dt = rt.to_tensor()
98    self.assertAllEqual(dt, [[9, 8, 7], [0, 0, 0], [6, 5, 0], [4, 0, 0]])
99
100  @parameterized.named_parameters(
101      # Simple 2D ragged tensors (with one ragged dimension)
102      {
103          'testcase_name': 'shape_2xN',
104          'rt_input': [[0, 1, 2], [], [3]],
105          'expected': [[0, 1, 2], [0, 0, 0], [3, 0, 0]]
106      },
107      {
108          'testcase_name': 'shape_2xN_default_0D',
109          'rt_input': [[0, 1, 2], [], [3]],
110          'default': 5,
111          'expected': [[0, 1, 2], [5, 5, 5], [3, 5, 5]]
112      },
113      {
114          'testcase_name': 'empty_first_row',
115          'rt_input': [[], [], [3, 4], []],
116          'expected': [[0, 0], [0, 0], [3, 4], [0, 0]]
117      },
118      {
119          'testcase_name': 'empty_last_row',
120          'rt_input': [[0, 1, 2], [], [3], []],
121          'expected': [[0, 1, 2], [0, 0, 0], [3, 0, 0], [0, 0, 0]]
122      },
123      {
124          'testcase_name': 'shape_4xN',
125          'rt_input': [[1, 2, 3], [], [4], [5, 6]],
126          'expected': [[1, 2, 3], [0, 0, 0], [4, 0, 0], [5, 6, 0]]
127      },
128      {
129          'testcase_name': 'shape_4xN_default_0D',
130          'rt_input': [[1, 2, 3], [], [4], [5, 6]],
131          'default': 9,
132          'expected': [[1, 2, 3], [9, 9, 9], [4, 9, 9], [5, 6, 9]]
133      },
134      {
135          'testcase_name': 'shape_2xN_already_dense',
136          'rt_input': [[6, 7, 8], [9, 10, 11]],
137          'expected': [[6, 7, 8], [9, 10, 11]],
138      },
139      {
140          'testcase_name': 'shape_2xN_string_already_dense',
141          'rt_input': [[b'a', b'b', b'c'],
142                       [b'd', b'e', b'antidisestablishmentarianism']],
143          'ragged_rank': 1,
144          'expected': [[b'a', b'b', b'c'],
145                       [b'd', b'e', b'antidisestablishmentarianism']],
146      },
147      # 3D ragged tensors with two ragged dimensions
148      {
149          'testcase_name': 'shape_4xNxM',
150          'rt_input': [[[1, 2], [], [3, 4]], [], [[5]], [[6, 7], [8]]],
151          'expected': [
152              [[1, 2], [0, 0], [3, 4]],  #
153              [[0, 0], [0, 0], [0, 0]],  #
154              [[5, 0], [0, 0], [0, 0]],  #
155              [[6, 7], [8, 0], [0, 0]],  #
156          ]
157      },
158      {
159          'testcase_name': 'shape_4xNxM_default_0D',
160          'rt_input': [[[1, 2], [], [3, 4]], [], [[5]], [[6, 7], [8]]],
161          'default': 9,
162          'expected': [
163              [[1, 2], [9, 9], [3, 4]],  #
164              [[9, 9], [9, 9], [9, 9]],  #
165              [[5, 9], [9, 9], [9, 9]],  #
166              [[6, 7], [8, 9], [9, 9]],  #
167          ]
168      },
169      {
170          'testcase_name': 'shape_1xNx1_default_0D',
171          'rt_input': [[[1], [2], [3]]],
172          'ragged_rank': 1,
173          'default': 0,
174          'expected': [[[1], [2], [3]]],
175      },
176      {
177          'testcase_name': 'shape_2xNx2_already_dense',
178          'rt_input': [[[6, 7], [8, 9], [10, 11]],
179                       [[12, 13], [14, 15], [16, 17]]],
180          'ragged_rank': 1,
181          'expected': [[[6, 7], [8, 9], [10, 11]],
182                       [[12, 13], [14, 15], [16, 17]]],
183      },
184      {
185          'testcase_name': 'shape_2xNx2_already_dense_default_1D',
186          'rt_input': [[[6, 7], [8, 9], [10, 11]],
187                       [[12, 13], [14, 15], [16, 17]]],
188          'ragged_rank': 1,
189          'default': [31, 32],
190          'expected': [[[6, 7], [8, 9], [10, 11]],
191                       [[12, 13], [14, 15], [16, 17]]],
192      },
193      {
194          'testcase_name': 'shape_2xNx2_string_already_dense',
195          'rt_input': [[[b'a', b'b'], [b'c', b'd'], [b'e', b'f']],
196                       [[b'g', b'jalapeno'], [b'kangaroo', b'llama'],
197                        [b'manzana', b'nectar']]],
198          'ragged_rank': 1,
199          'expected': [[[b'a', b'b'], [b'c', b'd'], [b'e', b'f']],
200                       [[b'g', b'jalapeno'], [b'kangaroo', b'llama'],
201                        [b'manzana', b'nectar']]],
202      },
203      # 3D ragged tensors with one ragged dimension
204      {
205          'testcase_name': 'shape_4xNx1_default_1D',
206          'rt_input': [[[1], [2], [3]], [], [[4]], [[5], [6]]],
207          'ragged_rank': 1,
208          'default': [9],
209          'expected': [[[1], [2], [3]],
210                       [[9], [9], [9]],
211                       [[4], [9], [9]],
212                       [[5], [6], [9]]]
213      },
214      {
215          'testcase_name': 'shape_2xNx2_default_0D',
216          'rt_input': [[[6, 7], [8, 9], [10, 11]],
217                       [[12, 13], [14, 15]]],
218          'ragged_rank': 1,
219          'default': 2,
220          'expected': [[[6, 7], [8, 9], [10, 11]],
221                       [[12, 13], [14, 15], [2, 2]]],
222      },
223      {
224          'testcase_name': 'shape_2xNx2_default_1D',
225          'rt_input': [[[6, 7], [8, 9], [10, 11]],
226                       [[12, 13], [14, 15]]],
227          'ragged_rank': 1,
228          'default': [2, 3],
229          'expected': [[[6, 7], [8, 9], [10, 11]],
230                       [[12, 13], [14, 15], [2, 3]]],
231      },
232      # 4D ragged tensors with 3 ragged dimensions
233      {
234          'testcase_name': 'shape_1xNxMxK_default_0D',
235          'rt_input': [[[[1], [2]], [], [[3]]]],
236          'default': 9,
237          'expected': [[[[1], [2]], [[9], [9]], [[3], [9]]]],
238      },
239      # Broadcast default
240      {
241          'testcase_name': 'shape_2xNx2x2_default_2x1',
242          'rt_input': [[[[1, 2], [3, 4]]], []],
243          'ragged_rank': 1,
244          'default': [[5], [6]],
245          'expected': [[[[1, 2], [3, 4]]],
246                       [[[5, 5], [6, 6]]]],
247      },
248      {
249          'testcase_name': 'shape_2xNx2x2_default_1x2',
250          'rt_input': [[[[1, 2], [3, 4]]], []],
251          'ragged_rank': 1,
252          'default': [[5, 6]],
253          'expected': [[[[1, 2], [3, 4]]],
254                       [[[5, 6], [5, 6]]]],
255      },
256      # Explicit shape
257      {
258          'testcase_name': 'shape_4xN_with_crop',
259          'rt_input': [[0, 1, 2, 3], [], [4], []],
260          'shape': [2, 3],
261          'expected': [[0, 1, 2], [0, 0, 0]],
262      },
263      {
264          'testcase_name': 'shape_2xN_with_pad',
265          'rt_input': [[1, 2], [3]],
266          'shape': [3, 3],
267          'expected': [[1, 2, 0], [3, 0, 0], [0, 0, 0]],
268      },
269      {
270          'testcase_name': 'shape_4xN_with_crop_and_pad',
271          'rt_input': [[0, 1, 2, 3], [], [4], []],
272          'shape': [2, 8],
273          'expected': [[0, 1, 2, 3, 0, 0, 0, 0],
274                       [0, 0, 0, 0, 0, 0, 0, 0]],
275      },
276      {
277          'testcase_name': 'shape_4xN_with_tuple_shape',
278          'rt_input': [[0, 1, 2, 3], [], [4], []],
279          'shape': (2, 3),
280          'expected': [[0, 1, 2], [0, 0, 0]],
281      },
282      {
283          'testcase_name': 'shape_4xN_with_tensorshape_shape',
284          'rt_input': [[0, 1, 2, 3], [], [4], []],
285          'shape': tensor_shape.TensorShape([2, 3]),
286          'expected': [[0, 1, 2], [0, 0, 0]],
287      },
288      {
289          'testcase_name': 'shape_4xN_with_partial_shape',
290          'rt_input': [[0, 1, 2, 3], [], [4], []],
291          'shape': tensor_shape.TensorShape([2, None]),
292          'expected': [[0, 1, 2, 3], [0, 0, 0, 0]],
293      },
294      # Empty tensors
295      {
296          'testcase_name': 'shape_0xN',
297          'rt_input': [],
298          'ragged_rank': 1,
299          'expected': [],
300          'expected_shape': [0, 0],
301      },
302      {
303          'testcase_name': 'shape_0xNxM',
304          'rt_input': [],
305          'ragged_rank': 2,
306          'expected': [],
307          'expected_shape': [0, 0, 0],
308      },
309      # {
310      #     'testcase_name': 'shape_0xNx2',
311      #     'rt_input': [],
312      #     'ragged_rank': 1,
313      #     'inner_shape': [2],
314      #     'expected': [],
315      #     'expected_shape': [0, 0, 2],
316      # },
317      {
318          'testcase_name': 'shape_2xN_empty',
319          'rt_input': [[], []],
320          'expected': [[], []],
321          'expected_shape': [2, 0],
322      },
323  )  # pyformat: disable
324  def testRaggedTensorToTensor(self,
325                               rt_input,
326                               expected,
327                               ragged_rank=None,
328                               inner_shape=None,
329                               default=None,
330                               shape=None,
331                               expected_shape=None):
332    rt1 = ragged_factory_ops.constant(
333        rt_input, ragged_rank=ragged_rank, inner_shape=inner_shape)
334    rt2 = rebuild_ragged_tensor_with_value_rowids(rt1)
335    for rt in [rt1, rt2]:
336      for use_placeholder in [False, True]:
337        if use_placeholder:
338          if default is not None:
339            default = make_placeholder(default)
340          rt = nest.map_structure(make_placeholder, rt, expand_composites=True)
341        dt = rt.to_tensor(default_value=default, shape=shape)
342        self.assertIsInstance(dt, ops.Tensor)
343        self.assertEqual(rt.dtype, dt.dtype)
344        if shape is not None:
345          self.assertTrue(dt.shape.is_compatible_with(shape))
346        else:
347          self.assertTrue(dt.shape.is_compatible_with(rt.shape))
348        if expected_shape is not None:
349          expected = np.ndarray(expected_shape, buffer=np.array(expected))
350        self.assertAllEqual(dt, expected)
351
352  @parameterized.parameters([
353      {
354          'rt_input': [[1, 2, 3]],
355          'default': 'a',
356          'error_type': TypeError,
357          'error': r'Expected int32|Cannot convert',
358      },
359      {
360          'rt_input': [[1, 2, 3]],
361          'default': [0],
362          'error': r'default_value\.shape=\[1\] and '
363                   r'rt_input\.flat_values\.shape=\[3\] are incompatible: '
364                   r'default_value\.rank = 1  must be less than '
365                   r'rt_input\.flat_values\.rank = 1'
366      },
367      {
368          'rt_input': [[[1, 2], [3, 4]], [[5, 6]]],
369          'ragged_rank': 1,
370          'default': [7, 8, 9],
371          'error': r'default_value\.shape=\[3\] and '
372                   r'rt_input\.flat_values\.shape=\[3,2\] are incompatible: '
373                   r'default_value\.shape\[-1\] = 3 but '
374                   r'rt_input\.flat_values\.shape\[-1\] = 2'
375      },
376      {
377          'rt_input': [[1, 2, 3]],
378          'shape': [3, 3, 3],
379          'error': r'rt_input\.shape and shape=\[.,.,.\] are incompatible: '
380                   r'rt_input\.rank = 2 but shape\.rank = 3'
381      },
382      {
383          'rt_input': [[[1, 2, 3]]],
384          'ragged_rank': 1,
385          'shape': [1, 1, 4],
386          'error': r'rt_input\.shape and shape=\[1,1,4\] are incompatible: '
387                   r'rt_input\.shape\[2\] = 3 but shape\[2\] = 4'
388      },
389  ])
390  def testError(self,
391                rt_input,
392                error,
393                error_type=(ValueError, errors.InvalidArgumentError),
394                default=None,
395                ragged_rank=None,
396                shape=None):
397
398    rt = ragged_factory_ops.constant(rt_input, ragged_rank=ragged_rank)
399    with self.assertRaisesRegex(error_type, error):
400      self.evaluate(rt.to_tensor(default_value=default, shape=shape))
401    rt_placeholder = nest.map_structure(
402        make_placeholder, rt, expand_composites=True)
403    with self.assertRaisesRegex(error_type, error):
404      self.evaluate(
405          rt_placeholder.to_tensor(default_value=default, shape=shape))
406
407  def test_shape_limit_shape_is_tensor(self):
408    input_data = ragged_factory_ops.constant([[0, 1, 2, 3], [], [4], []])
409    actual = input_data.to_tensor(
410        shape=constant_op.constant([2, 3], dtype=dtypes.int64))
411    self.assertAllEqual(actual, [[0, 1, 2], [0, 0, 0]])
412    self.assertEqual(actual.shape.as_list(), [2, 3])
413
414  def test_shape_limit_shape_is_tensor_unknown_rank(self):
415    input_data = ragged_factory_ops.constant([[0, 1, 2, 3], [], [4], []])
416    actual = input_data.to_tensor(
417        shape=constant_op.constant(-1, dtype=dtypes.int64))
418    self.assertAllEqual(
419        actual, [[0, 1, 2, 3], [0, 0, 0, 0], [4, 0, 0, 0], [0, 0, 0, 0]])
420    self.assertTrue(actual.shape.is_compatible_with([4, 4]))
421
422  def test_shape_limit_shape_is_tensor_unknown_dim(self):
423    input_data = ragged_factory_ops.constant([[0, 1, 2, 3], [], [4], []])
424    actual = input_data.to_tensor(
425        shape=constant_op.constant([2, -1], dtype=dtypes.int64))
426    self.assertAllEqual(actual, [[0, 1, 2, 3], [0, 0, 0, 0]])
427    self.assertTrue(actual.shape.is_compatible_with([2, None]))
428
429  def test_shape_limit_shape_is_tensor_int32(self):
430    input_data = ragged_factory_ops.constant([[0, 1, 2, 3], [], [4], []])
431    actual = input_data.to_tensor(
432        shape=constant_op.constant([2, 3], dtype=dtypes.int32))
433    self.assertAllEqual(actual, [[0, 1, 2], [0, 0, 0]])
434    self.assertEqual(actual.shape.as_list(), [2, 3])
435
436  def test_shape_expand_first_dim(self):
437    input_data = ragged_factory_ops.constant([[0, 1, 2], [], [3]])
438    actual = input_data.to_tensor(shape=[4, 4])
439    self.assertAllEqual(
440        actual, [[0, 1, 2, 0], [0, 0, 0, 0], [3, 0, 0, 0], [0, 0, 0, 0]])
441    self.assertEqual(actual.shape.as_list(), [4, 4])
442
443  def test_value_transposed(self):
444    # Check that transposed data is not an issue.
445    my_value = array_ops.transpose(
446        constant_op.constant([[0, 1, 2, 3], [4, 5, 6, 7]]))
447    input_data = RaggedTensor.from_value_rowids(
448        values=my_value,
449        value_rowids=constant_op.constant([0, 1, 2, 3], dtype=dtypes.int64),
450        nrows=constant_op.constant(4, dtype=dtypes.int64),
451        validate=True)
452    self.assertAllEqual(input_data, [[[0, 4]], [[1, 5]], [[2, 6]], [[3, 7]]])
453
454  def test_broadcast_default(self):
455    # The dense dimension here is 2 x 2
456    input_data = ragged_factory_ops.constant([[[[1, 2], [3, 4]]], []],
457                                             ragged_rank=1)
458    # This placeholder has a 2 x 1 dimension.
459    default_value = make_placeholder([[5], [6]])
460    actual = input_data.to_tensor(default_value=default_value)
461    expected = [[[[1, 2], [3, 4]]], [[[5, 5], [6, 6]]]]
462    self.assertAllEqual(actual, expected)
463
464  def test_broadcast_default_no_placeholder(self):
465    input_data = ragged_factory_ops.constant([[[[1, 2], [3, 4]]], []],
466                                             ragged_rank=1)
467    # default_value has a 2 x 1 dimension.
468    default_value = constant_op.constant([[5], [6]], shape=None)
469    actual = input_data.to_tensor(default_value=default_value)
470    expected = [[[[1, 2], [3, 4]]], [[[5, 5], [6, 6]]]]
471    self.assertAllEqual(actual, expected)
472
473  def test_shape_expand_second_dim(self):
474    input_data = ragged_factory_ops.constant([[0, 1, 2], [], [3], []])
475    actual = input_data.to_tensor(shape=[3, 4])
476    self.assertAllEqual(actual, [[0, 1, 2, 0], [0, 0, 0, 0], [3, 0, 0, 0]])
477
478  @parameterized.parameters(
479      ([2, 3, 4], None, [2, 3, 4]),
480      ([2, 3, 4], [None, None, None], [2, 3, 4]),
481      ([2, 3, 4], [None, 3, None], [2, 3, 4]),
482      ([2, 3, 4], [None, 3, 4], [2, 3, 4]),
483      ([2, 3, 4], [2, 3, 4], [2, 3, 4]),
484      )
485  def test_preserve_shape_roundtrip(
486      self, input_shape, to_tensor_shape, expected_shape):
487    tensor = array_ops.zeros(input_shape)
488    ragged_from_tensor = RaggedTensor.from_tensor(tensor, ragged_rank=2)
489    recovered_tensor = ragged_from_tensor.to_tensor(shape=to_tensor_shape)
490    self.assertAllEqual(tensor.shape.as_list(), expected_shape)
491    self.assertAllEqual(ragged_from_tensor.shape.as_list(), expected_shape)
492    self.assertAllEqual(recovered_tensor.shape.as_list(), expected_shape)
493
494  def test_empty_tensor_with_shape(self):
495    input_data = RaggedTensor.from_value_rowids(
496        values=constant_op.constant([], dtype=dtypes.int64),
497        value_rowids=constant_op.constant([], dtype=dtypes.int64),
498        nrows=constant_op.constant(2, dtype=dtypes.int64),
499        validate=True)
500    actual = input_data.to_tensor(default_value=3, shape=[2, 3])
501    self.assertAllEqual(actual, [[3, 3, 3], [3, 3, 3]])
502
503  # pylint: disable=bad-whitespace
504  @parameterized.named_parameters([
505      dict(
506          testcase_name = '2d_default_shape',
507          shape         = None,
508          rt_value      = [[1, 2, 3], [4], [5, 6]],
509          rt_grad       = [[9, 8, 7], [6], [3, 2]],
510          default_value = 0,
511          default_grad  = sum([5, 4, 1]),
512          output_value  = [[1, 2, 3], [4, 0, 0], [5, 6, 0]],
513          output_grad   = [[9, 8, 7], [6, 5, 4], [3, 2, 1]]),
514      dict(
515          testcase_name = '2d_pad',
516          shape         = [4, 4],
517          rt_value      = [[1, 2, 3], [4], [5, 6]],
518          rt_grad       = [[9, 8, 7], [5], [1, 0]],
519          default_value = 0,
520          default_grad  = sum([6, 4, 3, 2, 1, 2, 3, 4, 5, 6]),
521          output_value  = [
522              [1, 2, 3, 0], [4, 0, 0, 0], [5, 6, 0, 0], [0, 0, 0, 0]],
523          output_grad   = [
524              [9, 8, 7, 6], [5, 4, 3, 2], [1, 0, 1, 2], [3, 4, 5, 6]]),
525      dict(
526          testcase_name = '2d_pad_and_crop',
527          shape         = [5, 3],
528          rt_value      = [[1, 2, 3], [4], [5, 6, 7, 8, 9], [8]],
529          rt_grad       = [[9, 8, 7], [6], [3, 2, 1, 0, 0], [2]],
530          default_value = 0,
531          default_grad  = sum([5, 4, 3, 4, 5, 6, 7]),
532          output_value  = [
533              [1, 2, 3], [4, 0, 0], [5, 6, 7], [8, 0, 0], [0, 0, 0]],
534          output_grad   = [
535              [9, 8, 7], [6, 5, 4], [3, 2, 1], [2, 3, 4], [5, 6, 7]]),
536      dict(
537          testcase_name = '3d_rrank_2',
538          shape         = [2, 2, 2],
539          rt_value      = [[[9, 8, 7], [6]], [[5, 4]]],
540          rt_grad       = [[[1, 2, 0], [3]], [[5, 6]]],
541          default_value = 3,
542          default_grad  = sum([4, 7, 8]),
543          output_value  = [[[9, 8], [6, 3]], [[5, 4], [3, 3]]],
544          output_grad   = [[[1, 2], [3, 4]], [[5, 6], [7, 8]]]),
545      dict(
546          testcase_name = '3d_rrank_1_with_0d_default',
547          ragged_rank   = 1,
548          shape         = [2, 2, 2],
549          rt_value      = [[[9, 8], [7, 6]], [[5, 4]]],
550          rt_grad       = [[[1, 2], [3, 4]], [[5, 6]]],
551          default_value = 3,
552          default_grad  = sum([7, 8]),
553          output_value  = [[[9, 8], [7, 6]], [[5, 4], [3, 3]]],
554          output_grad   = [[[1, 2], [3, 4]], [[5, 6], [7, 8]]]),
555      dict(
556          testcase_name = '3d_rrank_1_with_1d_default',
557          ragged_rank   = 1,
558          shape         = [2, 2, 2],
559          rt_value      = [[[9, 8], [7, 6]], [[5, 4]]],
560          rt_grad       = [[[1, 2], [3, 4]], [[5, 6]]],
561          default_value = [3, 2],
562          default_grad  = [7, 8],
563          output_value  = [[[9, 8], [7, 6]], [[5, 4], [3, 2]]],
564          output_grad   = [[[1, 2], [3, 4]], [[5, 6], [7, 8]]]),
565      dict(
566          testcase_name = '3d_rrank_1_with_1d_broadcast_default',
567          ragged_rank   = 1,
568          shape         = [2, 2, 2],
569          rt_value      = [[[9, 8], [7, 6]], [[5, 4]]],
570          rt_grad       = [[[1, 2], [3, 4]], [[5, 6]]],
571          default_value = [3],
572          default_grad  = [7 + 8],
573          output_value  = [[[9, 8], [7, 6]], [[5, 4], [3, 3]]],
574          output_grad   = [[[1, 2], [3, 4]], [[5, 6], [7, 8]]]),
575      dict(
576          testcase_name = '4d_rrank_1_with_2d_default',
577          ragged_rank   = 1,
578          shape         = [3, 3, 2, 1],
579          rt_value      = [[[[9], [8]], [[7], [6]]], [[[5], [4]]]],
580          rt_grad       = [[[[1], [2]], [[3], [4]]], [[[7], [8]]]],
581          default_value = [[3], [2]],
582          default_grad  = [[5 + 9 + 2 + 4 + 6 + 8], [6 + 1 + 3 + 5 + 7 + 9]],
583          output_value  = [[[[9], [8]], [[7], [6]], [[3], [2]]],
584                           [[[5], [4]], [[3], [2]], [[3], [2]]],
585                           [[[3], [2]], [[3], [2]], [[3], [2]]]],
586          output_grad   = [[[[1], [2]], [[3], [4]], [[5], [6]]],
587                           [[[7], [8]], [[9], [1]], [[2], [3]]],
588                           [[[4], [5]], [[6], [7]], [[8], [9]]]]),
589      dict(
590          testcase_name = '4d_rrank_1_with_with_0d_default',
591          ragged_rank   = 1,
592          shape         = [3, 3, 2, 1],
593          rt_value      = [[[[9], [8]], [[7], [6]]], [[[5], [4]]]],
594          rt_grad       = [[[[1], [2]], [[3], [4]]], [[[7], [8]]]],
595          default_value = 3,
596          default_grad  = 5 + 9 + 2 + 4 + 6 + 8 + 6 + 1 + 3 + 5 + 7 + 9,
597          output_value  = [[[[9], [8]], [[7], [6]], [[3], [3]]],
598                           [[[5], [4]], [[3], [3]], [[3], [3]]],
599                           [[[3], [3]], [[3], [3]], [[3], [3]]]],
600          output_grad   = [[[[1], [2]], [[3], [4]], [[5], [6]]],
601                           [[[7], [8]], [[9], [1]], [[2], [3]]],
602                           [[[4], [5]], [[6], [7]], [[8], [9]]]]),
603      dict(
604          testcase_name = 'zero_size',
605          shape         = [0, 0],
606          rt_value      = [[9, 8], [7, 6, 5], [4]],
607          rt_grad       = [[0, 0], [0, 0, 0], [0]],
608          default_value = 3,
609          default_grad  = 0,
610          output_value  = [],
611          output_grad   = [])
612  ])  # pyformat: disable
613  def test_gradient(self,
614                    shape,
615                    rt_value,
616                    rt_grad,
617                    default_value,
618                    default_grad,
619                    output_value,
620                    output_grad,
621                    ragged_rank=None):
622    """Tests that ragged_to_dense generates the right gradient.
623
624    Args:
625      shape: The `shape` arg for `ragged_to_dense`.
626      rt_value: The `rt_input` arg for `ragged_to_dense`.
627      rt_grad: The expected gradient for `rt_value`.  Corresponds 1:1 with
628        `rt_value`.
629      default_value: The `default_value` arg for `ragged_to_dense`.
630      default_grad: The expected gradient for `default_value`.  Corresponds 1:1
631        with `default_value`.
632      output_value: The expected output of `ragged_to_dense`.
633      output_grad: The gradient for the output (used to generate the gradients
634        `rt_grad` and `default_grad`).  Corresponds 1:1 with `output_value`.
635      ragged_rank: Ragged rank for `rt_value`.
636    """
637    if context.executing_eagerly():
638      return
639
640    rt_value = ragged_factory_ops.constant(
641        rt_value, dtype=dtypes.float32, ragged_rank=ragged_rank)
642    rt_grad = ragged_factory_ops.constant(
643        rt_grad, dtype=dtypes.float32, ragged_rank=ragged_rank)
644    default_value = constant_op.constant(default_value, dtype=dtypes.float32)
645    default_grad = constant_op.constant(default_grad, dtype=dtypes.float32)
646    output_value = constant_op.constant(
647        output_value, dtype=dtypes.float32, shape=shape)
648    output_grad = constant_op.constant(
649        output_grad, dtype=dtypes.float32, shape=shape)
650    shape = tensor_shape.as_shape(shape)
651
652    # There are different code paths for ragged_to_dense, depending on whether
653    # the RaggedTensor was created from row_splits or value_rowids.  Make sure
654    # that we test both.
655    for partition_type in ['row_splits', 'value_rowids']:
656
657      # There are different code paths when computing the gradient for
658      # default_value, depending on whether shape info is statically available;
659      # make sure that we test all code paths.
660      for shape_info in ['known', 'unknown_dims', 'unknown_rank']:
661        rt_val = self.rt_with_partition_type(rt_value, partition_type)
662        rt_val = self.wrap_in_placeholder(rt_val, shape_info)
663        default_val = self.wrap_in_placeholder(default_value, shape_info)
664        shape_val = self.wrap_in_placeholder(shape, shape_info)
665        out = rt_val.to_tensor(default_val, shape=shape_val)
666        self.assertAllClose(out, output_value)
667
668        actual_flat_values_grad, actual_default_grad = gradients_impl.gradients(
669            ys=out,
670            xs=(rt_value.flat_values, default_value),
671            grad_ys=output_grad)
672        self.assertIsInstance(actual_flat_values_grad,
673                              indexed_slices.IndexedSlices)
674        actual_flat_values_grad = ops.convert_to_tensor(actual_flat_values_grad)
675        actual_values_grad = rt_value.with_flat_values(actual_flat_values_grad)
676        self.assertAllClose(actual_values_grad, rt_grad)
677        self.assertAllClose(actual_default_grad, default_grad)
678
679  def rt_with_partition_type(self, rt, partition_type):
680    if isinstance(rt, ops.Tensor):
681      return rt
682    if partition_type == 'row_splits':
683      return rt
684    if partition_type == 'value_rowids':
685      return ragged_tensor.RaggedTensor.from_value_rowids(
686          self.rt_with_partition_type(rt.values, partition_type),
687          rt.value_rowids(), rt.nrows())
688    raise AssertionError('Unexpected partition_type %r' % partition_type)
689
690  def wrap_in_placeholder(self, arg, shape_info):
691    """Wraps `arg` in a placeholder to limit static shape info.
692
693    Args:
694      arg: The value to wrap.  A Tensor, RaggedTensor, or TensorShape.
695      shape_info: One of ['known', 'unknown_dims', 'unknown_rank'].
696
697    Returns:
698      * If shape_info is 'known': returns `arg`.
699      * If shape_info is 'unknown_dims': returns a placeholder wrapping `arg`
700        where the dimension sizes are unknown.  If `arg` is a TensorShape,
701        then convert it to a vector first.  If `arg` is a RaggedTensor, then
702        wrap the flat_values.
703      * If shape_info is 'unknown_rank': returns a placeholder wrapping `arg`
704        where the rank is unknown.  If `arg` is a TensorShape, then convert it
705        to a vector first.  If `arg` is a RaggedTensor, then wrap the
706        flat_values.
707    """
708    if shape_info == 'known':
709      return arg
710    if isinstance(arg, ragged_tensor.RaggedTensor):
711      return arg.with_flat_values(
712          self.wrap_in_placeholder(arg.flat_values, shape_info))
713    if isinstance(arg, tensor_shape.TensorShape):
714      if arg.ndims is None:
715        return arg
716      arg = constant_op.constant(arg.as_list())
717    if shape_info == 'unknown_rank':
718      return array_ops.placeholder_with_default(arg, None)
719    if shape_info == 'unknown_dims':
720      return array_ops.placeholder_with_default(arg, [None] * arg.shape.rank)
721    raise AssertionError('Unexpected shape_info %r' % shape_info)
722
723  def test_shape_is_list_including_tensor_element(self):
724    rt = ragged_factory_ops.constant([[1, 2, 3], [4], [5, 6]])
725    result = rt.to_tensor(shape=[2, constant_op.constant(2)])
726    self.assertAllEqual(result, [[1, 2], [4, 0]])
727
728
729class RaggedToDenseBenchmark(googletest.Benchmark):
730
731  # Configurations to test.  See `run_benchmark` for config param docs.
732  CONFIGS = [
733      {'shape': [10, 10]},
734      {'shape': [10, 1000]},
735      {'shape': [1000, 10]},
736      {'shape': [1000, 10], 'fill': [1, 0.95]},  # Mostly full.
737      {'shape': [1000, 10], 'fill': [1, 0.05]},  # Mostly empty.
738      {'shape': [1000, 10], 'dtype': dtypes.string},
739      {'shape': [1000, 10], 'dtype': dtypes.int64},
740      {'shape': [100, 100]},
741      {'shape': [50, 50, 32]},
742      {'shape': [100, 100, 100], 'min_iters': 100},
743      {'shape': [1000, 1000], 'min_iters': 100},
744      {'shape': [10, 10, 10, 10, 10]},
745      {'shape': [10, 10, 10, 10, 10], 'ragged_rank': 1},
746      {'shape': [10, 10, 10, 10, 10], 'ragged_rank': 2},
747      {'shape': [50, 50, 32], 'ragged_rank': 1, 'default_shape': [32]},
748      {'shape': [200, 50, 32], 'ragged_rank': 1, 'default_shape': [32]}
749  ]  # pyformat: disable
750
751  def run_benchmark(self,
752                    shape=(100, 100),
753                    ragged_rank=None,
754                    dtype=dtypes.float32,
755                    fill=None,
756                    default_shape=(),
757                    output_shape=None,
758                    min_iters=1000):
759    """Run a benchmark with the specified configuration parameters.
760
761    Args:
762      shape: Bounding box for the input ragged tensor.
763      ragged_rank: Ragged rank for the input ragged tensor.  Defaults to
764        `len(shape)-1`.
765      dtype: Data type for the input ragged tensor.
766      fill: How full each dimension should be (0-1).  Corresponds 1:1 with
767        `shape`.  Defaults to 0.8 for each dimension.
768      default_shape: Shape for the default (padding) value.
769      output_shape: Output shape -- ragged tensor will be padded or cropped to
770        this shape.
771      min_iters: Minimum iterations for benchmark.
772    """
773    if ragged_rank is None:
774      ragged_rank = len(shape) - 1
775    if fill is None:
776      fill = [0.8 for _ in shape]
777
778    # Build the inputs for the op.
779    rt_input = self._generateRaggedTensor(shape, ragged_rank, dtype, fill)
780    default_value = constant_op.constant(
781        self._generateRaggedTensor(default_shape, 0, dtype), dtype=dtype)
782
783    mbs = np.prod(shape) / (2**20)
784    with session.Session(config=benchmark.benchmark_config()) as sess:
785      extras = {
786          'shape': shape,
787          'ragged_rank': ragged_rank,
788          'dtype': dtype,
789          'fill': fill,
790          'default_shape': default_shape
791      }
792      rt = ragged_factory_ops.constant(rt_input, dtype, ragged_rank=ragged_rank)
793
794      # Inputs for with_splits:
795      splits_rt_placeholder = ragged_factory_ops.placeholder(
796          dtype, ragged_rank, shape[ragged_rank + 1:])
797      splits_feed_dict = {splits_rt_placeholder: sess.run(rt)}
798
799      # Inputs for with_rowids:
800      rowids_feed_dict = {}
801      rowids_rt_placeholder = rebuild_ragged_tensor_with_value_rowids(
802          rt, rowids_feed_dict, sess)
803
804      # Common arguments for benchmarks:
805      run_op_benchmark_kwargs = dict(
806          sess=sess,
807          store_memory_usage=True,
808          min_iters=min_iters,
809          burn_iters=max(5, min_iters // 10),
810          mbs=mbs,
811          extras=extras)
812
813      ragged_to_tensor_with_splits = splits_rt_placeholder.to_tensor(
814          default_value=default_value)
815      self.run_op_benchmark(
816          op_or_tensor=ragged_to_tensor_with_splits.op,
817          name='ragged_to_tensor_with_splits',
818          feed_dict=splits_feed_dict,
819          **run_op_benchmark_kwargs)
820
821      ragged_to_tensor_with_rowids = rowids_rt_placeholder.to_tensor(
822          default_value=default_value)
823      self.run_op_benchmark(
824          op_or_tensor=ragged_to_tensor_with_rowids.op,
825          name='ragged_to_tensor_with_rowids',
826          feed_dict=rowids_feed_dict,
827          **run_op_benchmark_kwargs)
828
829  def _generateRaggedTensor(self, shape, ragged_rank, dtype, fill=None, axis=0):
830    if axis == len(shape):
831      value = random.random()
832      if dtype == dtypes.string:
833        value = str(value)
834      if dtype.is_integer:
835        value = int(value * 1000)
836      return value
837    if axis == 0 or axis > ragged_rank:
838      slice_size = shape[axis]
839    else:
840      slice_size = (np.random.geometric(fill[axis], shape[axis]) == 1).sum()
841    return [
842        self._generateRaggedTensor(shape, ragged_rank, dtype, fill, axis + 1)
843        for _ in range(slice_size)
844    ]
845
846  def benchmark_ragged_to_dense(self):
847    random.seed(5)
848    for config in self.CONFIGS:
849      self.run_benchmark(**config)
850
851
852if __name__ == '__main__':
853  googletest.main()
854