• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#     http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14#,============================================================================
15"""Tests for layer graphs construction & handling."""
16
17from __future__ import absolute_import
18from __future__ import division
19from __future__ import print_function
20
21import numpy as np
22
23from tensorflow.python import keras
24from tensorflow.python.eager import context
25from tensorflow.python.framework import constant_op
26from tensorflow.python.framework import dtypes
27from tensorflow.python.framework import tensor_shape
28from tensorflow.python.framework import test_util
29from tensorflow.python.keras import keras_parameterized
30from tensorflow.python.keras import testing_utils
31from tensorflow.python.keras.engine import input_layer as input_layer_lib
32from tensorflow.python.keras.engine import network as network_lib
33from tensorflow.python.ops import array_ops
34from tensorflow.python.ops import math_ops
35from tensorflow.python.ops import state_ops
36from tensorflow.python.platform import test
37
38try:
39  import yaml  # pylint:disable=g-import-not-at-top
40except ImportError:
41  yaml = None
42
43
44class TopologyConstructionTest(keras_parameterized.TestCase):
45
46  @test_util.run_deprecated_v1
47  def test_get_updates(self):
48
49    class MyLayer(keras.layers.Layer):
50
51      def build(self, input_shape):
52        self.a = self.add_variable('a',
53                                   (1, 1),
54                                   'float32',
55                                   trainable=False)
56        self.b = self.add_variable('b',
57                                   (1, 1),
58                                   'float32',
59                                   trainable=False)
60        self.add_update(state_ops.assign_add(self.a, [[1.]],
61                                             name='unconditional_update'))
62        self.built = True
63
64      def call(self, inputs):
65        self.add_update(state_ops.assign_add(self.b, inputs,
66                                             name='conditional_update'),
67                        inputs=True)
68        return inputs + 1
69
70    x1 = input_layer_lib.Input(shape=(1,))
71    layer = MyLayer()
72    _ = layer.apply(x1)
73
74    self.assertEqual(len(layer.updates), 2)
75    self.assertEqual(len(layer.get_updates_for(x1)), 1)
76    self.assertEqual(len(layer.get_updates_for(None)), 1)
77
78    x2 = input_layer_lib.Input(shape=(1,))
79    y2 = layer.apply(x2)
80
81    self.assertEqual(len(layer.updates), 3)
82    self.assertEqual(len(layer.get_updates_for(x1)), 1)
83    self.assertEqual(len(layer.get_updates_for(x2)), 1)
84    self.assertEqual(len(layer.get_updates_for(None)), 1)
85
86    network = network_lib.Network(x2, y2)
87    self.assertEqual(len(network.updates), 2)
88    self.assertEqual(len(network.get_updates_for(x2)), 1)
89    self.assertEqual(len(network.get_updates_for(None)), 1)
90
91    x3 = input_layer_lib.Input(shape=(1,))
92    _ = layer.apply(x3)
93    self.assertEqual(len(network.updates), 2)
94
95    x4 = input_layer_lib.Input(shape=(1,))
96    _ = network(x4)
97    self.assertEqual(len(network.updates), 3)
98    self.assertEqual(len(network.get_updates_for(x2)), 1)
99    self.assertEqual(len(network.get_updates_for(x4)), 1)
100    self.assertEqual(len(network.get_updates_for(None)), 1)
101
102    network.add_update(state_ops.assign_add(layer.a, [[1]]))
103    self.assertEqual(len(network.updates), 4)
104    self.assertEqual(len(network.get_updates_for(None)), 2)
105
106    network.add_update(state_ops.assign_add(layer.b, x4), inputs=True)
107    self.assertEqual(len(network.updates), 5)
108    self.assertEqual(len(network.get_updates_for(x4)), 2)
109
110  @test_util.run_in_graph_and_eager_modes()
111  def test_get_updates_bn(self):
112    x1 = input_layer_lib.Input(shape=(1,))
113    layer = keras.layers.BatchNormalization()
114    _ = layer.apply(x1)
115
116    self.assertEqual(len(layer.updates), 2)
117    self.assertEqual(len(layer.get_updates_for(x1)), 2)
118    self.assertEqual(len(layer.get_updates_for(None)), 0)
119
120  @test_util.run_deprecated_v1
121  def test_get_losses(self):
122
123    class MyLayer(keras.layers.Layer):
124
125      def build(self, input_shape):
126        self.a = self.add_variable('a',
127                                   (1, 1),
128                                   'float32',
129                                   trainable=False)
130        self.b = self.add_variable('b',
131                                   (1, 1),
132                                   'float32',
133                                   trainable=False)
134        self.add_loss(math_ops.reduce_sum(self.a))
135        self.built = True
136
137      def call(self, inputs):
138        self.add_loss(math_ops.reduce_sum(inputs),
139                      inputs=True)
140        return inputs + 1
141
142    x1 = input_layer_lib.Input(shape=(1,))
143    layer = MyLayer()
144    _ = layer.apply(x1)
145
146    self.assertEqual(len(layer.losses), 2)
147    self.assertEqual(len(layer.get_losses_for(x1)), 1)
148    self.assertEqual(len(layer.get_losses_for(None)), 1)
149
150    x2 = input_layer_lib.Input(shape=(1,))
151    y2 = layer.apply(x2)
152
153    self.assertEqual(len(layer.losses), 3)
154    self.assertEqual(len(layer.get_losses_for(x1)), 1)
155    self.assertEqual(len(layer.get_losses_for(x2)), 1)
156    self.assertEqual(len(layer.get_losses_for(None)), 1)
157
158    network = network_lib.Network(x2, y2)
159    self.assertEqual(len(network.losses), 2)
160    self.assertEqual(len(network.get_losses_for(x1)), 0)
161    self.assertEqual(len(network.get_losses_for(x2)), 1)
162    self.assertEqual(len(network.get_losses_for(None)), 1)
163
164    x3 = input_layer_lib.Input(shape=(1,))
165    _ = layer.apply(x3)
166    self.assertEqual(len(network.losses), 2)
167
168    x4 = input_layer_lib.Input(shape=(1,))
169    _ = network(x4)
170    self.assertEqual(len(network.losses), 3)
171    self.assertEqual(len(network.get_losses_for(x2)), 1)
172    self.assertEqual(len(network.get_losses_for(x4)), 1)
173    self.assertEqual(len(network.get_losses_for(None)), 1)
174
175    network.add_loss(math_ops.reduce_sum(layer.a))
176    self.assertEqual(len(network.losses), 4)
177    self.assertEqual(len(network.get_losses_for(None)), 2)
178
179    network.add_loss(math_ops.reduce_sum(x4), inputs=True)
180    self.assertEqual(len(network.losses), 5)
181    self.assertEqual(len(network.get_losses_for(x4)), 2)
182
183  @test_util.run_in_graph_and_eager_modes()
184  def testTopologicalAttributes(self):
185    # test layer attributes / methods related to cross-layer connectivity.
186    a = input_layer_lib.Input(shape=(32,), name='input_a')
187    b = input_layer_lib.Input(shape=(32,), name='input_b')
188
189    # test input, output, input_shape, output_shape
190    test_layer = keras.layers.Dense(16, name='test_layer')
191    a_test = test_layer(a)
192    self.assertEqual(test_layer.input, a)
193    self.assertEqual(test_layer.output, a_test)
194    self.assertEqual(test_layer.input_shape, (None, 32))
195    self.assertEqual(test_layer.output_shape, (None, 16))
196
197    # test `get_*_at` methods
198    dense = keras.layers.Dense(16, name='dense_1')
199    a_2 = dense(a)
200    b_2 = dense(b)
201
202    self.assertEqual(dense.get_input_at(0), a)
203    self.assertEqual(dense.get_input_at(1), b)
204    self.assertEqual(dense.get_output_at(0), a_2)
205    self.assertEqual(dense.get_output_at(1), b_2)
206    self.assertEqual(dense.get_input_shape_at(0), (None, 32))
207    self.assertEqual(dense.get_input_shape_at(1), (None, 32))
208    self.assertEqual(dense.get_output_shape_at(0), (None, 16))
209    self.assertEqual(dense.get_output_shape_at(1), (None, 16))
210
211    # Test invalid value for attribute retrieval.
212    with self.assertRaises(ValueError):
213      dense.get_input_at(2)
214    with self.assertRaises(AttributeError):
215      new_dense = keras.layers.Dense(16)
216      _ = new_dense.input
217    with self.assertRaises(AttributeError):
218      new_dense = keras.layers.Dense(16)
219      _ = new_dense.output
220    with self.assertRaises(AttributeError):
221      new_dense = keras.layers.Dense(16)
222      _ = new_dense.output_shape
223    with self.assertRaises(AttributeError):
224      new_dense = keras.layers.Dense(16)
225      _ = new_dense.input_shape
226    with self.assertRaises(AttributeError):
227      new_dense = keras.layers.Dense(16)
228      a = input_layer_lib.Input(shape=(3, 32))
229      a = input_layer_lib.Input(shape=(5, 32))
230      a_2 = dense(a)
231      b_2 = dense(b)
232      _ = new_dense.input_shape
233    with self.assertRaises(AttributeError):
234      new_dense = keras.layers.Dense(16)
235      a = input_layer_lib.Input(shape=(3, 32))
236      a = input_layer_lib.Input(shape=(5, 32))
237      a_2 = dense(a)
238      b_2 = dense(b)
239      _ = new_dense.output_shape
240
241  @test_util.run_in_graph_and_eager_modes()
242  def testTopologicalAttributesMultiOutputLayer(self):
243
244    class PowersLayer(keras.layers.Layer):
245
246      def call(self, inputs):
247        return [inputs**2, inputs**3]
248
249    x = input_layer_lib.Input(shape=(32,))
250    test_layer = PowersLayer()
251    p1, p2 = test_layer(x)  # pylint: disable=not-callable
252
253    self.assertEqual(test_layer.input, x)
254    self.assertEqual(test_layer.output, [p1, p2])
255    self.assertEqual(test_layer.input_shape, (None, 32))
256    self.assertEqual(test_layer.output_shape, [(None, 32), (None, 32)])
257
258  @test_util.run_in_graph_and_eager_modes()
259  def testTopologicalAttributesMultiInputLayer(self):
260
261    class AddLayer(keras.layers.Layer):
262
263      def call(self, inputs):
264        assert len(inputs) == 2
265        return inputs[0] + inputs[1]
266
267    a = input_layer_lib.Input(shape=(32,))
268    b = input_layer_lib.Input(shape=(32,))
269    test_layer = AddLayer()
270    y = test_layer([a, b])  # pylint: disable=not-callable
271
272    self.assertEqual(test_layer.input, [a, b])
273    self.assertEqual(test_layer.output, y)
274    self.assertEqual(test_layer.input_shape, [(None, 32), (None, 32)])
275    self.assertEqual(test_layer.output_shape, (None, 32))
276
277  @test_util.run_deprecated_v1
278  def testBasicNetwork(self):
279    # minimum viable network
280    x = input_layer_lib.Input(shape=(32,))
281    dense = keras.layers.Dense(2)
282    y = dense(x)
283    network = network_lib.Network(x, y, name='dense_network')
284
285    # test basic attributes
286    self.assertEqual(network.name, 'dense_network')
287    self.assertEqual(len(network.layers), 2)  # InputLayer + Dense
288    self.assertEqual(network.layers[1], dense)
289    self.assertEqual(network.weights, dense.weights)
290    self.assertEqual(network.trainable_weights, dense.trainable_weights)
291    self.assertEqual(network.non_trainable_weights, dense.non_trainable_weights)
292
293    # test callability on Input
294    x_2 = input_layer_lib.Input(shape=(32,))
295    y_2 = network(x_2)
296    self.assertEqual(y_2.shape.as_list(), [None, 2])
297
298    # test callability on regular tensor
299    x_2 = array_ops.placeholder(dtype='float32', shape=(None, 32))
300    y_2 = network(x_2)
301    self.assertEqual(y_2.shape.as_list(), [None, 2])
302
303    # test network `trainable` attribute
304    network.trainable = False
305    self.assertEqual(network.weights, dense.weights)
306    self.assertEqual(network.trainable_weights, [])
307    self.assertEqual(network.non_trainable_weights,
308                     dense.trainable_weights + dense.non_trainable_weights)
309
310  @test_util.run_in_graph_and_eager_modes
311  def test_trainable_weights(self):
312    a = keras.layers.Input(shape=(2,))
313    b = keras.layers.Dense(1)(a)
314    model = keras.models.Model(a, b)
315
316    weights = model.weights
317    self.assertListEqual(model.trainable_weights, weights)
318    self.assertListEqual(model.non_trainable_weights, [])
319
320    model.trainable = False
321    self.assertListEqual(model.trainable_weights, [])
322    self.assertListEqual(model.non_trainable_weights, weights)
323
324    model.trainable = True
325    self.assertListEqual(model.trainable_weights, weights)
326    self.assertListEqual(model.non_trainable_weights, [])
327
328    model.layers[1].trainable = False
329    self.assertListEqual(model.trainable_weights, [])
330    self.assertListEqual(model.non_trainable_weights, weights)
331
332    # sequential model
333    model = keras.models.Sequential()
334    model.add(keras.layers.Dense(1, input_dim=2))
335    weights = model.weights
336
337    self.assertListEqual(model.trainable_weights, weights)
338    self.assertListEqual(model.non_trainable_weights, [])
339
340    model.trainable = False
341    self.assertListEqual(model.trainable_weights, [])
342    self.assertListEqual(model.non_trainable_weights, weights)
343
344    model.trainable = True
345    self.assertListEqual(model.trainable_weights, weights)
346    self.assertListEqual(model.non_trainable_weights, [])
347
348    model.layers[0].trainable = False
349    self.assertListEqual(model.trainable_weights, [])
350    self.assertListEqual(model.non_trainable_weights, weights)
351
352  @test_util.run_deprecated_v1
353  def test_layer_call_arguments(self):
354    # Test the ability to pass and serialize arguments to `call`.
355    inp = keras.layers.Input(shape=(2,))
356    x = keras.layers.Dense(3)(inp)
357    x = keras.layers.Dropout(0.5)(x, training=True)
358    model = keras.models.Model(inp, x)
359    # Would be `dropout/cond/Merge` by default
360    self.assertTrue(model.output.op.name.endswith('dropout/mul_1'))
361
362    # Test that argument is kept when applying the model
363    inp2 = keras.layers.Input(shape=(2,))
364    out2 = model(inp2)
365    self.assertTrue(out2.op.name.endswith('dropout/mul_1'))
366
367    # Test that argument is kept after loading a model
368    config = model.get_config()
369    model = keras.models.Model.from_config(config)
370    self.assertTrue(model.output.op.name.endswith('dropout/mul_1'))
371
372  def test_node_construction(self):
373    # test basics
374    a = keras.layers.Input(shape=(32,), name='input_a')
375    b = keras.layers.Input(shape=(32,), name='input_b')
376
377    with self.assertRaises(ValueError):
378      _ = keras.layers.Input(shape=(32,), batch_shape=(10, 32))
379    with self.assertRaises(ValueError):
380      _ = keras.layers.Input(shape=(32,), unknown_kwarg=None)
381
382    self.assertListEqual(a.shape.as_list(), [None, 32])
383    a_layer, a_node_index, a_tensor_index = a._keras_history
384    b_layer, _, _ = b._keras_history
385    self.assertEqual(len(a_layer._inbound_nodes), 1)
386    self.assertEqual(a_tensor_index, 0)
387    node = a_layer._inbound_nodes[a_node_index]
388    self.assertEqual(node.outbound_layer, a_layer)
389
390    self.assertListEqual(node.inbound_layers, [])
391    self.assertListEqual(node.input_tensors, [a])
392    self.assertListEqual(node.input_shapes, [(None, 32)])
393    self.assertListEqual(node.output_tensors, [a])
394    self.assertListEqual(node.output_shapes, [(None, 32)])
395
396    dense = keras.layers.Dense(16, name='dense_1')
397    a_2 = dense(a)
398    b_2 = dense(b)
399
400    self.assertEqual(len(dense._inbound_nodes), 2)
401    self.assertEqual(len(dense._outbound_nodes), 0)
402    self.assertEqual(dense._inbound_nodes[0].inbound_layers, a_layer)
403    self.assertEqual(dense._inbound_nodes[0].outbound_layer, dense)
404    self.assertEqual(dense._inbound_nodes[1].inbound_layers, b_layer)
405    self.assertEqual(dense._inbound_nodes[1].outbound_layer, dense)
406    self.assertEqual(dense._inbound_nodes[0].input_tensors, a)
407    self.assertEqual(dense._inbound_nodes[1].input_tensors, b)
408
409    # test layer properties
410    test_layer = keras.layers.Dense(16, name='test_layer')
411    a_test = test_layer(a)
412    self.assertListEqual(test_layer.kernel.shape.as_list(), [32, 16])
413    self.assertEqual(test_layer.input, a)
414    self.assertEqual(test_layer.output, a_test)
415    self.assertEqual(test_layer.input_shape, (None, 32))
416    self.assertEqual(test_layer.output_shape, (None, 16))
417
418    self.assertEqual(dense.get_input_at(0), a)
419    self.assertEqual(dense.get_input_at(1), b)
420    self.assertEqual(dense.get_output_at(0), a_2)
421    self.assertEqual(dense.get_output_at(1), b_2)
422    self.assertEqual(dense.get_input_shape_at(0), (None, 32))
423    self.assertEqual(dense.get_input_shape_at(1), (None, 32))
424    self.assertEqual(dense.get_output_shape_at(0), (None, 16))
425    self.assertEqual(dense.get_output_shape_at(1), (None, 16))
426    self.assertEqual(dense.get_input_mask_at(0), None)
427    self.assertEqual(dense.get_input_mask_at(1), None)
428    self.assertEqual(dense.get_output_mask_at(0), None)
429    self.assertEqual(dense.get_output_mask_at(1), None)
430
431  @test_util.run_in_graph_and_eager_modes()
432  def test_multi_input_layer(self):
433    with self.cached_session():
434      # test multi-input layer
435      a = keras.layers.Input(shape=(32,), name='input_a')
436      b = keras.layers.Input(shape=(32,), name='input_b')
437
438      dense = keras.layers.Dense(16, name='dense_1')
439      a_2 = dense(a)
440      b_2 = dense(b)
441
442      merged = keras.layers.concatenate([a_2, b_2], name='merge')
443      self.assertListEqual(merged.shape.as_list(), [None, 16 * 2])
444      merge_layer, merge_node_index, merge_tensor_index = merged._keras_history
445
446      self.assertEqual(merge_node_index, 0)
447      self.assertEqual(merge_tensor_index, 0)
448
449      self.assertEqual(len(merge_layer._inbound_nodes), 1)
450      self.assertEqual(len(merge_layer._outbound_nodes), 0)
451
452      self.assertEqual(len(merge_layer._inbound_nodes[0].input_tensors), 2)
453      self.assertEqual(len(merge_layer._inbound_nodes[0].inbound_layers), 2)
454
455      c = keras.layers.Dense(64, name='dense_2')(merged)
456      d = keras.layers.Dense(5, name='dense_3')(c)
457
458      model = keras.models.Model(inputs=[a, b], outputs=[c, d], name='model')
459      self.assertEqual(len(model.layers), 6)
460      output_shapes = model.compute_output_shape([(None, 32), (None, 32)])
461      self.assertListEqual(output_shapes[0].as_list(), [None, 64])
462      self.assertListEqual(output_shapes[1].as_list(), [None, 5])
463      self.assertListEqual(
464          model.compute_mask([a, b], [None, None]), [None, None])
465
466      # we don't check names of first 2 layers (inputs) because
467      # ordering of same-level layers is not fixed
468      self.assertListEqual([l.name for l in model.layers][2:],
469                           ['dense_1', 'merge', 'dense_2', 'dense_3'])
470      self.assertListEqual([l.name for l in model._input_layers],
471                           ['input_a', 'input_b'])
472      self.assertListEqual([l.name for l in model._output_layers],
473                           ['dense_2', 'dense_3'])
474
475      # actually run model
476      fn = keras.backend.function(model.inputs, model.outputs)
477      input_a_np = np.random.random((10, 32))
478      input_b_np = np.random.random((10, 32))
479      fn_outputs = fn([input_a_np, input_b_np])
480      self.assertListEqual([x.shape for x in fn_outputs], [(10, 64), (10, 5)])
481
482      # test get_source_inputs
483      self.assertListEqual(keras.engine.get_source_inputs(c), [a, b])
484
485      # serialization / deserialization
486      json_config = model.to_json()
487      recreated_model = keras.models.model_from_json(json_config)
488      recreated_model.compile('rmsprop', 'mse')
489
490      self.assertListEqual([l.name for l in recreated_model.layers][2:],
491                           ['dense_1', 'merge', 'dense_2', 'dense_3'])
492      self.assertListEqual([l.name for l in recreated_model._input_layers],
493                           ['input_a', 'input_b'])
494      self.assertListEqual([l.name for l in recreated_model._output_layers],
495                           ['dense_2', 'dense_3'])
496
497      fn = keras.backend.function(recreated_model.inputs,
498                                  recreated_model.outputs)
499      input_a_np = np.random.random((10, 32))
500      input_b_np = np.random.random((10, 32))
501      fn_outputs = fn([input_a_np, input_b_np])
502      self.assertListEqual([x.shape for x in fn_outputs], [(10, 64), (10, 5)])
503
504  @test_util.run_deprecated_v1
505  def test_recursion(self):
506    with self.cached_session():
507      a = keras.layers.Input(shape=(32,), name='input_a')
508      b = keras.layers.Input(shape=(32,), name='input_b')
509
510      dense = keras.layers.Dense(16, name='dense_1')
511      a_2 = dense(a)
512      b_2 = dense(b)
513      merged = keras.layers.concatenate([a_2, b_2], name='merge')
514      c = keras.layers.Dense(64, name='dense_2')(merged)
515      d = keras.layers.Dense(5, name='dense_3')(c)
516
517      model = keras.models.Model(inputs=[a, b], outputs=[c, d], name='model')
518
519      e = keras.layers.Input(shape=(32,), name='input_e')
520      f = keras.layers.Input(shape=(32,), name='input_f')
521      self.assertEqual(len(model.inputs), 2)
522      g, h = model([e, f])
523      self.assertEqual(len(model.inputs), 2)
524      self.assertEqual(g.name, 'model/dense_2/BiasAdd:0')
525
526      self.assertListEqual(g.shape.as_list(), c.shape.as_list())
527      self.assertListEqual(h.shape.as_list(), d.shape.as_list())
528
529      # test separate manipulation of different layer outputs
530      i = keras.layers.Dense(7, name='dense_4')(h)
531
532      final_model = keras.models.Model(
533          inputs=[e, f], outputs=[i, g], name='final')
534      self.assertEqual(len(final_model.inputs), 2)
535      self.assertEqual(len(final_model.outputs), 2)
536      self.assertEqual(len(final_model.layers), 4)
537
538      # we don't check names of first 2 layers (inputs) because
539      # ordering of same-level layers is not fixed
540      self.assertListEqual([layer.name for layer in final_model.layers][2:],
541                           ['model', 'dense_4'])
542      self.assertListEqual(
543          model.compute_mask([e, f], [None, None]), [None, None])
544      self.assertListEqual(
545          final_model.compute_output_shape([(10, 32), (10, 32)]), [(10, 7),
546                                                                   (10, 64)])
547
548      # run recursive model
549      fn = keras.backend.function(final_model.inputs, final_model.outputs)
550      input_a_np = np.random.random((10, 32))
551      input_b_np = np.random.random((10, 32))
552      fn_outputs = fn([input_a_np, input_b_np])
553      self.assertListEqual([x.shape for x in fn_outputs], [(10, 7), (10, 64)])
554
555      # test serialization
556      model_config = final_model.get_config()
557      recreated_model = keras.models.Model.from_config(model_config)
558
559      fn = keras.backend.function(recreated_model.inputs,
560                                  recreated_model.outputs)
561      input_a_np = np.random.random((10, 32))
562      input_b_np = np.random.random((10, 32))
563      fn_outputs = fn([input_a_np, input_b_np])
564      self.assertListEqual([x.shape for x in fn_outputs], [(10, 7), (10, 64)])
565
566  @test_util.run_in_graph_and_eager_modes()
567  def test_multi_input_multi_output_recursion(self):
568    with self.cached_session():
569      # test multi-input multi-output
570      a = keras.layers.Input(shape=(32,), name='input_a')
571      b = keras.layers.Input(shape=(32,), name='input_b')
572
573      dense = keras.layers.Dense(16, name='dense_1')
574      a_2 = dense(a)
575      b_2 = dense(b)
576      merged = keras.layers.concatenate([a_2, b_2], name='merge')
577      c = keras.layers.Dense(64, name='dense_2')(merged)
578      d = keras.layers.Dense(5, name='dense_3')(c)
579
580      model = keras.models.Model(inputs=[a, b], outputs=[c, d], name='model')
581
582      j = keras.layers.Input(shape=(32,), name='input_j')
583      k = keras.layers.Input(shape=(32,), name='input_k')
584      _, n = model([j, k])
585
586      o = keras.layers.Input(shape=(32,), name='input_o')
587      p = keras.layers.Input(shape=(32,), name='input_p')
588      q, _ = model([o, p])
589
590      self.assertListEqual(n.shape.as_list(), [None, 5])
591      self.assertListEqual(q.shape.as_list(), [None, 64])
592      s = keras.layers.concatenate([n, q], name='merge_nq')
593      self.assertListEqual(s.shape.as_list(), [None, 64 + 5])
594
595      # test with single output as 1-elem list
596      multi_io_model = keras.models.Model([j, k, o, p], [s])
597
598      fn = keras.backend.function(multi_io_model.inputs, multi_io_model.outputs)
599      fn_outputs = fn([
600          np.random.random((10, 32)), np.random.random((10, 32)),
601          np.random.random((10, 32)), np.random.random((10, 32))
602      ])
603      self.assertListEqual([x.shape for x in fn_outputs], [(10, 69)])
604
605      # test with single output as tensor
606      multi_io_model = keras.models.Model([j, k, o, p], s)
607
608      fn = keras.backend.function(multi_io_model.inputs, multi_io_model.outputs)
609      fn_outputs = fn([
610          np.random.random((10, 32)), np.random.random((10, 32)),
611          np.random.random((10, 32)), np.random.random((10, 32))
612      ])
613      # note that the output of the function will still be a 1-elem list
614      self.assertListEqual([x.shape for x in fn_outputs], [(10, 69)])
615
616      # test serialization
617      model_config = multi_io_model.get_config()
618      recreated_model = keras.models.Model.from_config(model_config)
619
620      fn = keras.backend.function(recreated_model.inputs,
621                                  recreated_model.outputs)
622      fn_outputs = fn([
623          np.random.random((10, 32)), np.random.random((10, 32)),
624          np.random.random((10, 32)), np.random.random((10, 32))
625      ])
626      # note that the output of the function will still be a 1-elem list
627      self.assertListEqual([x.shape for x in fn_outputs], [(10, 69)])
628
629      config = model.get_config()
630      keras.models.Model.from_config(config)
631
632      model.summary()
633      json_str = model.to_json()
634      keras.models.model_from_json(json_str)
635
636      if yaml is not None:
637        yaml_str = model.to_yaml()
638        keras.models.model_from_yaml(yaml_str)
639
640  @test_util.run_in_graph_and_eager_modes()
641  def test_invalid_graphs(self):
642    a = keras.layers.Input(shape=(32,), name='input_a')
643    b = keras.layers.Input(shape=(32,), name='input_b')
644
645    dense = keras.layers.Dense(16, name='dense_1')
646    a_2 = dense(a)
647    b_2 = dense(b)
648    merged = keras.layers.concatenate([a_2, b_2], name='merge')
649    c = keras.layers.Dense(64, name='dense_2')(merged)
650    d = keras.layers.Dense(5, name='dense_3')(c)
651
652    model = keras.models.Model(inputs=[a, b], outputs=[c, d], name='model')
653
654    # input is not an Input tensor
655    j = keras.layers.Input(shape=(32,), name='input_j')
656    j = keras.layers.Dense(32)(j)
657    k = keras.layers.Input(shape=(32,), name='input_k')
658    m, n = model([j, k])
659
660    with self.assertRaises(Exception):
661      keras.models.Model([j, k], [m, n])
662
663    # disconnected graph
664    j = keras.layers.Input(shape=(32,), name='input_j')
665    k = keras.layers.Input(shape=(32,), name='input_k')
666    m, n = model([j, k])
667    with self.assertRaises(Exception):
668      keras.models.Model([j], [m, n])
669
670    # redundant outputs
671    j = keras.layers.Input(shape=(32,), name='input_j')
672    k = keras.layers.Input(shape=(32,), name='input_k')
673    m, n = model([j, k])
674
675    keras.models.Model([j, k], [m, n, n])
676
677    # redundant inputs
678    j = keras.layers.Input(shape=(32,), name='input_j')
679    k = keras.layers.Input(shape=(32,), name='input_k')
680    m, n = model([j, k])
681    with self.assertRaises(Exception):
682      keras.models.Model([j, k, j], [m, n])
683
684    # i have not idea what I'm doing: garbage as inputs/outputs
685    j = keras.layers.Input(shape=(32,), name='input_j')
686    k = keras.layers.Input(shape=(32,), name='input_k')
687    m, n = model([j, k])
688    with self.assertRaises(Exception):
689      keras.models.Model([j, k], [m, n, 0])
690
691  @test_util.run_deprecated_v1
692  def test_raw_tf_compatibility(self):
693    # test calling layers/models on TF tensors
694    a = keras.layers.Input(shape=(32,), name='input_a')
695    b = keras.layers.Input(shape=(32,), name='input_b')
696
697    dense = keras.layers.Dense(16, name='dense_1')
698    a_2 = dense(a)
699    b_2 = dense(b)
700    merged = keras.layers.concatenate([a_2, b_2], name='merge')
701    c = keras.layers.Dense(64, name='dense_2')(merged)
702    d = keras.layers.Dense(5, name='dense_3')(c)
703
704    model = keras.models.Model(inputs=[a, b], outputs=[c, d], name='model')
705
706    j = keras.layers.Input(shape=(32,), name='input_j')
707    k = keras.layers.Input(shape=(32,), name='input_k')
708    self.assertEqual(len(model.inputs), 2)
709    m, n = model([j, k])
710    self.assertEqual(len(model.inputs), 2)
711    tf_model = keras.models.Model([j, k], [m, n])
712
713    j_tf = array_ops.placeholder(dtype=dtypes.float32, shape=(None, 32))
714    k_tf = array_ops.placeholder(dtype=dtypes.float32, shape=(None, 32))
715    m_tf, n_tf = tf_model([j_tf, k_tf])
716    self.assertListEqual(m_tf.shape.as_list(), [None, 64])
717    self.assertListEqual(n_tf.shape.as_list(), [None, 5])
718
719    # test merge
720    keras.layers.concatenate([j_tf, k_tf], axis=1)
721    keras.layers.add([j_tf, k_tf])
722
723    # test tensor input
724    x = array_ops.placeholder(shape=(None, 2), dtype=dtypes.float32)
725    keras.layers.InputLayer(input_tensor=x)
726
727    x = keras.layers.Input(tensor=x)
728    keras.layers.Dense(2)(x)
729
730  @test_util.run_in_graph_and_eager_modes()
731  def test_basic_masking(self):
732    a = keras.layers.Input(shape=(10, 32), name='input_a')
733    b = keras.layers.Masking()(a)
734    model = keras.models.Model(a, b)
735    self.assertEqual(model.output_mask.shape.as_list(), [None, 10])
736
737  @test_util.run_deprecated_v1
738  def testMaskingSingleInput(self):
739
740    class MaskedLayer(keras.layers.Layer):
741
742      def call(self, inputs, mask=None):
743        if mask is not None:
744          return inputs * mask
745        return inputs
746
747      def compute_mask(self, inputs, mask=None):
748        return array_ops.ones_like(inputs)
749
750    if context.executing_eagerly():
751      a = constant_op.constant([2] * 32)
752      mask = constant_op.constant([0, 1] * 16)
753      a._keras_mask = mask
754      b = MaskedLayer().apply(a)
755      self.assertTrue(hasattr(b, '_keras_mask'))
756      self.assertAllEqual(
757          self.evaluate(array_ops.ones_like(mask)),
758          self.evaluate(getattr(b, '_keras_mask')))
759      self.assertAllEqual(self.evaluate(a * mask), self.evaluate(b))
760    else:
761      x = input_layer_lib.Input(shape=(32,))
762      y = MaskedLayer()(x)  # pylint: disable=not-callable
763      network = network_lib.Network(x, y)
764
765      # test callability on Input
766      x_2 = input_layer_lib.Input(shape=(32,))
767      y_2 = network(x_2)
768      self.assertEqual(y_2.shape.as_list(), [None, 32])
769
770      # test callability on regular tensor
771      x_2 = array_ops.placeholder(dtype='float32', shape=(None, 32))
772      y_2 = network(x_2)
773      self.assertEqual(y_2.shape.as_list(), [None, 32])
774
775  @test_util.run_deprecated_v1
776  def test_activity_regularization_with_model_composition(self):
777
778    def reg(x):
779      return math_ops.reduce_sum(x)
780
781    net_a_input = input_layer_lib.Input((2,))
782    net_a = net_a_input
783    net_a = keras.layers.Dense(2, kernel_initializer='ones',
784                               use_bias=False,
785                               activity_regularizer=reg)(net_a)
786    model_a = keras.Model([net_a_input], [net_a])
787
788    net_b_input = input_layer_lib.Input((2,))
789    net_b = model_a(net_b_input)
790    model_b = keras.Model([net_b_input], [net_b])
791
792    model_b.compile(optimizer='sgd', loss=None)
793    x = np.ones((1, 2))
794    loss = model_b.evaluate(x)
795    self.assertEqual(loss, 4.)
796
797  @keras_parameterized.run_all_keras_modes
798  def test_layer_sharing_at_heterogenous_depth(self):
799    x_val = np.random.random((10, 5))
800
801    x = input_layer_lib.Input(shape=(5,))
802    a = keras.layers.Dense(5, name='A')
803    b = keras.layers.Dense(5, name='B')
804    output = a(b(a(b(x))))
805    m = keras.models.Model(x, output)
806    m.run_eagerly = testing_utils.should_run_eagerly()
807
808    output_val = m.predict(x_val)
809
810    config = m.get_config()
811    weights = m.get_weights()
812
813    m2 = keras.models.Model.from_config(config)
814    m2.set_weights(weights)
815
816    output_val_2 = m2.predict(x_val)
817    self.assertAllClose(output_val, output_val_2, atol=1e-6)
818
819  @keras_parameterized.run_all_keras_modes
820  def test_layer_sharing_at_heterogenous_depth_with_concat(self):
821    input_shape = (16, 9, 3)
822    input_layer = input_layer_lib.Input(shape=input_shape)
823
824    a = keras.layers.Dense(3, name='dense_A')
825    b = keras.layers.Dense(3, name='dense_B')
826    c = keras.layers.Dense(3, name='dense_C')
827
828    x1 = b(a(input_layer))
829    x2 = a(c(input_layer))
830    output = keras.layers.concatenate([x1, x2])
831
832    m = keras.models.Model(inputs=input_layer, outputs=output)
833    m.run_eagerly = testing_utils.should_run_eagerly()
834
835    x_val = np.random.random((10, 16, 9, 3))
836    output_val = m.predict(x_val)
837
838    config = m.get_config()
839    weights = m.get_weights()
840
841    m2 = keras.models.Model.from_config(config)
842    m2.set_weights(weights)
843
844    output_val_2 = m2.predict(x_val)
845    self.assertAllClose(output_val, output_val_2, atol=1e-6)
846
847  @keras_parameterized.run_all_keras_modes
848  def test_explicit_training_argument(self):
849    a = keras.layers.Input(shape=(2,))
850    b = keras.layers.Dropout(0.5)(a)
851    base_model = keras.models.Model(a, b)
852
853    a = keras.layers.Input(shape=(2,))
854    b = base_model(a, training=False)
855    model = keras.models.Model(a, b)
856
857    x = np.ones((100, 2))
858    y = np.ones((100, 2))
859    model.compile(
860        optimizer='sgd',
861        loss='mse',
862        run_eagerly=testing_utils.should_run_eagerly())
863    loss = model.train_on_batch(x, y)
864    self.assertEqual(loss, 0)  # In inference mode, output is equal to input.
865
866    a = keras.layers.Input(shape=(2,))
867    b = base_model(a, training=True)
868    model = keras.models.Model(a, b)
869    preds = model.predict(x)
870    self.assertEqual(np.min(preds), 0.)  # At least one unit was dropped.
871
872  @keras_parameterized.run_all_keras_modes
873  def test_multi_output_model_with_none_masking(self):
874    def func(x):
875      return [x * 0.2, x * 0.3]
876
877    def output_shape(input_shape):
878      return [input_shape, input_shape]
879
880    i = keras.layers.Input(shape=(3, 2, 1))
881    o = keras.layers.Lambda(function=func, output_shape=output_shape)(i)
882
883    self.assertEqual(keras.backend.int_shape(o[0]), (None, 3, 2, 1))
884    self.assertEqual(keras.backend.int_shape(o[1]), (None, 3, 2, 1))
885
886    o = keras.layers.add(o)
887    model = keras.Model(i, o)
888    model.run_eagerly = testing_utils.should_run_eagerly()
889
890    i2 = keras.layers.Input(shape=(3, 2, 1))
891    o2 = model(i2)
892    model2 = keras.Model(i2, o2)
893    model2.run_eagerly = testing_utils.should_run_eagerly()
894
895    x = np.random.random((4, 3, 2, 1))
896    out = model2.predict(x)
897    assert out.shape == (4, 3, 2, 1)
898    self.assertAllClose(out, x * 0.2 + x * 0.3, atol=1e-4)
899
900  @keras_parameterized.run_all_keras_modes
901  def test_constant_initializer_with_numpy(self):
902    initializer = keras.initializers.Constant(np.ones((3, 2)))
903    model = keras.models.Sequential()
904    model.add(
905        keras.layers.Dense(2, input_shape=(3,), kernel_initializer=initializer))
906    model.add(keras.layers.Dense(3))
907    model.compile(
908        loss='mse',
909        optimizer='sgd',
910        metrics=['acc'],
911        run_eagerly=testing_utils.should_run_eagerly())
912
913    json_str = model.to_json()
914    keras.models.model_from_json(json_str)
915
916    if yaml is not None:
917      yaml_str = model.to_yaml()
918      keras.models.model_from_yaml(yaml_str)
919
920  def test_subclassed_error_if_init_not_called(self):
921
922    class MyNetwork(network_lib.Network):
923
924      def __init__(self):
925        self._foo = [keras.layers.Dense(10), keras.layers.Dense(10)]
926
927    with self.assertRaisesRegexp(RuntimeError, 'forgot to call'):
928      MyNetwork()
929
930
931class DeferredModeTest(test.TestCase):
932
933  @test_util.run_in_graph_and_eager_modes()
934  def testSimpleNetworkBuilding(self):
935    inputs = input_layer_lib.Input(shape=(32,))
936    if context.executing_eagerly():
937      self.assertEqual(inputs.dtype.name, 'float32')
938      self.assertEqual(inputs.shape.as_list(), [None, 32])
939
940    x = keras.layers.Dense(2)(inputs)
941    if context.executing_eagerly():
942      self.assertEqual(x.dtype.name, 'float32')
943      self.assertEqual(x.shape.as_list(), [None, 2])
944
945    outputs = keras.layers.Dense(4)(x)
946    network = network_lib.Network(inputs, outputs)
947    self.assertIsInstance(network, network_lib.Network)
948
949    if context.executing_eagerly():
950      # It should be possible to call such a network on EagerTensors.
951      inputs = constant_op.constant(
952          np.random.random((10, 32)).astype('float32'))
953      outputs = network(inputs)
954      self.assertEqual(outputs.shape.as_list(), [10, 4])
955
956  @test_util.run_in_graph_and_eager_modes()
957  def testMultiIONetworkBuilding(self):
958    input_a = input_layer_lib.Input(shape=(32,))
959    input_b = input_layer_lib.Input(shape=(16,))
960    a = keras.layers.Dense(16)(input_a)
961
962    class AddLayer(keras.layers.Layer):
963
964      def call(self, inputs):
965        return inputs[0] + inputs[1]
966
967    c = AddLayer()([a, input_b])  # pylint: disable=not-callable
968    c = keras.layers.Dense(2)(c)
969
970    network = network_lib.Network([input_a, input_b], [a, c])
971    if context.executing_eagerly():
972      a_val = constant_op.constant(
973          np.random.random((10, 32)).astype('float32'))
974      b_val = constant_op.constant(
975          np.random.random((10, 16)).astype('float32'))
976      outputs = network([a_val, b_val])
977      self.assertEqual(len(outputs), 2)
978      self.assertEqual(outputs[0].shape.as_list(), [10, 16])
979      self.assertEqual(outputs[1].shape.as_list(), [10, 2])
980
981
982class DefaultShapeInferenceBehaviorTest(keras_parameterized.TestCase):
983
984  def _testShapeInference(self, model, input_shape, expected_output_shape):
985    input_value = np.random.random(input_shape)
986    output_value = model.predict(input_value)
987    self.assertEqual(output_value.shape, expected_output_shape)
988
989  @test_util.run_in_graph_and_eager_modes()
990  def testSingleInputCase(self):
991
992    class LayerWithOneInput(keras.layers.Layer):
993
994      def build(self, input_shape):
995        self.w = array_ops.ones(shape=(3, 4))
996
997      def call(self, inputs):
998        return keras.backend.dot(inputs, self.w)
999
1000    inputs = input_layer_lib.Input(shape=(3,))
1001    layer = LayerWithOneInput()
1002
1003    if context.executing_eagerly():
1004      self.assertEqual(
1005          layer.compute_output_shape((None, 3)).as_list(), [None, 4])
1006      # As a side-effect, compute_output_shape builds the layer.
1007      self.assertTrue(layer.built)
1008      # We can still query the layer's compute_output_shape with compatible
1009      # input shapes.
1010      self.assertEqual(
1011          layer.compute_output_shape((6, 3)).as_list(), [6, 4])
1012
1013    outputs = layer(inputs)
1014    model = keras.Model(inputs, outputs)
1015    self._testShapeInference(model, (2, 3), (2, 4))
1016
1017  @test_util.run_in_graph_and_eager_modes()
1018  def testMultiInputOutputCase(self):
1019
1020    class MultiInputOutputLayer(keras.layers.Layer):
1021
1022      def build(self, input_shape):
1023        self.w = array_ops.ones(shape=(3, 4))
1024
1025      def call(self, inputs):
1026        a = keras.backend.dot(inputs[0], self.w)
1027        b = a + inputs[1]
1028        return [a, b]
1029
1030    input_a = input_layer_lib.Input(shape=(3,))
1031    input_b = input_layer_lib.Input(shape=(4,))
1032    output_a, output_b = MultiInputOutputLayer()([input_a, input_b])
1033    model = keras.Model([input_a, input_b], [output_a, output_b])
1034    output_a_val, output_b_val = model.predict(
1035        [np.random.random((2, 3)), np.random.random((2, 4))])
1036    self.assertEqual(output_a_val.shape, (2, 4))
1037    self.assertEqual(output_b_val.shape, (2, 4))
1038
1039  @test_util.run_in_graph_and_eager_modes()
1040  def testTrainingArgument(self):
1041
1042    class LayerWithTrainingArg(keras.layers.Layer):
1043
1044      def build(self, input_shape):
1045        self.w = array_ops.ones(shape=(3, 4))
1046
1047      def call(self, inputs, training):
1048        return keras.backend.dot(inputs, self.w)
1049
1050    inputs = input_layer_lib.Input(shape=(3,))
1051    outputs = LayerWithTrainingArg()(inputs, training=False)
1052    model = keras.Model(inputs, outputs)
1053    self._testShapeInference(model, (2, 3), (2, 4))
1054
1055  @test_util.run_in_graph_and_eager_modes()
1056  def testNoneInShape(self):
1057
1058    class Model(keras.Model):
1059
1060      def __init__(self):
1061        super(Model, self).__init__()
1062        self.conv1 = keras.layers.Conv2D(8, 3)
1063        self.pool = keras.layers.GlobalAveragePooling2D()
1064        self.fc = keras.layers.Dense(3)
1065
1066      def call(self, x):
1067        x = self.conv1(x)
1068        x = self.pool(x)
1069        x = self.fc(x)
1070        return x
1071
1072    model = Model()
1073    model.build(tensor_shape.TensorShape((None, None, None, 1)))
1074    self.assertTrue(model.built, 'Model should be built')
1075    self.assertTrue(model.weights,
1076                    'Model should have its weights created as it '
1077                    'has been built')
1078    sample_input = array_ops.ones((1, 10, 10, 1))
1079    output = model(sample_input)
1080    self.assertEqual(output.shape, (1, 3))
1081
1082  @test_util.run_in_graph_and_eager_modes()
1083  def testNoneInShapeWithCompoundModel(self):
1084
1085    class BasicBlock(keras.Model):
1086
1087      def __init__(self):
1088        super(BasicBlock, self).__init__()
1089        self.conv1 = keras.layers.Conv2D(8, 3)
1090        self.pool = keras.layers.GlobalAveragePooling2D()
1091        self.dense = keras.layers.Dense(3)
1092
1093      def call(self, x):
1094        x = self.conv1(x)
1095        x = self.pool(x)
1096        x = self.dense(x)
1097        return x
1098
1099    class CompoundModel(keras.Model):
1100
1101      def __init__(self):
1102        super(CompoundModel, self).__init__()
1103        self.block = BasicBlock()
1104
1105      def call(self, x):
1106        x = self.block(x)  # pylint: disable=not-callable
1107        return x
1108
1109    model = CompoundModel()
1110    model.build(tensor_shape.TensorShape((None, None, None, 1)))
1111    self.assertTrue(model.built, 'Model should be built')
1112    self.assertTrue(model.weights,
1113                    'Model should have its weights created as it '
1114                    'has been built')
1115    sample_input = array_ops.ones((1, 10, 10, 1))
1116    output = model(sample_input)  # pylint: disable=not-callable
1117    self.assertEqual(output.shape, (1, 3))
1118
1119  @test_util.run_in_graph_and_eager_modes()
1120  def testNoneInShapeWithFunctinalAPI(self):
1121
1122    class BasicBlock(keras.Model):
1123      # Inherting from keras.layers.Layer since we are calling this layer
1124      # inside a model created using functional API.
1125
1126      def __init__(self):
1127        super(BasicBlock, self).__init__()
1128        self.conv1 = keras.layers.Conv2D(8, 3)
1129
1130      def call(self, x):
1131        x = self.conv1(x)
1132        return x
1133
1134    input_layer = keras.layers.Input(shape=(None, None, 1))
1135    x = BasicBlock()(input_layer)
1136    x = keras.layers.GlobalAveragePooling2D()(x)
1137    output_layer = keras.layers.Dense(3)(x)
1138
1139    model = keras.Model(inputs=input_layer, outputs=output_layer)
1140
1141    model.build(tensor_shape.TensorShape((None, None, None, 1)))
1142    self.assertTrue(model.built, 'Model should be built')
1143    self.assertTrue(model.weights,
1144                    'Model should have its weights created as it '
1145                    'has been built')
1146    sample_input = array_ops.ones((1, 10, 10, 1))
1147    output = model(sample_input)
1148    self.assertEqual(output.shape, (1, 3))
1149
1150  @keras_parameterized.run_all_keras_modes
1151  def test_sequential_as_downstream_of_masking_layer(self):
1152    inputs = keras.layers.Input(shape=(3, 4))
1153    x = keras.layers.Masking(mask_value=0., input_shape=(3, 4))(inputs)
1154
1155    s = keras.Sequential()
1156    s.add(keras.layers.Dense(5, input_shape=(4,)))
1157
1158    x = keras.layers.wrappers.TimeDistributed(s)(x)
1159    model = keras.Model(inputs=inputs, outputs=x)
1160    model.compile(
1161        optimizer='rmsprop',
1162        loss='mse',
1163        run_eagerly=testing_utils.should_run_eagerly())
1164
1165    model_input = np.random.randint(
1166        low=1, high=5, size=(10, 3, 4)).astype('float32')
1167    for i in range(4):
1168      model_input[i, i:, :] = 0.
1169    model.fit(model_input,
1170              np.random.random((10, 3, 5)), epochs=1, batch_size=6)
1171
1172    if not context.executing_eagerly():
1173      # Note: this doesn't work in eager due to DeferredTensor/ops compatibility
1174      # issue.
1175      mask_outputs = [model.layers[1].compute_mask(model.layers[1].input)]
1176      mask_outputs += [model.layers[2].compute_mask(
1177          model.layers[2].input, mask_outputs[-1])]
1178      func = keras.backend.function([model.input], mask_outputs)
1179      mask_outputs_val = func([model_input])
1180      self.assertAllClose(mask_outputs_val[0], np.any(model_input, axis=-1))
1181      self.assertAllClose(mask_outputs_val[1], np.any(model_input, axis=-1))
1182
1183  @test_util.run_in_graph_and_eager_modes()
1184  def test_external_keras_serialization_compat(self):
1185    inputs = keras.Input(shape=(10,))
1186    outputs = keras.layers.Dense(1)(inputs)
1187    model = keras.Model(inputs, outputs)
1188    config = model.get_config()
1189    # Checks that single inputs and outputs are still saved as 1-element lists.
1190    # Saving as 1-element lists or not is equivalent in TF Keras, but only the
1191    # 1-element list format is supported in TF.js and keras-team/Keras.
1192    self.assertLen(config['input_layers'], 1)
1193    self.assertLen(config['output_layers'], 1)
1194
1195
1196class GraphUtilsTest(test.TestCase):
1197
1198  @test_util.run_deprecated_v1
1199  def testGetReachableFromInputs(self):
1200
1201    with self.cached_session():
1202      pl_1 = array_ops.placeholder(shape=None, dtype='float32')
1203      pl_2 = array_ops.placeholder(shape=None, dtype='float32')
1204      pl_3 = array_ops.placeholder(shape=None, dtype='float32')
1205      x_1 = pl_1 + pl_2
1206      x_2 = pl_2 * 2
1207      x_3 = pl_3 + 1
1208      x_4 = x_1 + x_2
1209      x_5 = x_3 * pl_1
1210
1211      self.assertEqual(
1212          keras.utils.tf_utils.get_reachable_from_inputs([pl_1]),
1213          {pl_1, x_1, x_4, x_5, x_1.op, x_4.op, x_5.op})
1214      self.assertEqual(
1215          keras.utils.tf_utils.get_reachable_from_inputs([pl_1, pl_2]),
1216          {pl_1, pl_2, x_1, x_2, x_4, x_5, x_1.op, x_2.op, x_4.op, x_5.op})
1217      self.assertEqual(
1218          keras.utils.tf_utils.get_reachable_from_inputs([pl_3]),
1219          {pl_3, x_3, x_5, x_3.op, x_5.op})
1220      self.assertEqual(
1221          keras.utils.tf_utils.get_reachable_from_inputs([x_3]),
1222          {x_3, x_5, x_5.op})
1223
1224
1225@test_util.run_all_in_graph_and_eager_modes
1226class NestedNetworkTest(test.TestCase):
1227
1228  def test_nested_inputs_network(self):
1229    inputs = {'x1': keras.Input(shape=(1,)), 'x2': keras.Input(shape=(1,))}
1230    outputs = keras.layers.Add()([inputs['x1'], inputs['x2']])
1231    network = keras.engine.network.Network(inputs, outputs)
1232
1233    network = keras.engine.network.Network.from_config(network.get_config())
1234
1235    result_tensor = network({
1236        'x': array_ops.ones((1, 1), 'float32'),
1237        'y': array_ops.ones((1, 1), 'float32')
1238    })
1239    result = self.evaluate(result_tensor)
1240    self.assertAllEqual(result, [[2.]])
1241
1242    # TODO(b/122726584): Investigate why concrete batch is flaky in some builds.
1243    output_shape = network.compute_output_shape({
1244        'x1': (None, 1),
1245        'x2': (None, 1)
1246    })
1247    self.assertListEqual(output_shape.as_list(), [None, 1])
1248
1249  def test_nested_outputs_network(self):
1250    inputs = keras.Input(shape=(1,))
1251    outputs = {
1252        'x+x': keras.layers.Add()([inputs, inputs]),
1253        'x*x': keras.layers.Multiply()([inputs, inputs])
1254    }
1255
1256    network = keras.engine.network.Network(inputs, outputs)
1257
1258    network = keras.engine.network.Network.from_config(network.get_config())
1259
1260    result_tensor = network(array_ops.ones((1, 1), 'float32'))
1261    result = self.evaluate(result_tensor)
1262    self.assertAllEqual(result['x+x'], [[2.]])
1263    self.assertAllEqual(result['x*x'], [[1.]])
1264
1265    output_shape = network.compute_output_shape((None, 1))
1266    self.assertListEqual(output_shape['x+x'].as_list(), [None, 1])
1267    self.assertListEqual(output_shape['x*x'].as_list(), [None, 1])
1268
1269  def test_nested_network_inside_network(self):
1270    inner_inputs = {
1271        'x1': keras.Input(shape=(1,)),
1272        'x2': keras.Input(shape=(1,))
1273    }
1274    inner_outputs = {
1275        'x1+x2':
1276            keras.layers.Add()([inner_inputs['x1'], inner_inputs['x2']]),
1277        'x1*x2':
1278            keras.layers.Multiply()([inner_inputs['x1'], inner_inputs['x2']])
1279    }
1280    inner_network = keras.engine.network.Network(inner_inputs, inner_outputs)
1281
1282    inputs = [keras.Input(shape=(1,)), keras.Input(shape=(1,))]
1283    middle = inner_network({'x1': inputs[0], 'x2': inputs[1]})
1284    outputs = keras.layers.Add()([middle['x1+x2'], middle['x1*x2']])
1285    network = keras.engine.network.Network(inputs, outputs)
1286
1287    network = keras.engine.network.Network.from_config(network.get_config())
1288
1289    # Computes: `(x1+x2) + (x1*x2)`
1290    result_tensor = network(
1291        [array_ops.ones((1, 1), 'float32'),
1292         array_ops.ones((1, 1), 'float32')])
1293    result = self.evaluate(result_tensor)
1294    self.assertAllEqual(result, [[3.]])
1295
1296    output_shape = network.compute_output_shape([(None, 1), (None, 1)])
1297    self.assertListEqual(output_shape.as_list(), [None, 1])
1298
1299  @test_util.run_in_graph_and_eager_modes
1300  def test_updates_with_direct_call(self):
1301    inputs = keras.Input(shape=(10,))
1302    x = keras.layers.BatchNormalization()(inputs)
1303    x = keras.layers.Dense(10)(x)
1304    model = keras.Model(inputs, x)
1305
1306    ph = keras.backend.placeholder(shape=(10, 10))
1307    model(ph)
1308
1309    self.assertLen(model.get_updates_for(ph), 2)
1310    self.assertLen(model.get_updates_for(None), 0)
1311
1312
1313if __name__ == '__main__':
1314  test.main()
1315