• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#     http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ==============================================================================
15"""Test configs for conv followed with bias Add and activations."""
16from __future__ import absolute_import
17from __future__ import division
18from __future__ import print_function
19
20import numpy as np
21import tensorflow.compat.v1 as tf
22from tensorflow.lite.testing.zip_test_utils import create_tensor_data
23from tensorflow.lite.testing.zip_test_utils import make_zip_of_tests
24from tensorflow.lite.testing.zip_test_utils import register_make_test_function
25
26
27def make_conv_bias_activation_tests(activation_op):
28  """Make a set of tests to do convolution with activation and bias.
29
30  This test will create multiple consecutive convolutions with NCHW layout to
31  make sure that the tranformations to NHWC works as expected. Note this
32  doesn't check any performance so manual checking of the generated model is
33  advised.
34
35  Args:
36    activation_op: The activation op to be used in the test.
37
38  Returns:
39    The function that creates the test.
40  """
41
42  def create_test(options):
43    """Actual function that generates examples."""
44    test_parameters = [
45        {
46            "input_shape": [[1, 3, 4, 3]],
47            "filter_shape": [[2, 3], [3, 3]],
48            "filter_2_shape": [[2, 1, 1, 3]],
49            "strides": [[1, 1, 1, 1]],
50            "dilations": [[1, 1, 1, 1]],
51            "data_format": ["NCHW"],
52            "channel_multiplier": [1, 2],
53            "fully_quantize": [False],
54            "dynamic_range_quantize": [False],
55        },
56    ]
57
58    def get_tensor_shapes(parameters):
59      input_shape = parameters["input_shape"]
60      filter_size = parameters["filter_shape"]
61      filter_shape = filter_size + [
62          input_shape[3], parameters["channel_multiplier"]
63      ]
64      return [input_shape, filter_shape]
65
66    # TF CPU doesn't support cases with NCHW. Instead
67    # use XLA which doesn't have the same restrictions.
68    @tf.function(jit_compile=True)
69    def add_conv(input_tensor, filter_input, parameters):
70      out = tf.nn.conv2d(
71          input_tensor,
72          filter_input,
73          strides=parameters["strides"],
74          dilations=parameters["dilations"],
75          padding="VALID",
76          data_format=parameters["data_format"])
77      return out
78
79    def add_bias_add(data_input, filter_shape):
80      bias_input = create_tensor_data(np.float32, (filter_shape[-1],))
81      out = tf.nn.bias_add(data_input, bias_input, data_format="NHWC")
82      return out
83
84    def build_graph(parameters):
85      """Build a conv graph given `parameters`."""
86      input_shape, filter_shape = get_tensor_shapes(parameters)
87      input_tensor = tf.compat.v1.placeholder(
88          dtype=tf.float32, name="input", shape=input_shape)
89
90      filter_input = create_tensor_data(
91          np.float32, filter_shape, min_value=-10, max_value=10)
92      input_tensors = [input_tensor]
93
94      if parameters["data_format"] == "NCHW":
95        out = add_conv(input_tensor, filter_input, parameters)
96      else:
97        out = tf.nn.conv2d(
98            input_tensor,
99            filter_input,
100            strides=parameters["strides"],
101            dilations=parameters["dilations"],
102            padding="VALID",
103            data_format=parameters["data_format"])
104      out = add_bias_add(out, filter_shape)
105      out = activation_op(out)
106
107      # Add another conv + bias_add + activation.
108
109      # Create constant filter for the second conv2d.
110      filter_input_2 = create_tensor_data(
111          np.float32, parameters["filter_2_shape"], min_value=-10, max_value=10)
112      if parameters["data_format"] == "NCHW":
113        out = add_conv(out, filter_input_2, parameters)
114      else:
115        out = tf.nn.conv2d(
116            out,
117            filter_input_2,
118            strides=parameters["strides"],
119            dilations=parameters["dilations"],
120            padding="VALID",
121            data_format=parameters["data_format"])
122      out = add_bias_add(out, filter_shape)
123      out = activation_op(out)
124      return input_tensors, [out]
125
126    def build_inputs(parameters, sess, inputs, outputs):
127      """Build inputs for conv with activation."""
128
129      input_shape, _ = get_tensor_shapes(parameters)
130      values = [
131          create_tensor_data(
132              np.float32, input_shape, min_value=-1, max_value=1)
133      ]
134      return values, sess.run(outputs, feed_dict=dict(zip(inputs, values)))
135
136    make_zip_of_tests(
137        options,
138        test_parameters,
139        build_graph,
140        build_inputs,
141        expected_tf_failures=2)
142
143  return create_test
144
145
146@register_make_test_function()
147def make_conv_bias_relu6_tests(options):
148  """Make a set of tests to do conv_bias_relu6."""
149  return make_conv_bias_activation_tests(tf.nn.relu6)(options)
150