• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#     http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ==============================================================================
15"""Test configs for gelu."""
16import functools
17
18import tensorflow as tf
19from tensorflow.lite.testing.zip_test_utils import create_tensor_data
20from tensorflow.lite.testing.zip_test_utils import make_zip_of_tests
21from tensorflow.lite.testing.zip_test_utils import register_make_test_function
22
23
24def _tflite_convert_verify_op(tflite_convert_function, *args, **kwargs):
25  """Verifies that the result of the conversion contains Gelu op."""
26  result = tflite_convert_function(*args, **kwargs)
27  tflite_model_binary = result[0]
28  if not result[0]:
29    tf.compat.v1.logging.error(result[1])  # stderr from running tflite_convert.
30    raise RuntimeError("Failed to build model: \n\n" + result[1])
31  interpreter = tf.lite.Interpreter(model_content=tflite_model_binary)
32  interpreter.allocate_tensors()
33  for op in interpreter._get_ops_details():  # pylint: disable=protected-access
34    if op["op_name"] == "GELU":
35      return result
36  raise RuntimeError("Expected to generate GELU op node in graph.")
37
38
39@register_make_test_function()
40def make_gelu_tests(options):
41  """Makes a set of tests for gelu."""
42
43  test_parameters = [{
44      "input_dtype": [tf.float32],
45      "input_shape": [[], [1], [2, 3], [1, 1, 1, 1], [1, 3, 4, 3],
46                      [3, 15, 14, 3], [3, 1, 2, 4, 6], [2, 2, 3, 4, 5, 6]],
47      "fully_quantize": [False, True],
48      "input_range": [(-10, 10)],
49      "approximate": [True, False],
50  }]
51
52  def build_graph(parameters):
53    """Builds the gelu op testing graph."""
54    input_tensor = tf.compat.v1.placeholder(
55        dtype=parameters["input_dtype"],
56        name="input",
57        shape=parameters["input_shape"])
58
59    out = tf.nn.gelu(input_tensor, approximate=parameters["approximate"])
60    return [input_tensor], [out]
61
62  def build_inputs(parameters, sess, inputs, outputs):
63    values = [
64        create_tensor_data(
65            parameters["input_dtype"],
66            parameters["input_shape"],
67            min_value=-8,
68            max_value=8)
69    ]
70    return values, sess.run(outputs, feed_dict=dict(zip(inputs, values)))
71
72  if not options.run_with_flex:
73    options.tflite_convert_function = functools.partial(
74        _tflite_convert_verify_op,
75        options.tflite_convert_function)
76  make_zip_of_tests(options, test_parameters, build_graph, build_inputs)
77