• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#     http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ==============================================================================
15"""Tests for Keras activation functions."""
16
17from __future__ import absolute_import
18from __future__ import division
19from __future__ import print_function
20
21import numpy as np
22
23from tensorflow.python import keras
24from tensorflow.python.framework import test_util
25from tensorflow.python.ops import nn_ops as nn
26from tensorflow.python.platform import test
27
28
29def _ref_softmax(values):
30  m = np.max(values)
31  e = np.exp(values - m)
32  return e / np.sum(e)
33
34
35@test_util.run_all_in_graph_and_eager_modes
36class KerasActivationsTest(test.TestCase):
37
38  def test_serialization(self):
39    all_activations = ['softmax', 'relu', 'elu', 'tanh',
40                       'sigmoid', 'hard_sigmoid', 'linear',
41                       'softplus', 'softsign', 'selu']
42    for name in all_activations:
43      fn = keras.activations.get(name)
44      ref_fn = getattr(keras.activations, name)
45      assert fn == ref_fn
46      config = keras.activations.serialize(fn)
47      fn = keras.activations.deserialize(config)
48      assert fn == ref_fn
49
50  def test_serialization_v2(self):
51    activation_map = {nn.softmax_v2: 'softmax'}
52    for fn_v2_key in activation_map:
53      fn_v2 = keras.activations.get(fn_v2_key)
54      config = keras.activations.serialize(fn_v2)
55      fn = keras.activations.deserialize(config)
56      assert fn.__name__ == activation_map[fn_v2_key]
57
58  def test_serialization_with_layers(self):
59    activation = keras.layers.LeakyReLU(alpha=0.1)
60    layer = keras.layers.Dense(3, activation=activation)
61    config = keras.layers.serialize(layer)
62    deserialized_layer = keras.layers.deserialize(
63        config, custom_objects={'LeakyReLU': activation})
64    self.assertEqual(deserialized_layer.__class__.__name__,
65                     layer.__class__.__name__)
66    self.assertEqual(deserialized_layer.activation.__class__.__name__,
67                     activation.__class__.__name__)
68
69  def test_softmax(self):
70    x = keras.backend.placeholder(ndim=2)
71    f = keras.backend.function([x], [keras.activations.softmax(x)])
72    test_values = np.random.random((2, 5))
73
74    result = f([test_values])[0]
75    expected = _ref_softmax(test_values[0])
76    self.assertAllClose(result[0], expected, rtol=1e-05)
77
78    with self.assertRaises(ValueError):
79      x = keras.backend.placeholder(ndim=1)
80      keras.activations.softmax(x)
81
82  def test_temporal_softmax(self):
83    x = keras.backend.placeholder(shape=(2, 2, 3))
84    f = keras.backend.function([x], [keras.activations.softmax(x)])
85    test_values = np.random.random((2, 2, 3)) * 10
86    result = f([test_values])[0]
87    expected = _ref_softmax(test_values[0, 0])
88    self.assertAllClose(result[0, 0], expected, rtol=1e-05)
89
90  def test_selu(self):
91    x = keras.backend.placeholder(ndim=2)
92    f = keras.backend.function([x], [keras.activations.selu(x)])
93    alpha = 1.6732632423543772848170429916717
94    scale = 1.0507009873554804934193349852946
95
96    positive_values = np.array([[1, 2]], dtype=keras.backend.floatx())
97    result = f([positive_values])[0]
98    self.assertAllClose(result, positive_values * scale, rtol=1e-05)
99
100    negative_values = np.array([[-1, -2]], dtype=keras.backend.floatx())
101    result = f([negative_values])[0]
102    true_result = (np.exp(negative_values) - 1) * scale * alpha
103    self.assertAllClose(result, true_result)
104
105  def test_softplus(self):
106    def softplus(x):
107      return np.log(np.ones_like(x) + np.exp(x))
108
109    x = keras.backend.placeholder(ndim=2)
110    f = keras.backend.function([x], [keras.activations.softplus(x)])
111    test_values = np.random.random((2, 5))
112    result = f([test_values])[0]
113    expected = softplus(test_values)
114    self.assertAllClose(result, expected, rtol=1e-05)
115
116  def test_softsign(self):
117    def softsign(x):
118      return np.divide(x, np.ones_like(x) + np.absolute(x))
119
120    x = keras.backend.placeholder(ndim=2)
121    f = keras.backend.function([x], [keras.activations.softsign(x)])
122    test_values = np.random.random((2, 5))
123    result = f([test_values])[0]
124    expected = softsign(test_values)
125    self.assertAllClose(result, expected, rtol=1e-05)
126
127  def test_sigmoid(self):
128    def ref_sigmoid(x):
129      if x >= 0:
130        return 1 / (1 + np.exp(-x))
131      else:
132        z = np.exp(x)
133        return z / (1 + z)
134    sigmoid = np.vectorize(ref_sigmoid)
135
136    x = keras.backend.placeholder(ndim=2)
137    f = keras.backend.function([x], [keras.activations.sigmoid(x)])
138    test_values = np.random.random((2, 5))
139    result = f([test_values])[0]
140    expected = sigmoid(test_values)
141    self.assertAllClose(result, expected, rtol=1e-05)
142
143  def test_hard_sigmoid(self):
144    def ref_hard_sigmoid(x):
145      x = (x * 0.2) + 0.5
146      z = 0.0 if x <= 0 else (1.0 if x >= 1 else x)
147      return z
148    hard_sigmoid = np.vectorize(ref_hard_sigmoid)
149    x = keras.backend.placeholder(ndim=2)
150    f = keras.backend.function([x], [keras.activations.hard_sigmoid(x)])
151    test_values = np.random.random((2, 5))
152    result = f([test_values])[0]
153    expected = hard_sigmoid(test_values)
154    self.assertAllClose(result, expected, rtol=1e-05)
155
156  def test_relu(self):
157    x = keras.backend.placeholder(ndim=2)
158    f = keras.backend.function([x], [keras.activations.relu(x)])
159    positive_values = np.random.random((2, 5))
160    result = f([positive_values])[0]
161    self.assertAllClose(result, positive_values, rtol=1e-05)
162
163    negative_values = np.random.uniform(-1, 0, (2, 5))
164    result = f([negative_values])[0]
165    expected = np.zeros((2, 5))
166    self.assertAllClose(result, expected, rtol=1e-05)
167
168  def test_elu(self):
169    x = keras.backend.placeholder(ndim=2)
170    f = keras.backend.function([x], [keras.activations.elu(x, 0.5)])
171    test_values = np.random.random((2, 5))
172    result = f([test_values])[0]
173    self.assertAllClose(result, test_values, rtol=1e-05)
174    negative_values = np.array([[-1, -2]], dtype=keras.backend.floatx())
175    result = f([negative_values])[0]
176    true_result = (np.exp(negative_values) - 1) / 2
177    self.assertAllClose(result, true_result)
178
179  def test_tanh(self):
180    test_values = np.random.random((2, 5))
181    x = keras.backend.placeholder(ndim=2)
182    exp = keras.activations.tanh(x)
183    f = keras.backend.function([x], [exp])
184    result = f([test_values])[0]
185    expected = np.tanh(test_values)
186    self.assertAllClose(result, expected, rtol=1e-05)
187
188  def test_exponential(self):
189    test_values = np.random.random((2, 5))
190    x = keras.backend.placeholder(ndim=2)
191    exp = keras.activations.exponential(x)
192    f = keras.backend.function([x], [exp])
193    result = f([test_values])[0]
194    expected = np.exp(test_values)
195    self.assertAllClose(result, expected, rtol=1e-05)
196
197  def test_linear(self):
198    x = np.random.random((10, 5))
199    self.assertAllClose(x, keras.activations.linear(x))
200
201  def test_invalid_usage(self):
202    with self.assertRaises(ValueError):
203      keras.activations.get('unknown')
204
205    # The following should be possible but should raise a warning:
206    keras.activations.get(keras.layers.LeakyReLU())
207
208if __name__ == '__main__':
209  test.main()
210