• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#     http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ==============================================================================
15"""Tests for Softplus and SoftplusGrad."""
16
17import numpy as np
18
19from tensorflow.python.framework import constant_op
20from tensorflow.python.framework import test_util
21from tensorflow.python.ops import gradient_checker
22from tensorflow.python.ops import gradients_impl
23from tensorflow.python.ops import nn_ops
24import tensorflow.python.ops.nn_grad  # pylint: disable=unused-import
25from tensorflow.python.platform import test
26
27
28class SoftplusTest(test.TestCase):
29
30  def _npSoftplus(self, np_features):
31    np_features = np.asarray(np_features)
32    zero = np.asarray(0).astype(np_features.dtype)
33    return np.logaddexp(zero, np_features)
34
35  def _testSoftplus(self, np_features, use_gpu=False):
36    np_softplus = self._npSoftplus(np_features)
37    with self.cached_session(use_gpu=use_gpu):
38      softplus = nn_ops.softplus(np_features)
39      tf_softplus = self.evaluate(softplus)
40    self.assertAllCloseAccordingToType(np_softplus, tf_softplus)
41    self.assertTrue(np.all(tf_softplus > 0))
42    self.assertShapeEqual(np_softplus, softplus)
43
44  def testNumbers(self):
45    for t in [np.float16, np.float32, np.float64]:
46      self._testSoftplus(
47          np.array([[-9, 7, -5, 3, -1], [1, -3, 5, -7, 9]]).astype(t),
48          use_gpu=False)
49      self._testSoftplus(
50          np.array([[-9, 7, -5, 3, -1], [1, -3, 5, -7, 9]]).astype(t),
51          use_gpu=True)
52      log_eps = np.log(np.finfo(t).eps)
53      one = t(1)
54      ten = t(10)
55      self._testSoftplus(
56          [
57              log_eps, log_eps - one, log_eps + one, log_eps - ten,
58              log_eps + ten, -log_eps, -log_eps - one, -log_eps + one,
59              -log_eps - ten, -log_eps + ten
60          ],
61          use_gpu=False)
62      self._testSoftplus(
63          [
64              log_eps, log_eps - one, log_eps + one, log_eps - ten,
65              log_eps + ten - log_eps, -log_eps - one, -log_eps + one,
66              -log_eps - ten, -log_eps + ten
67          ],
68          use_gpu=True)
69
70  @test_util.run_deprecated_v1
71  def testGradient(self):
72    with self.cached_session():
73      x = constant_op.constant(
74          [-0.9, -0.7, -0.5, -0.3, -0.1, 0.1, 0.3, 0.5, 0.7, 0.9],
75          shape=[2, 5],
76          name="x")
77      y = nn_ops.softplus(x, name="softplus")
78      x_init = np.asarray(
79          [[-0.9, -0.7, -0.5, -0.3, -0.1], [0.1, 0.3, 0.5, 0.7, 0.9]],
80          dtype=np.float32,
81          order="F")
82      err = gradient_checker.compute_gradient_error(
83          x, [2, 5], y, [2, 5], x_init_value=x_init)
84    print("softplus (float) gradient err = ", err)
85    self.assertLess(err, 1e-4)
86
87  @test_util.run_deprecated_v1
88  def testGradGrad(self):
89    with self.cached_session():
90      x = constant_op.constant(
91          [-0.9, -0.7, -0.5, -0.3, -0.1, 0.1, 0.3, 0.5, 0.7, 0.9],
92          shape=[2, 5],
93          name="x")
94      y = nn_ops.softplus(x, name="softplus")
95      (grad,) = gradients_impl.gradients(y, x)
96      x_init = np.asarray(
97          [[-0.9, -0.7, -0.5, -0.3, -0.1], [0.1, 0.3, 0.5, 0.7, 0.9]],
98          dtype=np.float32,
99          order="F")
100      err = gradient_checker.compute_gradient_error(
101          x, [2, 5], grad, [2, 5], x_init_value=x_init)
102    print("softplus (float) gradient of gradient err = ", err)
103    self.assertLess(err, 5e-5)
104
105  @test_util.run_deprecated_v1
106  def testGradGradGrad(self):
107    with self.cached_session():
108      x = constant_op.constant(
109          [-0.9, -0.7, -0.5, -0.3, -0.1, 0.1, 0.3, 0.5, 0.7, 0.9],
110          shape=[2, 5],
111          name="x")
112      y = nn_ops.softplus(x, name="softplus")
113      (grad,) = gradients_impl.gradients(y, x)
114      (grad_grad,) = gradients_impl.gradients(grad, x)
115      x_init = np.asarray(
116          [[-0.9, -0.7, -0.5, -0.3, -0.1], [0.1, 0.3, 0.5, 0.7, 0.9]],
117          dtype=np.float32,
118          order="F")
119      err = gradient_checker.compute_gradient_error(
120          x, [2, 5], grad_grad, [2, 5], x_init_value=x_init)
121    print("softplus (float) third-order gradient err = ", err)
122    self.assertLess(err, 5e-5)
123
124  @test_util.run_deprecated_v1
125  def testNoInts(self):
126    with self.cached_session():
127      with self.assertRaisesRegex(
128          TypeError,
129          "'features' has DataType int32 not in list of allowed values"):
130        nn_ops.softplus(constant_op.constant(42)).eval()
131
132
133if __name__ == "__main__":
134  test.main()
135