• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#     http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ==============================================================================
15"""Layers that act as activation functions.
16"""
17from __future__ import absolute_import
18from __future__ import division
19from __future__ import print_function
20
21from tensorflow.python.keras import backend as K
22from tensorflow.python.keras import constraints
23from tensorflow.python.keras import initializers
24from tensorflow.python.keras import regularizers
25from tensorflow.python.keras.engine.base_layer import Layer
26from tensorflow.python.keras.engine.input_spec import InputSpec
27from tensorflow.python.keras.utils import tf_utils
28from tensorflow.python.ops import math_ops
29from tensorflow.python.util.tf_export import keras_export
30
31
32@keras_export('keras.layers.LeakyReLU')
33class LeakyReLU(Layer):
34  """Leaky version of a Rectified Linear Unit.
35
36  It allows a small gradient when the unit is not active:
37  `f(x) = alpha * x for x < 0`,
38  `f(x) = x for x >= 0`.
39
40  Input shape:
41    Arbitrary. Use the keyword argument `input_shape`
42    (tuple of integers, does not include the samples axis)
43    when using this layer as the first layer in a model.
44
45  Output shape:
46    Same shape as the input.
47
48  Arguments:
49    alpha: Float >= 0. Negative slope coefficient.
50
51  """
52
53  def __init__(self, alpha=0.3, **kwargs):
54    super(LeakyReLU, self).__init__(**kwargs)
55    self.supports_masking = True
56    self.alpha = K.cast_to_floatx(alpha)
57
58  def call(self, inputs):
59    return K.relu(inputs, alpha=self.alpha)
60
61  def get_config(self):
62    config = {'alpha': float(self.alpha)}
63    base_config = super(LeakyReLU, self).get_config()
64    return dict(list(base_config.items()) + list(config.items()))
65
66  @tf_utils.shape_type_conversion
67  def compute_output_shape(self, input_shape):
68    return input_shape
69
70
71@keras_export('keras.layers.PReLU')
72class PReLU(Layer):
73  """Parametric Rectified Linear Unit.
74
75  It follows:
76  `f(x) = alpha * x for x < 0`,
77  `f(x) = x for x >= 0`,
78  where `alpha` is a learned array with the same shape as x.
79
80  Input shape:
81    Arbitrary. Use the keyword argument `input_shape`
82    (tuple of integers, does not include the samples axis)
83    when using this layer as the first layer in a model.
84
85  Output shape:
86    Same shape as the input.
87
88  Arguments:
89    alpha_initializer: Initializer function for the weights.
90    alpha_regularizer: Regularizer for the weights.
91    alpha_constraint: Constraint for the weights.
92    shared_axes: The axes along which to share learnable
93      parameters for the activation function.
94      For example, if the incoming feature maps
95      are from a 2D convolution
96      with output shape `(batch, height, width, channels)`,
97      and you wish to share parameters across space
98      so that each filter only has one set of parameters,
99      set `shared_axes=[1, 2]`.
100  """
101
102  def __init__(self,
103               alpha_initializer='zeros',
104               alpha_regularizer=None,
105               alpha_constraint=None,
106               shared_axes=None,
107               **kwargs):
108    super(PReLU, self).__init__(**kwargs)
109    self.supports_masking = True
110    self.alpha_initializer = initializers.get(alpha_initializer)
111    self.alpha_regularizer = regularizers.get(alpha_regularizer)
112    self.alpha_constraint = constraints.get(alpha_constraint)
113    if shared_axes is None:
114      self.shared_axes = None
115    elif not isinstance(shared_axes, (list, tuple)):
116      self.shared_axes = [shared_axes]
117    else:
118      self.shared_axes = list(shared_axes)
119
120  @tf_utils.shape_type_conversion
121  def build(self, input_shape):
122    param_shape = list(input_shape[1:])
123    if self.shared_axes is not None:
124      for i in self.shared_axes:
125        param_shape[i - 1] = 1
126    self.alpha = self.add_weight(
127        shape=param_shape,
128        name='alpha',
129        initializer=self.alpha_initializer,
130        regularizer=self.alpha_regularizer,
131        constraint=self.alpha_constraint)
132    # Set input spec
133    axes = {}
134    if self.shared_axes:
135      for i in range(1, len(input_shape)):
136        if i not in self.shared_axes:
137          axes[i] = input_shape[i]
138    self.input_spec = InputSpec(ndim=len(input_shape), axes=axes)
139    self.built = True
140
141  def call(self, inputs):
142    pos = K.relu(inputs)
143    neg = -self.alpha * K.relu(-inputs)
144    return pos + neg
145
146  def get_config(self):
147    config = {
148        'alpha_initializer': initializers.serialize(self.alpha_initializer),
149        'alpha_regularizer': regularizers.serialize(self.alpha_regularizer),
150        'alpha_constraint': constraints.serialize(self.alpha_constraint),
151        'shared_axes': self.shared_axes
152    }
153    base_config = super(PReLU, self).get_config()
154    return dict(list(base_config.items()) + list(config.items()))
155
156  @tf_utils.shape_type_conversion
157  def compute_output_shape(self, input_shape):
158    return input_shape
159
160
161@keras_export('keras.layers.ELU')
162class ELU(Layer):
163  """Exponential Linear Unit.
164
165  It follows:
166  `f(x) =  alpha * (exp(x) - 1.) for x < 0`,
167  `f(x) = x for x >= 0`.
168
169  Input shape:
170    Arbitrary. Use the keyword argument `input_shape`
171    (tuple of integers, does not include the samples axis)
172    when using this layer as the first layer in a model.
173
174  Output shape:
175    Same shape as the input.
176
177  Arguments:
178    alpha: Scale for the negative factor.
179  """
180
181  def __init__(self, alpha=1.0, **kwargs):
182    super(ELU, self).__init__(**kwargs)
183    self.supports_masking = True
184    self.alpha = K.cast_to_floatx(alpha)
185
186  def call(self, inputs):
187    return K.elu(inputs, self.alpha)
188
189  def get_config(self):
190    config = {'alpha': float(self.alpha)}
191    base_config = super(ELU, self).get_config()
192    return dict(list(base_config.items()) + list(config.items()))
193
194  @tf_utils.shape_type_conversion
195  def compute_output_shape(self, input_shape):
196    return input_shape
197
198
199@keras_export('keras.layers.ThresholdedReLU')
200class ThresholdedReLU(Layer):
201  """Thresholded Rectified Linear Unit.
202
203  It follows:
204  `f(x) = x for x > theta`,
205  `f(x) = 0 otherwise`.
206
207  Input shape:
208    Arbitrary. Use the keyword argument `input_shape`
209    (tuple of integers, does not include the samples axis)
210    when using this layer as the first layer in a model.
211
212  Output shape:
213    Same shape as the input.
214
215  Arguments:
216    theta: Float >= 0. Threshold location of activation.
217  """
218
219  def __init__(self, theta=1.0, **kwargs):
220    super(ThresholdedReLU, self).__init__(**kwargs)
221    self.supports_masking = True
222    self.theta = K.cast_to_floatx(theta)
223
224  def call(self, inputs):
225    return inputs * math_ops.cast(
226        math_ops.greater(inputs, self.theta), K.floatx())
227
228  def get_config(self):
229    config = {'theta': float(self.theta)}
230    base_config = super(ThresholdedReLU, self).get_config()
231    return dict(list(base_config.items()) + list(config.items()))
232
233  @tf_utils.shape_type_conversion
234  def compute_output_shape(self, input_shape):
235    return input_shape
236
237
238@keras_export('keras.layers.Softmax')
239class Softmax(Layer):
240  """Softmax activation function.
241
242  Input shape:
243    Arbitrary. Use the keyword argument `input_shape`
244    (tuple of integers, does not include the samples axis)
245    when using this layer as the first layer in a model.
246
247  Output shape:
248    Same shape as the input.
249
250  Arguments:
251    axis: Integer, axis along which the softmax normalization is applied.
252  """
253
254  def __init__(self, axis=-1, **kwargs):
255    super(Softmax, self).__init__(**kwargs)
256    self.supports_masking = True
257    self.axis = axis
258
259  def call(self, inputs):
260    return K.softmax(inputs, axis=self.axis)
261
262  def get_config(self):
263    config = {'axis': self.axis}
264    base_config = super(Softmax, self).get_config()
265    return dict(list(base_config.items()) + list(config.items()))
266
267  @tf_utils.shape_type_conversion
268  def compute_output_shape(self, input_shape):
269    return input_shape
270
271
272@keras_export('keras.layers.ReLU')
273class ReLU(Layer):
274  """Rectified Linear Unit activation function.
275
276  With default values, it returns element-wise `max(x, 0)`.
277
278  Otherwise, it follows:
279  `f(x) = max_value` for `x >= max_value`,
280  `f(x) = x` for `threshold <= x < max_value`,
281  `f(x) = negative_slope * (x - threshold)` otherwise.
282
283  Input shape:
284    Arbitrary. Use the keyword argument `input_shape`
285    (tuple of integers, does not include the samples axis)
286    when using this layer as the first layer in a model.
287
288  Output shape:
289    Same shape as the input.
290
291  Arguments:
292    max_value: Float >= 0. Maximum activation value.
293    negative_slope: Float >= 0. Negative slope coefficient.
294    threshold: Float. Threshold value for thresholded activation.
295  """
296
297  def __init__(self, max_value=None, negative_slope=0, threshold=0, **kwargs):
298    super(ReLU, self).__init__(**kwargs)
299    if max_value is not None and max_value < 0.:
300      raise ValueError('max_value of Relu layer '
301                       'cannot be negative value: ' + str(max_value))
302    if negative_slope < 0.:
303      raise ValueError('negative_slope of Relu layer '
304                       'cannot be negative value: ' + str(negative_slope))
305
306    self.support_masking = True
307    if max_value is not None:
308      max_value = K.cast_to_floatx(max_value)
309    self.max_value = max_value
310    self.negative_slope = K.cast_to_floatx(negative_slope)
311    self.threshold = K.cast_to_floatx(threshold)
312
313  def call(self, inputs):
314    # alpha is used for leaky relu slope in activations instead of
315    # negative_slope.
316    return K.relu(inputs,
317                  alpha=self.negative_slope,
318                  max_value=self.max_value,
319                  threshold=self.threshold)
320
321  def get_config(self):
322    config = {
323        'max_value': self.max_value,
324        'negative_slope': self.negative_slope,
325        'threshold': self.threshold
326    }
327    base_config = super(ReLU, self).get_config()
328    return dict(list(base_config.items()) + list(config.items()))
329
330  @tf_utils.shape_type_conversion
331  def compute_output_shape(self, input_shape):
332    return input_shape
333