• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#     http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ==============================================================================
15
16"""Utilities for calculating gradients."""
17from __future__ import absolute_import
18from __future__ import division
19from __future__ import print_function
20
21import enum
22
23from tensorflow.python.util.tf_export import tf_export
24
25
26@tf_export("UnconnectedGradients")
27class UnconnectedGradients(enum.Enum):
28  """Controls how gradient computation behaves when y does not depend on x.
29
30  The gradient of y with respect to x can be zero in two different ways: there
31  could be no differentiable path in the graph connecting x to y (and so we can
32  statically prove that the gradient is zero) or it could be that runtime values
33  of tensors in a particular execution lead to a gradient of zero (say, if a
34  relu unit happens to not be activated). To allow you to distinguish between
35  these two cases you can choose what value gets returned for the gradient when
36  there is no path in the graph from x to y:
37
38  * `NONE`: Indicates that [None] will be returned if there is no path from x
39    to y
40  * `ZERO`: Indicates that a zero tensor will be returned in the shape of x.
41  """
42  NONE = "none"
43  ZERO = "zero"
44