• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#     http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ==============================================================================
15"""Utilities related to distributed training."""
16# pylint:disable=protected-access
17from __future__ import absolute_import
18from __future__ import division
19from __future__ import print_function
20
21from tensorflow.python.distribute import distribution_strategy_context as ds_context
22from tensorflow.python.keras import backend
23
24
25# TODO(b/118776054): Currently we support global batch size for TPUStrategy and
26# core MirroredStrategy only. Remove this check when contrib MirroredStrategy is
27# no longer needed.
28def global_batch_size_supported(distribution_strategy):
29  return distribution_strategy.extended._global_batch_size  # pylint: disable=protected-access
30
31
32def call_replica_local_fn(fn, *args, **kwargs):
33  """Call a function that uses replica-local variables.
34
35  This function correctly handles calling `fn` in a cross-replica
36  context.
37
38  Args:
39    fn: The function to call.
40    *args: Positional arguments to the `fn`.
41    **kwargs: Keyword argument to `fn`.
42
43  Returns:
44    The result of calling `fn`.
45  """
46  # TODO(b/132666209): Remove this function when we support assign_*
47  # for replica-local variables.
48  strategy = None
49  if 'strategy' in kwargs:
50    strategy = kwargs.pop('strategy')
51  else:
52    if ds_context.has_strategy():
53      strategy = ds_context.get_strategy()
54
55  # TODO(b/120571621): TPUStrategy does not implement replica-local variables.
56  is_tpu = backend.is_tpu_strategy(strategy)
57  if ((not is_tpu) and strategy and ds_context.in_cross_replica_context()):
58    with strategy.scope():
59      return strategy.extended.call_for_each_replica(fn, args, kwargs)
60  return fn(*args, **kwargs)
61