Home
last modified time | relevance | path

Searched refs:distribution_strategy (Results 1 – 20 of 20) sorted by relevance

/external/tensorflow/tensorflow/python/keras/benchmarks/
Ddistribution_util.py86 def get_distribution_strategy(distribution_strategy="mirrored", argument
108 distribution_strategy = distribution_strategy.lower()
110 if distribution_strategy == "off":
116 if distribution_strategy == "multi_worker_mirrored":
120 if distribution_strategy == "one_device":
128 if distribution_strategy == "mirrored":
138 distribution_strategy)
Dbenchmark_util.py109 distribution_strategy='off'): argument
163 distribution_strategy=distribution_strategy, num_gpus=num_gpus)
215 'distribution_strategy': distribution_strategy,
/external/tensorflow/tensorflow/python/keras/distribute/
Ddistributed_training_utils_v1.py55 def set_weights(distribution_strategy, dist_model, weights): argument
76 assign_ops.append(distribution_strategy.unwrap(sw.assign(w)))
83 def unwrap_values(distribution_strategy, grouped_inputs, grouped_outputs, argument
113 all_inputs = flatten_per_replica_values(distribution_strategy,
115 all_outputs = unwrap_outputs(distribution_strategy, grouped_outputs,
119 all_updates = flatten_per_replica_values(distribution_strategy,
129 distribution_strategy, grouped_feed_dict)
134 distribution_strategy, grouped_fetches)
172 def unwrap_outputs(distribution_strategy, grouped_outputs, argument
194 return flatten_per_replica_values(distribution_strategy,
[all …]
Ddistributed_training_utils.py28 def global_batch_size_supported(distribution_strategy): argument
29 return distribution_strategy.extended._global_batch_size # pylint: disable=protected-access
/external/tensorflow/tensorflow/python/keras/benchmarks/keras_examples_benchmarks/
Dmnist_conv_custom_training_benchmark_test.py90 batch_size, distribution_strategy): argument
105 per_replica_losses = distribution_strategy.run(
114 return distribution_strategy.reduce(
123 distribution_strategy=None, argument
158 if distribution_strategy is not None:
162 distribution_strategy)
181 distribution_strategy=None): argument
209 if distribution_strategy is not None and \
214 if distribution_strategy is None and \
236 distribution_strategy, batch_size)
[all …]
Dbidirectional_lstm_benchmark_test.py121 distribution_strategy='mirrored',
Dreuters_mlp_benchmark_test.py126 distribution_strategy='mirrored',
Dmnist_conv_benchmark_test.py126 distribution_strategy='mirrored',
Dmnist_hierarchical_rnn_benchmark_test.py127 distribution_strategy='mirrored',
Dmnist_irnn_benchmark_test.py124 distribution_strategy='mirrored',
Dantirectifier_benchmark_test.py119 distribution_strategy="mirrored",
Dcifar10_cnn_benchmark_test.py135 distribution_strategy='mirrored',
Dtext_classification_transformer_benchmark_test.py131 distribution_strategy='mirrored',
DREADME.md224 `distribution_strategy` and etc. You can check examples from
/external/tensorflow/tensorflow/python/training/
Dslot_creator.py150 distribution_strategy = distribution_strategy_context.get_strategy()
151 with distribution_strategy.extended.colocate_vars_with(primary):
209 distribution_strategy = distribution_strategy_context.get_strategy()
210 with distribution_strategy.extended.colocate_vars_with(primary):
Dsync_replicas_optimizer.py261 distribution_strategy = distribution_strategy_context.get_strategy()
262 with distribution_strategy.extended.colocate_vars_with(local_anchor):
Doptimizer.py821 distribution_strategy = distribute_ctx.get_strategy()
822 with distribution_strategy.extended.colocate_vars_with(colocate_with):
/external/tensorflow/tensorflow/python/keras/engine/
Dtraining_arrays_v1.py536 def _get_iterator(inputs, distribution_strategy=None): argument
537 if distribution_strategy:
539 inputs, distribution_strategy)
543 def _reinitialize_iterator(iterator, distribution_strategy=None): argument
544 if distribution_strategy:
546 iterator, distribution_strategy)
Ddata_adapter.py1170 distribution_strategy=ds_context.get_strategy(),
/external/tensorflow/tensorflow/python/keras/
Dbackend.py6504 def configure_and_create_distributed_session(distribution_strategy): argument
6507 def _create_session(distribution_strategy): argument
6517 if is_tpu_strategy(distribution_strategy):
6521 distribution_strategy.configure(session_config)
6522 …master = distribution_strategy.extended._tpu_cluster_resolver.master() # pylint: disable=protecte…
6535 distribution_strategy.configure(session_config)
6540 if distribution_strategy.extended._in_multi_worker_mode():
6543 distribution_strategy,
6546 _create_session(distribution_strategy)