Home
last modified time | relevance | path

Searched refs:has_chief (Results 1 – 14 of 14) sorted by relevance

/external/tensorflow/tensorflow/python/distribute/
Dcombinations_test.py45 "HasClusterParams", lambda: None, has_chief=True, num_workers=2),
48 def testClusterParams(self, distribution, has_chief, num_workers): argument
49 self.assertTrue(has_chief)
57 def testClusterParamsHasDefault(self, distribution, has_chief, num_workers): argument
58 self.assertFalse(has_chief)
64 def testClusterParamsNoStrategy(self, v, has_chief, num_workers): argument
65 self.assertFalse(has_chief)
71 "WithClusterParams", lambda: None, has_chief=True, num_workers=2),
82 "Strategy1", lambda: None, has_chief=True, num_workers=0),
84 "Strategy2", lambda: None, has_chief=False, num_workers=1),
[all …]
Dcombinations.py86 if strategy is not None and _num_total_workers(v.has_chief,
93 has_chief = strategy.has_chief
96 if "has_chief" in kwargs and kwargs["has_chief"] != has_chief:
103 has_chief = kwargs.get("has_chief", False)
111 update["has_chief"] = has_chief
259 has_chief=False, argument
283 self.has_chief = has_chief
484 def decorator(self, has_chief, num_workers, runner, **kwargs): argument
485 if _num_total_workers(has_chief, num_workers) == 1 or _running_in_worker:
520 has_chief=has_chief,
[all …]
Dstrategy_combinations.py170 def _deferred_pool_runner(has_chief, num_workers, initializer=None): argument
190 has_chief=has_chief,
205 has_chief=True,
209 has_chief=True,
288 has_chief=True,
297 has_chief=True,
307 has_chief=True,
317 has_chief=True,
Dmulti_process_runner_test.py84 num_workers=2, num_ps=3, has_chief=True))
213 has_chief=True, num_workers=2, num_ps=2),
261 has_chief=True, num_workers=1),
294 has_chief=True, num_workers=1),
355 has_chief=True, num_workers=1),
373 has_chief=True, num_workers=1))
445 has_chief=True, num_workers=1),
460 has_chief=False, num_workers=2),
484 has_chief=False, num_workers=1),
563 has_chief=True, num_workers=2)
[all …]
Dmulti_worker_test_base.py90 has_chief=False, argument
112 if has_chief:
135 if has_chief:
158 has_chief=False, argument
163 gpu_mem_frac = 0.7 / (num_workers + int(has_chief) + int(has_eval))
175 if has_chief:
202 has_chief=has_chief,
355 has_chief=False, argument
359 has_chief=has_chief,
373 def create_cluster_spec(has_chief=False, argument
[all …]
Dmulti_worker_test_base_test.py33 num_workers=2, num_ps=1, has_chief=True, rpc_layer="grpc")
Ddevice_util_test.py86 has_chief=False, num_workers=1, num_ps=0, has_eval=False))
Dcollective_all_reduce_strategy_test.py411 num_workers=3, num_ps=0, has_chief=True)
531 has_chief=False, num_workers=1)
Ddistribute_coordinator_test.py198 has_chief=False, argument
206 if has_chief:
Dparameter_server_strategy_v2_test.py431 has_chief=True,
Dparameter_server_strategy_test.py828 num_workers=3, num_ps=2, has_chief=True)
Dmirrored_strategy_test.py1288 num_workers=2, num_ps=0, has_chief=True)
/external/tensorflow/tensorflow/tools/api/golden/v2/
Dtensorflow.__internal__.distribute.multi_process_runner.pbtxt17 …argspec: "args=[\'has_chief\', \'num_workers\', \'num_ps\', \'has_eval\'], varargs=None, keywords=…
/external/tensorflow/tensorflow/python/kernel_tests/
Dcollective_ops_multi_worker_test.py139 has_chief=True, num_workers=1)