/external/tensorflow/tensorflow/python/distribute/ |
D | combinations_test.py | 45 "HasClusterParams", lambda: None, has_chief=True, num_workers=2), 48 def testClusterParams(self, distribution, has_chief, num_workers): argument 49 self.assertTrue(has_chief) 57 def testClusterParamsHasDefault(self, distribution, has_chief, num_workers): argument 58 self.assertFalse(has_chief) 64 def testClusterParamsNoStrategy(self, v, has_chief, num_workers): argument 65 self.assertFalse(has_chief) 71 "WithClusterParams", lambda: None, has_chief=True, num_workers=2), 82 "Strategy1", lambda: None, has_chief=True, num_workers=0), 84 "Strategy2", lambda: None, has_chief=False, num_workers=1), [all …]
|
D | combinations.py | 86 if strategy is not None and _num_total_workers(v.has_chief, 93 has_chief = strategy.has_chief 96 if "has_chief" in kwargs and kwargs["has_chief"] != has_chief: 103 has_chief = kwargs.get("has_chief", False) 111 update["has_chief"] = has_chief 259 has_chief=False, argument 283 self.has_chief = has_chief 484 def decorator(self, has_chief, num_workers, runner, **kwargs): argument 485 if _num_total_workers(has_chief, num_workers) == 1 or _running_in_worker: 520 has_chief=has_chief, [all …]
|
D | strategy_combinations.py | 170 def _deferred_pool_runner(has_chief, num_workers, initializer=None): argument 190 has_chief=has_chief, 205 has_chief=True, 209 has_chief=True, 288 has_chief=True, 297 has_chief=True, 307 has_chief=True, 317 has_chief=True,
|
D | multi_process_runner_test.py | 84 num_workers=2, num_ps=3, has_chief=True)) 213 has_chief=True, num_workers=2, num_ps=2), 261 has_chief=True, num_workers=1), 294 has_chief=True, num_workers=1), 355 has_chief=True, num_workers=1), 373 has_chief=True, num_workers=1)) 445 has_chief=True, num_workers=1), 460 has_chief=False, num_workers=2), 484 has_chief=False, num_workers=1), 563 has_chief=True, num_workers=2) [all …]
|
D | multi_worker_test_base.py | 90 has_chief=False, argument 112 if has_chief: 135 if has_chief: 158 has_chief=False, argument 163 gpu_mem_frac = 0.7 / (num_workers + int(has_chief) + int(has_eval)) 175 if has_chief: 202 has_chief=has_chief, 355 has_chief=False, argument 359 has_chief=has_chief, 373 def create_cluster_spec(has_chief=False, argument [all …]
|
D | multi_worker_test_base_test.py | 33 num_workers=2, num_ps=1, has_chief=True, rpc_layer="grpc")
|
D | device_util_test.py | 86 has_chief=False, num_workers=1, num_ps=0, has_eval=False))
|
D | collective_all_reduce_strategy_test.py | 411 num_workers=3, num_ps=0, has_chief=True) 531 has_chief=False, num_workers=1)
|
D | distribute_coordinator_test.py | 198 has_chief=False, argument 206 if has_chief:
|
D | parameter_server_strategy_v2_test.py | 431 has_chief=True,
|
D | parameter_server_strategy_test.py | 828 num_workers=3, num_ps=2, has_chief=True)
|
D | mirrored_strategy_test.py | 1288 num_workers=2, num_ps=0, has_chief=True)
|
/external/tensorflow/tensorflow/tools/api/golden/v2/ |
D | tensorflow.__internal__.distribute.multi_process_runner.pbtxt | 17 …argspec: "args=[\'has_chief\', \'num_workers\', \'num_ps\', \'has_eval\'], varargs=None, keywords=…
|
/external/tensorflow/tensorflow/python/kernel_tests/ |
D | collective_ops_multi_worker_test.py | 139 has_chief=True, num_workers=1)
|