/external/tensorflow/tensorflow/python/keras/utils/ |
D | dataset_creator.py | 55 def __init__(self, dataset_fn): argument 56 if not callable(dataset_fn): 58 self.dataset_fn = dataset_fn 63 dataset = self.dataset_fn(*args, **kwargs)
|
D | dataset_creator_test.py | 40 dataset_fn = lambda: 3 function 44 dataset_creator.DatasetCreator(dataset_fn)() 46 dataset_fn = lambda: dataset_ops.DatasetV2.from_tensor_slices([1, 1]) function 47 got = dataset_creator.DatasetCreator(dataset_fn)() 64 def dataset_fn(input_context): function 75 dataset_creator.DatasetCreator(dataset_fn),
|
/external/tensorflow/tensorflow/python/distribute/ |
D | input_lib_test.py | 302 dataset_fn = lambda _: dataset_ops.DatasetV2.range(10) function 304 input_type, dataset_fn) 340 dataset_fn = lambda _: dataset_ops.DatasetV1.range(10) function 345 dataset_fn(distribute_lib.InputContext()), input_workers, distribution) 369 dataset_fn = lambda _: dataset_ops.Dataset.range(10) function 371 input_type, dataset_fn) 392 dataset_fn = lambda _: dataset_ops.DatasetV1.range(10) function 394 input_type, dataset_fn) 424 dataset_fn = lambda _: dataset_ops.Dataset.range(10) function 426 input_type, dataset_fn) [all …]
|
D | single_loss_example.py | 36 def dataset_fn(): function 48 dataset_fn, loss_fn, optimizer, distribution, iterations_per_step) 57 def dataset_fn(): function 79 return model_fn, dataset_fn, layer 89 def dataset_fn(): function 121 return model_fn, dataset_fn, batchnorm
|
D | step_fn.py | 53 def __init__(self, dataset_fn, distribution): argument 55 self._iterator = distribution.make_input_fn_iterator(lambda _: dataset_fn()) 88 def __init__(self, dataset_fn, loss_fn, optimizer, distribution, argument 90 super(StandardSingleLossStep, self).__init__(dataset_fn, distribution)
|
D | one_device_strategy_test.py | 58 dataset_fn = lambda: dataset_ops.Dataset.range(10) function 61 dataset_fn, 68 dataset_fn = lambda: dataset_ops.Dataset.range(10) function 71 dataset_fn,
|
D | distribute_lib_test.py | 91 def _distribute_datasets_from_function(self, dataset_fn, options): argument 92 return dataset_fn(distribute_lib.InputContext()) 520 dataset_fn = lambda _: dataset_ops.DatasetV2.range(10).batch(2) function 522 dataset_fn(distribute_lib.InputContext())) 525 dataset_fn = lambda _: dataset_ops.DatasetV1.range(10).batch(2) function 527 dataset_fn(distribute_lib.InputContext())) 537 dataset_fn = lambda _: dataset_ops.DatasetV2.range(10).batch(2) function 540 dataset_fn) 544 dataset_fn = lambda _: dataset_ops.DatasetV2.range(10).batch(2) function 547 dataset_fn)
|
D | one_device_strategy.py | 114 dataset_fn, # pylint: disable=useless-super-delegation argument 154 self).distribute_datasets_from_function(dataset_fn, options) 327 def _distribute_datasets_from_function(self, dataset_fn, options): argument 336 dataset_fn,
|
/external/tensorflow/tensorflow/python/data/experimental/kernel_tests/ |
D | parallel_interleave_test.py | 59 def dataset_fn(self, input_values, cycle_length, block_length, sloppy, member in ParallelInterleaveTest 182 self.dataset_fn( 203 self.dataset_fn( 235 self.dataset_fn( 275 self.dataset_fn( 313 self.dataset_fn( 354 self.dataset_fn( 389 self.dataset_fn( 406 self.dataset_fn( 428 self.dataset_fn( [all …]
|
D | stats_dataset_ops_test.py | 342 def dataset_fn(): function 348 dataset_fn, {"ParallelMapDataset"}, 10, function_processing_time=True) 355 def dataset_fn(): function 361 dataset_fn, {"ParallelMapDataset"}, 10, function_processing_time=True) 368 def dataset_fn(): function 379 self.parallelCallsStats(dataset_fn, {"ParallelInterleaveDatasetV2"}, 10) 384 def dataset_fn(): function 393 dataset_fn, {"MapAndBatchDataset"}, 410 def dataset_fn(): function 424 dataset_fn, {"ParseExampleDatasetV2"}, [all …]
|
D | optimize_dataset_test.py | 100 name, dataset_fn = y 102 dataset_fn=combinations.NamedObject(name, dataset_fn)) 121 name, dataset_fn, expected_output = y 123 dataset_fn=combinations.NamedObject(name, dataset_fn), 252 def testOptimizationDisableIntraOpParallelism(self, dataset_fn, argument 255 dataset = dataset_fn() 367 def testOptimizationWithCapturedRefVar(self, dataset_fn): argument 376 unoptimized_dataset = dataset_fn(variable)
|
D | dense_to_sparse_batch_test.py | 95 def dataset_fn(input_tensor): function 100 get_next = self.getNext(dataset_fn([[1]])) 106 get_next = self.getNext(dataset_fn(np.int32(range(13))))
|
D | make_tf_record_dataset_test.py | 112 def dataset_fn(): function 121 next_element = self.getNext(dataset_fn()) 129 next_element = self.getNext(dataset_fn())
|
/external/tensorflow/tensorflow/python/data/kernel_tests/ |
D | reduce_test.py | 144 def dataset_fn(): function 152 _ = dataset_fn().reduce(np.int64(0), reduce_fn) 163 def dataset_fn(): function 172 _ = dataset_fn().reduce(np.int64(0), reduce_fn) 183 def dataset_fn(): function 196 _ = dataset_fn().reduce(np.int64(0), reduce1_fn) 197 _ = dataset_fn().reduce(np.int64(0), reduce2_fn) 212 def dataset_fn(): function 217 for _ in dataset_fn():
|
D | text_line_dataset_test.py | 88 def dataset_fn(filenames, num_epochs, batch_size=None): function 98 dataset_fn([test_filenames[0]], 1), expected_output=expected_output) 102 dataset_fn([test_filenames[1]], 1), 109 dataset_fn(test_filenames, 1), expected_output=expected_output) 115 dataset_fn(test_filenames, 10), expected_output=expected_output * 10) 119 dataset_fn(test_filenames, 10, 5),
|
D | fixed_length_record_dataset_test.py | 80 def dataset_fn(filenames, num_epochs, batch_size=None): function 93 dataset_fn([test_filenames[0]], 1), 100 dataset_fn([test_filenames[1]], 1), 111 dataset_fn(test_filenames, 1), expected_output=expected_output) 114 get_next = self.getNext(dataset_fn(test_filenames, 10)) 123 get_next = self.getNext(dataset_fn(test_filenames, 10, self._num_records))
|
D | cache_test.py | 60 def dataset_fn(count=5, filename=None): function 70 dataset_ops.get_legacy_output_shapes(dataset_fn())) 72 get_next = self.getNext(dataset_fn()) 83 get_next = self.getNext(dataset_fn(filename=self.cache_prefix)) 93 get_next = self.getNext(dataset_fn(count=0, filename=self.cache_prefix)) 104 dataset_fn(count=0, filename=self.cache_prefix + "nonsense")) 255 dataset_fn = lambda: dataset_ops.Dataset.range(5).cache() function 256 d1 = dataset_fn().map(lambda x: x + 1) 257 d2 = dataset_fn().map(lambda x: x + 6)
|
D | cardinality_test.py | 165 name, dataset_fn, expected_result = y 167 dataset_fn=combinations.NamedObject(name, dataset_fn), 190 def testCardinality(self, dataset_fn, expected_result): argument 191 dataset = dataset_fn()
|
D | shuffle_test.py | 51 def dataset_fn(count=5, buffer_size=None, seed=0): function 65 get_next = self.getNext(dataset_fn()) 74 get_next = self.getNext(dataset_fn(buffer_size=100, seed=37)) 85 get_next = self.getNext(dataset_fn(buffer_size=100, seed=37)) 95 get_next = self.getNext(dataset_fn(buffer_size=100, seed=137)) 108 get_next = self.getNext(dataset_fn(buffer_size=2, seed=37)) 118 get_next = self.getNext(dataset_fn(count=0, buffer_size=100, seed=37))
|
D | list_files_test.py | 94 def dataset_fn(): function 106 next_element = self.getNext(dataset_fn(), requires_initialization=True) 125 def dataset_fn(): function 129 dataset_fn(),
|
D | dataset_test.py | 133 dataset_fn = dataset_ops.Dataset.from_sparse_tensor_slices( 138 self.assertEmpty(dataset_fn._inputs()) 140 def _testUnaryInputs(self, dataset_fn): argument 142 self.assertEqual([input_dataset], dataset_fn(input_dataset)._inputs()) 200 def _testInputsWithInterleaveFn(self, dataset_fn, interleave_parallelism): argument 223 def _testBinaryInputs(self, dataset_fn): argument 226 self.assertEqual([input1, input2], dataset_fn(input1, input2)._inputs()) 232 def _testVariadicInputs(self, dataset_fn, input_datasets): argument 235 dataset_fn(input_datasets)._inputs())
|
/external/tensorflow/tensorflow/python/keras/distribute/ |
D | test_example.py | 34 def dataset_fn(): function 56 return model_fn, dataset_fn, layer 66 def dataset_fn(): function 98 return model_fn, dataset_fn, batchnorm
|
D | minimize_loss_test.py | 95 model_fn, dataset_fn, layer = minimize_loss_example( 104 iterator = self._get_iterator(distribution, dataset_fn) 138 model_fn, dataset_fn, layer = minimize_loss_example( 141 iterator = self._get_iterator(distribution, dataset_fn) 190 model_fn, dataset_fn, _ = minimize_loss_example( 199 iterator = self._get_iterator(distribution, dataset_fn) 251 model_fn, dataset_fn, batchnorm = batchnorm_example( 267 iterator = self._get_iterator(distribution, dataset_fn) 363 def dataset_fn(): function 374 iterator = self._get_iterator(distribution, dataset_fn) [all …]
|
D | mirrored_strategy_test.py | 105 def dataset_fn(_): function 106 return test_utils_obj.dataset_fn(feature_mapper, label_mapper) 128 dataset_fn)
|
/external/tensorflow/tensorflow/python/distribute/coordinator/ |
D | cluster_coordinator.py | 1155 def create_per_worker_dataset(self, dataset_fn): argument 1221 return _PerWorkerDistributedDataset(dataset_fn, input_workers, self) 1307 def __init__(self, dataset_fn, input_workers, coordinator): argument 1319 if isinstance(dataset_fn, def_function.Function): 1321 dataset_fn = dataset_fn.get_concrete_function() 1322 elif not isinstance(dataset_fn, tf_function.ConcreteFunction): 1324 dataset_fn = def_function.function(dataset_fn).get_concrete_function() 1325 self._dataset_fn = dataset_fn
|