/external/tensorflow/tensorflow/python/distribute/ |
D | input_lib_test.py | 235 dataset_fn = lambda _: dataset_ops.DatasetV1.range(10) function 240 dataset_fn(distribute_lib.InputContext()), input_workers, distribution) 260 dataset_fn = lambda _: dataset_ops.DatasetV2.range(10).batch(2) function 263 dataset_fn(distribute_lib.InputContext()), input_workers, distribution) 284 dataset_fn = lambda _: dataset_ops.DatasetV2.range(10) function 286 dataset_fn = lambda _: dataset_ops.Dataset.range(10) function 288 input_type, dataset_fn) 318 dataset_fn = lambda _: dataset_ops.DatasetV2.range(10) function 320 dataset_fn = lambda _: dataset_ops.Dataset.range(10) function 322 input_type, dataset_fn) [all …]
|
D | single_loss_example.py | 35 def dataset_fn(): function 47 dataset_fn, loss_fn, optimizer, distribution, iterations_per_step) 56 def dataset_fn(): function 78 return model_fn, dataset_fn, layer 88 def dataset_fn(): function 120 return model_fn, dataset_fn, batchnorm
|
D | step_fn.py | 53 def __init__(self, dataset_fn, distribution): argument 55 self._iterator = distribution.make_input_fn_iterator(lambda _: dataset_fn()) 88 def __init__(self, dataset_fn, loss_fn, optimizer, distribution, argument 90 super(StandardSingleLossStep, self).__init__(dataset_fn, distribution)
|
D | one_device_strategy_test.py | 52 dataset_fn = lambda: dataset_ops.Dataset.range(10) function 55 dataset_fn, 62 dataset_fn = lambda: dataset_ops.Dataset.range(10) function 65 dataset_fn,
|
D | minimize_loss_test.py | 92 model_fn, dataset_fn, layer = minimize_loss_example( 101 iterator = self._get_iterator(distribution, dataset_fn) 135 model_fn, dataset_fn, layer = minimize_loss_example( 138 iterator = self._get_iterator(distribution, dataset_fn) 187 model_fn, dataset_fn, _ = minimize_loss_example( 196 iterator = self._get_iterator(distribution, dataset_fn) 248 model_fn, dataset_fn, batchnorm = batchnorm_example( 264 iterator = self._get_iterator(distribution, dataset_fn) 360 def dataset_fn(): function 371 iterator = self._get_iterator(distribution, dataset_fn) [all …]
|
D | distribute_lib_test.py | 92 def _experimental_distribute_datasets_from_function(self, dataset_fn): argument 93 return dataset_fn(distribute_lib.InputContext()) 519 dataset_fn = lambda _: dataset_ops.DatasetV2.range(10).batch(2) function 521 dataset_fn(distribute_lib.InputContext())) 524 dataset_fn = lambda _: dataset_ops.DatasetV1.range(10).batch(2) function 526 dataset_fn(distribute_lib.InputContext())) 536 dataset_fn = lambda _: dataset_ops.DatasetV2.range(10).batch(2) function 539 dataset_fn) 543 dataset_fn = lambda _: dataset_ops.DatasetV2.range(10).batch(2) function 546 dataset_fn)
|
D | one_device_strategy.py | 110 …def experimental_distribute_datasets_from_function(self, dataset_fn): # pylint: disable=useless-s… argument 148 dataset_fn) 300 def _experimental_distribute_datasets_from_function(self, dataset_fn): argument 302 dataset_fn,
|
D | central_storage_strategy.py | 106 …def experimental_distribute_datasets_from_function(self, dataset_fn): # pylint: disable=useless-s… argument 146 self).experimental_distribute_datasets_from_function(dataset_fn)
|
D | keras_metrics_test.py | 94 def _test_metric(self, distribution, dataset_fn, metric_init_fn, expected_fn): argument 98 iterator = distribution.make_input_fn_iterator(lambda _: dataset_fn())
|
/external/tensorflow/tensorflow/python/data/experimental/kernel_tests/ |
D | parallel_interleave_test.py | 58 def dataset_fn(self, input_values, cycle_length, block_length, sloppy, member in ParallelInterleaveTest 181 self.dataset_fn( 202 self.dataset_fn( 234 self.dataset_fn( 274 self.dataset_fn( 312 self.dataset_fn( 353 self.dataset_fn( 388 self.dataset_fn( 405 self.dataset_fn( 427 self.dataset_fn( [all …]
|
D | stats_dataset_ops_test.py | 342 def dataset_fn(): function 348 dataset_fn, {"ParallelMapDataset"}, 10, function_processing_time=True) 355 def dataset_fn(): function 361 dataset_fn, {"ParallelMapDataset"}, 10, function_processing_time=True) 368 def dataset_fn(): function 379 self.parallelCallsStats(dataset_fn, {"ParallelInterleaveDatasetV2"}, 10) 384 def dataset_fn(): function 393 dataset_fn, {"MapAndBatchDataset"}, 410 def dataset_fn(): function 424 dataset_fn, {"ParseExampleDataset"}, [all …]
|
D | cardinality_test.py | 156 name, dataset_fn, expected_result = y 158 dataset_fn=combinations.NamedObject(name, dataset_fn), 170 def testCardinality(self, dataset_fn, expected_result): argument 173 sess.run(cardinality.cardinality(dataset_fn())), expected_result)
|
D | dense_to_sparse_batch_test.py | 95 def dataset_fn(input_tensor): function 100 get_next = self.getNext(dataset_fn([[1]])) 106 get_next = self.getNext(dataset_fn(np.int32(range(13))))
|
D | optimize_dataset_test.py | 99 name, dataset_fn = y 101 dataset_fn=combinations.NamedObject(name, dataset_fn)) 209 def testOptimizationWithCapturedRefVar(self, dataset_fn): argument 218 unoptimized_dataset = dataset_fn(variable)
|
D | make_tf_record_dataset_test.py | 112 def dataset_fn(): function 121 next_element = self.getNext(dataset_fn()) 129 next_element = self.getNext(dataset_fn())
|
D | scan_test.py | 57 def dataset_fn(start, step, take): function 63 next_element = self.getNext(dataset_fn(start_val, step_val, take_val)) 95 def dataset_fn(start, step, take): function 102 next_element = self.getNext(dataset_fn(start_val, step_val, take_val))
|
D | map_and_batch_test.py | 63 def dataset_fn(batch_size, count): function 75 dataset = dataset_fn(14, 28) 93 get_next = self.getNext(dataset_fn(8, 14)) 113 self.assertDatasetProduces(dataset_fn(8, 0), expected_output=[]) 117 self.assertDatasetProduces(dataset_fn(0, 14), expected_output=[])
|
/external/tensorflow/tensorflow/python/data/kernel_tests/ |
D | reduce_test.py | 144 def dataset_fn(): function 152 _ = dataset_fn().reduce(np.int64(0), reduce_fn) 163 def dataset_fn(): function 172 _ = dataset_fn().reduce(np.int64(0), reduce_fn) 183 def dataset_fn(): function 196 _ = dataset_fn().reduce(np.int64(0), reduce1_fn) 197 _ = dataset_fn().reduce(np.int64(0), reduce2_fn) 212 def dataset_fn(): function 217 for _ in dataset_fn():
|
D | text_line_dataset_test.py | 87 def dataset_fn(filenames, num_epochs, batch_size=None): function 97 dataset_fn([test_filenames[0]], 1), expected_output=expected_output) 101 dataset_fn([test_filenames[1]], 1), 108 dataset_fn(test_filenames, 1), expected_output=expected_output) 114 dataset_fn(test_filenames, 10), expected_output=expected_output * 10) 118 dataset_fn(test_filenames, 10, 5),
|
D | fixed_length_record_dataset_test.py | 79 def dataset_fn(filenames, num_epochs, batch_size=None): function 92 dataset_fn([test_filenames[0]], 1), 99 dataset_fn([test_filenames[1]], 1), 110 dataset_fn(test_filenames, 1), expected_output=expected_output) 113 get_next = self.getNext(dataset_fn(test_filenames, 10)) 122 get_next = self.getNext(dataset_fn(test_filenames, 10, self._num_records))
|
D | cache_test.py | 57 def dataset_fn(count=5, filename=None): function 67 dataset_ops.get_legacy_output_shapes(dataset_fn())) 69 get_next = self.getNext(dataset_fn()) 80 get_next = self.getNext(dataset_fn(filename=self.cache_prefix)) 90 get_next = self.getNext(dataset_fn(count=0, filename=self.cache_prefix)) 101 dataset_fn(count=0, filename=self.cache_prefix + "nonsense")) 254 dataset_fn = lambda: dataset_ops.Dataset.range(5).cache() function 255 d1 = dataset_fn().map(lambda x: x + 1) 256 d2 = dataset_fn().map(lambda x: x + 6)
|
D | shuffle_test.py | 49 def dataset_fn(count=5, buffer_size=None, seed=0): function 63 get_next = self.getNext(dataset_fn()) 72 get_next = self.getNext(dataset_fn(buffer_size=100, seed=37)) 83 get_next = self.getNext(dataset_fn(buffer_size=100, seed=37)) 93 get_next = self.getNext(dataset_fn(buffer_size=100, seed=137)) 106 get_next = self.getNext(dataset_fn(buffer_size=2, seed=37)) 116 get_next = self.getNext(dataset_fn(count=0, buffer_size=100, seed=37))
|
D | dataset_test.py | 132 dataset_fn = dataset_ops.Dataset.from_sparse_tensor_slices( 137 self.assertEmpty(dataset_fn._inputs()) 139 def _testUnaryInputs(self, dataset_fn): argument 141 self.assertEqual([input_dataset], dataset_fn(input_dataset)._inputs()) 199 def _testInputsWithInterleaveFn(self, dataset_fn, interleave_parallelism): argument 222 def _testBinaryInputs(self, dataset_fn): argument 225 self.assertEqual([input1, input2], dataset_fn(input1, input2)._inputs()) 231 def _testVariadicInputs(self, dataset_fn, input_datasets): argument 234 dataset_fn(input_datasets)._inputs())
|
D | list_files_test.py | 94 def dataset_fn(): function 106 next_element = self.getNext(dataset_fn(), requires_initialization=True) 125 def dataset_fn(): function 129 dataset_fn(),
|
/external/tensorflow/tensorflow/python/keras/engine/ |
D | training_utils_test.py | 146 def test_assert_not_batched(self, dataset_fn, expected_error=None): argument 148 training_utils.assert_not_batched(dataset_fn()) 151 training_utils.assert_not_batched(dataset_fn()) 193 def test_assert_not_shuffled(self, dataset_fn, expected_error=None): argument 195 training_utils.assert_not_shuffled(dataset_fn()) 198 training_utils.assert_not_shuffled(dataset_fn())
|