/external/tensorflow/tensorflow/python/data/experimental/kernel_tests/ |
D | auto_shard_dataset_test.py | 51 dataset = dataset_ops.Dataset.list_files(self.test_filenames, shuffle=True) 52 dataset = dataset.flat_map(core_readers.TFRecordDataset) 53 dataset = dataset.batch(5) 54 dataset = distribute._AutoShardDataset(dataset, 5, 3) 61 self.assertDatasetProduces(dataset, list(chunk(expected, 5))) 71 dataset = dataset_ops.Dataset.zip((dataset1, dataset2)) 72 dataset = distribute._AutoShardDataset(dataset, 5, 3) 80 self.assertDatasetProduces(dataset, expected) 92 dataset = dataset1.concatenate(dataset2) 93 dataset = distribute._AutoShardDataset(dataset, 5, 3) [all …]
|
D | map_and_batch_test.py | 69 dataset = dataset_ops.Dataset.from_tensor_slices(components).repeat( 79 dataset = dataset.with_options(options) 80 return dataset 84 dataset = dataset_fn(14, 28) 85 get_next = self.getNext(dataset) 89 for shape in dataset_ops.get_legacy_output_shapes(dataset)]) 135 dataset = ( 145 dataset = dataset.with_options(options) 149 [4, 1], dataset_ops.get_legacy_output_shapes(dataset).as_list()) 152 [None, 1], dataset_ops.get_legacy_output_shapes(dataset).as_list()) [all …]
|
D | rebatch_dataset_test.py | 33 def _flat_shapes(dataset): argument 34 return nest.flatten(dataset_ops.get_legacy_output_shapes(dataset)) 43 dataset = dataset_ops.Dataset.range(1024).batch( 45 rebatched_dataset = batching._RebatchDataset(dataset, num_workers=4) 48 [ts.as_list() for ts in _flat_shapes(dataset)]) 57 dataset = dataset_ops.Dataset.range(1024) 59 batching._RebatchDataset(dataset, num_workers=4) 62 dataset = dataset_ops.Dataset.range(1024).batch( 66 rebatched_dataset = batching._RebatchDataset(dataset, num_workers=5) 71 dataset = ( [all …]
|
D | stats_dataset_ops_test.py | 38 def function_set_stats_aggregator(dataset, argument 42 return dataset.apply( 46 def function_apply_options(dataset, aggregator, prefix="", counter_prefix=""): argument 52 return dataset.with_options(options) 64 dataset = dataset_ops.Dataset.range(100).map( 67 dataset = dataset_transformation(dataset, aggregator) 68 next_element = self.getNext(dataset, requires_initialization=True) 86 dataset = dataset_ops.Dataset.range(100).apply( 88 dataset = dataset_transformation(dataset, aggregator) 89 next_element = self.getNext(dataset, requires_initialization=True) [all …]
|
/external/tensorflow/tensorflow/python/data/kernel_tests/ |
D | from_tensors_test.py | 46 dataset = dataset_ops.Dataset.from_tensors(components) 50 nest.flatten(dataset_ops.get_legacy_output_shapes(dataset))) 52 self.assertDatasetProduces(dataset, expected_output=[components]) 65 dataset = dataset_ops.Dataset.from_tensors(components) 69 [shape for shape in dataset_ops.get_legacy_output_shapes(dataset)]) 70 self.assertDatasetProduces(dataset, expected_output=[components]) 84 dataset = dataset_ops.Dataset.from_tensors(components) 88 ], [shape for shape in dataset_ops.get_legacy_output_shapes(dataset)]) 90 self.assertDatasetProduces(dataset, expected_output=[components]) 98 dataset = dataset_ops.Dataset.from_tensors(components) [all …]
|
D | filter_test_base.py | 47 dataset = dataset_ops.Dataset.from_tensor_slices(components).map( 50 dataset = self.apply_filter( 51 dataset, lambda x, _y, _z: math_ops.equal( 56 [shape for shape in dataset_ops.get_legacy_output_shapes(dataset)]) 57 get_next = self.getNext(dataset) 73 dataset = dataset_ops.Dataset.range(4) 74 dataset = self.apply_filter( 75 dataset, lambda x: math_ops.not_equal(math_ops.mod(x, 3), 2)) 76 self.assertDatasetProduces(dataset, expected_output=[0, 1, 3]) 79 dataset = dataset_ops.Dataset.range(10).map( [all …]
|
D | shard_test.py | 31 dataset = dataset_ops.Dataset.range(10).shard(5, 2) 32 self.assertDatasetProduces(dataset, expected_output=[2, 7]) 37 dataset = dataset_ops.Dataset.zip((dataset_a, dataset_b)).shard(5, 2) 38 self.assertDatasetProduces(dataset, expected_output=[(2, 8), (7, 3)]) 41 dataset = dataset_ops.Dataset.range(10).shard(5, 0) 42 self.assertDatasetProduces(dataset, expected_output=[0, 5]) 46 dataset = dataset_ops.Dataset.range(10).shard(5, 7) 47 self.evaluate(self.getNext(dataset)()) 51 dataset = dataset_ops.Dataset.range(10).shard(5, -3) 52 self.evaluate(self.getNext(dataset)()) [all …]
|
D | map_test.py | 82 dataset = dataset_ops.Dataset.range(num_elements).map( 84 return dataset, coordination_events 96 dataset = dataset_ops.Dataset.from_tensor_slices(components).map( 100 [shape for shape in dataset_ops.get_legacy_output_shapes(dataset)]) 101 return dataset 161 dataset = dataset_ops.Dataset.from_tensor_slices(components).map( 167 [shape for shape in dataset_ops.get_legacy_output_shapes(dataset)]) 168 return dataset 251 dataset = self._buildParallelMapDataset(components, 1000, 100, 100) 253 dataset = dataset.prefetch(100) [all …]
|
D | range_test.py | 31 dataset = dataset_ops.Dataset.range(5) 32 self.assertDatasetProduces(dataset, expected_output=range(5)) 36 dataset = dataset_ops.Dataset.range(start, stop) 37 self.assertDatasetProduces(dataset, expected_output=range(2, 5)) 41 dataset = dataset_ops.Dataset.range(start, stop, step) 42 self.assertDatasetProduces(dataset, expected_output=range(2, 10, 2)) 47 dataset = dataset_ops.Dataset.range(start, stop, step) 48 self.evaluate(dataset._variant_tensor) 52 dataset = dataset_ops.Dataset.range(start, stop, step) 53 self.assertDatasetProduces(dataset, expected_output=range(2, 10, -1)) [all …]
|
/external/tensorflow/tensorflow/python/data/experimental/kernel_tests/optimization/ |
D | optimize_dataset_test.py | 107 dataset = dataset_ops.Dataset.range( 111 dataset = dataset.with_options(options) 112 get_next = self.getNext(dataset) 118 dataset = dataset_ops.Dataset.from_tensors(input_t) 121 dataset = dataset.with_options(options) 122 iterator = dataset_ops.make_initializable_iterator(dataset) 133 dataset = dataset_ops.Dataset.from_tensor_slices(input_t) 136 dataset = dataset.with_options(options) 137 iterator = dataset_ops.make_initializable_iterator(dataset) 148 dataset = dataset_ops.Dataset.from_tensors(0) [all …]
|
D | choose_fastest_branch_dataset_test.py | 40 dataset = dataset_ops.Dataset.from_tensor_slices([0, 1, 2, 3, 4]) 42 def branch(dataset): argument 43 return dataset.map(lambda x: x) 46 dataset, [branch, branch]) 51 expected_shapes=dataset.output_shapes) 54 dataset = dataset_ops.Dataset.range(10) 59 def branch_0(dataset): argument 60 return dataset.map(lambda x: x + const_64) 62 def branch_1(dataset): argument 63 return dataset.map(lambda x: x + math_ops.cast(const_32, dtypes.int64)) [all …]
|
/external/tensorflow/tensorflow/python/keras/engine/ |
D | training_dataset_test.py | 63 dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets)) 64 dataset = dataset.repeat(100) 65 dataset = dataset.batch(10) 66 iterator = dataset_ops.make_one_shot_iterator(dataset) 124 dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets)) 125 dataset = dataset.repeat(2) 126 dataset = dataset.batch(10) 127 iterator = dataset_ops.make_one_shot_iterator(dataset) 155 dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets)) 156 dataset = dataset.repeat(100) [all …]
|
/external/tensorflow/tensorflow/python/distribute/ |
D | input_ops_test.py | 92 def _verifySimpleShardingOutput(self, dataset, record_fn): argument 93 iterator = dataset.make_one_shot_iterator() 104 dataset = readers.TFRecordDataset(self._createTFRecordFiles()) 105 dataset = input_ops.auto_shard_dataset( 106 dataset, self._num_shards, self._shard_index) 108 self._verifySimpleShardingOutput(dataset, self._record) 112 dataset = dataset_ops.Dataset.from_tensor_slices( 114 dataset = dataset.flat_map(readers.TFRecordDataset) 115 dataset = input_ops.auto_shard_dataset( 116 dataset, self._num_shards, self._shard_index) [all …]
|
/external/tensorflow/tensorflow/contrib/tensor_forest/kernels/v4/ |
D | decision_node_evaluator_test.cc | 37 std::unique_ptr<tensorflow::tensorforest::TensorDataSet> dataset( in TEST() local 41 ASSERT_EQ(eval->Decide(dataset, 2), 0); in TEST() 42 ASSERT_EQ(eval->Decide(dataset, 3), 0); in TEST() 43 ASSERT_EQ(eval->Decide(dataset, 4), 1); in TEST() 54 std::unique_ptr<tensorflow::tensorforest::TensorDataSet> dataset( in TEST() local 58 ASSERT_EQ(eval->Decide(dataset, 2), 0); in TEST() 59 ASSERT_EQ(eval->Decide(dataset, 3), 1); in TEST() 60 ASSERT_EQ(eval->Decide(dataset, 4), 1); in TEST() 71 std::unique_ptr<tensorflow::tensorforest::TensorDataSet> dataset( in TEST() local 75 ASSERT_EQ(eval->Decide(dataset, 2), 1); in TEST() [all …]
|
/external/tensorflow/tensorflow/contrib/eager/python/examples/revnet/ |
D | imagenet_input.py | 160 dataset = tf.data.Dataset.list_files(file_pattern, shuffle=self.is_training) 163 dataset = dataset.repeat() 167 dataset = tf.data.TFRecordDataset(filename, buffer_size=buffer_size) 168 return dataset 171 dataset = dataset.apply( 175 dataset = dataset.cache().apply( 178 dataset = dataset.shuffle(1024) 190 dataset = dataset.apply( 199 dataset = dataset.map( 204 dataset = dataset.map(functools.partial(self.set_shapes, batch_size)) [all …]
|
/external/tensorflow/tensorflow/python/data/experimental/benchmarks/ |
D | autotune_benchmark.py | 42 dataset = dataset_ops.Dataset.from_tensors((np.random.rand(1, 4 * k), 45 dataset = dataset.map( 50 dataset = dataset.with_options(options) 51 iterator = dataset_ops.make_one_shot_iterator(dataset) 78 dataset = dataset_ops.Dataset.from_tensors((np.random.rand(1, 4 * k), 81 dataset = dataset.apply( 89 dataset = dataset.with_options(options) 90 iterator = dataset_ops.make_one_shot_iterator(dataset) 116 dataset = dataset_ops.Dataset.from_tensors((np.random.rand(1, 4 * k), 119 dataset = dataset.map(math_ops.matmul) [all …]
|
D | unbatch_benchmark.py | 40 dataset = dataset_ops.Dataset.from_tensors("element").repeat(None) 42 dataset = dataset.batch(batch_size_placeholder) 43 dataset = dataset.apply(batching.unbatch()) 44 dataset = dataset.skip(elems_per_trial) 47 dataset = dataset.with_options(options) 48 iterator = dataset_ops.make_initializable_iterator(dataset) 77 dataset = dataset_ops.Dataset.from_tensors("element").repeat(None) 79 dataset = dataset.batch(batch_size_placeholder) 80 dataset = dataset.flat_map(dataset_ops.Dataset.from_tensor_slices) 81 dataset = dataset.skip(elems_per_trial) [all …]
|
D | csv_dataset_benchmark.py | 64 def _runBenchmark(self, dataset, num_cols, prefix): argument 65 dataset = dataset.skip(self._num_per_iter - 1) 68 dataset = dataset.with_options(options) 71 next_element = dataset_ops.make_one_shot_iterator(dataset).get_next() 95 dataset = core_readers.TextLineDataset(self._filenames[i]).repeat() 96 …dataset = dataset.map(lambda l: parsing_ops.decode_csv(l, **kwargs)) # pylint: disable=cell-var-f… 97 self._runBenchmark(dataset, num_cols, 'csv_float_map_decode_csv') 105 dataset = core_readers.TextLineDataset(self._filenames[i]).repeat() 106 …dataset = dataset.map(lambda l: parsing_ops.decode_csv(l, **kwargs)) # pylint: disable=cell-var-f… 107 self._runBenchmark(dataset, num_cols, 'csv_strings_map_decode_csv') [all …]
|
/external/tensorflow/tensorflow/python/data/benchmarks/ |
D | meta_benchmark.py | 44 dataset = self.setup_fast_dataset() 45 self.run_benchmark_with_only_cpp_iterations(dataset) 48 dataset = self.setup_fast_dataset() 49 self.run_benchmark_with_session_run(dataset) 52 dataset = self.setup_fast_dataset() 53 self.run_benchmark_with_session_run(dataset, make_callable=True) 57 dataset = self.setup_fast_dataset() 58 self.run_benchmark_in_eager(dataset) 61 dataset = self.setup_fast_dataset() 64 return dataset.apply(sleep.sleep(1000)) [all …]
|
/external/tensorflow/tensorflow/python/data/experimental/kernel_tests/serialization/ |
D | choose_fastest_branch_dataset_serialization_test.py | 36 dataset = dataset_ops.Dataset.range(size) 38 def branch_0(dataset): argument 39 return dataset.map(lambda x: x).batch(10) 41 def branch_1(dataset): argument 42 return dataset.batch(10).map(lambda x: x) 45 dataset, [branch_0, branch_1], 54 dataset = dataset_ops.Dataset.range(10) 58 def branch_0(dataset): argument 59 return dataset.map(lambda x: x + const_64) 61 def branch_1(dataset): argument [all …]
|
/external/tensorflow/tensorflow/python/grappler/ |
D | datasets_test.py | 50 dataset = dataset_ops.Dataset.from_tensors(test_case['tensor']) 51 iterator = dataset_ops.make_one_shot_iterator(dataset) 75 dataset = dataset_ops.Dataset.from_tensor_slices(test_case['tensor']) 76 iterator = dataset_ops.make_one_shot_iterator(dataset) 108 dataset = dataset_ops.Dataset.from_generator( 112 iterator = dataset_ops.make_one_shot_iterator(dataset) 124 dataset = dataset_ops.Dataset.range(42) 125 iterator = dataset_ops.make_one_shot_iterator(dataset) 149 dataset = dataset_ops.Dataset.from_tensors(test_case['tensor']) 150 dataset = fn(dataset, test_case['tensor'], test_case['shape']) [all …]
|
/external/tensorflow/tensorflow/contrib/distribute/python/ |
D | keras_backward_compat_test.py | 100 dataset = dataset_ops.Dataset.from_tensor_slices((x_train, y_train)) 101 dataset = dataset.batch(32) 102 return dataset 114 dataset = dataset_ops.Dataset.from_tensor_slices((x_test, y_test)) 115 dataset = dataset.batch(32) 116 return dataset 162 def batch_wrapper(dataset, batch_size, distribution, repeat=None): argument 164 dataset = dataset.repeat(repeat) 168 return dataset.batch(batch_size, drop_remainder=True) 170 return dataset.batch(batch_size) [all …]
|
/external/tensorflow/tensorflow/python/data/experimental/ops/ |
D | readers.py | 206 dataset, num_epochs, shuffle, shuffle_buffer_size, shuffle_seed): argument 210 return dataset.apply( 214 return dataset.shuffle(shuffle_buffer_size, shuffle_seed) 216 return dataset.repeat(num_epochs) 217 return dataset 281 dataset = core_readers.TFRecordDataset( 287 dataset = _maybe_shuffle_and_repeat( 288 dataset, num_epochs, shuffle, shuffle_buffer_size, shuffle_seed) 297 dataset = dataset.batch(batch_size, drop_remainder=drop_final_batch) 301 dataset = dataset.apply(batching.map_and_batch( [all …]
|
D | batching.py | 46 def batch_window(dataset): argument 55 dataset_output_classes = dataset_ops.get_legacy_output_classes(dataset) 59 return _batch_dense_window(dataset) 61 return _batch_sparse_window(dataset) 66 def _batch_dense_window(dataset): argument 82 dataset_output_shapes = dataset_ops.get_legacy_output_shapes(dataset) 86 first_element = get_single_element.get_single_element(dataset.take(1)) 90 dataset.apply(grouping.group_by_reducer(key_fn, shape_reducer))) 95 batch_shape, dtype=dataset_ops.get_legacy_output_types(dataset)) 102 dataset.apply(grouping.group_by_reducer(key_fn, batch_reducer))) [all …]
|
/external/tensorflow/tensorflow/core/kernels/data/ |
D | reader_dataset_ops.cc | 166 if (current_file_index_ == dataset()->filenames_.size()) { in GetNextInternal() 220 if (current_file_index_ >= dataset()->filenames_.size()) { in SetupStreamsLocked() 223 " >= filenames_.size():", dataset()->filenames_.size()); in SetupStreamsLocked() 228 dataset()->filenames_[current_file_index_], &file_)); in SetupStreamsLocked() 232 if (dataset()->use_compression_) { in SetupStreamsLocked() 234 input_stream_.get(), dataset()->options_.input_buffer_size, in SetupStreamsLocked() 235 dataset()->options_.input_buffer_size, dataset()->options_); in SetupStreamsLocked() 237 zlib_input_stream_.get(), dataset()->options_.input_buffer_size, in SetupStreamsLocked() 241 input_stream_.get(), dataset()->options_.input_buffer_size, in SetupStreamsLocked() 423 input_buffer_->ReadNBytes(dataset()->record_bytes_, &record)); in GetNextInternal() [all …]
|