/external/tensorflow/tensorflow/python/data/experimental/kernel_tests/optimization/ |
D | inject_prefetch_test.py | 32 def _enable_autotune_buffers(self, dataset): argument 35 return dataset.with_options(options) 39 dataset = dataset_ops.Dataset.range(100) 40 dataset = dataset.apply( 42 dataset = dataset.map( 44 dataset = dataset.take(50) 45 dataset = self._enable_autotune_buffers(dataset) 46 self.assertDatasetProduces(dataset, range(1, 51)) 50 dataset = dataset_ops.Dataset.range(100) 51 dataset = dataset.apply( [all …]
|
D | choose_fastest_branch_dataset_test.py | 39 dataset = dataset_ops.Dataset.from_tensor_slices([0, 1, 2, 3, 4]) 41 def branch(dataset): argument 42 return dataset.map(lambda x: x) 45 dataset, [branch, branch]) 50 expected_shapes=dataset_ops.get_legacy_output_shapes(dataset)) 54 dataset = dataset_ops.Dataset.range(10) 59 def branch_0(dataset): argument 60 return dataset.map(lambda x: x + const_64) 62 def branch_1(dataset): argument 63 return dataset.map(lambda x: x + math_ops.cast(const_32, dtypes.int64)) [all …]
|
/external/tensorflow/tensorflow/python/data/experimental/kernel_tests/ |
D | auto_shard_dataset_test.py | 52 def getAllDatasetElements(self, dataset): argument 54 next_fn = self.getNext(dataset) 62 def assertDatasetProducesWithShuffle(self, dataset, expected, batch, argument 66 next_fn = self.getNext(dataset) 78 self.assertDatasetProduces(dataset, list(chunk(expected, batch))) 85 dataset = dataset_ops.Dataset.list_files( 87 dataset = dataset.flat_map(core_readers.TFRecordDataset) 88 dataset = dataset.batch(5) 89 dataset = distribute._AutoShardDataset(dataset, 5, 3) 96 self.assertDatasetProducesWithShuffle(dataset, expected, 5, 4, shuffle) [all …]
|
D | prefetch_with_slack_test.py | 40 dataset = dataset_ops.Dataset.range(10) 41 dataset = dataset.prefetch(1) 44 dataset = dataset.with_options(options) 46 dataset, ["/cpu:1", "/cpu:2"]) 47 dataset = multi_device_iterator._dataset # pylint: disable=protected-access 48 self.assertIn("slack", dataset.options()._graph_rewrites()) 50 dataset.options()._graph_rewrite_configs()) 67 dataset = dataset_ops.Dataset.range(10) 68 dataset = dataset.prefetch(1) 71 dataset = dataset.with_options(options) [all …]
|
D | non_serializable_test.py | 33 dataset = dataset_ops.Dataset.from_tensors(0) 34 dataset = dataset.apply(testing.assert_next(["FiniteSkip"])) 35 dataset = dataset.skip(0) # Should not be removed by noop elimination 36 dataset = dataset.apply(testing.non_serializable()) 37 dataset = dataset.apply(testing.assert_next(["MemoryCacheImpl"])) 38 dataset = dataset.skip(0) # Should be removed by noop elimination 39 dataset = dataset.cache() 43 dataset = dataset.with_options(options) 44 self.assertDatasetProduces(dataset, expected_output=[0]) 49 dataset = dataset_ops.Dataset.from_tensors(0) [all …]
|
D | snapshot_test.py | 94 dataset = dataset_ops.Dataset.range(1000) 95 dataset = dataset.apply(snapshot.snapshot(tmpdir)) 96 self.assertDatasetProduces(dataset, list(range(1000))) 98 dataset = dataset_ops.Dataset.range(1001) 99 dataset = dataset.apply(snapshot.snapshot(tmpdir)) 100 self.assertDatasetProduces(dataset, list(range(1001))) 153 dataset = dataset_ops.Dataset.range(1000) 154 dataset = dataset.apply(snapshot.snapshot(tmpdir, compression=compression)) 155 self.assertDatasetProduces(dataset, list(range(1000))) 163 dataset = dataset_ops.Dataset.range(10) [all …]
|
D | rebatch_dataset_test.py | 48 def _flat_shapes(dataset): argument 49 return nest.flatten(dataset_ops.get_legacy_output_shapes(dataset)) 58 dataset = dataset_ops.Dataset.range(1024).batch( 60 rebatched_dataset = distribute._RebatchDataset(dataset, num_replicas=4) 69 dataset = dataset_ops.Dataset.from_tensors("xxx") 72 dataset = dataset.map(image_ops.decode_image) 73 self.assertEqual([tensor_shape.TensorShape(None)], _flat_shapes(dataset)) 74 rebatched_dataset = distribute._RebatchDataset(dataset, num_replicas=4) 81 dataset = dataset_ops.Dataset.range(1000) 82 dataset = dataset.batch(10, drop_remainder=False) [all …]
|
D | optimize_dataset_test.py | 110 dataset = dataset_ops.Dataset.range( 114 dataset = dataset.with_options(options) 115 get_next = self.getNext(dataset) 122 dataset = dataset_ops.Dataset.from_tensors(input_t) 125 dataset = dataset.with_options(options) 126 iterator = dataset_ops.make_initializable_iterator(dataset) 138 dataset = dataset_ops.Dataset.from_tensor_slices(input_t) 141 dataset = dataset.with_options(options) 142 iterator = dataset_ops.make_initializable_iterator(dataset) 154 dataset = dataset_ops.Dataset.from_tensors(0) [all …]
|
/external/tensorflow/tensorflow/python/data/experimental/benchmarks/ |
D | snapshot_dataset_benchmark.py | 50 dataset = dataset_ops.Dataset.from_tensor_slices([1.0]) 51 dataset = dataset.map( 53 dataset = dataset.repeat(num_elems) 54 dataset = dataset.apply(snapshot.snapshot(tmp_dir, compression=compression)) 56 return dataset 58 def _consumeDataset(self, dataset, num_elems): argument 59 dataset = dataset.skip(num_elems) 60 next_element = dataset_ops.make_one_shot_iterator(dataset).get_next() 69 dataset = self._createSimpleDataset( 72 self.run_and_report_benchmark(dataset, num_elems, "write_gzip", [all …]
|
D | autotune_benchmark.py | 34 def _run_benchmark(self, dataset, autotune, autotune_buffers, argument 40 dataset = dataset.with_options(options) 41 iterator = dataset_ops.make_one_shot_iterator(dataset) 76 dataset = dataset_ops.Dataset.from_tensors( 78 dataset = dataset.map( 81 dataset, 98 dataset = dataset_ops.Dataset.from_tensors( 100 dataset = dataset.map( 102 dataset = dataset.batch(batch_size=batch_size) 104 dataset, [all …]
|
D | unbatch_benchmark.py | 39 dataset = dataset_ops.Dataset.from_tensors("element").repeat(None) 41 dataset = dataset.batch(batch_size_placeholder) 42 dataset = dataset.unbatch() 43 dataset = dataset.skip(elems_per_trial) 46 dataset = dataset.with_options(options) 47 iterator = dataset_ops.make_initializable_iterator(dataset) 76 dataset = dataset_ops.Dataset.from_tensors("element").repeat(None) 78 dataset = dataset.batch(batch_size_placeholder) 79 dataset = dataset.flat_map(dataset_ops.Dataset.from_tensor_slices) 80 dataset = dataset.skip(elems_per_trial) [all …]
|
/external/tensorflow/tensorflow/python/distribute/ |
D | input_ops_test.py | 47 def _getNext(self, dataset): argument 49 iterator = iter(dataset) 52 iterator = dataset_ops.make_one_shot_iterator(dataset) 103 def _verifySimpleShardingOutput(self, dataset, record_fn): argument 104 next_element_fn = self._getNext(dataset) 114 dataset = readers.TFRecordDataset(self._createTFRecordFiles()) 115 dataset = input_ops.auto_shard_dataset( 116 dataset, self._num_shards, self._shard_index) 118 self._verifySimpleShardingOutput(dataset, self._record) 122 dataset = dataset_ops.Dataset.from_tensor_slices( [all …]
|
/external/tensorflow/tensorflow/python/data/kernel_tests/ |
D | map_test.py | 63 def new_map_fn(dataset, *args, **kwargs): argument 64 return dataset.map(*args, **kwargs) 66 def legacy_map_fn(dataset, *args, **kwargs): argument 67 return dataset.map_with_legacy_function(*args, **kwargs) 84 def new_map_fn(dataset, *args, **kwargs): argument 85 return dataset.map(*args, **kwargs) 148 dataset = dataset_ops.Dataset.range(num_elements) 149 dataset = apply_map(dataset, map_fn, num_parallel_calls).with_options(options) 150 return dataset, coordination_events 160 dataset = dataset_ops.Dataset.from_tensor_slices(components) [all …]
|
D | filter_test.py | 35 def filter_fn(dataset, predicate): argument 36 return dataset.filter(predicate) 38 def legacy_filter_fn(dataset, predicate): argument 39 return dataset.filter_with_legacy_function(predicate) 68 dataset = dataset_ops.Dataset.from_tensor_slices(components).map( 71 dataset = apply_filter( 72 dataset, 77 [shape for shape in dataset_ops.get_legacy_output_shapes(dataset)]) 78 get_next = self.getNext(dataset) 95 dataset = dataset_ops.Dataset.range(4) [all …]
|
D | from_tensors_test.py | 48 dataset = dataset_ops.Dataset.from_tensors(components) 52 nest.flatten(dataset_ops.get_legacy_output_shapes(dataset))) 54 self.assertDatasetProduces(dataset, expected_output=[components]) 59 dataset = dataset_ops.Dataset.from_tensors(dataset_ops.Dataset.range(10)) 60 dataset = dataset.flat_map(lambda x: x) 61 self.assertDatasetProduces(dataset, expected_output=range(10)) 70 dataset = dataset_ops.Dataset.from_tensors(components) 73 dataset, expected_output=[[1.0, 2.0]], requires_initialization=True) 87 dataset = dataset_ops.Dataset.from_tensors(components) 91 [shape for shape in dataset_ops.get_legacy_output_shapes(dataset)]) [all …]
|
D | shard_test.py | 33 dataset = dataset_ops.Dataset.range(10).shard(5, 2) 34 self.assertDatasetProduces(dataset, expected_output=[2, 7]) 40 dataset = dataset_ops.Dataset.zip((dataset_a, dataset_b)).shard(5, 2) 41 self.assertDatasetProduces(dataset, expected_output=[(2, 8), (7, 3)]) 45 dataset = dataset_ops.Dataset.range(10).shard(5, 0) 46 self.assertDatasetProduces(dataset, expected_output=[0, 5]) 51 dataset = dataset_ops.Dataset.range(10).shard(5, 7) 52 self.evaluate(self.getNext(dataset)()) 57 dataset = dataset_ops.Dataset.range(10).shard(5, -3) 58 self.evaluate(self.getNext(dataset)()) [all …]
|
D | dataset_test.py | 53 dataset = dataset_ops.Dataset.range(10) 55 self.evaluate(dataset._as_serialized_graph())) 59 dataset = dataset_ops.Dataset.range(10).map( 63 dataset._as_serialized_graph(external_state_policy=distribute_options 93 def _testNumInputs(self, dataset, num_inputs): argument 94 self.assertLen(dataset._inputs(), num_inputs) 98 dataset = readers.FixedLengthRecordDataset("", 42) 99 self._testNumInputs(dataset, 0) 106 dataset = dataset_ops.Dataset.from_generator(gen, dtypes.int32) 107 self._testNumInputs(dataset, 1) [all …]
|
/external/tensorflow/tensorflow/python/keras/engine/ |
D | training_dataset_test.py | 71 dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets)) 72 dataset = dataset.repeat(100) 73 dataset = dataset.batch(10) 76 model.fit(dataset, epochs=1, steps_per_epoch=2, verbose=0, 77 validation_data=dataset, validation_steps=2) 78 model.fit(dataset, epochs=1, steps_per_epoch=2, verbose=0, 79 validation_data=dataset, validation_steps=2) 97 dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets)) 98 dataset = dataset.repeat() # Infinite dataset. 99 dataset = dataset.batch(10) [all …]
|
/external/protobuf/benchmarks/cpp/ |
D | cpp_benchmark.cc | 54 Fixture(const BenchmarkDataset& dataset, const std::string& suffix) { in Fixture() argument 55 for (int i = 0; i < dataset.payload_size(); i++) { in Fixture() 56 payloads_.push_back(dataset.payload(i)); in Fixture() 61 dataset.message_name()); in Fixture() 64 std::cerr << "Couldn't find message named '" << dataset.message_name() in Fixture() 69 SetName((dataset.name() + suffix).c_str()); in Fixture() 97 ParseNewFixture(const BenchmarkDataset& dataset) in ParseNewFixture() argument 98 : Fixture(dataset, "_parse_new") {} in ParseNewFixture() 118 ParseNewArenaFixture(const BenchmarkDataset& dataset) in ParseNewArenaFixture() argument 119 : Fixture(dataset, "_parse_newarena") {} in ParseNewArenaFixture() [all …]
|
/external/tensorflow/tensorflow/python/data/benchmarks/ |
D | meta_benchmark.py | 45 dataset = self.setup_fast_dataset() 46 self.run_benchmark_with_only_cpp_iterations(dataset) 49 dataset = self.setup_fast_dataset() 50 self.run_benchmark_with_session_run(dataset) 53 dataset = self.setup_fast_dataset() 54 self.run_benchmark_with_session_run(dataset, make_callable=True) 58 dataset = self.setup_fast_dataset() 59 self.run_benchmark_in_eager(dataset) 62 dataset = self.setup_fast_dataset() 65 return dataset.apply(testing.sleep(1000)) [all …]
|
/external/tensorflow/tensorflow/python/data/experimental/kernel_tests/serialization/ |
D | choose_fastest_branch_dataset_serialization_test.py | 41 dataset = dataset_ops.Dataset.range(size) 43 def branch_0(dataset): argument 44 return dataset.map(lambda x: x).batch(10) 46 def branch_1(dataset): argument 47 return dataset.batch(10).map(lambda x: x) 50 dataset, [branch_0, branch_1], 60 dataset = dataset_ops.Dataset.range(10) 64 def branch_0(dataset): argument 65 return dataset.map(lambda x: x + const_64) 67 def branch_1(dataset): argument [all …]
|
/external/guava/guava/src/com/google/common/math/ |
D | Quantiles.java | 232 public double compute(Collection<? extends Number> dataset) { in compute() argument 233 return computeInPlace(Doubles.toArray(dataset)); in compute() 243 public double compute(double... dataset) { in compute() argument 244 return computeInPlace(dataset.clone()); in compute() 255 public double compute(long... dataset) { in compute() argument 256 return computeInPlace(longsToDoubles(dataset)); in compute() 266 public double compute(int... dataset) { in compute() argument 267 return computeInPlace(intsToDoubles(dataset)); in compute() 277 public double computeInPlace(double... dataset) { in computeInPlace() argument 278 checkArgument(dataset.length > 0, "Cannot calculate quantiles of an empty dataset"); in computeInPlace() [all …]
|
/external/guava/android/guava/src/com/google/common/math/ |
D | Quantiles.java | 232 public double compute(Collection<? extends Number> dataset) { in compute() argument 233 return computeInPlace(Doubles.toArray(dataset)); in compute() 243 public double compute(double... dataset) { in compute() argument 244 return computeInPlace(dataset.clone()); in compute() 255 public double compute(long... dataset) { in compute() argument 256 return computeInPlace(longsToDoubles(dataset)); in compute() 266 public double compute(int... dataset) { in compute() argument 267 return computeInPlace(intsToDoubles(dataset)); in compute() 277 public double computeInPlace(double... dataset) { in computeInPlace() argument 278 checkArgument(dataset.length > 0, "Cannot calculate quantiles of an empty dataset"); in computeInPlace() [all …]
|
/external/tensorflow/tensorflow/python/data/experimental/ops/ |
D | readers.py | 207 dataset, num_epochs, shuffle, shuffle_buffer_size, shuffle_seed): argument 210 dataset = dataset.shuffle(shuffle_buffer_size, shuffle_seed) 212 dataset = dataset.repeat(num_epochs) 213 return dataset 286 dataset = core_readers.TFRecordDataset( 292 dataset = _maybe_shuffle_and_repeat( 293 dataset, num_epochs, shuffle, shuffle_buffer_size, shuffle_seed) 302 dataset = dataset.batch(batch_size, drop_remainder=drop_final_batch) 304 dataset = dataset.map( 306 dataset = dataset.batch(batch_size, drop_remainder=drop_final_batch) [all …]
|
/external/tensorflow/tensorflow/core/kernels/data/ |
D | fixed_length_record_dataset_op.cc | 140 input_buffer_->ReadNBytes(dataset()->record_bytes_, &record)); in GetNextInternal() 142 dataset()->record_bytes_); in GetNextInternal() 160 if (current_file_index_ == dataset()->filenames_.size()) { in GetNextInternal() 168 dataset()->filenames_[current_file_index_], &file_size)); in GetNextInternal() 169 file_pos_limit_ = file_size - dataset()->footer_bytes_; in GetNextInternal() 172 file_size - (dataset()->header_bytes_ + dataset()->footer_bytes_); in GetNextInternal() 174 if (body_size % dataset()->record_bytes_ != 0) { in GetNextInternal() 176 "Excluding the header (", dataset()->header_bytes_, in GetNextInternal() 177 " bytes) and footer (", dataset()->footer_bytes_, in GetNextInternal() 179 dataset()->filenames_[current_file_index_], "\" has body length ", in GetNextInternal() [all …]
|