/external/tensorflow/tensorflow/python/keras/engine/ |
D | base_preprocessing_layer_test.py | 140 input_dataset = {"foo": 0} 145 layer.adapt(input_dataset) 148 layer.adapt(input_dataset) 152 input_dataset = dataset_ops.Dataset.from_tensor_slices( 158 layer.adapt(input_dataset) 162 layer.adapt(input_dataset) 166 input_dataset = np.array([1, 2, 3, 4, 5]) 170 updates = combiner.extract(combiner.compute(input_dataset)) 189 input_dataset = np.array([1, 2, 3, 4, 5]) 192 layer.adapt(input_dataset) [all …]
|
/external/tensorflow/tensorflow/python/data/experimental/ops/ |
D | distribute.py | 69 def __init__(self, input_dataset, num_workers, index, num_replicas=None): argument 70 self._input_dataset = input_dataset 72 self._element_spec = input_dataset.element_spec 78 input_dataset.options().experimental_distribute.auto_shard_policy), 81 super(_AutoShardDataset, self).__init__(input_dataset, variant_tensor) 88 def _AutoShardDatasetV1(input_dataset, num_workers, index, num_replicas=None): # pylint: disable=i… argument 90 _AutoShardDataset(input_dataset, num_workers, index, num_replicas)) 120 def __init__(self, input_dataset, batch_sizes, drop_remainder=False): argument 133 self._input_dataset = input_dataset 143 dataset_ops.get_structure(input_dataset)) [all …]
|
D | testing.py | 85 def __init__(self, input_dataset, transformations): argument 87 self._input_dataset = input_dataset 97 super(_AssertNextDataset, self).__init__(input_dataset, variant_tensor) 103 def __init__(self, input_dataset): argument 105 self._input_dataset = input_dataset 110 super(_NonSerializableDataset, self).__init__(input_dataset, variant_tensor) 116 def __init__(self, input_dataset, sleep_microseconds): argument 117 self._input_dataset = input_dataset 123 super(_SleepDataset, self).__init__(input_dataset, variant_tensor)
|
D | batching.py | 300 def __init__(self, input_dataset, batch_size, row_shape): argument 303 dataset_ops.get_legacy_output_types(input_dataset), dtypes.DType): 306 dataset_ops.get_legacy_output_types(input_dataset)) 307 self._input_dataset = input_dataset 312 dataset_ops.get_legacy_output_types(input_dataset)) 319 super(_DenseToSparseBatchDataset, self).__init__(input_dataset, 330 def __init__(self, input_dataset, map_func, batch_size, num_parallel_calls, argument 332 self._input_dataset = input_dataset 337 dataset=input_dataset, 370 super(_MapAndBatchDataset, self).__init__(input_dataset, variant_tensor) [all …]
|
D | grouping.py | 275 def __init__(self, input_dataset, key_func, reducer): argument 277 self._input_dataset = input_dataset 278 self._make_key_func(key_func, input_dataset) 280 self._make_reduce_func(reducer.reduce_func, input_dataset) 293 super(_GroupByReducerDataset, self).__init__(input_dataset, variant_tensor) 295 def _make_key_func(self, key_func, input_dataset): argument 298 key_func, self._transformation_name(), dataset=input_dataset) 313 def _make_reduce_func(self, reduce_func, input_dataset): argument 328 input_structure=(self._state_structure, input_dataset.element_spec), 398 def __init__(self, input_dataset, key_func, reduce_func, window_size_func): argument [all …]
|
D | snapshot.py | 40 input_dataset, argument 81 self._input_dataset = input_dataset 103 super(_LegacySnapshotDataset, self).__init__(input_dataset, variant_tensor) 178 input_dataset=dataset, 201 input_dataset, argument 215 self._input_dataset = input_dataset 224 dataset_ops.DatasetSpec(input_dataset.element_spec)), 229 dataset=input_dataset, 240 input_dataset._variant_tensor, # pylint: disable=protected-access 248 super(_SnapshotDataset, self).__init__(input_dataset, variant_tensor) [all …]
|
D | unique.py | 54 def __init__(self, input_dataset): argument 56 self._input_dataset = input_dataset 57 if dataset_ops.get_legacy_output_types(input_dataset) not in ( 65 super(_UniqueDataset, self).__init__(input_dataset, variant_tensor)
|
D | sleep.py | 27 def __init__(self, input_dataset, sleep_microseconds): argument 28 self._input_dataset = input_dataset 34 super(_SleepDataset, self).__init__(input_dataset, variant_tensor)
|
D | prefetching_ops.py | 89 def __init__(self, input_dataset, target_device, source_device="/cpu:0"): argument 97 self._input_dataset = input_dataset 212 super(_CopyToDeviceDataset, self).__init__(input_dataset, variant_tensor) 231 def __init__(self, input_dataset, map_func, use_inter_op_parallelism=True): argument 233 self._input_dataset = input_dataset 239 dataset=input_dataset, 247 super(_MapOnGpuDataset, self).__init__(input_dataset, variant_tensor)
|
D | error_ops.py | 63 def __init__(self, input_dataset, log_warning): argument 65 self._input_dataset = input_dataset 77 super(_IgnoreErrorsDataset, self).__init__(input_dataset, variant_tensor)
|
/external/tensorflow/tensorflow/python/data/ops/ |
D | dataset_ops.py | 216 for input_dataset in self._inputs(): 217 input_options = input_dataset.options() 341 …[input_dataset._has_captured_ref() for input_dataset in self._inputs()]) # pylint: disable=protec… 3165 def __init__(self, input_dataset, variant_tensor): argument 3166 self._input_dataset = input_dataset 3176 def __init__(self, input_dataset, variant_tensor): argument 3177 self._input_dataset = input_dataset 3179 input_dataset, variant_tensor) 3724 def __init__(self, input_dataset, dataset_to_concatenate): argument 3726 self._input_dataset = input_dataset [all …]
|
/external/tensorflow/tensorflow/python/data/kernel_tests/ |
D | concatenate_test.py | 45 input_dataset = dataset_ops.Dataset.from_tensor_slices(input_components) 48 concatenated = input_dataset.concatenate(dataset_to_concatenate) 77 input_dataset = dataset_ops.Dataset.from_tensor_slices(input_components) 80 concatenated = input_dataset.concatenate(dataset_to_concatenate) 109 input_dataset = dataset_ops.Dataset.from_tensor_slices(input_components) 114 input_dataset.concatenate(dataset_to_concatenate) 127 input_dataset = dataset_ops.Dataset.from_tensor_slices(input_components) 132 input_dataset.concatenate(dataset_to_concatenate) 143 input_dataset = dataset_ops.Dataset.from_tensor_slices(input_components) 148 input_dataset.concatenate(dataset_to_concatenate)
|
/external/tensorflow/tensorflow/python/data/experimental/benchmarks/ |
D | choose_fastest_branch_benchmark.py | 30 input_dataset, argument 35 ds_0 = branch_0(input_dataset) 36 ds_1 = branch_1(input_dataset) 38 input_dataset, [branch_0, branch_1], 87 input_dataset = make_dataset(1000 * 1000, 104 input_dataset, slow_branch, fast_branch, 1, num_elements_per_branch=2) 106 input_dataset, fast_branch, slow_branch, 1, num_elements_per_branch=2)
|
/external/tensorflow/tensorflow/core/api_def/base_api/ |
D | api_def_DatasetCardinality.pbtxt | 5 name: "input_dataset" 13 The cardinality of `input_dataset`. Named constants are used to represent 17 summary: "Returns the cardinality of `input_dataset`." 19 Returns the cardinality of `input_dataset`.
|
D | api_def_ExperimentalDatasetCardinality.pbtxt | 5 name: "input_dataset" 13 The cardinality of `input_dataset`. Named constants are used to represent 17 summary: "Returns the cardinality of `input_dataset`." 19 Returns the cardinality of `input_dataset`.
|
D | api_def_ExperimentalGroupByReducerDataset.pbtxt | 5 name: "input_dataset" 20 A function mapping an element of `input_dataset`, concatenated 48 A function mapping the current reducer state and an element of `input_dataset`, 65 summary: "Creates a dataset that computes a group-by on `input_dataset`." 67 Creates a dataset that computes a group-by on `input_dataset`.
|
D | api_def_GroupByReducerDataset.pbtxt | 5 name: "input_dataset" 20 A function mapping an element of `input_dataset`, concatenated 48 A function mapping the current reducer state and an element of `input_dataset`, 65 summary: "Creates a dataset that computes a group-by on `input_dataset`." 67 Creates a dataset that computes a group-by on `input_dataset`.
|
D | api_def_TakeDataset.pbtxt | 7 A scalar representing the number of elements from the `input_dataset` 8 that should be taken. A value of `-1` indicates that all of `input_dataset` 12 summary: "Creates a dataset that contains `count` elements from the `input_dataset`."
|
D | api_def_DatasetToGraph.pbtxt | 5 name: "input_dataset" 16 summary: "Returns a serialized GraphDef representing `input_dataset`." 18 Returns a graph representation for `input_dataset`.
|
D | api_def_DatasetToGraphV2.pbtxt | 5 name: "input_dataset" 16 summary: "Returns a serialized GraphDef representing `input_dataset`." 18 Returns a graph representation for `input_dataset`.
|
D | api_def_OptimizeDataset.pbtxt | 5 name: "input_dataset" 16 summary: "Creates a dataset by applying optimizations to `input_dataset`." 18 Creates a dataset by applying optimizations to `input_dataset`.
|
/external/tensorflow/tensorflow/python/data/experimental/kernel_tests/ |
D | tf_record_writer_test.py | 46 input_dataset = readers.TFRecordDataset([filename], compression_type) 48 compression_type).write(input_dataset) 98 input_dataset = dataset_ops.Dataset.from_tensors(10) 100 writers.TFRecordWriter(self._outputFilename(), "").write(input_dataset) 104 input_dataset = dataset_ops.Dataset.from_tensors([["hello"], ["world"]]) 106 writers.TFRecordWriter(self._outputFilename(), "").write(input_dataset) 111 input_dataset = readers.TFRecordDataset(self._createFile()) 112 return writers.TFRecordWriter(self._outputFilename()).write(input_dataset)
|
/external/tensorflow/tensorflow/core/ops/compat/ops_history_v1/ |
D | DatasetToGraph.pbtxt | 4 name: "input_dataset" 15 name: "input_dataset" 35 name: "input_dataset" 62 name: "input_dataset"
|
/external/tensorflow/tensorflow/python/data/benchmarks/ |
D | from_tensor_slices_benchmark.py | 33 def __init__(self, input_dataset, map_func): argument 35 self._input_dataset = input_dataset 39 dataset=input_dataset, 43 input_dataset._variant_tensor, # pylint: disable=protected-access 47 super(SingleThreadedFlatMapDataset, self).__init__(input_dataset,
|
/external/tensorflow/tensorflow/core/ops/compat/ops_history_v2/ |
D | PrefetchDataset.pbtxt | 4 name: "input_dataset" 32 name: "input_dataset" 59 name: "input_dataset" 100 name: "input_dataset"
|