Home
last modified time | relevance | path

Searched refs:input_dataset (Results 1 – 25 of 304) sorted by relevance

12345678910>>...13

/external/tensorflow/tensorflow/python/keras/engine/
Dbase_preprocessing_layer_test.py140 input_dataset = {"foo": 0}
145 layer.adapt(input_dataset)
148 layer.adapt(input_dataset)
152 input_dataset = dataset_ops.Dataset.from_tensor_slices(
158 layer.adapt(input_dataset)
162 layer.adapt(input_dataset)
166 input_dataset = np.array([1, 2, 3, 4, 5])
170 updates = combiner.extract(combiner.compute(input_dataset))
189 input_dataset = np.array([1, 2, 3, 4, 5])
192 layer.adapt(input_dataset)
[all …]
/external/tensorflow/tensorflow/python/data/experimental/ops/
Ddistribute.py69 def __init__(self, input_dataset, num_workers, index, num_replicas=None): argument
70 self._input_dataset = input_dataset
72 self._element_spec = input_dataset.element_spec
78 input_dataset.options().experimental_distribute.auto_shard_policy),
81 super(_AutoShardDataset, self).__init__(input_dataset, variant_tensor)
88 def _AutoShardDatasetV1(input_dataset, num_workers, index, num_replicas=None): # pylint: disable=i… argument
90 _AutoShardDataset(input_dataset, num_workers, index, num_replicas))
120 def __init__(self, input_dataset, batch_sizes, drop_remainder=False): argument
133 self._input_dataset = input_dataset
143 dataset_ops.get_structure(input_dataset))
[all …]
Dtesting.py85 def __init__(self, input_dataset, transformations): argument
87 self._input_dataset = input_dataset
97 super(_AssertNextDataset, self).__init__(input_dataset, variant_tensor)
103 def __init__(self, input_dataset): argument
105 self._input_dataset = input_dataset
110 super(_NonSerializableDataset, self).__init__(input_dataset, variant_tensor)
116 def __init__(self, input_dataset, sleep_microseconds): argument
117 self._input_dataset = input_dataset
123 super(_SleepDataset, self).__init__(input_dataset, variant_tensor)
Dbatching.py300 def __init__(self, input_dataset, batch_size, row_shape): argument
303 dataset_ops.get_legacy_output_types(input_dataset), dtypes.DType):
306 dataset_ops.get_legacy_output_types(input_dataset))
307 self._input_dataset = input_dataset
312 dataset_ops.get_legacy_output_types(input_dataset))
319 super(_DenseToSparseBatchDataset, self).__init__(input_dataset,
330 def __init__(self, input_dataset, map_func, batch_size, num_parallel_calls, argument
332 self._input_dataset = input_dataset
337 dataset=input_dataset,
370 super(_MapAndBatchDataset, self).__init__(input_dataset, variant_tensor)
[all …]
Dgrouping.py275 def __init__(self, input_dataset, key_func, reducer): argument
277 self._input_dataset = input_dataset
278 self._make_key_func(key_func, input_dataset)
280 self._make_reduce_func(reducer.reduce_func, input_dataset)
293 super(_GroupByReducerDataset, self).__init__(input_dataset, variant_tensor)
295 def _make_key_func(self, key_func, input_dataset): argument
298 key_func, self._transformation_name(), dataset=input_dataset)
313 def _make_reduce_func(self, reduce_func, input_dataset): argument
328 input_structure=(self._state_structure, input_dataset.element_spec),
398 def __init__(self, input_dataset, key_func, reduce_func, window_size_func): argument
[all …]
Dsnapshot.py40 input_dataset, argument
81 self._input_dataset = input_dataset
103 super(_LegacySnapshotDataset, self).__init__(input_dataset, variant_tensor)
178 input_dataset=dataset,
201 input_dataset, argument
215 self._input_dataset = input_dataset
224 dataset_ops.DatasetSpec(input_dataset.element_spec)),
229 dataset=input_dataset,
240 input_dataset._variant_tensor, # pylint: disable=protected-access
248 super(_SnapshotDataset, self).__init__(input_dataset, variant_tensor)
[all …]
Dunique.py54 def __init__(self, input_dataset): argument
56 self._input_dataset = input_dataset
57 if dataset_ops.get_legacy_output_types(input_dataset) not in (
65 super(_UniqueDataset, self).__init__(input_dataset, variant_tensor)
Dsleep.py27 def __init__(self, input_dataset, sleep_microseconds): argument
28 self._input_dataset = input_dataset
34 super(_SleepDataset, self).__init__(input_dataset, variant_tensor)
Dprefetching_ops.py89 def __init__(self, input_dataset, target_device, source_device="/cpu:0"): argument
97 self._input_dataset = input_dataset
212 super(_CopyToDeviceDataset, self).__init__(input_dataset, variant_tensor)
231 def __init__(self, input_dataset, map_func, use_inter_op_parallelism=True): argument
233 self._input_dataset = input_dataset
239 dataset=input_dataset,
247 super(_MapOnGpuDataset, self).__init__(input_dataset, variant_tensor)
Derror_ops.py63 def __init__(self, input_dataset, log_warning): argument
65 self._input_dataset = input_dataset
77 super(_IgnoreErrorsDataset, self).__init__(input_dataset, variant_tensor)
/external/tensorflow/tensorflow/python/data/ops/
Ddataset_ops.py216 for input_dataset in self._inputs():
217 input_options = input_dataset.options()
341 …[input_dataset._has_captured_ref() for input_dataset in self._inputs()]) # pylint: disable=protec…
3165 def __init__(self, input_dataset, variant_tensor): argument
3166 self._input_dataset = input_dataset
3176 def __init__(self, input_dataset, variant_tensor): argument
3177 self._input_dataset = input_dataset
3179 input_dataset, variant_tensor)
3724 def __init__(self, input_dataset, dataset_to_concatenate): argument
3726 self._input_dataset = input_dataset
[all …]
/external/tensorflow/tensorflow/python/data/kernel_tests/
Dconcatenate_test.py45 input_dataset = dataset_ops.Dataset.from_tensor_slices(input_components)
48 concatenated = input_dataset.concatenate(dataset_to_concatenate)
77 input_dataset = dataset_ops.Dataset.from_tensor_slices(input_components)
80 concatenated = input_dataset.concatenate(dataset_to_concatenate)
109 input_dataset = dataset_ops.Dataset.from_tensor_slices(input_components)
114 input_dataset.concatenate(dataset_to_concatenate)
127 input_dataset = dataset_ops.Dataset.from_tensor_slices(input_components)
132 input_dataset.concatenate(dataset_to_concatenate)
143 input_dataset = dataset_ops.Dataset.from_tensor_slices(input_components)
148 input_dataset.concatenate(dataset_to_concatenate)
/external/tensorflow/tensorflow/python/data/experimental/benchmarks/
Dchoose_fastest_branch_benchmark.py30 input_dataset, argument
35 ds_0 = branch_0(input_dataset)
36 ds_1 = branch_1(input_dataset)
38 input_dataset, [branch_0, branch_1],
87 input_dataset = make_dataset(1000 * 1000,
104 input_dataset, slow_branch, fast_branch, 1, num_elements_per_branch=2)
106 input_dataset, fast_branch, slow_branch, 1, num_elements_per_branch=2)
/external/tensorflow/tensorflow/core/api_def/base_api/
Dapi_def_DatasetCardinality.pbtxt5 name: "input_dataset"
13 The cardinality of `input_dataset`. Named constants are used to represent
17 summary: "Returns the cardinality of `input_dataset`."
19 Returns the cardinality of `input_dataset`.
Dapi_def_ExperimentalDatasetCardinality.pbtxt5 name: "input_dataset"
13 The cardinality of `input_dataset`. Named constants are used to represent
17 summary: "Returns the cardinality of `input_dataset`."
19 Returns the cardinality of `input_dataset`.
Dapi_def_ExperimentalGroupByReducerDataset.pbtxt5 name: "input_dataset"
20 A function mapping an element of `input_dataset`, concatenated
48 A function mapping the current reducer state and an element of `input_dataset`,
65 summary: "Creates a dataset that computes a group-by on `input_dataset`."
67 Creates a dataset that computes a group-by on `input_dataset`.
Dapi_def_GroupByReducerDataset.pbtxt5 name: "input_dataset"
20 A function mapping an element of `input_dataset`, concatenated
48 A function mapping the current reducer state and an element of `input_dataset`,
65 summary: "Creates a dataset that computes a group-by on `input_dataset`."
67 Creates a dataset that computes a group-by on `input_dataset`.
Dapi_def_TakeDataset.pbtxt7 A scalar representing the number of elements from the `input_dataset`
8 that should be taken. A value of `-1` indicates that all of `input_dataset`
12 summary: "Creates a dataset that contains `count` elements from the `input_dataset`."
Dapi_def_DatasetToGraph.pbtxt5 name: "input_dataset"
16 summary: "Returns a serialized GraphDef representing `input_dataset`."
18 Returns a graph representation for `input_dataset`.
Dapi_def_DatasetToGraphV2.pbtxt5 name: "input_dataset"
16 summary: "Returns a serialized GraphDef representing `input_dataset`."
18 Returns a graph representation for `input_dataset`.
Dapi_def_OptimizeDataset.pbtxt5 name: "input_dataset"
16 summary: "Creates a dataset by applying optimizations to `input_dataset`."
18 Creates a dataset by applying optimizations to `input_dataset`.
/external/tensorflow/tensorflow/python/data/experimental/kernel_tests/
Dtf_record_writer_test.py46 input_dataset = readers.TFRecordDataset([filename], compression_type)
48 compression_type).write(input_dataset)
98 input_dataset = dataset_ops.Dataset.from_tensors(10)
100 writers.TFRecordWriter(self._outputFilename(), "").write(input_dataset)
104 input_dataset = dataset_ops.Dataset.from_tensors([["hello"], ["world"]])
106 writers.TFRecordWriter(self._outputFilename(), "").write(input_dataset)
111 input_dataset = readers.TFRecordDataset(self._createFile())
112 return writers.TFRecordWriter(self._outputFilename()).write(input_dataset)
/external/tensorflow/tensorflow/core/ops/compat/ops_history_v1/
DDatasetToGraph.pbtxt4 name: "input_dataset"
15 name: "input_dataset"
35 name: "input_dataset"
62 name: "input_dataset"
/external/tensorflow/tensorflow/python/data/benchmarks/
Dfrom_tensor_slices_benchmark.py33 def __init__(self, input_dataset, map_func): argument
35 self._input_dataset = input_dataset
39 dataset=input_dataset,
43 input_dataset._variant_tensor, # pylint: disable=protected-access
47 super(SingleThreadedFlatMapDataset, self).__init__(input_dataset,
/external/tensorflow/tensorflow/core/ops/compat/ops_history_v2/
DPrefetchDataset.pbtxt4 name: "input_dataset"
32 name: "input_dataset"
59 name: "input_dataset"
100 name: "input_dataset"

12345678910>>...13