Searched refs:from_tensor_slices (Results 1 – 25 of 125) sorted by relevance
12345
/external/tensorflow/tensorflow/python/data/kernel_tests/ |
D | concatenate_test.py | 44 input_dataset = dataset_ops.Dataset.from_tensor_slices(input_components) 45 dataset_to_concatenate = dataset_ops.Dataset.from_tensor_slices( 75 input_dataset = dataset_ops.Dataset.from_tensor_slices(input_components) 76 dataset_to_concatenate = dataset_ops.Dataset.from_tensor_slices( 106 input_dataset = dataset_ops.Dataset.from_tensor_slices(input_components) 107 dataset_to_concatenate = dataset_ops.Dataset.from_tensor_slices( 123 input_dataset = dataset_ops.Dataset.from_tensor_slices(input_components) 124 dataset_to_concatenate = dataset_ops.Dataset.from_tensor_slices( 138 input_dataset = dataset_ops.Dataset.from_tensor_slices(input_components) 139 dataset_to_concatenate = dataset_ops.Dataset.from_tensor_slices(
|
D | flat_map_test.py | 42 dataset = dataset_ops.Dataset.from_tensor_slices(components).flat_map( 52 dataset = dataset_ops.Dataset.from_tensor_slices(components).flat_map( 53 lambda x: dataset_ops.Dataset.from_tensor_slices(x).flat_map( 69 dataset_ops.Dataset.from_tensor_slices(components).flat_map( 70 lambda x: dataset_ops.Dataset.from_tensor_slices(x).flat_map( 114 return dataset_ops.Dataset.from_tensor_slices(
|
D | cache_test.py | 54 dataset_ops.Dataset.from_tensor_slices(components).repeat(count)) 105 dataset_ops.Dataset.from_tensor_slices(components).cache( 108 dataset_ops.Dataset.from_tensor_slices(components).cache( 126 dataset_ops.Dataset.from_tensor_slices(components).cache( 129 dataset_ops.Dataset.from_tensor_slices(components).cache( 210 dataset_ops.Dataset.from_tensor_slices(components).repeat(0))
|
D | iterator_test.py | 63 dataset = dataset_ops.Dataset.from_tensor_slices(component).map(add) 73 dataset_ops.Dataset.from_tensor_slices([0.0, 1.0, 2.0]) 90 dataset_ops.Dataset.from_tensor_slices(components).map(_map_fn) 117 dataset_ops.Dataset.from_tensor_slices(tensor_components) 144 dataset_ops.Dataset.from_tensor_slices(components) 404 dataset_3 = dataset_ops.Dataset.from_tensor_slices([1, 2, 3]) 405 dataset_4 = dataset_ops.Dataset.from_tensor_slices([10, 20, 30, 40]) 463 dataset_3 = dataset_ops.Dataset.from_tensor_slices([1, 2, 3]) 464 dataset_4 = dataset_ops.Dataset.from_tensor_slices([10, 20, 30, 40]) 528 dataset = dataset_ops.Dataset.from_tensor_slices([1, 2, 3]) [all …]
|
D | iterator_cluster_test.py | 50 dataset_3 = dataset_ops.Dataset.from_tensor_slices([1, 2, 3]) 66 dataset_3 = dataset_ops.Dataset.from_tensor_slices([1, 2, 3]) 132 input_sentences = dataset_ops.Dataset.from_tensor_slices( 168 dataset_ops.Dataset.from_tensor_slices(components).map(_map_fn)
|
D | from_tensor_slices_test.py | 43 dataset = dataset_ops.Dataset.from_tensor_slices(components) 68 dataset = dataset_ops.Dataset.from_tensor_slices(components) 116 dataset = dataset_ops.Dataset.from_tensor_slices(components) 162 dataset = dataset_ops.Dataset.from_tensor_slices(components)
|
D | interleave_test.py | 145 dataset = dataset_ops.Dataset.from_tensor_slices(input_values).repeat( 172 dataset = dataset_ops.Dataset.from_tensor_slices(input_values).map( 194 return dataset_ops.Dataset.from_tensor_slices( 225 dataset = dataset_ops.Dataset.from_tensor_slices(input_values).repeat(
|
/external/tensorflow/tensorflow/python/keras/engine/ |
D | training_dataset_test.py | 63 dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets)) 124 dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets)) 155 dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets)) 177 dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets)) 263 dataset_tuple = dataset_ops.Dataset.from_tensor_slices(( 271 predict_dataset_tuple = dataset_ops.Dataset.from_tensor_slices( 286 dataset_dict = dataset_ops.Dataset.from_tensor_slices(( 294 predict_dataset_dict = dataset_ops.Dataset.from_tensor_slices( 313 dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets, 334 dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets)) [all …]
|
D | feature_columns_integration_test.py | 89 ds1 = dataset_ops.Dataset.from_tensor_slices(x) 90 ds2 = dataset_ops.Dataset.from_tensor_slices(y) 134 ds1 = dataset_ops.Dataset.from_tensor_slices(x) 135 ds2 = dataset_ops.Dataset.from_tensor_slices(y)
|
/external/tensorflow/tensorflow/python/data/experimental/kernel_tests/optimization/ |
D | choose_fastest_dataset_test.py | 36 dataset = dataset_ops.Dataset.from_tensor_slices([0, 1, 2, 3, 4]) 44 dataset = dataset_ops.Dataset.from_tensor_slices([0, 1, 2, 3, 4]) 71 dataset_a = dataset_ops.Dataset.from_tensor_slices(slices_a) 72 dataset_b = dataset_ops.Dataset.from_tensor_slices(slices_b)
|
D | map_vectorization_test.py | 108 return dataset_ops.Dataset.from_tensor_slices(random_int) 111 return dataset_ops.Dataset.from_tensor_slices(random_input > 0) 114 return dataset_ops.Dataset.from_tensor_slices(random_input) 117 return dataset_ops.Dataset.from_tensor_slices( 213 return dataset_ops.Dataset.from_tensor_slices(["1.0:2:a", 245 return dataset_ops.Dataset.from_tensor_slices( 399 base_dataset = dataset_ops.Dataset.from_tensor_slices([[1, 2], 415 base_dataset = dataset_ops.Dataset.from_tensor_slices([[1, 2], 489 base_dataset = dataset_ops.Dataset.from_tensor_slices([[1, 2],
|
/external/tensorflow/tensorflow/python/data/experimental/kernel_tests/ |
D | group_by_window_test.py | 60 input_dataset = dataset_ops.Dataset.from_tensor_slices( 89 input_dataset = dataset_ops.Dataset.from_tensor_slices( 154 input_dataset = dataset_ops.Dataset.from_tensor_slices(math_ops.range( 187 dataset = dataset_ops.Dataset.from_tensor_slices(components).apply( 207 dataset = dataset_ops.Dataset.from_tensor_slices( 228 dataset = dataset_ops.Dataset.from_tensor_slices(components).repeat( 245 dataset = dataset_ops.Dataset.from_tensor_slices(components).apply( 276 dataset = dataset_ops.Dataset.from_tensor_slices( 296 dataset = dataset_ops.Dataset.from_tensor_slices(
|
D | take_while_test.py | 47 dataset_ops.Dataset.from_tensor_slices) 71 dataset = dataset_ops.Dataset.from_tensor_slices(string).apply( 90 dataset = dataset_ops.Dataset.from_tensor_slices(boolean_array).apply(
|
D | rejection_resample_test.py | 48 dataset = dataset_ops.Dataset.from_tensor_slices(classes).shuffle( 85 dataset = dataset_ops.Dataset.from_tensor_slices(data_np) 109 dataset = dataset_ops.Dataset.from_tensor_slices(data_np)
|
D | ignore_errors_test.py | 46 dataset_ops.Dataset.from_tensor_slices(components) 60 dataset_ops.Dataset.from_tensor_slices(components).map( 83 dataset_ops.Dataset.from_tensor_slices(filenames).map(
|
D | unbatch_test.py | 49 data = dataset_ops.Dataset.from_tensor_slices(data) 60 data = dataset_ops.Dataset.from_tensor_slices(data) 100 data = dataset_ops.Dataset.from_tensor_slices(data) 112 data = dataset_ops.Dataset.from_tensor_slices(data)
|
/external/tensorflow/tensorflow/contrib/distribute/python/ |
D | keras_backward_compat_test.py | 100 dataset = dataset_ops.Dataset.from_tensor_slices((x_train, y_train)) 114 dataset = dataset_ops.Dataset.from_tensor_slices((x_test, y_test)) 183 dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets)) 191 dataset = dataset_ops.Dataset.from_tensor_slices(inputs) 250 train_dataset = dataset_ops.Dataset.from_tensor_slices( 265 eval_dataset = dataset_ops.Dataset.from_tensor_slices( 281 predict_dataset = dataset_ops.Dataset.from_tensor_slices(x_predict) 555 dataset_tuple = dataset_ops.Dataset.from_tensor_slices(( 563 dataset_dict = dataset_ops.Dataset.from_tensor_slices(( 611 dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets, [all …]
|
D | keras_test.py | 134 dataset = dataset_ops.Dataset.from_tensor_slices((x_train, y_train)) 148 dataset = dataset_ops.Dataset.from_tensor_slices((x_test, y_test)) 217 dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets)) 225 dataset = dataset_ops.Dataset.from_tensor_slices(inputs) 240 original_dataset = (dataset_ops.Dataset.from_tensor_slices( 417 return dataset_ops.Dataset.from_tensor_slices((input_dict, 430 return dataset_ops.Dataset.from_tensor_slices((input_dict, 871 dataset_tuple = dataset_ops.Dataset.from_tensor_slices(( 879 dataset_dict = dataset_ops.Dataset.from_tensor_slices(( 907 dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets)) [all …]
|
/external/tensorflow/tensorflow/python/data/benchmarks/ |
D | from_tensor_slices_benchmark.py | 39 dataset_ops.Dataset.from_tensor_slices(input_data).repeat( 57 dataset_ops.Dataset.from_tensor_slices( 75 dataset_ops.Dataset.from_tensor_slices(input_data).batch(
|
/external/tensorflow/tensorflow/python/data/experimental/kernel_tests/serialization/ |
D | sequence_dataset_serialization_test.py | 32 return dataset_ops.Dataset.from_tensor_slices(components).skip(count) 61 return dataset_ops.Dataset.from_tensor_slices(components).take(count) 92 return dataset_ops.Dataset.from_tensor_slices(components).take(
|
D | flat_map_dataset_serialization_test.py | 73 return dataset_ops.Dataset.from_tensor_slices([defun_fn(x)]) 85 lambda _: dataset_ops.Dataset.from_tensor_slices([test_var])) 112 return dataset_ops.Dataset.from_tensor_slices(
|
D | concatenate_dataset_serialization_test.py | 36 return dataset_ops.Dataset.from_tensor_slices(input_components).concatenate( 37 dataset_ops.Dataset.from_tensor_slices(to_concatenate_components))
|
D | padded_batch_dataset_serialization_test.py | 35 return dataset_ops.Dataset.from_tensor_slices(seq_lens).map( 53 return dataset_ops.Dataset.from_tensor_slices(seq_lens).map(
|
D | interleave_dataset_serialization_test.py | 37 return dataset_ops.Dataset.from_tensor_slices(input_values).repeat( 72 return dataset_ops.Dataset.from_tensor_slices(
|
/external/tensorflow/tensorflow/contrib/eager/python/ |
D | datasets_test.py | 118 dataset = Dataset.from_tensor_slices(['brain', 'salad', 'surgery']) 125 ds = Dataset.from_tensor_slices([1, 2, 3, 4, 5, 6]).map(math_ops.square) 175 datasets.Iterator(Dataset.from_tensor_slices(components))): 238 dataset = Dataset.from_tensor_slices([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]) 253 dataset = Dataset.from_tensor_slices([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]) 314 Dataset.from_tensor_slices(input_data).repeat(num_epochs) 342 Dataset.from_tensor_slices(input_data).batch(batch_size).cache()
|
12345