/external/tensorflow/tensorflow/compiler/tests/ |
D | lstm_test.py | 61 self._inputs = np.array([[-1.], [-.5], [0.], [.5], [1.]], np.float32) 62 self._batch_size = len(self._inputs) 83 x = constant_op.constant(self._inputs) 115 self.assertAllClose(m, self._NextM(self._inputs, 1., m_prev, c_prev)) 116 self.assertAllClose(c, self._NextC(self._inputs, 1., m_prev, c_prev)) 128 self._NextM(self._inputs, weight, m_prev, c_prev)) 130 self._NextC(self._inputs, weight, m_prev, c_prev)) 147 x_seq = [constant_op.constant(self._inputs)] * seq_length 167 x_seq = [constant_op.constant(self._inputs)] * seq_length 200 m0 = self._NextM(self._inputs, weight1, m_init, c_init) [all …]
|
/external/tensorflow/tensorflow/python/training/ |
D | evaluation_test.py | 68 self._inputs = np.zeros((16, 4)) 73 self._inputs[i, j] = 1 87 tf_inputs = constant_op.constant(self._inputs, dtype=dtypes.float32) 115 inputs = constant_op.constant(self._inputs, dtype=dtypes.float32) 141 all_inputs = constant_op.constant(self._inputs, dtype=dtypes.float32) 180 inputs = constant_op.constant(self._inputs, dtype=dtypes.float32) 208 inputs = constant_op.constant(self._inputs, dtype=dtypes.float32) 237 inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
|
/external/tensorflow/tensorflow/python/ops/ |
D | clustering_ops.py | 141 self._inputs = inputs if isinstance(inputs, list) else [inputs] 362 inputs = self._inputs 367 self._inputs, num_clusters, initial_clusters, self._distance_metric, 584 self._inputs = inputs 598 [array_ops.shape(i)[0] for i in self._inputs]) 607 return embedding_lookup(self._inputs, indices, partition_strategy='div') 612 inp = self._inputs[0] 634 first_shard = self._inputs[0] 709 lambda: array_ops.concat(self._inputs, 0), 727 return self._initial_clusters(self._inputs, self._num_remaining)
|
/external/pigweed/pw_docgen/ |
D | docs.gni | 37 _inputs = invoker.inputs 39 _inputs = [] 58 pw_doc_inputs = rebase_path(_inputs, root_build_dir) 61 inputs = invoker.sources + _inputs
|
/external/tensorflow/tensorflow/lite/python/ |
D | interpreter.py | 195 self._inputs = self._signature_def['inputs'] 210 if len(kwargs) != len(self._inputs): 213 'expected %s vs provided %s' % (len(kwargs), len(self._inputs))) 216 if input_name not in self._inputs: 219 self._interpreter.resize_tensor_input(self._inputs[input_name],
|
D | op_hint.py | 351 self._inputs = OpHint.OpHintArgumentTracker( 415 return self._inputs.add(*args, **kwargs) 452 self._inputs.add(arg, name=name) 456 return [self._inputs.add(arg) for arg in args]
|
/external/tensorflow/tensorflow/lite/delegates/coreml/ |
D | coreml_executor.mm | 38 const std::vector<TensorData>* _inputs; field 56 _inputs = inputs; 58 for (auto& input : *_inputs) { 69 for (auto& input : *_inputs) { 79 for (auto& input : *_inputs) {
|
/external/tensorflow/tensorflow/python/data/kernel_tests/ |
D | dataset_test.py | 95 self.assertLen(dataset._inputs(), num_inputs) 138 self.assertEmpty(dataset_fn._inputs()) 142 self.assertEqual([input_dataset], dataset_fn(input_dataset)._inputs()) 198 self.assertEqual([input_dataset], dataset._inputs()) 206 self.assertEqual([input_dataset], dataset._inputs()) 226 self.assertEqual([input1, input2], dataset_fn(input1, input2)._inputs()) 235 dataset_fn(input_datasets)._inputs()) 271 queue.extend(ds._inputs())
|
/external/pigweed/pw_build/ |
D | python_action.gni | 84 _inputs = invoker.inputs 86 _inputs = [] 92 _inputs += [ invoker.script ] 172 inputs = _inputs
|
/external/tensorflow/tensorflow/compiler/mlir/tfr/python/ |
D | composite.py | 47 self._inputs = inputs
|
/external/tensorflow/tensorflow/python/debug/lib/ |
D | debug_graphs.py | 173 self._inputs = [] 210 self._inputs.append(inp) 217 return self._inputs
|
/external/tensorflow/tensorflow/python/data/experimental/ops/ |
D | optimization.py | 108 def _inputs(self): member in _ChooseFastestDataset
|
D | interleave_ops.py | 143 def _inputs(self): member in _DirectedInterleaveDataset
|
/external/tensorflow/tensorflow/python/data/ops/ |
D | readers.py | 360 def _inputs(self): member in TFRecordDatasetV2 361 return self._impl._inputs() # pylint: disable=protected-access
|
D | dataset_ops.py | 216 for input_dataset in self._inputs(): 304 def _inputs(self): member in DatasetV2 341 …[input_dataset._has_captured_ref() for input_dataset in self._inputs()]) # pylint: disable=protec… 2789 def _inputs(self): member in DatasetV1Adapter 2790 return self._dataset._inputs() # pylint: disable=protected-access 2826 for input_ds in ds._inputs(): 3158 def _inputs(self): member in DatasetSource 3169 def _inputs(self): member in UnaryDataset 3268 def _inputs(self): member in _VariantDataset 3713 def _inputs(self): member in ZipDataset [all …]
|
D | multi_device_iterator_ops.py | 142 def _inputs(self): member in _PerDeviceGenerator 184 def _inputs(self): member in _ReincarnatedPerDeviceGenerator
|
/external/tensorflow/tensorflow/python/keras/engine/ |
D | training_utils_v1.py | 1722 self._inputs = inputs 1723 self._is_dict = isinstance(self._inputs, dict) 1724 self._is_single_input = not isinstance(self._inputs, (list, tuple, dict)) 1730 for k in sorted(self._inputs.keys()): 1731 self._flattened_inputs.append(self._inputs[k]) 1734 self._flattened_inputs = nest.flatten(self._inputs)
|
/external/tensorflow/tensorflow/python/ops/parallel_for/ |
D | pfor.py | 792 self._inputs = inputs 806 stack_indices = range(len(self._inputs)) 809 inp = self._inputs[i] 812 self._inputs[i] = _stack(inp.t, length) 814 self._inputs[i] = wrap( 815 _tile_variant_with_length(self._inputs[i].t, length), True) 817 self._inputs[i] = wrap(_untile_variant(self._inputs[i].t), True) 827 if not self._inputs: 837 ranks = [_get_rank(x) for x in self._inputs] 842 for i, inp in enumerate(self._inputs): [all …]
|
/external/tensorflow/tensorflow/lite/schema/ |
D | schema_generated.h | 14641 auto _inputs = _o->inputs.size() ? _fbb.CreateVector(_o->inputs) : 0; 14652 _inputs, 14687 auto _inputs = _o->inputs.size() ? _fbb.CreateVector(_o->inputs) : 0; 14694 _inputs, 14808 …auto _inputs = _o->inputs.size() ? _fbb.CreateVector<flatbuffers::Offset<tflite::TensorMap>> (_o->… 14814 _inputs,
|