| /external/tensorflow/tensorflow/python/ops/ |
| D | confusion_matrix.py | 29 labels, predictions, expected_rank_diff=0, name=None): argument 36 But, for example, if `labels` contains class IDs and `predictions` contains 1 38 `labels`, so `expected_rank_diff` would be 1. In this case, we'd squeeze 39 `labels` if `rank(predictions) - rank(labels) == 0`, and 40 `predictions` if `rank(predictions) - rank(labels) == 2`. 46 labels: Label values, a `Tensor` whose dimensions match `predictions`. 48 expected_rank_diff: Expected result of `rank(predictions) - rank(labels)`. 52 Tuple of `labels` and `predictions`, possibly with last dim squeezed. 55 [labels, predictions]): 57 labels = ops.convert_to_tensor(labels) [all …]
|
| D | metrics_impl.py | 84 def _remove_squeezable_dimensions(predictions, labels, weights): argument 87 Squeezes last dim of `predictions` or `labels` if their rank differs by 1 99 labels: Optional label `Tensor` whose dimensions match `predictions`. 104 Tuple of `predictions`, `labels` and `weights`. Each of them possibly has 108 if labels is not None: 109 labels, predictions = confusion_matrix.remove_squeezable_dimensions( 110 labels, predictions) 111 predictions.get_shape().assert_is_compatible_with(labels.get_shape()) 114 return predictions, labels, None 120 return predictions, labels, weights [all …]
|
| D | ctc_ops.py | 71 def ctc_loss(labels, argument 88 max(labels.indices(labels.indices[:, 1] == b, 2)) 98 `num_labels + 1` classes, where num_labels is the number of true labels, and 101 For example, for a vocabulary containing 3 labels `[a, b, c]`, 102 `num_classes = 4` and the labels indexing is `{a: 0, b: 1, c: 2, blank: 3}`. 108 before loss calculation, wherein repeated labels passed to the loss 109 are merged into single labels. This is useful if the training labels come 113 repeated non-blank labels will not be merged and are interpreted 114 as individual labels. This is a simplified (non-standard) version of CTC. 127 in the input labels before training. [all …]
|
| /external/autotest/server/hosts/ |
| D | base_label.py | 18 Decorator for labels that should exist forever once applied. 36 return (self._NAME in info.labels) or exists(self, host) 42 This class contains the scaffolding for the host-specific labels. 52 Return the list of labels generated for the host. 57 @return a list of labels applicable to the host. 66 This method is geared for the type of labels that indicate if the host 78 Return the list of labels. 90 Return all possible labels generated by this label class. 92 @returns a tuple of sets, the first set is for labels that are prefixes 93 like 'os:android'. The second set is for labels that are full [all …]
|
| D | afe_store.py | 54 return host_info.HostInfo(host.labels, host.attributes) 64 # copy of HostInfo from the AFE and then add/remove labels / attribtes 66 # parallel, we'll end up with corrupted labels / attributes. 69 list(set(old_info.labels) - set(new_info.labels))) 71 list(set(new_info.labels) - set(old_info.labels))) 75 def _remove_labels_on_afe(self, labels): argument 76 """Requests the AFE to remove the given labels. 78 @param labels: Remove these. 80 if not labels: 83 logging.debug('removing labels: %s', labels) [all …]
|
| D | host_info_unittest.py | 40 labels=['label1', 'label2', 'label1'], 44 labels=['label1', 'label2', 'label1'], 54 info1 = host_info.HostInfo(labels=['label']) 63 self.info.labels = ['cros-version', 'fwrw-version', 'fwro-version'] 69 self.info.labels = ['not-at-start-cros-version:cros1'] 75 self.info.labels = ['fwrw-version:fwrw1', 'fwro-version:fwro1'] 80 """When multiple labels match, first one should be used as build.""" 81 self.info.labels = ['cros-version:cros1', 'cros-version:cros2'] 87 self.info.labels = ['cheets-version:ab1', 'cros-version:cros1'] 89 self.info.labels = ['cros-version:cros1', 'cheets-version:ab1'] [all …]
|
| /external/tensorflow/tensorflow/python/ops/losses/ |
| D | losses_impl.py | 218 labels, predictions, weights=1.0, scope=None, argument 232 labels: The ground truth output tensor, same dimensions as 'predictions'. 235 `labels`, and must be broadcastable to `labels` (i.e., all dimensions must 243 shape as `labels`; otherwise, it is scalar. 247 `labels` or if the shape of `weights` is invalid or if `labels` 255 if labels is None: 256 raise ValueError("Argument `labels` must not be None.") 260 (predictions, labels, weights)) as scope: 262 labels = math_ops.cast(labels, dtype=dtypes.float32) 263 predictions.get_shape().assert_is_compatible_with(labels.get_shape()) [all …]
|
| /external/autotest/utils/ |
| D | labellib_unittest.py | 44 labels = ['webcam', 'pool:suites'] 45 mapping = labellib.LabelsMapping(labels) 46 self.assertEqual(mapping.getlabels(), labels) 49 labels = ['webcam', 'pool:suites', 'pool:party'] 50 mapping = labellib.LabelsMapping(labels) 54 labels = ['ohse:tsubame', 'webcam'] 55 mapping = labellib.LabelsMapping(labels) 59 labels = ['webcam', 'exec', 'method'] 60 mapping = labellib.LabelsMapping(labels) 64 labels = ['class:protecta', 'method:metafalica', 'exec:chronicle_key'] [all …]
|
| /external/tensorflow/tensorflow/python/distribute/ |
| D | metrics_v1_test.py | 29 # First four batches of x: labels, predictions -> (labels == predictions) 35 lambda x: {"labels": x % 5, "predictions": x % 3}).batch( 40 # First four batches of labels, predictions: {TP, FP, TN, FN} 47 "labels": [True, False, True, False], 53 # First four batches of labels, predictions: {TP, FP, TN, FN} 60 "labels": [True, False, True, False], 67 "labels": [1., .5, 1., 0.], 151 labels = x["labels"] 153 return metrics.accuracy(labels, predictions) 166 labels = x["labels"] [all …]
|
| /external/tensorflow/tensorflow/python/kernel_tests/ |
| D | metrics_test.py | 50 def _binary_2d_label_to_2d_sparse_value(labels): argument 53 Only 1 values in `labels` are included in result. 56 labels: Dense 2D binary indicator, shape [batch_size, num_classes]. 60 is the number of `1` values in each row of `labels`. Values are indices 61 of `1` values along the last dimension of `labels`. 66 for row in labels: 78 shape = [len(labels), len(labels[0])] 84 def _binary_2d_label_to_1d_sparse_value(labels): argument 87 Only 1 values in `labels` are included in result. 90 labels: Dense 2D binary indicator, shape [batch_size, num_classes]. Each [all …]
|
| D | xent_op_test_base.py | 41 def _opFwdBwd(self, labels, logits, axis=-1): argument 47 labels=labels, logits=logits, dim=axis) 50 def _npXent(self, labels, logits, dim=-1): argument 57 bp = (probs - labels) 58 l = -np.sum(labels * np.log(probs + 1.0e-20), axis=dim) 68 np_loss, np_gradient = self._npXent(labels=np_labels, logits=np_logits) 90 labels=np_labels, logits=np_logits, dim=dim) 97 labels=np.array([[-1.], [0.], [1.], [1.]]).astype(dtype), 111 labels = [[0., 0., 0., 1.], [0., .5, .5, 0.]] 132 np_loss, np_gradient = self._npXent(np.array(labels), np.array(logits)) [all …]
|
| /external/tensorflow/tensorflow/python/kernel_tests/sparse_ops/ |
| D | sparse_xent_op_test_base.py | 36 def _opFwdBwd(self, labels, logits): argument 42 labels=labels, logits=logits) 45 def _npXent(self, labels, logits): argument 47 labels = np.reshape(labels, [-1]) 55 labels_mat[np.arange(batch_size), labels] = 1.0 61 np_loss, np_gradient = self._npXent(labels=np_labels, logits=np_logits) 62 tf_loss, tf_gradient = self._opFwdBwd(labels=np_labels, logits=np_logits) 69 labels=np.array([0, 0, 0]).astype(label_dtype), 76 labels = [4, 3, 0, -1] 79 loss, gradient = self._opFwdBwd(labels=labels, logits=logits) [all …]
|
| /external/grpc-grpc/tools/run_tests/sanity/ |
| D | check_test_filtering.py | 40 def test_filtering(self, changed_files=[], labels=_LIST_OF_LANGUAGE_LABELS): argument 43 default labels should be able to match all jobs 45 :param labels: list of job labels that should be skipped 61 if "sanity" in job.labels: 63 all_jobs = [job for job in all_jobs if "sanity" not in job.labels] 65 if "sanity" in job.labels: 68 job for job in filtered_jobs if "sanity" not in job.labels 73 for label in labels: 75 self.assertNotIn(label, job.labels) 78 for label in labels: [all …]
|
| /external/rust/crates/grpcio-sys/grpc/tools/run_tests/sanity/ |
| D | check_test_filtering.py | 40 def test_filtering(self, changed_files=[], labels=_LIST_OF_LANGUAGE_LABELS): argument 43 default labels should be able to match all jobs 45 :param labels: list of job labels that should be skipped 61 if "sanity" in job.labels: 63 all_jobs = [job for job in all_jobs if "sanity" not in job.labels] 65 if "sanity" in job.labels: 68 job for job in filtered_jobs if "sanity" not in job.labels 73 for label in labels: 75 self.assertNotIn(label, job.labels) 78 for label in labels: [all …]
|
| /external/tensorflow/tensorflow/python/kernel_tests/nn_ops/ |
| D | ctc_loss_op_test.py | 63 def _ctc_loss_v2(labels, inputs, sequence_length, argument 73 labels=labels, 86 labels, argument 96 inputs=inputs_t, labels=labels, sequence_length=seq_lens) 227 labels = SimpleSparseTensorFrom([targets_0, targets_1]) 244 self._testCTCLoss(inputs, seq_lens, labels, loss_truth, grad_truth) 255 labels = SimpleSparseTensorFrom([[0, 1], [1, 0]]) 265 inputs=inputs_t, labels=labels, sequence_length=seq_lens) 268 labels=labels, 279 labels = SimpleSparseTensorFrom([[0, 1], [1, 0]]) [all …]
|
| D | xent_op_test_base.py | 37 def _opFwdBwd(self, labels, logits, axis=-1): argument 43 labels=labels, logits=logits, dim=axis) 46 def _npXent(self, labels, logits, dim=-1): argument 53 bp = (probs - labels) 54 l = -np.sum(labels * np.log(probs + 1.0e-20), axis=dim) 64 np_loss, np_gradient = self._npXent(labels=np_labels, logits=np_logits) 86 labels=np_labels, logits=np_logits, dim=dim) 93 labels=np.array([[-1.], [0.], [1.], [1.]]).astype(dtype), 107 labels = [[0., 0., 0., 1.], [0., .5, .5, 0.]] 128 np_loss, np_gradient = self._npXent(np.array(labels), np.array(logits)) [all …]
|
| D | losses_test.py | 109 labels = constant_op.constant([1, 9, 2, -5, -2, 6], shape=(2, 3)) 110 losses.absolute_difference(labels, predictions) 118 labels = constant_op.constant([[1, 0, 0], [0, 1, 0], [0, 0, 1]]) 121 losses.softmax_cross_entropy(labels, logits, weights=None) 128 labels = constant_op.constant([[1, 0, 0], [0, 1, 0], [0, 0, 1]]) 129 loss = losses.softmax_cross_entropy(labels, logits) 137 labels = constant_op.constant([[0, 0, 1], [1, 0, 0], [0, 1, 0]]) 140 loss = losses.softmax_cross_entropy(labels, logits) 148 labels = constant_op.constant([[0, 0, 1], [1, 0, 0], [0, 1, 0]]) 151 loss = losses.softmax_cross_entropy(labels, logits, weights) [all …]
|
| /external/tensorflow/tensorflow/tools/api/golden/v1/ |
| D | tensorflow.metrics.pbtxt | 5 …argspec: "args=[\'labels\', \'predictions\', \'weights\', \'metrics_collections\', \'updates_colle… 9 …argspec: "args=[\'labels\', \'predictions\', \'weights\', \'num_thresholds\', \'metrics_collection… 13 …argspec: "args=[\'labels\', \'predictions\', \'k\', \'weights\', \'metrics_collections\', \'update… 17 …argspec: "args=[\'labels\', \'predictions\', \'weights\', \'metrics_collections\', \'updates_colle… 21 …argspec: "args=[\'labels\', \'predictions\', \'thresholds\', \'weights\', \'metrics_collections\',… 25 …argspec: "args=[\'labels\', \'predictions\', \'weights\', \'metrics_collections\', \'updates_colle… 29 …argspec: "args=[\'labels\', \'predictions\', \'thresholds\', \'weights\', \'metrics_collections\',… 37 …argspec: "args=[\'labels\', \'predictions\', \'weights\', \'metrics_collections\', \'updates_colle… 41 …argspec: "args=[\'labels\', \'predictions\', \'dim\', \'weights\', \'metrics_collections\', \'upda… 45 …argspec: "args=[\'labels\', \'predictions\', \'num_classes\', \'weights\', \'metrics_collections\'… [all …]
|
| /external/openscreen/discovery/dnssd/impl/ |
| D | conversion_layer_unittest.cc | 72 ASSERT_EQ(query.name.labels().size(), size_t{7}); in TEST() 73 EXPECT_EQ(query.name.labels()[0], "instance.Id"); in TEST() 74 EXPECT_EQ(query.name.labels()[1], "_service-id"); in TEST() 75 EXPECT_EQ(query.name.labels()[2], "_udp"); in TEST() 76 EXPECT_EQ(query.name.labels()[3], "192"); in TEST() 77 EXPECT_EQ(query.name.labels()[4], "168"); in TEST() 78 EXPECT_EQ(query.name.labels()[5], "0"); in TEST() 79 EXPECT_EQ(query.name.labels()[6], "0"); in TEST() 87 ASSERT_EQ(query.name.labels().size(), size_t{6}); in TEST() 88 EXPECT_EQ(query.name.labels()[0], "_service-id"); in TEST() [all …]
|
| /external/tensorflow/tensorflow/python/eager/ |
| D | monitoring.py | 143 def get_cell(self, *labels): argument 145 if len(labels) != self._label_length: 146 raise ValueError('The {} expects taking {} labels'.format( 149 self._metric, *labels) 182 metric). Each value is identified by a tuple of labels. The class allows the 188 def __init__(self, name, description, *labels): argument 194 *labels: The label list of the new metric. 196 super(Counter, self).__init__('Counter', _counter_methods, len(labels), 197 name, description, *labels) 199 def get_cell(self, *labels): argument [all …]
|
| /external/python/cpython3/Lib/encodings/ |
| D | idna.py | 162 labels = result.split(b'.') 163 for label in labels[:-1]: 166 if len(labels[-1]) >= 64: 171 labels = dots.split(input) 172 if labels and not labels[-1]: 174 del labels[-1] 177 for label in labels: 204 labels = input.split(b".") 206 if labels and len(labels[-1]) == 0: 208 del labels[-1] [all …]
|
| /external/tensorflow/tensorflow/core/kernels/linalg/ |
| D | einsum_op_impl.h | 56 using Labels = gtl::InlinedVector<int, 8>; variable 57 using OperandLabels = gtl::InlinedVector<Labels, 2>; 63 // Insert new (unnamed) broadcasting labels at the location of ellipsis. 65 int ellipsis_axis, Labels* labels, in InsertBroadcastLabels() 67 labels->erase(labels->begin() + ellipsis_axis); in InsertBroadcastLabels() 68 labels->insert(labels->begin() + ellipsis_axis, num_bcast_dims, 0); in InsertBroadcastLabels() 69 std::iota(labels->begin() + ellipsis_axis, in InsertBroadcastLabels() 70 labels->begin() + ellipsis_axis + num_bcast_dims, in InsertBroadcastLabels() 72 // Increment label counts. Since these are new labels, the count is set in InsertBroadcastLabels() 78 // (non-broadcasting) label as broadcasting labels don't have a fixed [all …]
|
| /external/autotest/server/cros/ |
| D | provision.py | 46 This is used for determine actions to perform based on labels, for 75 @returns: A string that is the prefix of version labels for the type 128 Base class to give a template for mapping labels to tests. 131 # A dictionary mapping labels to test names. 160 def run_task_actions(cls, job, host, labels): argument 162 Run task actions on host that correspond to the labels. 169 @param labels: The list of job labels to work on. 172 unactionable = cls._filter_unactionable_labels(labels) 177 for action_item, value in cls._actions_and_values_iter(labels): 184 def _actions_and_values_iter(cls, labels): argument [all …]
|
| /external/tensorflow/tensorflow/core/lib/monitoring/ |
| D | collection_registry_test.cc | 49 "/tensorflow/metric0", "An example metric with no labels."); in TEST() 73 "/tensorflow/metric", "An example metric with no labels."); in TEST() 85 "Counter with labels.", "MyLabel0", "MyLabel1")); in TEST() 87 "/tensorflow/test/counter_without_labels", "Counter without labels.")); in TEST() 109 EXPECT_EQ("Counter with labels.", ld.description); in TEST() 119 EXPECT_EQ("Counter without labels.", ud.description); in TEST() 133 ASSERT_EQ(2, lps.points[0]->labels.size()); in TEST() 134 EXPECT_EQ("MyLabel0", lps.points[0]->labels[0].name); in TEST() 135 EXPECT_EQ("Label00", lps.points[0]->labels[0].value); in TEST() 136 EXPECT_EQ("MyLabel1", lps.points[0]->labels[1].name); in TEST() [all …]
|
| /external/python/cpython2/Lib/encodings/ |
| D | idna.py | 157 labels = dots.split(input) 158 if labels and len(labels[-1])==0: 160 del labels[-1] 163 for label in labels: 178 labels = dots.split(input) 183 labels = input.split(".") 185 if labels and len(labels[-1]) == 0: 187 del labels[-1] 192 for label in labels: 206 labels = dots.split(input) [all …]
|