Home
last modified time | relevance | path

Searched refs:num_buckets (Results 1 – 25 of 85) sorted by relevance

1234

/external/ltp/utils/ffsb-6.0-rc2/
Dffsb_stats.c41 fsc->num_buckets = 0; in ffsb_statsc_init()
49 fsc->num_buckets++; in ffsb_statsc_addbucket()
53 fsc->num_buckets); in ffsb_statsc_addbucket()
58 fsc->buckets[fsc->num_buckets - 1].min = min; in ffsb_statsc_addbucket()
59 fsc->buckets[fsc->num_buckets - 1].max = max; in ffsb_statsc_addbucket()
91 fsc->num_buckets); in ffsb_statsd_init()
94 memset(fsd->buckets[i], 0, sizeof(uint32_t) * fsc->num_buckets); in ffsb_statsd_init()
108 unsigned num_buckets, i; in ffsb_add_data() local
122 if (fsd->config->num_buckets == 0) in ffsb_add_data()
125 num_buckets = fsd->config->num_buckets; in ffsb_add_data()
[all …]
/external/grpc-grpc/test/core/util/
Dhistogram.cc53 size_t num_buckets; member
66 GPR_ASSERT(bucket < h->num_buckets); in bucket_for()
89 h->num_buckets = bucket_for_unchecked(h, max_bucket_start) + 1; in grpc_histogram_create()
90 GPR_ASSERT(h->num_buckets > 1); in grpc_histogram_create()
91 GPR_ASSERT(h->num_buckets < 100000000); in grpc_histogram_create()
93 static_cast<uint32_t*>(gpr_zalloc(sizeof(uint32_t) * h->num_buckets)); in grpc_histogram_create()
116 if ((dst->num_buckets != src->num_buckets) || in grpc_histogram_merge()
121 grpc_histogram_merge_contents(dst, src->buckets, src->num_buckets, in grpc_histogram_merge()
132 GPR_ASSERT(dst->num_buckets == data_count); in grpc_histogram_merge_contents()
142 for (i = 0; i < dst->num_buckets; i++) { in grpc_histogram_merge_contents()
[all …]
/external/boringssl/src/crypto/lhash/
Dlhash.c85 size_t num_buckets; member
102 ret->num_buckets = kMinNumBuckets; in lh_new()
103 ret->buckets = OPENSSL_malloc(sizeof(LHASH_ITEM *) * ret->num_buckets); in lh_new()
108 OPENSSL_memset(ret->buckets, 0, sizeof(LHASH_ITEM *) * ret->num_buckets); in lh_new()
120 for (size_t i = 0; i < lh->num_buckets; i++) { in lh_free()
150 LHASH_ITEM **ret = &lh->buckets[hash % lh->num_buckets]; in get_next_ptr_and_hash()
167 LHASH_ITEM **ret = &lh->buckets[key_hash % lh->num_buckets]; in get_next_ptr_by_key()
210 for (i = 0; i < lh->num_buckets; i++) { in lh_rebucket()
221 lh->num_buckets = new_num_buckets; in lh_rebucket()
234 assert(lh->num_buckets >= kMinNumBuckets); in lh_maybe_resize()
[all …]
/external/python/cpython3/Modules/
Dhashtable.c162 ht->num_buckets = round_size(init_size); in _Py_hashtable_new_full()
167 buckets_size = ht->num_buckets * sizeof(ht->buckets[0]); in _Py_hashtable_new_full()
202 size += ht->num_buckets * sizeof(_Py_hashtable_entry_t *); in _Py_hashtable_size()
223 load = (double)ht->entries / ht->num_buckets; in _Py_hashtable_print_stats()
228 for (hv = 0; hv < ht->num_buckets; hv++) { in _Py_hashtable_print_stats()
243 ht, ht->entries, ht->num_buckets, load * 100.0); in _Py_hashtable_print_stats()
263 index = key_hash & (ht->num_buckets - 1); in _Py_hashtable_get_entry()
285 index = key_hash & (ht->num_buckets - 1); in _Py_hashtable_pop_entry()
305 if ((float)ht->entries / (float)ht->num_buckets < HASHTABLE_LOW) in _Py_hashtable_pop_entry()
331 index = key_hash & (ht->num_buckets - 1); in _Py_hashtable_set()
[all …]
/external/libtextclassifier/annotator/
Dquantization_test.cc87 const int num_buckets = 3; in TEST() local
92 std::vector<float> scales(num_buckets); in TEST()
93 std::vector<uint8> embeddings(bytes_per_embedding * num_buckets); in TEST()
110 const int num_buckets = 3; in TEST() local
115 std::vector<float> scales(num_buckets, 1.0); in TEST()
116 std::vector<uint8> embeddings(bytes_per_embedding * num_buckets, 0xFF); in TEST()
130 const int num_buckets = 3; in TEST() local
135 std::vector<float> scales(num_buckets, 1.0); in TEST()
137 std::vector<uint8> embeddings(bytes_per_embedding * num_buckets, 0); in TEST()
Dmodel-executor.cc70 int num_buckets = embeddings->dims->data[0]; in FromBuffer() local
72 if (scales->dims->size != 2 || scales->dims->data[0] != num_buckets || in FromBuffer()
84 std::move(executor), quantization_bits, num_buckets, bytes_per_embedding, in FromBuffer()
91 int num_buckets, int bytes_per_embedding, int output_embedding_size, in TFLiteEmbeddingExecutor() argument
97 num_buckets_(num_buckets), in TFLiteEmbeddingExecutor()
112 full_num_buckets_ = num_buckets; in TFLiteEmbeddingExecutor()
/external/libtextclassifier/utils/
Dtoken-feature-extractor_test.cc39 options.num_buckets = 1000; in TEST_F()
109 options.num_buckets = 1000; in TEST_F()
138 options.num_buckets = 1000; in TEST_F()
208 options.num_buckets = 1000; in TEST_F()
239 options.num_buckets = 1000; in TEST_F()
274 options.num_buckets = 1000; in TEST_F()
298 options.num_buckets = 1000; in TEST_F()
322 options.num_buckets = 1000; in TEST_F()
346 options.num_buckets = 1000; in TEST_F()
366 options.num_buckets = 1000; in TEST_F()
[all …]
/external/tensorflow/tensorflow/contrib/feature_column/python/feature_column/
Dsequence_feature_column_test.py112 key='aaa', num_buckets=vocabulary_size)
118 key='bbb', num_buckets=vocabulary_size)
155 key='aaa', num_buckets=vocabulary_size)
207 key='aaa', num_buckets=vocabulary_size)
209 key='bbb', num_buckets=vocabulary_size)
250 key='aaa', num_buckets=vocabulary_size)
252 key='bbb', num_buckets=vocabulary_size)
319 key='aaa', num_buckets=vocabulary_size_a)
322 key='bbb', num_buckets=vocabulary_size_b)
348 key='aaa', num_buckets=vocabulary_size)
[all …]
/external/tensorflow/tensorflow/python/feature_column/
Dsequence_feature_column_test.py115 key='aaa', num_buckets=vocabulary_size)
121 key='bbb', num_buckets=vocabulary_size)
157 key='aaa', num_buckets=vocabulary_size)
211 key='aaa', num_buckets=vocabulary_size)
213 key='bbb', num_buckets=vocabulary_size)
254 key='aaa', num_buckets=vocabulary_size)
256 key='bbb', num_buckets=vocabulary_size)
321 key='aaa', num_buckets=vocabulary_size_a)
324 key='bbb', num_buckets=vocabulary_size_b)
348 key='aaa', num_buckets=vocabulary_size)
[all …]
Dfeature_column_v2_test.py530 self.assertEqual(6, b.num_buckets)
872 self.assertEqual(10, column.num_buckets)
1025 self.assertEqual(4, wire_column.num_buckets)
1050 self.assertEqual(4, wire_column.num_buckets)
1173 self.assertEqual(15, crossed.num_buckets)
1372 def num_buckets(self): member in CrossedColumnTest.test_linear_model_with_weights._TestColumnWithWeights
1471 def num_buckets(self): member in CrossedColumnTest.test_old_linear_model_with_weights._TestColumnWithWeights
1476 return self.num_buckets
1727 def num_buckets(self): member in LinearModelTest.test_dense_and_sparse_column._DenseAndSparseColumn
2448 def num_buckets(self): member in OldLinearModelTest.test_dense_and_sparse_column._DenseAndSparseColumn
[all …]
Dfeature_column_v2.py521 first_dim = column.num_buckets
1049 num_buckets = c0._num_buckets # pylint: disable=protected-access
1067 if num_buckets != c._num_buckets: # pylint: disable=protected-access
1072 c0, num_buckets, c, c._num_buckets)) # pylint: disable=protected-access
1221 num_buckets = c0.num_buckets
1237 if num_buckets != c.num_buckets:
1242 c0, num_buckets, c, c.num_buckets))
1250 num_buckets, trainable, shared_embedding_collection_name)
1811 def categorical_column_with_identity(key, num_buckets, default_value=None): argument
1865 if num_buckets < 1:
[all …]
Dfeature_column_test.py2652 key='a', num_buckets=3)
2696 key='a', num_buckets=3)
2867 key='aaa', num_buckets=3)
2869 key='bbb', num_buckets=3)
2957 animal = fc._categorical_column_with_identity('animal', num_buckets=4)
3053 key='aaa', num_buckets=3)
3055 key='bbb', num_buckets=3)
3087 key='aaa', num_buckets=3)
3089 key='bbb', num_buckets=3)
4249 column = fc._categorical_column_with_identity(key='aaa', num_buckets=3)
[all …]
/external/tensorflow/tensorflow/python/tpu/
Dfeature_column_test.py48 key='aaa', num_buckets=3)
64 key='aaa', num_buckets=3)
122 key='aaa', num_buckets=vocabulary_size)
148 key='aaa', num_buckets=3)
150 key='bbb', num_buckets=3)
185 key='aaa', num_buckets=3)
187 key='bbb', num_buckets=3)
265 key='aaa', num_buckets=vocabulary_size)
267 key='bbb', num_buckets=vocabulary_size)
/external/tensorflow/tensorflow/contrib/training/python/training/
Dbucket_ops.py66 num_buckets, argument
163 if len(batch_size) != num_buckets:
167 batch_size = [batch_size] * num_buckets
170 bucket_capacities = [capacity] * num_buckets
171 if len(bucket_capacities) != num_buckets:
174 "elements." % (str(bucket_capacities), num_buckets))
199 for i in range(num_buckets):
238 for i in range(num_buckets)
412 num_buckets=len(bucket_boundaries) + 1,
Dbucket_ops_test.py107 num_buckets=2,
157 num_buckets=2,
199 num_buckets=2,
280 num_buckets=2,
317 which_bucket=constant_op.constant(0), num_buckets=2,
403 num_buckets = len(bucket_boundaries) + 2
405 num_pairs_to_enqueue - (batch_size - 1) * num_buckets,
/external/libtextclassifier/actions/
Dactions-suggestions_test.cc1069 options_->num_buckets = 1000; in EmbeddingTest()
1104 options_->num_buckets)); in TEST_F()
1107 options_->num_buckets)); in TEST_F()
1110 options_->num_buckets)); in TEST_F()
1128 options_->num_buckets)); in TEST_F()
1131 options_->num_buckets)); in TEST_F()
1134 options_->num_buckets)); in TEST_F()
1154 options_->num_buckets)); in TEST_F()
1157 options_->num_buckets)); in TEST_F()
1174 options_->num_buckets)); in TEST_F()
[all …]
/external/tensorflow/tensorflow/python/kernel_tests/boosted_trees/
Dstats_ops_test.py341 num_buckets=2).eval())
347 num_buckets = 4
356 num_buckets) # shape=[max_splits, num_buckets, num_features, 2]
369 num_buckets = 4
379 num_buckets) # shape=[max_splits, num_buckets, num_features, 2]
398 num_buckets = 1
410 num_buckets) # shape=[max_splits, num_buckets, num_features, 2]
/external/libdrm/intel/
Duthash.h124 HASH_TO_BKT(hashval, (head)->hh.tbl->num_buckets, _hf_bkt); \
179 (head)->hh.tbl->num_buckets = HASH_INITIAL_NUM_BUCKETS; \
264 HASH_TO_BKT(hashval, (head)->hh.tbl->num_buckets, _ha_bkt); \
300 HASH_TO_BKT(hashval, (head)->hh.tbl->num_buckets, _ha_bkt); \
342 (head)->hh.tbl->num_buckets*sizeof(struct UT_hash_bucket) ); \
365 HASH_TO_BKT( _hd_hh_del->hashv, (head)->hh.tbl->num_buckets, _hd_bkt); \
408 for( _bkt_i = 0; _bkt_i < (head)->hh.tbl->num_buckets; _bkt_i++) { \
798 2UL * tbl->num_buckets * sizeof(struct UT_hash_bucket)); \
801 2UL * tbl->num_buckets * sizeof(struct UT_hash_bucket)); \
804 (((tbl->num_items & ((tbl->num_buckets*2U)-1U)) != 0U) ? 1U : 0U); \
[all …]
/external/tensorflow/tensorflow/contrib/learn/python/learn/estimators/
Dtest_data.py32 def get_quantile_based_buckets(feature_values, num_buckets): argument
35 ([100 * (i + 1.) / (num_buckets + 1.) for i in range(num_buckets)]))
/external/libdrm/etnaviv/
Detnaviv_bo_cache.c35 unsigned i = cache->num_buckets; in add_bucket()
41 cache->num_buckets++; in add_bucket()
77 for (i = 0; i < cache->num_buckets; i++) { in etna_bo_cache_cleanup()
103 for (i = 0; i < cache->num_buckets; i++) { in get_bucket()
/external/libdrm/freedreno/
Dfreedreno_bo_cache.c38 unsigned int i = cache->num_buckets; in add_bucket()
44 cache->num_buckets++; in add_bucket()
89 for (i = 0; i < cache->num_buckets; i++) { in fd_bo_cache_cleanup()
116 for (i = 0; i < cache->num_buckets; i++) { in get_bucket()
/external/tensorflow/tensorflow/examples/get_started/regression/
Dimports85.py122 num_buckets = 1000000
123 bucket_id = tf.string_to_hash_bucket_fast(line, num_buckets)
125 return bucket_id < int(train_fraction * num_buckets)
/external/tensorflow/tensorflow/contrib/layers/python/ops/
Dsparse_feature_cross_op.py45 def sparse_feature_cross(inputs, hashed_output=False, num_buckets=0, argument
104 num_buckets,
117 num_buckets,
/external/tensorflow/tensorflow/core/kernels/boosted_trees/
Dstats_ops.cc44 const int64 num_buckets = stats_summary_list[0].dim_size(1); in Compute() local
88 cum_grad.reserve(num_buckets); in Compute()
89 cum_hess.reserve(num_buckets); in Compute()
102 for (int bucket = 0; bucket < num_buckets; ++bucket) { in Compute()
124 for (int bucket = 0; bucket < num_buckets; ++bucket) { in Compute()
/external/tensorflow/tensorflow/core/api_def/base_api/
Dapi_def_SparseCross.pbtxt54 name: "num_buckets"
57 output = hashed_value%num_buckets if num_buckets > 0 else hashed_value.

1234