Home
last modified time | relevance | path

Searched refs:table (Results 1 – 15 of 15) sorted by relevance

/lib/
Dcrc8.c29 void crc8_populate_msb(u8 table[CRC8_TABLE_SIZE], u8 polynomial) in crc8_populate_msb()
35 table[0] = 0; in crc8_populate_msb()
40 table[i+j] = table[j] ^ t; in crc8_populate_msb()
51 void crc8_populate_lsb(u8 table[CRC8_TABLE_SIZE], u8 polynomial) in crc8_populate_lsb()
56 table[0] = 0; in crc8_populate_lsb()
61 table[i+j] = table[j] ^ t; in crc8_populate_lsb()
74 u8 crc8(const u8 table[CRC8_TABLE_SIZE], u8 *pdata, size_t nbytes, u8 crc) in crc8()
78 crc = table[(crc ^ *pdata++) & 0xff]; in crc8()
Dsg_pool.c84 void sg_free_table_chained(struct sg_table *table, in sg_free_table_chained() argument
87 if (table->orig_nents <= nents_first_chunk) in sg_free_table_chained()
93 __sg_free_table(table, SG_CHUNK_SIZE, nents_first_chunk, sg_pool_free); in sg_free_table_chained()
111 int sg_alloc_table_chained(struct sg_table *table, int nents, in sg_alloc_table_chained() argument
120 table->nents = table->orig_nents = nents; in sg_alloc_table_chained()
121 sg_init_table(table->sgl, nents); in sg_alloc_table_chained()
132 ret = __sg_alloc_table(table, nents, SG_CHUNK_SIZE, in sg_alloc_table_chained()
136 sg_free_table_chained(table, nents_first_chunk); in sg_alloc_table_chained()
Dscatterlist.c192 void __sg_free_table(struct sg_table *table, unsigned int max_ents, in __sg_free_table() argument
198 if (unlikely(!table->sgl)) in __sg_free_table()
201 sgl = table->sgl; in __sg_free_table()
202 while (table->orig_nents) { in __sg_free_table()
203 unsigned int alloc_size = table->orig_nents; in __sg_free_table()
221 table->orig_nents -= sg_size; in __sg_free_table()
230 table->sgl = NULL; in __sg_free_table()
239 void sg_free_table(struct sg_table *table) in sg_free_table() argument
241 __sg_free_table(table, SG_MAX_SINGLE_ALLOC, false, sg_kfree); in sg_free_table()
266 int __sg_alloc_table(struct sg_table *table, unsigned int nents, in __sg_alloc_table() argument
[all …]
Dgen_crc32table.c93 static void output_table(uint32_t (*table)[256], int rows, int len, char *trans) in output_table()
102 printf("%s(0x%8.8xL), ", trans, table[j][i]); in output_table()
104 printf("%s(0x%8.8xL)},\n", trans, table[j][len - 1]); in output_table()
Ddynamic_debug.c61 struct ddebug_table *table; member
730 iter->table = NULL; in ddebug_iter_first()
734 iter->table = list_entry(ddebug_tables.next, in ddebug_iter_first()
737 return &iter->table->ddebugs[iter->idx]; in ddebug_iter_first()
748 if (iter->table == NULL) in ddebug_iter_next()
750 if (++iter->idx == iter->table->num_ddebugs) { in ddebug_iter_next()
753 if (list_is_last(&iter->table->link, &ddebug_tables)) { in ddebug_iter_next()
754 iter->table = NULL; in ddebug_iter_next()
757 iter->table = list_entry(iter->table->link.next, in ddebug_iter_next()
760 return &iter->table->ddebugs[iter->idx]; in ddebug_iter_next()
[all …]
Ddevres.c268 void __iomem *table[PCIM_IOMAP_MAX]; member
278 if (this->table[i]) in pcim_iomap_release()
279 pci_iounmap(dev, this->table[i]); in pcim_iomap_release()
301 return dr->table; in pcim_iomap_table()
307 return dr->table; in pcim_iomap_table()
Dparser.c105 int match_token(char *s, const match_table_t table, substring_t args[]) in match_token() argument
109 for (p = table; !match_one(s, p->pattern, args) ; p++) in match_token()
Drhashtable.c33 union nested_table __rcu *table; member
72 ntbl = rcu_dereference_raw(ntbl->table); in nested_table_free()
1177 ntbl = rht_dereference_bucket_rcu(ntbl[index].table, tbl, hash); in __rht_bucket_nested()
1182 ntbl = rht_dereference_bucket_rcu(ntbl[index].table, in __rht_bucket_nested()
1218 ntbl = nested_table_alloc(ht, &ntbl[index].table, in rht_bucket_nested_insert()
1225 ntbl = nested_table_alloc(ht, &ntbl[index].table, in rht_bucket_nested_insert()
DKconfig164 This is the fastest algorithm, but comes with a 8KiB lookup table.
165 Most modern processors have enough cache to hold this table without
176 table.
184 is not particularly fast, but has a small 256 byte lookup table.
192 no lookup table. This is provided as a debugging option.
DKconfig.debug1372 bool "Debug SG table operations"
1825 tristate "Perform selftest on resizable hash table"
/lib/zlib_inflate/
Dinftrees.c24 code **table, unsigned *bits, unsigned short *work) in zlib_inflate_table() argument
107 *(*table)++ = this; /* make a table to force an error */ in zlib_inflate_table()
108 *(*table)++ = this; in zlib_inflate_table()
190 next = *table; /* current table to fill in */ in zlib_inflate_table()
271 (*table)[low].op = (unsigned char)curr; in zlib_inflate_table()
272 (*table)[low].bits = (unsigned char)root; in zlib_inflate_table()
273 (*table)[low].val = (unsigned short)(next - *table); in zlib_inflate_table()
292 next = *table; in zlib_inflate_table()
312 *table += used; in zlib_inflate_table()
Dinftrees.h57 unsigned codes, code **table,
/lib/zstd/
Dfse.h355 const void *table; /* precise table may vary, depending on U16 */ member
487 DStatePtr->table = dt + 1; in FSE_initDState()
492 FSE_decode_t const DInfo = ((const FSE_decode_t *)(DStatePtr->table))[DStatePtr->state]; in FSE_peekSymbol()
498 FSE_decode_t const DInfo = ((const FSE_decode_t *)(DStatePtr->table))[DStatePtr->state]; in FSE_updateState()
506 FSE_decode_t const DInfo = ((const FSE_decode_t *)(DStatePtr->table))[DStatePtr->state]; in FSE_decodeSymbol()
519 FSE_decode_t const DInfo = ((const FSE_decode_t *)(DStatePtr->table))[DStatePtr->state]; in FSE_decodeSymbolFast()
Dhuf_decompress.c74 static DTableDesc HUF_getDTableDesc(const HUF_DTable *table) in HUF_getDTableDesc() argument
77 memcpy(&dtd, table, sizeof(dtd)); in HUF_getDTableDesc()
Dcompress.c397 static void ZSTD_reduceTable(U32 *const table, U32 const size, U32 const reducerValue) in ZSTD_reduceTable() argument
401 if (table[u] < reducerValue) in ZSTD_reduceTable()
402 table[u] = 0; in ZSTD_reduceTable()
404 table[u] -= reducerValue; in ZSTD_reduceTable()