| /lib/ |
| D | fw_table.c | 31 acpi_get_entry_type(struct acpi_subtable_entry *entry) in acpi_get_entry_type() argument 33 switch (entry->type) { in acpi_get_entry_type() 35 return entry->hdr->common.type; in acpi_get_entry_type() 37 return entry->hdr->hmat.type; in acpi_get_entry_type() 41 return entry->hdr->cedt.type; in acpi_get_entry_type() 43 return entry->hdr->cdat.type; in acpi_get_entry_type() 49 acpi_get_entry_length(struct acpi_subtable_entry *entry) in acpi_get_entry_length() argument 51 switch (entry->type) { in acpi_get_entry_length() 53 return entry->hdr->common.length; in acpi_get_entry_length() 55 return entry->hdr->hmat.length; in acpi_get_entry_length() [all …]
|
| D | xarray.c | 183 void *entry; in xas_start() local 190 entry = xa_head(xas->xa); in xas_start() 191 if (!xa_is_node(entry)) { in xas_start() 195 if ((xas->xa_index >> xa_to_node(entry)->shift) > XA_CHUNK_MASK) in xas_start() 200 return entry; in xas_start() 207 void *entry = xa_entry(xas->xa, node, offset); in xas_descend() local 210 while (xa_is_sibling(entry)) { in xas_descend() 211 offset = xa_to_sibling(entry); in xas_descend() 212 entry = xa_entry(xas->xa, node, offset); in xas_descend() 213 if (node->shift && xa_is_node(entry)) in xas_descend() [all …]
|
| D | list_debug.c | 45 bool __list_del_entry_valid_or_report(struct list_head *entry) in __list_del_entry_valid_or_report() argument 49 prev = entry->prev; in __list_del_entry_valid_or_report() 50 next = entry->next; in __list_del_entry_valid_or_report() 53 "list_del corruption, %px->next is NULL\n", entry) || in __list_del_entry_valid_or_report() 55 "list_del corruption, %px->prev is NULL\n", entry) || in __list_del_entry_valid_or_report() 58 entry, LIST_POISON1) || in __list_del_entry_valid_or_report() 61 entry, LIST_POISON2) || in __list_del_entry_valid_or_report() 62 CHECK_DATA_CORRUPTION(prev->next != entry, in __list_del_entry_valid_or_report() 64 entry, prev->next, prev) || in __list_del_entry_valid_or_report() 65 CHECK_DATA_CORRUPTION(next->prev != entry, in __list_del_entry_valid_or_report() [all …]
|
| D | llist.c | 55 struct llist_node *entry, *next; in llist_del_first() local 57 entry = smp_load_acquire(&head->first); in llist_del_first() 59 if (entry == NULL) in llist_del_first() 61 next = READ_ONCE(entry->next); in llist_del_first() 62 } while (!try_cmpxchg(&head->first, &entry, next)); in llist_del_first() 64 return entry; in llist_del_first() 82 struct llist_node *entry, *next; in llist_del_first_this() local 85 entry = smp_load_acquire(&head->first); in llist_del_first_this() 87 if (entry != this) in llist_del_first_this() 89 next = READ_ONCE(entry->next); in llist_del_first_this() [all …]
|
| D | logic_pio.c | 240 struct logic_pio_hwaddr *entry = find_io_range(addr); \ 242 if (entry) \ 243 ret = entry->ops->in(entry->hostdata, \ 256 struct logic_pio_hwaddr *entry = find_io_range(addr); \ 258 if (entry) \ 259 entry->ops->out(entry->hostdata, \ 272 struct logic_pio_hwaddr *entry = find_io_range(addr); \ 274 if (entry) \ 275 entry->ops->ins(entry->hostdata, \ 289 struct logic_pio_hwaddr *entry = find_io_range(addr); \ [all …]
|
| D | test_maple_tree.c | 394 void *entry, *entry2; in check_find() local 426 while ((entry = mas_find(&mas, 268435456)) != NULL) { in check_find() 428 MT_BUG_ON(mt, xa_mk_value(val) != entry); in check_find() 430 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY); in check_find() 442 mas_for_each(&mas, entry, ULONG_MAX) { in check_find() 444 MT_BUG_ON(mt, xa_mk_value(val) != entry); in check_find() 446 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY); in check_find() 458 mas_for_each(&mas, entry, ULONG_MAX) { in check_find() 460 MT_BUG_ON(mt, xa_mk_value(val) != entry); in check_find() 462 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY); in check_find() [all …]
|
| D | maple_tree.c | 221 const struct maple_enode *entry) in mte_node_type() argument 223 return ((unsigned long)entry >> MAPLE_NODE_TYPE_SHIFT) & in mte_node_type() 237 static __always_inline bool mte_is_leaf(const struct maple_enode *entry) in mte_is_leaf() argument 239 return ma_is_leaf(mte_node_type(entry)); in mte_is_leaf() 246 static __always_inline bool mt_is_reserved(const void *entry) in mt_is_reserved() argument 248 return ((unsigned long)entry < MAPLE_RESERVED_RANGE) && in mt_is_reserved() 249 xa_is_internal(entry); in mt_is_reserved() 289 const struct maple_enode *entry) in mte_to_node() argument 291 return (struct maple_node *)((unsigned long)entry & ~MAPLE_NODE_MASK); in mte_to_node() 300 static inline struct maple_topiary *mte_to_mat(const struct maple_enode *entry) in mte_to_mat() argument [all …]
|
| D | idr.c | 231 void *entry = NULL; in idr_get_next_ul() local 237 entry = rcu_dereference_raw(*slot); in idr_get_next_ul() 238 if (!entry) in idr_get_next_ul() 240 if (!xa_is_internal(entry)) in idr_get_next_ul() 242 if (slot != &idr->idr_rt.xa_head && !xa_is_retry(entry)) in idr_get_next_ul() 250 return entry; in idr_get_next_ul() 267 void *entry = idr_get_next_ul(idr, &id); in idr_get_next() local 272 return entry; in idr_get_next() 294 void *entry; in idr_replace() local 298 entry = __radix_tree_lookup(&idr->idr_rt, id, &node, &slot); in idr_replace() [all …]
|
| D | test_xarray.c | 72 unsigned order, void *entry, gfp_t gfp) in xa_store_order() argument 79 curr = xas_store(&xas, entry); in xa_store_order() 105 void *entry; in check_xas_retry() local 138 xas_for_each(&xas, entry, ULONG_MAX) { in check_xas_retry() 153 void *entry = xa_load(xa, j); in check_xa_load() local 155 XA_BUG_ON(xa, xa_to_value(entry) != j); in check_xa_load() 157 XA_BUG_ON(xa, entry); in check_xa_load() 164 void *entry = xa_load(xa, j); in check_xa_load() local 166 XA_BUG_ON(xa, xa_to_value(entry) != j); in check_xa_load() 168 XA_BUG_ON(xa, entry); in check_xa_load() [all …]
|
| D | test_hmm.c | 216 void *entry; in dmirror_do_fault() local 228 entry = page; in dmirror_do_fault() 230 entry = xa_tag_pointer(entry, DPT_XA_TAG_WRITE); in dmirror_do_fault() 233 entry = xa_store(&dmirror->pt, pfn, entry, GFP_ATOMIC); in dmirror_do_fault() 234 if (xa_is_err(entry)) in dmirror_do_fault() 235 return xa_err(entry); in dmirror_do_fault() 245 void *entry; in dmirror_do_update() local 252 xa_for_each_range(&dmirror->pt, pfn, entry, start >> PAGE_SHIFT, in dmirror_do_update() 369 void *entry; in dmirror_do_read() local 372 entry = xa_load(&dmirror->pt, pfn); in dmirror_do_read() [all …]
|
| D | error-inject.c | 70 unsigned long entry, offset = 0, size = 0; in populate_error_injection_list() local 74 entry = (unsigned long)dereference_symbol_descriptor((void *)iter->addr); in populate_error_injection_list() 76 if (!kernel_text_address(entry) || in populate_error_injection_list() 77 !kallsyms_lookup_size_offset(entry, &size, &offset)) { in populate_error_injection_list() 79 (void *)entry); in populate_error_injection_list() 86 ent->start_addr = entry; in populate_error_injection_list() 87 ent->end_addr = entry + size; in populate_error_injection_list()
|
| D | radix-tree.c | 89 void __rcu **entry = rcu_dereference_raw(parent->slots[offset]); in radix_tree_descend() local 91 *nodep = (void *)entry; in radix_tree_descend() 411 void *entry; in radix_tree_extend() local 420 entry = rcu_dereference_raw(root->xa_head); in radix_tree_extend() 421 if (!entry && (!is_idr(root) || root_tag_get(root, IDR_FREE))) in radix_tree_extend() 445 if (radix_tree_is_internal_node(entry)) { in radix_tree_extend() 446 entry_to_node(entry)->parent = node; in radix_tree_extend() 447 } else if (xa_is_value(entry)) { in radix_tree_extend() 455 node->slots[0] = (void __rcu *)entry; in radix_tree_extend() 456 entry = node_to_entry(node); in radix_tree_extend() [all …]
|
| D | nlattr.c | 94 const struct nlattr *entry; in nla_validate_array() local 97 nla_for_each_attr(entry, head, len, rem) { in nla_validate_array() 100 if (nla_len(entry) == 0) in nla_validate_array() 103 if (nla_len(entry) < NLA_HDRLEN) { in nla_validate_array() 104 NL_SET_ERR_MSG_ATTR_POL(extack, entry, policy, in nla_validate_array() 109 ret = __nla_validate_parse(nla_data(entry), nla_len(entry), in nla_validate_array()
|
| D | rhashtable.c | 235 struct rhash_head *head, *next, *entry; in rhashtable_rehash_one() local 245 rht_for_each_from(entry, rht_ptr(bkt, old_tbl, old_hash), in rhashtable_rehash_one() 248 next = rht_dereference_bucket(entry->next, old_tbl, old_hash); in rhashtable_rehash_one() 253 pprev = &entry->next; in rhashtable_rehash_one() 259 new_hash = head_hashfn(ht, new_tbl, entry); in rhashtable_rehash_one() 266 RCU_INIT_POINTER(entry->next, head); in rhashtable_rehash_one() 268 rht_assign_unlock(new_tbl, &new_tbl->buckets[new_hash], entry, flags); in rhashtable_rehash_one()
|
| D | kobject.c | 181 list_add_tail(&kobj->entry, &kobj->kset->list); in kobj_kset_join() 192 list_del_init(&kobj->entry); in kobj_kset_leave() 202 INIT_LIST_HEAD(&kobj->entry); in kobject_init_internal() 911 list_for_each_entry(k, &kset->list, entry) { in kset_find_obj()
|
| D | sbitmap.c | 759 list_del_init(&sbq_wait->wait.entry); in sbitmap_del_wait_queue()
|
| D | Kconfig.kcsan | 188 # We can either let objtool nop __tsan_func_{entry,exit}() and builtin
|
| /lib/pldmfw/ |
| D | pldmfw.c | 298 list_add_tail(&desc->entry, &record->descs); in pldm_parse_desc_tlvs() 336 list_add_tail(&record->entry, &data->records); in pldm_parse_one_record() 484 list_add_tail(&component->entry, &data->components); in pldm_parse_components() 549 list_for_each_entry_safe(component, c_safe, &data->components, entry) { in pldmfw_free_priv() 550 list_del(&component->entry); in pldmfw_free_priv() 554 list_for_each_entry_safe(record, r_safe, &data->records, entry) { in pldmfw_free_priv() 555 list_for_each_entry_safe(desc, d_safe, &record->descs, entry) { in pldmfw_free_priv() 556 list_del(&desc->entry); in pldmfw_free_priv() 565 list_del(&record->entry); in pldmfw_free_priv() 639 list_for_each_entry(desc, &record->descs, entry) { in pldmfw_op_pci_match_record() [all …]
|
| /lib/zstd/compress/ |
| D | zstd_ldm.c | 181 size_t const hash, const ldmEntry_t entry, in ZSTD_ldm_insertEntry() argument 187 *(ZSTD_ldm_getBucket(ldmState, hash, ldmParams) + offset) = entry; in ZSTD_ldm_insertEntry() 294 ldmEntry_t entry; in ZSTD_ldm_fillHashTable() local 296 entry.offset = (U32)(split - base); in ZSTD_ldm_fillHashTable() 297 entry.checksum = (U32)(xxhash >> 32); in ZSTD_ldm_fillHashTable() 298 ZSTD_ldm_insertEntry(ldmState, hash, entry, *params); in ZSTD_ldm_fillHashTable()
|
| D | zstd_lazy.c | 682 const U32* entry = &dms->hashTable[ddsIdx]; in ZSTD_HcFindBestMatch() local 683 PREFETCH_L1(entry); in ZSTD_HcFindBestMatch()
|