Home
last modified time | relevance | path

Searched refs:cleanup_list (Results 1 – 8 of 8) sorted by relevance

/kernel/linux/linux-5.10/net/batman-adv/
Dsend.c536 INIT_HLIST_NODE(&forw_packet->cleanup_list); in batadv_forw_packet_alloc()
564 return !hlist_unhashed(&forw_packet->cleanup_list); in batadv_forw_packet_was_stolen()
592 hlist_add_fake(&forw_packet->cleanup_list); in batadv_forw_packet_steal()
613 struct hlist_head *cleanup_list, in batadv_forw_packet_list_steal() argument
630 hlist_add_head(&forw_packet->cleanup_list, cleanup_list); in batadv_forw_packet_list_steal()
650 cleanup_list) { in batadv_forw_packet_list_free()
653 hlist_del(&forw_packet->cleanup_list); in batadv_forw_packet_list_free()
686 WARN_ONCE(hlist_fake(&forw_packet->cleanup_list), in batadv_forw_packet_queue()
Dtypes.h2140 struct hlist_node cleanup_list; member
/kernel/linux/linux-5.10/net/core/
Dflow_offload.c407 struct list_head *cleanup_list) in __flow_block_indr_cleanup() argument
414 list_move(&this->indr.list, cleanup_list); in __flow_block_indr_cleanup()
418 static void flow_block_indr_notify(struct list_head *cleanup_list) in flow_block_indr_notify() argument
422 list_for_each_entry_safe(this, next, cleanup_list, indr.list) { in flow_block_indr_notify()
432 LIST_HEAD(cleanup_list); in flow_indr_dev_unregister()
450 __flow_block_indr_cleanup(release, cb_priv, &cleanup_list); in flow_indr_dev_unregister()
453 flow_block_indr_notify(&cleanup_list); in flow_indr_dev_unregister()
Dnet_namespace.c565 static LLIST_HEAD(cleanup_list);
575 net_kill_list = llist_del_all(&cleanup_list); in cleanup_net()
581 llist_for_each_entry(net, net_kill_list, cleanup_list) in cleanup_net()
596 llist_for_each_entry(net, net_kill_list, cleanup_list) { in cleanup_net()
661 if (llist_add(&net->cleanup_list, &cleanup_list)) in __put_net()
/kernel/linux/linux-5.10/drivers/gpu/drm/vmwgfx/
Dvmwgfx_fence.c44 struct list_head cleanup_list; member
273 list_splice_init(&fman->cleanup_list, &list); in vmw_fence_work_func()
310 INIT_LIST_HEAD(&fman->cleanup_list); in vmw_fence_manager_init()
332 list_empty(&fman->cleanup_list); in vmw_fence_manager_takedown()
380 list_add_tail(&action->head, &fman->cleanup_list); in vmw_fences_perform_actions()
502 if (!list_empty(&fman->cleanup_list)) in __vmw_fences_update()
/kernel/linux/linux-5.10/arch/x86/kernel/apic/
Dvector.c47 static DEFINE_PER_CPU(struct hlist_head, cleanup_list);
881 struct hlist_head *clhead = this_cpu_ptr(&cleanup_list); in DEFINE_IDTENTRY_SYSVEC()
920 hlist_add_head(&apicd->clist, per_cpu_ptr(&cleanup_list, cpu)); in __send_cleanup_vector()
/kernel/linux/linux-5.10/include/net/
Dnet_namespace.h86 struct llist_node cleanup_list; /* namespaces on death row */ member
/kernel/linux/linux-5.10/drivers/infiniband/hw/mlx4/
Dmcg.c128 struct list_head cleanup_list; member