Home
last modified time | relevance | path

Searched refs:cb_list (Results 1 – 25 of 26) sorted by relevance

12

/kernel/linux/linux-5.10/drivers/dma-buf/
Ddma-fence.c333 struct list_head cb_list; in dma_fence_signal_locked() local
342 list_replace(&fence->cb_list, &cb_list); in dma_fence_signal_locked()
348 list_for_each_entry_safe(cur, tmp, &cb_list, node) { in dma_fence_signal_locked()
445 if (WARN(!list_empty(&fence->cb_list) && in dma_fence_release()
574 list_add_tail(&cb->node, &fence->cb_list); in dma_fence_add_callback()
700 list_add(&cb.base.node, &fence->cb_list); in dma_fence_default_wait()
856 INIT_LIST_HEAD(&fence->cb_list); in dma_fence_init()
/kernel/linux/linux-5.10/net/netfilter/
Dnf_tables_offload.c198 struct list_head *cb_list) in nft_setup_cb_call() argument
203 list_for_each_entry(block_cb, cb_list, list) { in nft_setup_cb_call()
280 &basechain->flow_block.cb_list); in nft_flow_offload_rule()
286 list_splice(&bo->cb_list, &basechain->flow_block.cb_list); in nft_flow_offload_bind()
304 nft_setup_cb_call(TC_SETUP_CLSFLOWER, &cls_flow, &bo->cb_list); in nft_flow_offload_unbind()
307 list_for_each_entry_safe(block_cb, next, &bo->cb_list, list) { in nft_flow_offload_unbind()
348 bo->cb_list_head = &basechain->flow_block.cb_list; in nft_flow_block_offload_init()
349 INIT_LIST_HEAD(&bo->cb_list); in nft_flow_block_offload_init()
381 list_move(&block_cb->list, &bo.cb_list); in nft_indr_block_cleanup()
401 if (list_empty(&bo.cb_list)) in nft_indr_block_offload_cmd()
Dnf_flow_table_offload.c719 &offload->flowtable->flow_block.cb_list); in flow_offload_tuple_add()
727 &offload->flowtable->flow_block.cb_list); in flow_offload_tuple_del()
778 &offload->flowtable->flow_block.cb_list); in flow_offload_tuple_stats()
916 list_splice(&bo->cb_list, &flowtable->flow_block.cb_list); in nf_flow_table_block_setup()
919 list_for_each_entry_safe(block_cb, next, &bo->cb_list, list) { in nf_flow_table_block_setup()
945 bo->cb_list_head = &flowtable->flow_block.cb_list; in nf_flow_table_block_offload_init()
946 INIT_LIST_HEAD(&bo->cb_list); in nf_flow_table_block_offload_init()
/kernel/linux/linux-5.10/drivers/dma/
Dbcm2835-dma.c95 struct bcm2835_cb_entry cb_list[]; member
209 dma_pool_free(desc->c->cb_pool, desc->cb_list[i].cb, in bcm2835_dma_free_cb_chain()
210 desc->cb_list[i].paddr); in bcm2835_dma_free_cb_chain()
317 d = kzalloc(struct_size(d, cb_list, frames), gfp); in bcm2835_dma_create_cb_chain()
330 cb_entry = &d->cb_list[frame]; in bcm2835_dma_create_cb_chain()
357 d->cb_list[frame - 1].cb->next = cb_entry->paddr; in bcm2835_dma_create_cb_chain()
370 d->cb_list[d->frames - 1].cb->info |= finalextrainfo; in bcm2835_dma_create_cb_chain()
452 writel(d->cb_list[0].paddr, c->chan_base + BCM2835_DMA_ADDR); in bcm2835_dma_start_desc()
544 struct bcm2835_dma_cb *control_block = d->cb_list[i].cb; in bcm2835_dma_desc_size_pos()
686 bcm2835_dma_fill_cb_chain_with_sg(chan, direction, d->cb_list, in bcm2835_dma_prep_slave_sg()
[all …]
/kernel/linux/linux-5.10/include/net/
Dflow_offload.h431 struct list_head cb_list; member
443 struct list_head cb_list; member
500 list_add_tail(&block_cb->list, &offload->cb_list); in flow_block_cb_add()
506 list_move(&block_cb->list, &offload->cb_list); in flow_block_cb_remove()
513 list_move(&block_cb->list, &offload->cb_list); in flow_indr_block_cb_remove()
556 INIT_LIST_HEAD(&flow_block->cb_list); in flow_block_init()
/kernel/linux/linux-5.10/net/core/
Dflow_offload.c238 list_for_each_entry(block_cb, &block->cb_list, list) { in flow_block_cb_lookup()
359 struct list_head *cb_list; member
371 INIT_LIST_HEAD(&bo.cb_list); in existing_qdiscs_register()
373 list_splice(&bo.cb_list, cur->cb_list); in existing_qdiscs_register()
527 info->cb_list = bo->cb_list_head; in indir_dev_add()
566 return list_empty(&bo->cb_list) ? -EOPNOTSUPP : 0; in flow_indr_dev_setup_offload()
/kernel/linux/linux-5.10/drivers/gpu/drm/i915/gt/
Dintel_breadcrumbs.c287 struct list_head cb_list; in signal_irq_work() local
290 list_replace(&rq->fence.cb_list, &cb_list); in signal_irq_work()
292 __dma_fence_signal__notify(&rq->fence, &cb_list); in signal_irq_work()
Dselftest_hangcheck.c1270 if (wait_for(!list_empty(&rq->fence.cb_list), 10)) { in __igt_reset_evict_vma()
/kernel/linux/linux-5.10/arch/s390/kernel/
Dvtime.c279 LIST_HEAD(cb_list); in virt_timer_expire()
287 list_move_tail(&timer->entry, &cb_list); in virt_timer_expire()
300 list_for_each_entry_safe(timer, tmp, &cb_list, entry) { in virt_timer_expire()
/kernel/linux/linux-5.10/include/drm/
Ddrm_syncobj.h55 struct list_head cb_list; member
/kernel/linux/linux-5.10/block/
Dblk-core.c1694 INIT_LIST_HEAD(&plug->cb_list); in blk_start_plug()
1711 while (!list_empty(&plug->cb_list)) { in flush_plug_callbacks()
1712 list_splice_init(&plug->cb_list, &callbacks); in flush_plug_callbacks()
1733 list_for_each_entry(cb, &plug->cb_list, list) in blk_check_plugged()
1743 list_add(&cb->list, &plug->cb_list); in blk_check_plugged()
/kernel/linux/linux-5.10/drivers/gpu/drm/
Ddrm_syncobj.c256 list_add_tail(&wait->node, &syncobj->cb_list); in drm_syncobj_fence_add_wait()
304 list_for_each_entry_safe(cur, tmp, &syncobj->cb_list, node) in drm_syncobj_add_point()
337 list_for_each_entry_safe(cur, tmp, &syncobj->cb_list, node) in drm_syncobj_replace_fence()
488 INIT_LIST_HEAD(&syncobj->cb_list); in drm_syncobj_create()
/kernel/linux/linux-5.10/drivers/infiniband/hw/mlx4/
Dalias_GUID.c500 &dev->sriov.alias_guid.ports_guid[port - 1].cb_list; in set_guid_rec()
810 while (!list_empty(&det->cb_list)) { in mlx4_ib_destroy_alias_guid_service()
811 cb_ctx = list_entry(det->cb_list.next, in mlx4_ib_destroy_alias_guid_service()
870 INIT_LIST_HEAD(&dev->sriov.alias_guid.ports_guid[i].cb_list); in mlx4_ib_init_alias_guid_service()
Dmlx4_ib.h435 struct list_head cb_list; member
/kernel/linux/linux-5.10/include/linux/
Ddma-fence.h85 struct list_head cb_list; member
Dblkdev.h1257 struct list_head cb_list; /* md requires an unplug callback */ member
1298 !list_empty(&plug->cb_list)); in blk_needs_flush_plug()
/kernel/linux/linux-5.10/include/net/netfilter/
Dnf_flow_table.h184 list_add_tail(&block_cb->list, &block->cb_list); in nf_flow_table_offload_add_cb()
/kernel/linux/linux-5.10/net/sched/
Dcls_api.c635 bo->cb_list_head = &flow_block->cb_list; in tcf_block_offload_init()
636 INIT_LIST_HEAD(&bo->cb_list); in tcf_block_offload_init()
657 list_move(&block_cb->list, &bo.cb_list); in tc_block_indr_cleanup()
1449 list_for_each_entry(block_cb, &bo->cb_list, list) { in tcf_block_bind()
1461 list_splice(&bo->cb_list, &block->flow_block.cb_list); in tcf_block_bind()
1466 list_for_each_entry_safe(block_cb, next, &bo->cb_list, list) { in tcf_block_bind()
1490 list_for_each_entry_safe(block_cb, next, &bo->cb_list, list) { in tcf_block_unbind()
3259 list_for_each_entry(block_cb, &block->flow_block.cb_list, list) { in __tc_setup_cb_call()
Dact_ct.c334 list_for_each_entry_safe(block_cb, tmp_cb, &block->cb_list, list) { in tcf_ct_flow_table_cleanup_work()
/kernel/linux/linux-5.10/drivers/misc/habanalabs/common/
Ddebugfs.c105 list_for_each_entry(cb, &dev_entry->cb_list, debugfs_list) { in command_buffers_show()
1221 INIT_LIST_HEAD(&dev_entry->cb_list); in hl_debugfs_add_device()
1362 list_add(&cb->debugfs_list, &dev_entry->cb_list); in hl_debugfs_add_cb()
Dhabanalabs.h1265 struct list_head cb_list; member
/kernel/linux/linux-5.10/drivers/gpu/drm/i915/
Di915_active.c1041 list_add_tail((struct list_head *)node, &rq->fence.cb_list); in i915_request_add_active_barriers()
1098 list_add_tail(&active->cb.node, &fence->cb_list); in __i915_active_fence_set()
/kernel/linux/linux-5.10/drivers/gpu/drm/vmwgfx/
Dvmwgfx_fence.c198 list_add(&cb.base.node, &f->cb_list); in vmw_fence_wait()
/kernel/linux/linux-5.10/drivers/net/wireless/intel/ipw2x00/
Dipw2200.h752 struct command_block cb_list[CB_NUMBER_OF_ELEMENTS_SMALL]; member
Dipw2200.c2804 &priv->sram_desc.cb_list[index]); in ipw_fw_dma_kick()
2893 cb = &priv->sram_desc.cb_list[last_cb_element]; in ipw_fw_dma_add_command_block()

12