Home
last modified time | relevance | path

Searched refs:queues (Results 1 – 25 of 99) sorted by relevance

1234

/drivers/scsi/aacraid/
Dcomminit.c276 struct aac_entry * queues; in aac_comm_init() local
278 struct aac_queue_block * comm = dev->queues; in aac_comm_init()
297 queues = (struct aac_entry *)(((ulong)headers) + hdrsize); in aac_comm_init()
300 comm->queue[HostNormCmdQueue].base = queues; in aac_comm_init()
302 queues += HOST_NORM_CMD_ENTRIES; in aac_comm_init()
306 comm->queue[HostHighCmdQueue].base = queues; in aac_comm_init()
309 queues += HOST_HIGH_CMD_ENTRIES; in aac_comm_init()
313 comm->queue[AdapNormCmdQueue].base = queues; in aac_comm_init()
316 queues += ADAP_NORM_CMD_ENTRIES; in aac_comm_init()
320 comm->queue[AdapHighCmdQueue].base = queues; in aac_comm_init()
[all …]
Dcommsup.c325 q = &dev->queues->queue[qid]; in aac_get_entry()
524 if (!dev->queues) in aac_fib_send()
596 struct aac_queue * q = &dev->queues->queue[AdapNormCmdQueue]; in aac_fib_send()
777 q = &dev->queues->queue[AdapNormRespQueue]; in aac_fib_adapter_complete()
1372 kfree(aac->queues); in _aac_reset_adapter()
1373 aac->queues = NULL; in _aac_reset_adapter()
1705 add_wait_queue(&dev->queues->queue[HostNormCmdQueue].cmdready, &wait); in aac_command_thread()
1709 spin_lock_irqsave(dev->queues->queue[HostNormCmdQueue].lock, flags); in aac_command_thread()
1710 while(!list_empty(&(dev->queues->queue[HostNormCmdQueue].cmdq))) { in aac_command_thread()
1716 entry = dev->queues->queue[HostNormCmdQueue].cmdq.next; in aac_command_thread()
[all …]
Ddpcsup.c87 atomic_dec(&dev->queues->queue[AdapNormCmdQueue].numpending); in aac_response_normal()
293 struct aac_queue *q = &dev->queues->queue[HostNormCmdQueue]; in aac_intr_normal()
357 atomic_dec(&dev->queues->queue[AdapNormCmdQueue].numpending); in aac_intr_normal()
Drx.c68 aac_command_normal(&dev->queues->queue[HostNormCmdQueue]); in aac_rx_intr_producer()
72 aac_response_normal(&dev->queues->queue[HostNormRespQueue]); in aac_rx_intr_producer()
402 struct aac_queue *q = &dev->queues->queue[AdapNormCmdQueue]; in aac_rx_deliver_producer()
425 struct aac_queue *q = &dev->queues->queue[AdapNormCmdQueue]; in aac_rx_deliver_message()
Dsa.c68 aac_command_normal(&dev->queues->queue[HostNormCmdQueue]); in aac_sa_intr()
71 aac_response_normal(&dev->queues->queue[HostNormRespQueue]); in aac_sa_intr()
/drivers/gpu/drm/amd/amdkfd/
Dkfd_process_queue_manager.c37 list_for_each_entry(pqn, &pqm->queues, process_queue_list) { in get_queue_by_qid()
77 INIT_LIST_HEAD(&pqm->queues); in pqm_init()
97 list_for_each_entry_safe(pqn, next, &pqm->queues, process_queue_list) { in pqm_uninit()
195 if (list_empty(&pqm->queues)) { in pqm_create_queue()
270 list_add(&pqn->process_queue_list, &pqm->queues); in pqm_create_queue()
285 if (list_empty(&pqm->queues)) in pqm_create_queue()
344 if (list_empty(&pqm->queues)) in pqm_destroy_queue()
Dkfd_process.c195 kfree(p->queues); in kfd_process_wq_release()
287 process->queues = kmalloc_array(INITIAL_QUEUE_ARRAY_SIZE, in create_process()
288 sizeof(process->queues[0]), GFP_KERNEL); in create_process()
289 if (!process->queues) in create_process()
337 kfree(process->queues); in create_process()
Dkfd_device_queue_manager.c415 list_add(&n->list, &dqm->queues); in register_process_nocpsch()
442 list_for_each_entry_safe(cur, next, &dqm->queues, list) { in unregister_process_nocpsch()
560 INIT_LIST_HEAD(&dqm->queues); in initialize_nocpsch()
711 INIT_LIST_HEAD(&dqm->queues); in initialize_cpsch()
757 list_for_each_entry(node, &dqm->queues, list) in start_cpsch()
781 list_for_each_entry(node, &dqm->queues, list) { in stop_cpsch()
1023 retval = pm_send_runlist(&dqm->packets, &dqm->queues); in execute_queues_cpsch()
Dkfd_packet_manager.c313 struct list_head *queues, in pm_create_runlist_ib() argument
326 BUG_ON(!pm || !queues || !rl_size_bytes || !rl_gpu_addr); in pm_create_runlist_ib()
342 list_for_each_entry(cur, queues, list) { in pm_create_runlist_ib()
Dkfd_device_queue_manager.h164 struct list_head queues; member
/drivers/net/xen-netback/
Dinterface.c215 queue = &vif->queues[index]; in xenvif_start_xmit()
255 if (vif->queues == NULL) in xenvif_get_stats()
260 queue = &vif->queues[index]; in xenvif_get_stats()
283 queue = &vif->queues[queue_index]; in xenvif_up()
299 queue = &vif->queues[queue_index]; in xenvif_down()
409 void *vif_stats = &vif->queues[queue_index].stats; in xenvif_get_ethtool_stats()
482 vif->queues = NULL; in xenvif_alloc()
694 queue = &vif->queues[queue_index]; in xenvif_disconnect()
743 queue = &vif->queues[queue_index]; in xenvif_free()
747 vfree(vif->queues); in xenvif_free()
[all …]
Dxenbus.c206 &vif->queues[i], in xenvif_debugfs_addif()
675 struct xenvif_queue *queue = &vif->queues[queue_index]; in xen_net_rate_changed()
791 be->vif->queues = vzalloc(requested_num_queues * in connect()
793 if (!be->vif->queues) { in connect()
803 queue = &be->vif->queues[queue_index]; in connect()
869 vfree(be->vif->queues); in connect()
870 be->vif->queues = NULL; in connect()
/drivers/net/
Dxen-netfront.c156 struct netfront_queue *queues; member
349 if (!np->queues || np->broken) in xennet_open()
353 queue = &np->queues[i]; in xennet_open()
631 queue = &np->queues[queue_index]; in xennet_start_xmit()
753 queue = &np->queues[i]; in xennet_close()
1388 xennet_interrupt(0, &info->queues[i]); in xennet_poll_controller()
1430 np->queues = NULL; in xennet_create_dev()
1521 for (i = 0; i < num_queues && info->queues; ++i) { in xennet_disconnect_backend()
1522 struct netfront_queue *queue = &info->queues[i]; in xennet_disconnect_backend()
1884 struct netfront_queue *queue = &info->queues[i]; in xennet_destroy_queues()
[all …]
/drivers/block/
Dnull_blk.c42 struct nullb_queue *queues; member
308 return &nullb->queues[index]; in nullb_to_queue()
385 struct nullb_queue *nq = &nullb->queues[index]; in null_init_hctx()
412 cleanup_queue(&nullb->queues[i]); in cleanup_queues()
414 kfree(nullb->queues); in cleanup_queues()
615 nullb->queues = kzalloc(submit_queues * sizeof(struct nullb_queue), in setup_queues()
617 if (!nullb->queues) in setup_queues()
632 nq = &nullb->queues[i]; in init_driver_queues()
/drivers/scsi/arm/
Dfas216.c207 info->stats.queues, info->stats.removes, info->stats.fins, in fas216_dumpinfo()
995 queue_add_cmd_tail(&info->queues.disconnected, info->SCpnt); in fas216_reselected_intr()
1001 info->SCpnt = queue_remove_tgtluntag(&info->queues.disconnected, in fas216_reselected_intr()
1926 SCpnt = queue_remove_exclude(&info->queues.issue, in fas216_kick()
1950 queue_add_cmd_tail(&info->queues.disconnected, info->SCpnt); in fas216_kick()
2216 info->stats.queues += 1; in fas216_queue_command_lck()
2225 result = !queue_add_cmd_ordered(&info->queues.issue, SCpnt); in fas216_queue_command_lck()
2353 if (queue_remove_cmd(&info->queues.issue, SCpnt)) { in fas216_find_command()
2363 } else if (queue_remove_cmd(&info->queues.disconnected, SCpnt)) { in fas216_find_command()
2497 queue_remove_all_target(&info->queues.issue, target); in fas216_eh_device_reset()
[all …]
Dacornscsi.c708 SCpnt = queue_remove_exclude(&host->queues.issue, host->busyluns); in acornscsi_kick()
716 queue_add_cmd_tail(&host->queues.disconnected, host->SCpnt); in acornscsi_kick()
1797 if (!ok && queue_probetgtlun(&host->queues.disconnected, target, lun)) in acornscsi_reconnect()
1812 queue_add_cmd_tail(&host->queues.disconnected, host->SCpnt); in acornscsi_reconnect()
1839 queue_add_cmd_tail(&host->queues.disconnected, host->SCpnt); in acornscsi_reconnect_finish()
1849 host->SCpnt = queue_remove_tgtluntag(&host->queues.disconnected, in acornscsi_reconnect_finish()
2501 host->stats.queues += 1; in acornscsi_queuecmd_lck()
2506 if (!queue_add_cmd_ordered(&host->queues.issue, SCpnt)) { in acornscsi_queuecmd_lck()
2558 if (queue_remove_cmd(&host->queues.issue, SCpnt)) { in acornscsi_do_abort()
2569 } else if (queue_remove_cmd(&host->queues.disconnected, SCpnt)) { in acornscsi_do_abort()
[all …]
Dacornscsi.h305 unsigned int queues; member
320 } queues; member
Dfas216.h258 unsigned int queues; member
286 } queues; member
/drivers/nvme/host/
Dpci.c195 struct nvme_queue *nvmeq = dev->queues[0]; in nvme_admin_init_hctx()
219 struct nvme_queue *nvmeq = dev->queues[0]; in nvme_admin_init_request()
230 struct nvme_queue *nvmeq = dev->queues[hctx_idx + 1]; in nvme_init_hctx()
246 struct nvme_queue *nvmeq = dev->queues[hctx_idx + 1]; in nvme_init_request()
1092 struct nvme_queue *nvmeq = dev->queues[0]; in nvme_submit_async_admin_req()
1118 struct nvme_queue *nvmeq = dev->queues[0]; in nvme_submit_admin_async_cmd()
1342 nvme_submit_cmd(dev->queues[0], &cmd); in nvme_abort_req()
1407 struct nvme_queue *nvmeq = dev->queues[i]; in nvme_free_queues()
1409 dev->queues[i] = NULL; in nvme_free_queues()
1451 struct nvme_queue *nvmeq = dev->queues[qid]; in nvme_disable_queue()
[all …]
Dnvme.h43 struct nvme_queue **queues; member
/drivers/net/ethernet/cadence/
Dmacb.c562 (unsigned int)(queue - bp->queues), in macb_tx_error_task()
662 u16 queue_index = queue - bp->queues; in macb_tx_interrupt()
1056 (unsigned int)(queue - bp->queues), in macb_interrupt()
1154 for (q = 0, queue = bp->queues; q < bp->num_queues; ++q, ++queue) in macb_poll_controller()
1293 struct macb_queue *queue = &bp->queues[queue_index]; in macb_start_xmit()
1420 for (q = 0, queue = bp->queues; q < bp->num_queues; ++q, ++queue) { in macb_free_consistent()
1468 for (q = 0, queue = bp->queues; q < bp->num_queues; ++q, ++queue) { in macb_alloc_consistent()
1511 for (q = 0, queue = bp->queues; q < bp->num_queues; ++q, ++queue) { in gem_init_rings()
1541 bp->queues[0].tx_ring[i].addr = 0; in macb_init_rings()
1542 bp->queues[0].tx_ring[i].ctrl = MACB_BIT(TX_USED); in macb_init_rings()
[all …]
/drivers/net/wireless/iwlwifi/mvm/
Dtime-event.c102 u32 queues = 0; in iwl_mvm_roc_done_wk() local
112 queues |= BIT(IWL_MVM_OFFCHANNEL_QUEUE); in iwl_mvm_roc_done_wk()
116 queues |= BIT(mvm->aux_queue); in iwl_mvm_roc_done_wk()
132 iwl_mvm_flush_tx_path(mvm, queues, CMD_ASYNC); in iwl_mvm_roc_done_wk()
/drivers/soc/ti/
DKconfig15 is responsible for accelerating management of the packet queues.
/drivers/dma/
Dcppi41.c835 const struct chan_queues *queues; in cpp41_dma_filter_fn() local
850 queues = cdd->queues_tx; in cpp41_dma_filter_fn()
852 queues = cdd->queues_rx; in cpp41_dma_filter_fn()
858 cchan->q_num = queues[cchan->port_num].submit; in cpp41_dma_filter_fn()
859 cchan->q_comp_num = queues[cchan->port_num].complete; in cpp41_dma_filter_fn()
/drivers/net/wireless/ath/carl9170/
Dmain.c229 for (i = 0; i < ar->hw->queues; i++) { in carl9170_flush()
286 for (i = 0; i < ar->hw->queues; i++) { in carl9170_zap_queues()
307 for (i = 0; i < ar->hw->queues; i++) in carl9170_zap_queues()
368 for (i = 0; i < ar->hw->queues; i++) { in carl9170_op_start()
1377 if (queue < ar->hw->queues) { in carl9170_op_conf_tx()
1691 u32 queues, bool drop) in carl9170_op_flush() argument
1801 hw->queues = __AR9170_NUM_TXQ; in carl9170_alloc()
1812 for (i = 0; i < ar->hw->queues; i++) { in carl9170_alloc()

1234