Home
last modified time | relevance | path

Searched refs:reqs (Results 1 – 25 of 41) sorted by relevance

12

/drivers/i2c/busses/
Di2c-virtio.c58 struct virtio_i2c_req *reqs, in virtio_i2c_prepare_reqs() argument
67 init_completion(&reqs[i].completion); in virtio_i2c_prepare_reqs()
73 reqs[i].out_hdr.addr = cpu_to_le16(msgs[i].addr << 1); in virtio_i2c_prepare_reqs()
76 reqs[i].out_hdr.flags |= cpu_to_le32(VIRTIO_I2C_FLAGS_M_RD); in virtio_i2c_prepare_reqs()
79 reqs[i].out_hdr.flags |= cpu_to_le32(VIRTIO_I2C_FLAGS_FAIL_NEXT); in virtio_i2c_prepare_reqs()
81 sg_init_one(&out_hdr, &reqs[i].out_hdr, sizeof(reqs[i].out_hdr)); in virtio_i2c_prepare_reqs()
85 reqs[i].buf = i2c_get_dma_safe_msg_buf(&msgs[i], 1); in virtio_i2c_prepare_reqs()
86 if (!reqs[i].buf) in virtio_i2c_prepare_reqs()
89 sg_init_one(&msg_buf, reqs[i].buf, msgs[i].len); in virtio_i2c_prepare_reqs()
97 sg_init_one(&in_hdr, &reqs[i].in_hdr, sizeof(reqs[i].in_hdr)); in virtio_i2c_prepare_reqs()
[all …]
/drivers/virt/acrn/
Dioreq.c283 (req->reqs.pio_request.address == 0xcf8)); in is_cfg_addr()
289 ((req->reqs.pio_request.address >= 0xcfc) && in is_cfg_data()
290 (req->reqs.pio_request.address < (0xcfc + 4)))); in is_cfg_data()
319 WARN_ON(req->reqs.pio_request.size != 4); in handle_cf8cfc()
320 if (req->reqs.pio_request.direction == ACRN_IOREQ_DIR_WRITE) in handle_cf8cfc()
321 vm->pci_conf_addr = req->reqs.pio_request.value; in handle_cf8cfc()
323 req->reqs.pio_request.value = vm->pci_conf_addr; in handle_cf8cfc()
327 if (req->reqs.pio_request.direction == in handle_cf8cfc()
329 req->reqs.pio_request.value = 0xffffffff; in handle_cf8cfc()
332 offset = req->reqs.pio_request.address - 0xcfc; in handle_cf8cfc()
[all …]
Dioeventfd.c204 if (req->reqs.mmio_request.direction == ACRN_IOREQ_DIR_READ) { in acrn_ioeventfd_handler()
206 req->reqs.mmio_request.value = 0; in acrn_ioeventfd_handler()
209 addr = req->reqs.mmio_request.address; in acrn_ioeventfd_handler()
210 size = req->reqs.mmio_request.size; in acrn_ioeventfd_handler()
211 val = req->reqs.mmio_request.value; in acrn_ioeventfd_handler()
213 if (req->reqs.pio_request.direction == ACRN_IOREQ_DIR_READ) { in acrn_ioeventfd_handler()
215 req->reqs.pio_request.value = 0; in acrn_ioeventfd_handler()
218 addr = req->reqs.pio_request.address; in acrn_ioeventfd_handler()
219 size = req->reqs.pio_request.size; in acrn_ioeventfd_handler()
220 val = req->reqs.pio_request.value; in acrn_ioeventfd_handler()
/drivers/gpu/drm/msm/disp/dpu1/
Ddpu_rm.c317 struct dpu_rm_requirements *reqs) in _dpu_rm_check_lm_and_get_connected_blks() argument
342 if (!reqs->topology.num_dspp) in _dpu_rm_check_lm_and_get_connected_blks()
364 struct dpu_rm_requirements *reqs) in _dpu_rm_reserve_lms() argument
372 if (!reqs->topology.num_lm) { in _dpu_rm_reserve_lms()
373 DPU_ERROR("invalid number of lm: %d\n", reqs->topology.num_lm); in _dpu_rm_reserve_lms()
379 lm_count < reqs->topology.num_lm; i++) { in _dpu_rm_reserve_lms()
388 &dspp_idx[lm_count], reqs)) { in _dpu_rm_reserve_lms()
396 lm_count < reqs->topology.num_lm; j++) { in _dpu_rm_reserve_lms()
409 reqs)) { in _dpu_rm_reserve_lms()
418 if (lm_count != reqs->topology.num_lm) { in _dpu_rm_reserve_lms()
[all …]
/drivers/net/wireless/broadcom/brcm80211/brcmfmac/
Dpno.c34 struct cfg80211_sched_scan_request *reqs[BRCMF_PNO_MAX_BUCKETS]; member
49 pi->reqs[pi->n_reqs++] = req; in brcmf_pno_store_request()
66 if (pi->reqs[i]->reqid == reqid) in brcmf_pno_remove_request()
84 pi->reqs[i] = pi->reqs[i + 1]; in brcmf_pno_remove_request()
164 if (pi->reqs[ri]->flags & NL80211_SCAN_FLAG_RANDOM_ADDR) { in brcmf_pno_set_random()
165 mac_addr = pi->reqs[ri]->mac_addr; in brcmf_pno_set_random()
166 mac_mask = pi->reqs[ri]->mac_addr_mask; in brcmf_pno_set_random()
188 pi->reqs[ri]->reqid, pfn_mac.mac); in brcmf_pno_set_random()
313 *scan_freq = pi->reqs[0]->scan_plans[0].interval; in brcmf_pno_prep_fwconfig()
315 sr = pi->reqs[i]; in brcmf_pno_prep_fwconfig()
[all …]
Dusb.c436 struct brcmf_usbreq *req, *reqs; in brcmf_usbdev_qinit() local
438 reqs = kcalloc(qsize, sizeof(struct brcmf_usbreq), GFP_ATOMIC); in brcmf_usbdev_qinit()
439 if (reqs == NULL) in brcmf_usbdev_qinit()
442 req = reqs; in brcmf_usbdev_qinit()
453 return reqs; in brcmf_usbdev_qinit()
462 kfree(reqs); in brcmf_usbdev_qinit()
/drivers/interconnect/
Dcore.c158 path = kzalloc(struct_size(path, reqs, num_nodes), GFP_KERNEL); in path_init()
166 hlist_add_head(&path->reqs[i].req_node, &node->req_list); in path_init()
167 path->reqs[i].node = node; in path_init()
168 path->reqs[i].dev = dev; in path_init()
169 path->reqs[i].enabled = true; in path_init()
291 next = path->reqs[i].node; in apply_constraints()
586 path->reqs[i].tag = tag; in icc_set_tag()
640 old_avg = path->reqs[0].avg_bw; in icc_set_bw()
641 old_peak = path->reqs[0].peak_bw; in icc_set_bw()
644 node = path->reqs[i].node; in icc_set_bw()
[all …]
Dtrace.h26 __string(dev, dev_name(p->reqs[i].dev))
36 __assign_str(dev, dev_name(p->reqs[i].dev));
62 __string(dev, dev_name(p->reqs[0].dev))
68 __assign_str(dev, dev_name(p->reqs[0].dev));
Dinternal.h41 struct icc_req reqs[]; member
/drivers/pci/
Dats.c196 int pci_enable_pri(struct pci_dev *pdev, u32 reqs) in pci_enable_pri() argument
224 reqs = min(max_requests, reqs); in pci_enable_pri()
225 pdev->pri_reqs_alloc = reqs; in pci_enable_pri()
226 pci_write_config_dword(pdev, pri + PCI_PRI_ALLOC_REQ, reqs); in pci_enable_pri()
272 u32 reqs = pdev->pri_reqs_alloc; in pci_restore_pri_state() local
284 pci_write_config_dword(pdev, pri + PCI_PRI_ALLOC_REQ, reqs); in pci_restore_pri_state()
/drivers/usb/gadget/function/
Du_audio.c53 struct usb_request **reqs; member
454 if (prm->reqs[i]) { in free_ep()
455 if (usb_ep_dequeue(ep, prm->reqs[i])) in free_ep()
456 usb_ep_free_request(ep, prm->reqs[i]); in free_ep()
463 prm->reqs[i] = NULL; in free_ep()
608 if (!prm->reqs[i]) { in u_audio_start_capture()
613 prm->reqs[i] = req; in u_audio_start_capture()
622 if (usb_ep_queue(ep, prm->reqs[i], GFP_ATOMIC)) in u_audio_start_capture()
727 if (!prm->reqs[i]) { in u_audio_start_playback()
732 prm->reqs[i] = req; in u_audio_start_playback()
[all …]
/drivers/net/wireless/zydas/zd1211rw/
Dzd_chip.c368 struct zd_ioreq32 reqs[2] = {in_reqs[0], in_reqs[1]}; in zd_write_mac_addr_common() local
371 reqs[0].value = (mac_addr[3] << 24) in zd_write_mac_addr_common()
375 reqs[1].value = (mac_addr[5] << 8) in zd_write_mac_addr_common()
383 r = zd_iowrite32a_locked(chip, reqs, ARRAY_SIZE(reqs)); in zd_write_mac_addr_common()
393 static const struct zd_ioreq32 reqs[2] = { in zd_write_mac_addr() local
398 return zd_write_mac_addr_common(chip, mac_addr, reqs, "mac"); in zd_write_mac_addr()
403 static const struct zd_ioreq32 reqs[2] = { in zd_write_bssid() local
408 return zd_write_mac_addr_common(chip, bssid, reqs, "bssid"); in zd_write_bssid()
860 struct zd_ioreq32 reqs[3]; in set_aw_pt_bi() local
870 reqs[0].addr = CR_ATIM_WND_PERIOD; in set_aw_pt_bi()
[all …]
/drivers/tee/optee/
Dsupp.c28 INIT_LIST_HEAD(&supp->reqs); in optee_supp_init()
54 list_for_each_entry_safe(req, req_tmp, &supp->reqs, link) { in optee_supp_release()
104 list_add_tail(&req->link, &supp->reqs); in optee_supp_thrd_req()
165 if (list_empty(&supp->reqs)) in supp_pop_entry()
168 req = list_first_entry(&supp->reqs, struct optee_supp_req, link); in supp_pop_entry()
/drivers/mtd/nand/
Decc.c469 const struct nand_ecc_props *reqs = nanddev_get_ecc_requirements(nand); in nand_ecc_is_strong_enough() local
474 if (conf->step_size == 0 || reqs->step_size == 0) in nand_ecc_is_strong_enough()
483 ds_corr = (mtd->writesize * reqs->strength) / reqs->step_size; in nand_ecc_is_strong_enough()
485 return corr >= ds_corr && conf->strength >= reqs->strength; in nand_ecc_is_strong_enough()
Decc-mxic.c223 struct nand_ecc_props *reqs = &nand->ecc.requirements; in mxic_ecc_init_ctx() local
255 } else if (reqs->step_size && reqs->strength) { in mxic_ecc_init_ctx()
256 step_size = reqs->step_size; in mxic_ecc_init_ctx()
257 strength = reqs->strength; in mxic_ecc_init_ctx()
/drivers/net/ethernet/intel/ixgbevf/
Dmbx.c79 hw->mbx.stats.reqs++; in ixgbevf_clear_msg_vf()
149 hw->mbx.stats.reqs++; in ixgbevf_check_for_msg_vf()
424 mbx->stats.reqs = 0; in ixgbevf_init_mbx_params_vf()
/drivers/scsi/
Dhptiop.c191 req = hba->reqs[tag >> 8].req_virt; in hptiop_request_callback_mv()
242 req = hba->reqs[(_tag >> 4) & 0xff].req_virt; in hptiop_request_callback_mvfrey()
729 scp = hba->reqs[tag].scp; in hptiop_finish_scsi_req()
773 free_req(hba, &hba->reqs[tag]); in hptiop_finish_scsi_req()
783 req = hba->reqs[tag].req_virt; in hptiop_host_request_callback_itl()
788 req = hba->reqs[tag].req_virt; in hptiop_host_request_callback_itl()
1449 hba->reqs[i].next = NULL; in hptiop_probe()
1450 hba->reqs[i].req_virt = start_virt; in hptiop_probe()
1451 hba->reqs[i].req_shifted_phy = start_phy >> 5; in hptiop_probe()
1452 hba->reqs[i].index = i; in hptiop_probe()
[all …]
/drivers/net/ethernet/intel/igbvf/
Dmbx.c162 hw->mbx.stats.reqs++; in e1000_check_for_msg_vf()
331 mbx->stats.reqs = 0; in e1000_init_mbx_params_vf()
/drivers/net/ethernet/microsoft/mana/
Dhw_channel.c210 rx_req = &hwc_rxq->msg_buf->reqs[rx_req_idx]; in mana_hwc_rx_event_handler()
406 dma_buf = kzalloc(struct_size(dma_buf, reqs, q_depth), GFP_KERNEL); in mana_hwc_alloc_dma_buf()
425 hwc_wr = &dma_buf->reqs[i]; in mana_hwc_alloc_dma_buf()
579 req = &hwc_rxq->msg_buf->reqs[i]; in mana_hwc_test_channel()
796 tx_wr = &txq->msg_buf->reqs[msg_id]; in mana_hwc_send_request()
Dhw_channel.h120 struct hwc_work_request reqs[]; member
/drivers/net/ethernet/intel/ixgbe/
Dixgbe_mbx.c237 hw->mbx.stats.reqs++; in ixgbe_check_for_msg_pf()
418 mbx->stats.reqs = 0; in ixgbe_init_mbx_params_pf()
/drivers/infiniband/ulp/rtrs/
Drtrs-clt.c500 req = &clt_path->reqs[msg_id]; in process_io_rsp()
986 req = &clt_path->reqs[permit->mem_id]; in rtrs_clt_get_req()
1002 req = &alive_path->reqs[fail_req->permit->mem_id]; in rtrs_clt_get_copy_req()
1321 if (!clt_path->reqs) in fail_all_outstanding_reqs()
1324 req = &clt_path->reqs[i]; in fail_all_outstanding_reqs()
1347 if (!clt_path->reqs) in free_path_reqs()
1350 req = &clt_path->reqs[i]; in free_path_reqs()
1356 kfree(clt_path->reqs); in free_path_reqs()
1357 clt_path->reqs = NULL; in free_path_reqs()
1365 clt_path->reqs = kcalloc(clt_path->queue_depth, in alloc_path_reqs()
[all …]
/drivers/net/ethernet/intel/igb/
De1000_mbx.c268 hw->mbx.stats.reqs++; in igb_check_for_msg_pf()
469 mbx->stats.reqs = 0; in igb_init_mbx_params_pf()
/drivers/vdpa/vdpa_sim/
Dvdpa_sim_blk.c305 int reqs = 0; in vdpasim_blk_work() local
319 if (++reqs > 4) { in vdpasim_blk_work()
/drivers/infiniband/hw/hfi1/
Duser_sdma.h89 struct user_sdma_request *reqs; member

12