Home
last modified time | relevance | path

Searched refs:pnode (Results 1 – 18 of 18) sorted by relevance

/drivers/scsi/lpfc/
Dlpfc_scsi.c408 if (psb->rdata && psb->rdata->pnode && in lpfc_sli4_vport_delete_fcp_xri_aborted()
409 psb->rdata->pnode->vport == vport) in lpfc_sli4_vport_delete_fcp_xri_aborted()
478 if (psb->rdata && psb->rdata->pnode) in lpfc_sli4_io_xri_aborted()
479 ndlp = psb->rdata->pnode; in lpfc_sli4_io_xri_aborted()
1000 if (rdata && rdata->pnode) { in lpfc_bg_err_inject()
1001 ndlp = rdata->pnode; in lpfc_bg_err_inject()
3570 struct lpfc_nodelist *pnode = lpfc_cmd->rdata->pnode; in lpfc_send_scsi_error_event() local
3573 if (!pnode) in lpfc_send_scsi_error_event()
3589 &pnode->nlp_portname, sizeof(struct lpfc_name)); in lpfc_send_scsi_error_event()
3591 &pnode->nlp_nodename, sizeof(struct lpfc_name)); in lpfc_send_scsi_error_event()
[all …]
Dlpfc_scsi.h43 struct lpfc_nodelist *pnode; /* Pointer to the node structure. */ member
Dlpfc_nvme.c1205 struct lpfc_nodelist *pnode, in lpfc_nvme_prep_io_cmd() argument
1231 (pnode->nlp_flag & NLP_FIRSTBURST)) { in lpfc_nvme_prep_io_cmd()
1233 if (req_len < pnode->nvme_fb_size) in lpfc_nvme_prep_io_cmd()
1238 pnode->nvme_fb_size; in lpfc_nvme_prep_io_cmd()
1268 if (pnode->nlp_nvme_info & NLP_NVME_NSLER) { in lpfc_nvme_prep_io_cmd()
1287 phba->sli4_hba.rpi_ids[pnode->nlp_rpi]); in lpfc_nvme_prep_io_cmd()
Dlpfc_hbadisc.c110 ndlp = rdata->pnode; in lpfc_rport_invalid()
111 if (!rdata->pnode) { in lpfc_rport_invalid()
137 ndlp = rdata->pnode; in lpfc_terminate_rport_io()
159 ndlp = ((struct lpfc_rport_data *)rport->dd_data)->pnode; in lpfc_dev_loss_tmo_callbk()
181 ((struct lpfc_rport_data *)rport->dd_data)->pnode = NULL; in lpfc_dev_loss_tmo_callbk()
230 ((struct lpfc_rport_data *)rport->dd_data)->pnode = NULL; in lpfc_dev_loss_tmo_callbk()
4505 rdata->pnode = NULL; in lpfc_register_remote_port()
4519 rdata->pnode = lpfc_nlp_get(ndlp); in lpfc_register_remote_port()
4520 if (!rdata->pnode) { in lpfc_register_remote_port()
Dlpfc_bsg.c384 struct lpfc_nodelist *ndlp = rdata->pnode; in lpfc_bsg_send_mgmt_cmd()
647 struct lpfc_nodelist *ndlp = rdata->pnode; in lpfc_bsg_rport_els()
Dlpfc_sli.c1259 ndlp = lpfc_cmd->rdata->pnode; in __lpfc_sli_get_els_sglq()
12621 if ((lpfc_cmd->rdata) && (lpfc_cmd->rdata->pnode) && in lpfc_sli_validate_fcp_iocb()
12622 (lpfc_cmd->rdata->pnode->nlp_sid == tgt_id) && in lpfc_sli_validate_fcp_iocb()
12627 if ((lpfc_cmd->rdata) && (lpfc_cmd->rdata->pnode) && in lpfc_sli_validate_fcp_iocb()
12628 (lpfc_cmd->rdata->pnode->nlp_sid == tgt_id)) in lpfc_sli_validate_fcp_iocb()
12910 ndlp = lpfc_cmd->rdata->pnode; in lpfc_sli_abort_taskmgmt()
Dlpfc_attr.c6804 struct lpfc_nodelist *ndlp = rdata->pnode; in lpfc_set_rport_loss_tmo()
/drivers/macintosh/
Dmacio_asic.c453 struct device_node *np, *pnode; in macio_pci_add_devices() local
465 pnode = of_node_get(chip->of_node); in macio_pci_add_devices()
466 if (pnode == NULL) in macio_pci_add_devices()
470 rdev = macio_add_one_device(chip, parent, pnode, NULL, root_res); in macio_pci_add_devices()
476 for_each_child_of_node(pnode, np) { in macio_pci_add_devices()
492 pnode = mbdev->ofdev.dev.of_node; in macio_pci_add_devices()
493 for_each_child_of_node(pnode, np) { in macio_pci_add_devices()
505 pnode = sdev->ofdev.dev.of_node; in macio_pci_add_devices()
506 for_each_child_of_node(pnode, np) { in macio_pci_add_devices()
/drivers/base/power/
Dqos.c163 &req->data.pnode, action, value); in apply_constraint()
167 &req->data.pnode, action, value); in apply_constraint()
274 plist_for_each_entry_safe(req, tmp, &c->list, data.pnode) { in dev_pm_qos_constraints_destroy()
284 plist_for_each_entry_safe(req, tmp, &c->list, data.pnode) { in dev_pm_qos_constraints_destroy()
290 plist_for_each_entry_safe(req, tmp, &c->list, data.freq.pnode) { in dev_pm_qos_constraints_destroy()
297 plist_for_each_entry_safe(req, tmp, &c->list, data.freq.pnode) { in dev_pm_qos_constraints_destroy()
425 curr_value = req->data.pnode.prio; in __dev_pm_qos_update_request()
429 curr_value = req->data.freq.pnode.prio; in __dev_pm_qos_update_request()
895 dev->power.qos->latency_tolerance_req->data.pnode.prio; in dev_pm_qos_get_user_latency_tolerance()
/drivers/misc/sgi-gru/
Dgruhandles.h143 static inline unsigned long gru_chiplet_paddr(unsigned long paddr, int pnode, in gru_chiplet_paddr() argument
146 return paddr + GRU_SIZE * (2 * pnode + chiplet); in gru_chiplet_paddr()
149 static inline void *gru_chiplet_vaddr(void *vaddr, int pnode, int chiplet) in gru_chiplet_vaddr() argument
151 return vaddr + GRU_SIZE * (2 * pnode + chiplet); in gru_chiplet_vaddr()
Dgrufile.c254 int pnode, nid, bid, chip; in gru_init_tables() local
265 pnode = uv_blade_to_pnode(bid); in gru_init_tables()
281 paddr = gru_chiplet_paddr(gru_base_paddr, pnode, chip); in gru_init_tables()
282 vaddr = gru_chiplet_vaddr(gru_base_vaddr, pnode, chip); in gru_init_tables()
/drivers/net/ipvlan/
Dipvlan_main.c18 list_for_each_entry(ipvlan, &port->ipvlans, pnode) { in ipvlan_set_port_mode()
47 list_for_each_entry_continue_reverse(ipvlan, &port->ipvlans, pnode) { in ipvlan_set_port_mode()
636 list_add_tail_rcu(&ipvlan->pnode, &port->ipvlans); in ipvlan_link_new()
664 list_del_rcu(&ipvlan->pnode); in ipvlan_link_delete()
738 list_for_each_entry(ipvlan, &port->ipvlans, pnode) in ipvlan_device_event()
760 list_for_each_entry_safe(ipvlan, next, &port->ipvlans, pnode) in ipvlan_device_event()
767 list_for_each_entry(ipvlan, &port->ipvlans, pnode) { in ipvlan_device_event()
774 list_for_each_entry(ipvlan, &port->ipvlans, pnode) in ipvlan_device_event()
780 list_for_each_entry(ipvlan, &port->ipvlans, pnode) { in ipvlan_device_event()
790 list_for_each_entry(ipvlan, &port->ipvlans, pnode) { in ipvlan_device_event()
Dipvlan.h64 struct list_head pnode; member
Dipvlan_core.c126 list_for_each_entry_rcu(ipvlan, &port->ipvlans, pnode) { in ipvlan_addr_busy()
258 list_for_each_entry_rcu(ipvlan, &port->ipvlans, pnode) { in ipvlan_process_multicast()
/drivers/gpu/drm/nouveau/nvkm/core/
Dmm.c112 u32 align, struct nvkm_mm_node **pnode) in nvkm_mm_head() argument
153 *pnode = this; in nvkm_mm_head()
187 u32 align, struct nvkm_mm_node **pnode) in nvkm_mm_tail() argument
232 *pnode = this; in nvkm_mm_tail()
/drivers/clk/st/
Dclk-flexgen.c643 struct device_node *pnode; in st_of_flexgen_setup() local
656 pnode = of_get_parent(np); in st_of_flexgen_setup()
657 if (!pnode) in st_of_flexgen_setup()
660 reg = of_iomap(pnode, 0); in st_of_flexgen_setup()
661 of_node_put(pnode); in st_of_flexgen_setup()
Dclkgen-pll.c686 struct device_node *pnode; in clkgen_get_register_base() local
689 pnode = of_get_parent(np); in clkgen_get_register_base()
690 if (!pnode) in clkgen_get_register_base()
693 reg = of_iomap(pnode, 0); in clkgen_get_register_base()
695 of_node_put(pnode); in clkgen_get_register_base()
/drivers/virtio/
Dvirtio.c369 struct device_node *np, *pnode = dev_of_node(dev->dev.parent); in virtio_device_of_init() local
373 if (!pnode) in virtio_device_of_init()
376 count = of_get_available_child_count(pnode); in virtio_device_of_init()
384 np = of_get_next_available_child(pnode, NULL); in virtio_device_of_init()