Home
last modified time | relevance | path

Searched refs:smp (Results 1 – 25 of 28) sorted by relevance

12

/drivers/infiniband/core/
Dsmi.c129 enum smi_action smi_handle_dr_smp_send(struct ib_smp *smp, in smi_handle_dr_smp_send() argument
133 &smp->hop_ptr, smp->hop_cnt, in smi_handle_dr_smp_send()
134 smp->initial_path, in smi_handle_dr_smp_send()
135 smp->return_path, in smi_handle_dr_smp_send()
136 ib_get_smp_direction(smp), in smi_handle_dr_smp_send()
137 smp->dr_dlid == IB_LID_PERMISSIVE, in smi_handle_dr_smp_send()
138 smp->dr_slid == IB_LID_PERMISSIVE); in smi_handle_dr_smp_send()
141 enum smi_action opa_smi_handle_dr_smp_send(struct opa_smp *smp, in opa_smi_handle_dr_smp_send() argument
145 &smp->hop_ptr, smp->hop_cnt, in opa_smi_handle_dr_smp_send()
146 smp->route.dr.initial_path, in opa_smi_handle_dr_smp_send()
[all …]
Dopa_smi.h42 enum smi_action opa_smi_handle_dr_smp_recv(struct opa_smp *smp, bool is_switch,
44 int opa_smi_get_fwd_port(struct opa_smp *smp);
45 extern enum smi_forward_action opa_smi_check_forward_dr_smp(struct opa_smp *smp);
46 extern enum smi_action opa_smi_handle_dr_smp_send(struct opa_smp *smp,
53 static inline enum smi_action opa_smi_check_local_smp(struct opa_smp *smp, in opa_smi_check_local_smp() argument
59 !opa_get_smp_direction(smp) && in opa_smi_check_local_smp()
60 (smp->hop_ptr == smp->hop_cnt + 1)) ? in opa_smi_check_local_smp()
68 static inline enum smi_action opa_smi_check_local_returning_smp(struct opa_smp *smp, in opa_smi_check_local_returning_smp() argument
74 opa_get_smp_direction(smp) && in opa_smi_check_local_returning_smp()
75 !smp->hop_ptr) ? IB_SMI_HANDLE : IB_SMI_DISCARD; in opa_smi_check_local_returning_smp()
Dsmi.h54 enum smi_action smi_handle_dr_smp_recv(struct ib_smp *smp, bool is_switch,
56 int smi_get_fwd_port(struct ib_smp *smp);
57 extern enum smi_forward_action smi_check_forward_dr_smp(struct ib_smp *smp);
58 extern enum smi_action smi_handle_dr_smp_send(struct ib_smp *smp,
65 static inline enum smi_action smi_check_local_smp(struct ib_smp *smp, in smi_check_local_smp() argument
71 !ib_get_smp_direction(smp) && in smi_check_local_smp()
72 (smp->hop_ptr == smp->hop_cnt + 1)) ? in smi_check_local_smp()
80 static inline enum smi_action smi_check_local_returning_smp(struct ib_smp *smp, in smi_check_local_returning_smp() argument
86 ib_get_smp_direction(smp) && in smi_check_local_returning_smp()
87 !smp->hop_ptr) ? IB_SMI_HANDLE : IB_SMI_DISCARD); in smi_check_local_returning_smp()
Dmad.c768 struct ib_smp *smp = mad_send_wr->send_buf.mad; in handle_outgoing_dr_smp() local
769 struct opa_smp *opa_smp = (struct opa_smp *)smp; in handle_outgoing_dr_smp()
786 smp->mgmt_class == IB_MGMT_CLASS_SUBN_DIRECTED_ROUTE) in handle_outgoing_dr_smp()
797 if (opa && smp->class_version == OPA_SM_CLASS_VERSION) { in handle_outgoing_dr_smp()
825 if ((ib_get_smp_direction(smp) ? smp->dr_dlid : smp->dr_slid) == in handle_outgoing_dr_smp()
827 smi_handle_dr_smp_send(smp, rdma_cap_ib_switch(device), port_num) == in handle_outgoing_dr_smp()
833 drslid = be16_to_cpu(smp->dr_slid); in handle_outgoing_dr_smp()
836 if (smi_check_local_smp(smp, device) == IB_SMI_DISCARD && in handle_outgoing_dr_smp()
837 smi_check_local_returning_smp(smp, device) == IB_SMI_DISCARD) in handle_outgoing_dr_smp()
860 if (opa && smp->base_version == OPA_MGMT_BASE_VERSION) { in handle_outgoing_dr_smp()
[all …]
/drivers/gpu/drm/msm/mdp/mdp5/
Dmdp5_smp.c41 struct mdp5_kms *get_kms(struct mdp5_smp *smp) in get_kms() argument
43 struct msm_drm_private *priv = smp->dev->dev_private; in get_kms()
67 return mdp5_cfg->smp.clients[pipe] + plane; in pipe2client()
71 static int smp_request_block(struct mdp5_smp *smp, in smp_request_block() argument
76 int i, avail, cnt = smp->blk_cnt; in smp_request_block()
82 reserved = smp->reserved[cid]; in smp_request_block()
91 dev_err(smp->dev->dev, "out of blks (req=%d > avail=%d)\n", in smp_request_block()
105 static void set_fifo_thresholds(struct mdp5_smp *smp, in set_fifo_thresholds() argument
108 u32 smp_entries_per_blk = smp->blk_size / (128 / BITS_PER_BYTE); in set_fifo_thresholds()
114 smp->pipe_reqprio_fifo_wm0[pipe] = val * 1; in set_fifo_thresholds()
[all …]
Dmdp5_smp.h82 void mdp5_smp_destroy(struct mdp5_smp *smp);
84 void mdp5_smp_dump(struct mdp5_smp *smp, struct drm_printer *p);
86 uint32_t mdp5_smp_calculate(struct mdp5_smp *smp,
90 int mdp5_smp_assign(struct mdp5_smp *smp, struct mdp5_smp_state *state,
92 void mdp5_smp_release(struct mdp5_smp *smp, struct mdp5_smp_state *state,
95 void mdp5_smp_prepare_commit(struct mdp5_smp *smp, struct mdp5_smp_state *state);
96 void mdp5_smp_complete_commit(struct mdp5_smp *smp, struct mdp5_smp_state *state);
Dmdp5_pipe.c75 if (mdp5_kms->smp) { in mdp5_pipe_assign()
79 ret = mdp5_smp_assign(mdp5_kms->smp, &state->smp, in mdp5_pipe_assign()
110 if (mdp5_kms->smp) { in mdp5_pipe_release()
112 mdp5_smp_release(mdp5_kms->smp, &state->smp, hwpipe->pipe); in mdp5_pipe_release()
Dmdp5_kms.c95 if (mdp5_kms->smp) in mdp5_get_state()
96 new_state->smp = mdp5_kms->state->smp; in mdp5_get_state()
116 if (mdp5_kms->smp) in mdp5_prepare_commit()
117 mdp5_smp_prepare_commit(mdp5_kms->smp, &mdp5_kms->state->smp); in mdp5_prepare_commit()
125 if (mdp5_kms->smp) in mdp5_complete_commit()
126 mdp5_smp_complete_commit(mdp5_kms->smp, &mdp5_kms->state->smp); in mdp5_complete_commit()
191 if (!mdp5_kms->smp) { in smp_show()
196 mdp5_smp_dump(mdp5_kms->smp, &p); in smp_show()
719 if (mdp5_kms->smp) in mdp5_destroy()
720 mdp5_smp_destroy(mdp5_kms->smp); in mdp5_destroy()
[all …]
Dmdp5_cfg.c32 .smp = {
116 .smp = {
199 .smp = {
294 .smp = {
361 .smp = {
Dmdp5_irq.c44 if (mdp5_kms->smp) in mdp5_irq_error_handler()
45 mdp5_smp_dump(mdp5_kms->smp, &p); in mdp5_irq_error_handler()
Dmdp5_kms.h58 struct mdp5_smp *smp; member
92 struct mdp5_smp_state smp; member
Dmdp5_cfg.h91 struct mdp5_smp_block smp; member
Dmdp5_plane.c397 if (mdp5_kms->smp) { in mdp5_plane_atomic_check_with_state()
401 blkcfg = mdp5_smp_calculate(mdp5_kms->smp, format, in mdp5_plane_atomic_check_with_state()
/drivers/infiniband/hw/hfi1/
Dmad.c81 static int reply(struct ib_mad_hdr *smp) in reply() argument
87 smp->method = IB_MGMT_METHOD_GET_RESP; in reply()
88 if (smp->mgmt_class == IB_MGMT_CLASS_SUBN_DIRECTED_ROUTE) in reply()
89 smp->status |= IB_SMP_DIRECTION; in reply()
93 static inline void clear_opa_smp_data(struct opa_smp *smp) in clear_opa_smp_data() argument
95 void *data = opa_get_smp_data(smp); in clear_opa_smp_data()
96 size_t size = opa_get_smp_data_size(smp); in clear_opa_smp_data()
219 struct opa_smp *smp) in subn_handle_opa_trap_repress() argument
226 if (smp->attr_id != IB_SMP_ATTR_NOTICE) in subn_handle_opa_trap_repress()
234 if (trap && trap->tid == smp->tid) { in subn_handle_opa_trap_repress()
[all …]
Dud.c760 struct rvt_qp *qp, u16 slid, struct opa_smp *smp) in opa_smp_check() argument
779 if (smp->mgmt_class != IB_MGMT_CLASS_SUBN_DIRECTED_ROUTE && in opa_smp_check()
780 smp->mgmt_class != IB_MGMT_CLASS_SUBN_LID_ROUTED) { in opa_smp_check()
805 switch (smp->method) { in opa_smp_check()
821 if (smp->method == IB_MGMT_METHOD_TRAP) in opa_smp_check()
824 smp->status |= IB_SMP_UNSUP_METHOD; in opa_smp_check()
936 struct opa_smp *smp = (struct opa_smp *)data; in hfi1_ud_rcv() local
938 if (opa_smp_check(ibp, pkey, sc5, qp, slid, smp)) in hfi1_ud_rcv()
944 smp->mgmt_class != IB_MGMT_CLASS_SUBN_DIRECTED_ROUTE) in hfi1_ud_rcv()
/drivers/infiniband/hw/qib/
Dqib_mad.c40 static int reply(struct ib_smp *smp) in reply() argument
46 smp->method = IB_MGMT_METHOD_GET_RESP; in reply()
47 if (smp->mgmt_class == IB_MGMT_CLASS_SUBN_DIRECTED_ROUTE) in reply()
48 smp->status |= IB_SMP_DIRECTION; in reply()
52 static int reply_failure(struct ib_smp *smp) in reply_failure() argument
58 smp->method = IB_MGMT_METHOD_GET_RESP; in reply_failure()
59 if (smp->mgmt_class == IB_MGMT_CLASS_SUBN_DIRECTED_ROUTE) in reply_failure()
60 smp->status |= IB_SMP_DIRECTION; in reply_failure()
68 struct ib_smp *smp; in qib_send_trap() local
92 smp = send_buf->mad; in qib_send_trap()
[all …]
Dqib_ud.c497 struct ib_smp *smp; in qib_ud_rcv() local
502 smp = (struct ib_smp *) data; in qib_ud_rcv()
505 smp->mgmt_class != IB_MGMT_CLASS_SUBN_DIRECTED_ROUTE) in qib_ud_rcv()
Dqib_iba7322.c5471 struct ib_smp *smp; in try_7322_ipg() local
5501 smp = send_buf->mad; in try_7322_ipg()
5502 smp->base_version = IB_MGMT_BASE_VERSION; in try_7322_ipg()
5503 smp->mgmt_class = IB_MGMT_CLASS_SUBN_DIRECTED_ROUTE; in try_7322_ipg()
5504 smp->class_version = 1; in try_7322_ipg()
5505 smp->method = IB_MGMT_METHOD_SEND; in try_7322_ipg()
5506 smp->hop_cnt = 1; in try_7322_ipg()
5507 smp->attr_id = QIB_VENDOR_IPG; in try_7322_ipg()
5508 smp->attr_mod = 0; in try_7322_ipg()
/drivers/scsi/csiostor/
Dcsio_defs.h84 csio_set_state(void *smp, void *state) in csio_set_state() argument
86 ((struct csio_sm *)smp)->sm_state = (csio_sm_state_t)state; in csio_set_state()
90 csio_init_state(struct csio_sm *smp, void *state) in csio_init_state() argument
92 csio_set_state(smp, state); in csio_init_state()
96 csio_post_event(void *smp, uint32_t evt) in csio_post_event() argument
98 ((struct csio_sm *)smp)->sm_state(smp, evt); in csio_post_event()
102 csio_get_state(void *smp) in csio_get_state() argument
104 return ((struct csio_sm *)smp)->sm_state; in csio_get_state()
108 csio_match_state(void *smp, void *state) in csio_match_state() argument
110 return (csio_get_state(smp) == (csio_sm_state_t)state); in csio_match_state()
/drivers/sh/intc/
Dcore.c167 unsigned int smp) in save_reg() argument
174 d->smp[cnt] = smp; in save_reg()
184 unsigned int i, k, smp; in register_intc_controller() local
238 d->smp = kzalloc(d->nr_reg * sizeof(*d->smp), GFP_NOWAIT); in register_intc_controller()
239 if (!d->smp) in register_intc_controller()
246 smp = IS_SMP(hw->mask_regs[i]); in register_intc_controller()
247 k += save_reg(d, k, hw->mask_regs[i].set_reg, smp); in register_intc_controller()
248 k += save_reg(d, k, hw->mask_regs[i].clr_reg, smp); in register_intc_controller()
262 smp = IS_SMP(hw->prio_regs[i]); in register_intc_controller()
263 k += save_reg(d, k, hw->prio_regs[i].set_reg, smp); in register_intc_controller()
[all …]
Dinternals.h23 #define IS_SMP(x) (x.smp)
24 #define INTC_REG(d, x, c) (d->reg[(x)] + ((d->smp[(x)] & 0xff) * c))
25 #define SMP_NR(d, x) ((d->smp[(x)] >> 8) ? (d->smp[(x)] >> 8) : 1)
62 unsigned long *smp; member
/drivers/iio/adc/
Dstm32-adc.c1511 unsigned int smp, r = smpr->reg; in stm32_adc_smpr_init() local
1515 for (smp = 0; smp <= STM32_ADC_MAX_SMP; smp++) in stm32_adc_smpr_init()
1516 if ((period_ns * adc->cfg->smp_cycles[smp]) >= smp_ns) in stm32_adc_smpr_init()
1518 if (smp > STM32_ADC_MAX_SMP) in stm32_adc_smpr_init()
1519 smp = STM32_ADC_MAX_SMP; in stm32_adc_smpr_init()
1522 adc->smpr_val[r] = (adc->smpr_val[r] & ~mask) | (smp << shift); in stm32_adc_smpr_init()
1528 int scan_index, u32 smp) in stm32_adc_chan_init_one() argument
1545 stm32_adc_smpr_init(adc, chan->channel, smp); in stm32_adc_chan_init_one()
1560 u32 val, smp = 0; in stm32_adc_chan_of_init() local
1594 scan_index, &smp); in stm32_adc_chan_of_init()
[all …]
/drivers/net/ethernet/mellanox/mlx4/
Dcmd.c904 struct ib_smp *smp = inbox->buf; in mlx4_MAD_IFC_wrapper() local
925 if (smp->base_version == 1 && in mlx4_MAD_IFC_wrapper()
926 smp->mgmt_class == IB_MGMT_CLASS_SUBN_LID_ROUTED && in mlx4_MAD_IFC_wrapper()
927 smp->class_version == 1) { in mlx4_MAD_IFC_wrapper()
929 if (!network_view && smp->method == IB_MGMT_METHOD_GET) { in mlx4_MAD_IFC_wrapper()
930 if (smp->attr_id == IB_SMP_ATTR_PKEY_TABLE) { in mlx4_MAD_IFC_wrapper()
931 index = be32_to_cpu(smp->attr_mod); in mlx4_MAD_IFC_wrapper()
952 if (smp->attr_id == IB_SMP_ATTR_PORT_INFO) { in mlx4_MAD_IFC_wrapper()
955 smp->attr_mod = cpu_to_be32(port); in mlx4_MAD_IFC_wrapper()
969 if (smp->attr_id == IB_SMP_ATTR_GUID_INFO) { in mlx4_MAD_IFC_wrapper()
[all …]
/drivers/scsi/isci/
Dscu_task_context.h429 struct smp_task_context smp; member
/drivers/scsi/hisi_sas/
Dhisi_sas.h386 struct hisi_sas_command_table_smp smp; member

12