Home
last modified time | relevance | path

Searched refs:s_state (Results 1 – 15 of 15) sorted by relevance

/drivers/infiniband/hw/qib/
Dqib_uc.c87 switch (qp->s_state) { in qib_make_uc_req()
109 qp->s_state = OP(SEND_FIRST); in qib_make_uc_req()
114 qp->s_state = OP(SEND_ONLY); in qib_make_uc_req()
116 qp->s_state = in qib_make_uc_req()
138 qp->s_state = OP(RDMA_WRITE_FIRST); in qib_make_uc_req()
143 qp->s_state = OP(RDMA_WRITE_ONLY); in qib_make_uc_req()
145 qp->s_state = in qib_make_uc_req()
164 qp->s_state = OP(SEND_MIDDLE); in qib_make_uc_req()
173 qp->s_state = OP(SEND_LAST); in qib_make_uc_req()
175 qp->s_state = OP(SEND_LAST_WITH_IMMEDIATE); in qib_make_uc_req()
[all …]
Dqib_rc.c277 switch (qp->s_state) { in qib_make_rc_req()
320 qp->s_state = OP(SEND_FIRST); in qib_make_rc_req()
325 qp->s_state = OP(SEND_ONLY); in qib_make_rc_req()
327 qp->s_state = OP(SEND_ONLY_WITH_IMMEDIATE); in qib_make_rc_req()
355 qp->s_state = OP(RDMA_WRITE_FIRST); in qib_make_rc_req()
360 qp->s_state = OP(RDMA_WRITE_ONLY); in qib_make_rc_req()
362 qp->s_state = OP(RDMA_WRITE_ONLY_WITH_IMMEDIATE); in qib_make_rc_req()
396 qp->s_state = OP(RDMA_READ_REQUEST); in qib_make_rc_req()
422 qp->s_state = OP(COMPARE_SWAP); in qib_make_rc_req()
428 qp->s_state = OP(FETCH_ADD); in qib_make_rc_req()
[all …]
/drivers/infiniband/hw/hfi1/
Duc.c74 switch (qp->s_state) { in hfi1_make_uc_req()
122 qp->s_state = OP(SEND_FIRST); in hfi1_make_uc_req()
127 qp->s_state = OP(SEND_ONLY); in hfi1_make_uc_req()
129 qp->s_state = in hfi1_make_uc_req()
151 qp->s_state = OP(RDMA_WRITE_FIRST); in hfi1_make_uc_req()
156 qp->s_state = OP(RDMA_WRITE_ONLY); in hfi1_make_uc_req()
158 qp->s_state = in hfi1_make_uc_req()
177 qp->s_state = OP(SEND_MIDDLE); in hfi1_make_uc_req()
187 qp->s_state = OP(SEND_LAST); in hfi1_make_uc_req()
189 qp->s_state = OP(SEND_LAST_WITH_IMMEDIATE); in hfi1_make_uc_req()
[all …]
Drc.c471 switch (qp->s_state) { in hfi1_make_rc_req()
560 qp->s_state = OP(SEND_FIRST); in hfi1_make_rc_req()
565 qp->s_state = OP(SEND_ONLY); in hfi1_make_rc_req()
567 qp->s_state = OP(SEND_ONLY_WITH_IMMEDIATE); in hfi1_make_rc_req()
572 qp->s_state = OP(SEND_ONLY_WITH_INVALIDATE); in hfi1_make_rc_req()
602 qp->s_state = OP(RDMA_WRITE_FIRST); in hfi1_make_rc_req()
607 qp->s_state = OP(RDMA_WRITE_ONLY); in hfi1_make_rc_req()
609 qp->s_state = in hfi1_make_rc_req()
643 priv->s_state = TID_OP(WRITE_RESP); in hfi1_make_rc_req()
680 priv->s_state == in hfi1_make_rc_req()
[all …]
Dtrace_tid.h750 __field(u8, s_state)
771 __entry->s_state = qp->s_state;
790 __entry->s_state,
884 __field(u8, s_state)
903 __entry->s_state = qp->s_state;
922 __entry->s_state,
990 __field(u8, s_state)
1007 __entry->s_state = priv->s_state;
1023 __entry->s_state,
1487 __field(u8, s_state)
[all …]
Dtid_rdma.c353 qpriv->s_state = TID_OP(WRITE_RESP); in hfi1_qp_priv_init()
1771 qp->s_state = TID_OP(READ_REQ); in hfi1_build_tid_rdma_read_packet()
3393 qp->s_state = TID_OP(WRITE_REQ); in hfi1_build_tid_rdma_write_req()
4613 qpriv->s_state = TID_OP(WRITE_DATA_LAST); in hfi1_rc_rcv_tid_rdma_ack()
4618 qpriv->s_state = TID_OP(WRITE_DATA); in hfi1_rc_rcv_tid_rdma_ack()
4709 qpriv->s_state = TID_OP(WRITE_REQ); in hfi1_rc_rcv_tid_rdma_ack()
4732 qpriv->s_state = TID_OP(WRITE_REQ); in hfi1_rc_rcv_tid_rdma_ack()
4834 priv->s_state = TID_OP(RESYNC); in hfi1_tid_retry_timeout()
5005 priv->s_state = TID_OP(WRITE_RESP); in update_tid_tail()
5072 switch (priv->s_state) { in hfi1_make_tid_rdma_pkt()
[all …]
Dverbs.h141 u8 s_state; member
Dqp.c656 qp->s_state, in qp_iter_print()
/drivers/misc/
Dtifm_7xx1.c80 unsigned int s_state; in tifm_7xx1_toggle_sock_power() local
93 s_state = readl(sock_addr + SOCK_PRESENT_STATE); in tifm_7xx1_toggle_sock_power()
94 if (!(TIFM_SOCK_STATE_OCCUPIED & s_state)) in tifm_7xx1_toggle_sock_power()
105 writel((s_state & TIFM_CTRL_POWER_MASK) | 0x0c00, in tifm_7xx1_toggle_sock_power()
/drivers/gpu/drm/sun4i/
Dsun4i_layer.c59 struct sun4i_layer_state *s_state = state_to_sun4i_layer_state(state); in sun4i_backend_layer_destroy_state() local
63 kfree(s_state); in sun4i_backend_layer_destroy_state()
Dsun4i_backend.c597 struct sun4i_layer_state *s_state = state_to_sun4i_layer_state(p_state); in sun4i_backend_atomic_check() local
606 s_state->pipe = current_pipe; in sun4i_backend_atomic_check()
/drivers/net/ethernet/mellanox/mlx4/
Dcmd.c2359 struct mlx4_slave_state *s_state; in mlx4_multi_func_init() local
2406 s_state = &priv->mfunc.master.slave_state[i]; in mlx4_multi_func_init()
2407 s_state->last_cmd = MLX4_COMM_CMD_RESET; in mlx4_multi_func_init()
2408 s_state->vst_qinq_supported = false; in mlx4_multi_func_init()
2411 s_state->event_eq[j].eqn = -1; in mlx4_multi_func_init()
2420 s_state->vlan_filter[port] = in mlx4_multi_func_init()
2423 if (!s_state->vlan_filter[port]) { in mlx4_multi_func_init()
2425 kfree(s_state->vlan_filter[port]); in mlx4_multi_func_init()
2431 INIT_LIST_HEAD(&s_state->mcast_filters[port]); in mlx4_multi_func_init()
2443 spin_lock_init(&s_state->lock); in mlx4_multi_func_init()
Deq.c314 struct mlx4_slave_state *s_state = priv->mfunc.master.slave_state; in mlx4_get_slave_port_state() local
323 return s_state[slave].port_state[port]; in mlx4_get_slave_port_state()
331 struct mlx4_slave_state *s_state = priv->mfunc.master.slave_state; in mlx4_set_slave_port_state() local
340 s_state[slave].port_state[port] = state; in mlx4_set_slave_port_state()
Dmain.c745 struct mlx4_slave_state *s_state; in mlx4_how_many_lives_vf() local
750 s_state = &priv->mfunc.master.slave_state[i]; in mlx4_how_many_lives_vf()
751 if (s_state->active && s_state->last_cmd != in mlx4_how_many_lives_vf()
/drivers/infiniband/sw/rdmavt/
Dqp.c841 qp->s_state = IB_OPCODE_RC_SEND_LAST; in rvt_init_qp()
844 qp->s_state = IB_OPCODE_UC_SEND_LAST; in rvt_init_qp()