Home
last modified time | relevance | path

Searched refs:attrs (Results 1 – 25 of 982) sorted by relevance

12345678910>>...40

/drivers/infiniband/core/
Duverbs_std_types_device.c21 struct uverbs_attr_bundle *attrs) in UVERBS_HANDLER()
23 struct uverbs_api *uapi = attrs->ufile->device->uapi; in UVERBS_HANDLER()
28 rc = uverbs_get_const(&cmd, attrs, UVERBS_ATTR_WRITE_CMD); in UVERBS_HANDLER()
36 uverbs_fill_udata(attrs, &attrs->ucore, UVERBS_ATTR_CORE_IN, in UVERBS_HANDLER()
39 if (attrs->ucore.inlen < method_elm->req_size || in UVERBS_HANDLER()
40 attrs->ucore.outlen < method_elm->resp_size) in UVERBS_HANDLER()
43 attrs->uobject = NULL; in UVERBS_HANDLER()
44 rc = method_elm->handler(attrs); in UVERBS_HANDLER()
45 if (attrs->uobject) in UVERBS_HANDLER()
46 uverbs_finalize_object(attrs->uobject, UVERBS_ACCESS_NEW, true, in UVERBS_HANDLER()
[all …]
Duverbs_std_types_mr.c41 struct uverbs_attr_bundle *attrs) in uverbs_free_mr() argument
44 &attrs->driver_udata); in uverbs_free_mr()
48 struct uverbs_attr_bundle *attrs) in UVERBS_HANDLER()
51 uverbs_attr_get_obj(attrs, UVERBS_ATTR_ADVISE_MR_PD_HANDLE); in UVERBS_HANDLER()
63 ret = uverbs_get_const(&advice, attrs, UVERBS_ATTR_ADVISE_MR_ADVICE); in UVERBS_HANDLER()
67 ret = uverbs_get_flags32(&flags, attrs, UVERBS_ATTR_ADVISE_MR_FLAGS, in UVERBS_HANDLER()
73 attrs, UVERBS_ATTR_ADVISE_MR_SGE_LIST, sizeof(struct ib_sge)); in UVERBS_HANDLER()
77 sg_list = uverbs_attr_get_alloced_ptr(attrs, in UVERBS_HANDLER()
80 attrs); in UVERBS_HANDLER()
84 struct uverbs_attr_bundle *attrs) in UVERBS_HANDLER()
[all …]
Duverbs_cmd.c58 static int uverbs_response(struct uverbs_attr_bundle *attrs, const void *resp, in uverbs_response() argument
63 if (uverbs_attr_is_valid(attrs, UVERBS_ATTR_CORE_OUT)) in uverbs_response()
65 attrs, UVERBS_ATTR_CORE_OUT, resp, resp_len); in uverbs_response()
67 if (copy_to_user(attrs->ucore.outbuf, resp, in uverbs_response()
68 min(attrs->ucore.outlen, resp_len))) in uverbs_response()
71 if (resp_len < attrs->ucore.outlen) { in uverbs_response()
76 ret = clear_user(attrs->ucore.outbuf + resp_len, in uverbs_response()
77 attrs->ucore.outlen - resp_len); in uverbs_response()
91 static int uverbs_request(struct uverbs_attr_bundle *attrs, void *req, in uverbs_request() argument
94 if (copy_from_user(req, attrs->ucore.inbuf, in uverbs_request()
[all …]
Drdma_core.c124 struct uverbs_attr_bundle *attrs) in uverbs_destroy_uobject() argument
126 struct ib_uverbs_file *ufile = attrs->ufile; in uverbs_destroy_uobject()
139 attrs); in uverbs_destroy_uobject()
188 int uobj_destroy(struct ib_uobject *uobj, struct uverbs_attr_bundle *attrs) in uobj_destroy() argument
190 struct ib_uverbs_file *ufile = attrs->ufile; in uobj_destroy()
206 ret = uverbs_destroy_uobject(uobj, RDMA_REMOVE_DESTROY, attrs); in uobj_destroy()
223 u32 id, struct uverbs_attr_bundle *attrs) in __uobj_get_destroy() argument
228 uobj = rdma_lookup_get_uobject(obj, attrs->ufile, id, in __uobj_get_destroy()
229 UVERBS_LOOKUP_DESTROY, attrs); in __uobj_get_destroy()
233 ret = uobj_destroy(uobj, attrs); in __uobj_get_destroy()
[all …]
Duverbs_std_types_qp.c13 struct uverbs_attr_bundle *attrs) in uverbs_free_qp() argument
34 ret = ib_destroy_qp_user(qp, &attrs->driver_udata); in uverbs_free_qp()
83 struct uverbs_attr_bundle *attrs) in UVERBS_HANDLER()
86 uverbs_attr_get_uobject(attrs, UVERBS_ATTR_CREATE_QP_HANDLE), in UVERBS_HANDLER()
102 ret = uverbs_copy_from_or_zero(&cap, attrs, in UVERBS_HANDLER()
105 ret = uverbs_copy_from(&user_handle, attrs, in UVERBS_HANDLER()
108 ret = uverbs_get_const(&attr.qp_type, attrs, in UVERBS_HANDLER()
115 if (uverbs_attr_is_valid(attrs, in UVERBS_HANDLER()
117 uverbs_attr_is_valid(attrs, in UVERBS_HANDLER()
119 uverbs_attr_is_valid(attrs, in UVERBS_HANDLER()
[all …]
Duverbs_std_types_wq.c12 struct uverbs_attr_bundle *attrs) in uverbs_free_wq() argument
19 ret = ib_destroy_wq_user(wq, &attrs->driver_udata); in uverbs_free_wq()
28 struct uverbs_attr_bundle *attrs) in UVERBS_HANDLER()
31 uverbs_attr_get_uobject(attrs, UVERBS_ATTR_CREATE_WQ_HANDLE), in UVERBS_HANDLER()
34 uverbs_attr_get_obj(attrs, UVERBS_ATTR_CREATE_WQ_PD_HANDLE); in UVERBS_HANDLER()
36 uverbs_attr_get_obj(attrs, UVERBS_ATTR_CREATE_WQ_CQ_HANDLE); in UVERBS_HANDLER()
42 ret = uverbs_get_flags32(&wq_init_attr.create_flags, attrs, in UVERBS_HANDLER()
49 ret = uverbs_copy_from(&wq_init_attr.max_sge, attrs, in UVERBS_HANDLER()
52 ret = uverbs_copy_from(&wq_init_attr.max_wr, attrs, in UVERBS_HANDLER()
55 ret = uverbs_copy_from(&user_handle, attrs, in UVERBS_HANDLER()
[all …]
Duverbs_std_types_srq.c12 struct uverbs_attr_bundle *attrs) in uverbs_free_srq() argument
20 ret = ib_destroy_srq_user(srq, &attrs->driver_udata); in uverbs_free_srq()
37 struct uverbs_attr_bundle *attrs) in UVERBS_HANDLER()
40 uverbs_attr_get_uobject(attrs, UVERBS_ATTR_CREATE_SRQ_HANDLE), in UVERBS_HANDLER()
43 uverbs_attr_get_obj(attrs, UVERBS_ATTR_CREATE_SRQ_PD_HANDLE); in UVERBS_HANDLER()
50 ret = uverbs_copy_from(&attr.attr.max_sge, attrs, in UVERBS_HANDLER()
53 ret = uverbs_copy_from(&attr.attr.max_wr, attrs, in UVERBS_HANDLER()
56 ret = uverbs_copy_from(&attr.attr.srq_limit, attrs, in UVERBS_HANDLER()
59 ret = uverbs_copy_from(&user_handle, attrs, in UVERBS_HANDLER()
62 ret = uverbs_get_const(&attr.srq_type, attrs, in UVERBS_HANDLER()
[all …]
Duverbs_std_types_cq.c40 struct uverbs_attr_bundle *attrs) in uverbs_free_cq() argument
48 ret = ib_destroy_cq_user(cq, &attrs->driver_udata); in uverbs_free_cq()
62 struct uverbs_attr_bundle *attrs) in UVERBS_HANDLER()
65 uverbs_attr_get_uobject(attrs, UVERBS_ATTR_CREATE_CQ_HANDLE), in UVERBS_HANDLER()
67 struct ib_device *ib_dev = attrs->context->device; in UVERBS_HANDLER()
78 ret = uverbs_copy_from(&attr.comp_vector, attrs, in UVERBS_HANDLER()
81 ret = uverbs_copy_from(&attr.cqe, attrs, in UVERBS_HANDLER()
84 ret = uverbs_copy_from(&user_handle, attrs, in UVERBS_HANDLER()
89 ret = uverbs_get_flags32(&attr.flags, attrs, in UVERBS_HANDLER()
96 ev_file_uobj = uverbs_attr_get_uobject(attrs, UVERBS_ATTR_CREATE_CQ_COMP_CHANNEL); in UVERBS_HANDLER()
[all …]
Duverbs_std_types_counters.c40 struct uverbs_attr_bundle *attrs) in uverbs_free_counters() argument
56 struct uverbs_attr_bundle *attrs) in UVERBS_HANDLER()
59 attrs, UVERBS_ATTR_CREATE_COUNTERS_HANDLE); in UVERBS_HANDLER()
60 struct ib_device *ib_dev = attrs->context->device; in UVERBS_HANDLER()
81 ret = ib_dev->ops.create_counters(counters, attrs); in UVERBS_HANDLER()
89 struct uverbs_attr_bundle *attrs) in UVERBS_HANDLER()
94 uverbs_attr_get_obj(attrs, UVERBS_ATTR_READ_COUNTERS_HANDLE); in UVERBS_HANDLER()
103 ret = uverbs_get_flags32(&read_attr.flags, attrs, in UVERBS_HANDLER()
109 uattr = uverbs_attr_get(attrs, UVERBS_ATTR_READ_COUNTERS_BUFF); in UVERBS_HANDLER()
114 attrs, array_size(read_attr.ncounters, sizeof(u64))); in UVERBS_HANDLER()
[all …]
Duverbs_std_types_dm.c39 struct uverbs_attr_bundle *attrs) in uverbs_free_dm() argument
46 return dm->device->ops.dealloc_dm(dm, attrs); in uverbs_free_dm()
50 struct uverbs_attr_bundle *attrs) in UVERBS_HANDLER()
54 uverbs_attr_get(attrs, UVERBS_ATTR_ALLOC_DM_HANDLE) in UVERBS_HANDLER()
56 struct ib_device *ib_dev = attrs->context->device; in UVERBS_HANDLER()
63 ret = uverbs_copy_from(&attr.length, attrs, in UVERBS_HANDLER()
68 ret = uverbs_copy_from(&attr.alignment, attrs, in UVERBS_HANDLER()
73 dm = ib_dev->ops.alloc_dm(ib_dev, attrs->context, &attr, attrs); in UVERBS_HANDLER()
Duverbs_std_types.c44 struct uverbs_attr_bundle *attrs) in uverbs_free_ah() argument
48 &attrs->driver_udata); in uverbs_free_ah()
53 struct uverbs_attr_bundle *attrs) in uverbs_free_flow() argument
73 struct uverbs_attr_bundle *attrs) in uverbs_free_mw() argument
80 struct uverbs_attr_bundle *attrs) in uverbs_free_rwq_ind_tbl() argument
104 struct uverbs_attr_bundle *attrs) in uverbs_free_xrcd() argument
114 mutex_lock(&attrs->ufile->device->xrcd_tree_mutex); in uverbs_free_xrcd()
115 ret = ib_uverbs_dealloc_xrcd(uobject, xrcd, why, attrs); in uverbs_free_xrcd()
116 mutex_unlock(&attrs->ufile->device->xrcd_tree_mutex); in uverbs_free_xrcd()
123 struct uverbs_attr_bundle *attrs) in uverbs_free_pd() argument
[all …]
/drivers/net/ethernet/mellanox/mlx5/core/en_accel/
Dipsec.c66 if (sa_entry->attrs.drop) in mlx5e_ipsec_handle_tx_limit()
72 sa_entry->attrs.drop = true; in mlx5e_ipsec_handle_tx_limit()
147 struct mlx5_accel_esp_xfrm_attrs *attrs) in mlx5e_ipsec_init_limits() argument
152 attrs->lft.hard_packet_limit = x->lft.hard_packet_limit; in mlx5e_ipsec_init_limits()
153 attrs->lft.soft_packet_limit = x->lft.soft_packet_limit; in mlx5e_ipsec_init_limits()
187 n = attrs->lft.hard_packet_limit / BIT_ULL(31); in mlx5e_ipsec_init_limits()
188 start_value = attrs->lft.hard_packet_limit - n * BIT_ULL(31); in mlx5e_ipsec_init_limits()
195 start_value = attrs->lft.hard_packet_limit - n * BIT_ULL(31); in mlx5e_ipsec_init_limits()
200 attrs->lft.hard_packet_limit = lower_32_bits(start_value); in mlx5e_ipsec_init_limits()
201 attrs->lft.numb_rounds_hard = (u64)n; in mlx5e_ipsec_init_limits()
[all …]
Dipsec_offload.c93 struct mlx5_accel_esp_xfrm_attrs *attrs) in mlx5e_ipsec_packet_setup() argument
98 if (attrs->replay_esn.trigger) { in mlx5e_ipsec_packet_setup()
101 if (attrs->dir == XFRM_DEV_OFFLOAD_IN) { in mlx5e_ipsec_packet_setup()
103 attrs->replay_esn.replay_window); in mlx5e_ipsec_packet_setup()
108 attrs->replay_esn.esn); in mlx5e_ipsec_packet_setup()
122 if (attrs->dir == XFRM_DEV_OFFLOAD_OUT) in mlx5e_ipsec_packet_setup()
125 if (attrs->lft.hard_packet_limit != XFRM_INF) { in mlx5e_ipsec_packet_setup()
127 attrs->lft.hard_packet_limit); in mlx5e_ipsec_packet_setup()
131 if (attrs->lft.soft_packet_limit != XFRM_INF) { in mlx5e_ipsec_packet_setup()
133 attrs->lft.soft_packet_limit); in mlx5e_ipsec_packet_setup()
[all …]
Dipsec_fs.c1117 struct mlx5_accel_esp_xfrm_attrs *attrs, in setup_pkt_tunnel_reformat() argument
1130 if (attrs->dir == XFRM_DEV_OFFLOAD_OUT) { in setup_pkt_tunnel_reformat()
1133 switch (attrs->family) { in setup_pkt_tunnel_reformat()
1150 switch (attrs->family) { in setup_pkt_tunnel_reformat()
1161 ether_addr_copy(eth_hdr->h_dest, attrs->dmac); in setup_pkt_tunnel_reformat()
1162 ether_addr_copy(eth_hdr->h_source, attrs->smac); in setup_pkt_tunnel_reformat()
1164 switch (attrs->dir) { in setup_pkt_tunnel_reformat()
1170 reformat_params->param_0 = attrs->authsize; in setup_pkt_tunnel_reformat()
1173 switch (attrs->family) { in setup_pkt_tunnel_reformat()
1176 memcpy(&iphdr->saddr, &attrs->saddr.a4, 4); in setup_pkt_tunnel_reformat()
[all …]
/drivers/net/wireguard/
Dnetlink.c52 static struct wg_device *lookup_interface(struct nlattr **attrs, in lookup_interface() argument
57 if (!attrs[WGDEVICE_A_IFINDEX] == !attrs[WGDEVICE_A_IFNAME]) in lookup_interface()
59 if (attrs[WGDEVICE_A_IFINDEX]) in lookup_interface()
61 nla_get_u32(attrs[WGDEVICE_A_IFINDEX])); in lookup_interface()
62 else if (attrs[WGDEVICE_A_IFNAME]) in lookup_interface()
64 nla_data(attrs[WGDEVICE_A_IFNAME])); in lookup_interface()
203 wg = lookup_interface(genl_info_dump(cb)->attrs, cb->skb); in wg_get_device_start()
329 static int set_allowedip(struct wg_peer *peer, struct nlattr **attrs) in set_allowedip() argument
335 if (!attrs[WGALLOWEDIP_A_FAMILY] || !attrs[WGALLOWEDIP_A_IPADDR] || in set_allowedip()
336 !attrs[WGALLOWEDIP_A_CIDR_MASK]) in set_allowedip()
[all …]
/drivers/soc/aspeed/
Daspeed-socinfo.c79 struct soc_device_attribute *attrs; in aspeed_socinfo_init() local
112 attrs = kzalloc(sizeof(*attrs), GFP_KERNEL); in aspeed_socinfo_init()
113 if (!attrs) in aspeed_socinfo_init()
127 attrs->machine = kstrdup(machine, GFP_KERNEL); in aspeed_socinfo_init()
130 attrs->family = siliconid_to_name(siliconid); in aspeed_socinfo_init()
131 attrs->revision = siliconid_to_rev(siliconid); in aspeed_socinfo_init()
132 attrs->soc_id = kasprintf(GFP_KERNEL, "%08x", siliconid); in aspeed_socinfo_init()
135 attrs->serial_number = kasprintf(GFP_KERNEL, "%08x%08x", in aspeed_socinfo_init()
138 soc_dev = soc_device_register(attrs); in aspeed_socinfo_init()
140 kfree(attrs->machine); in aspeed_socinfo_init()
[all …]
/drivers/infiniband/hw/erdma/
Derdma_qp.c18 switch (qp->attrs.state) { in erdma_qp_llp_close()
27 qp->attrs.state = ERDMA_QP_STATE_IDLE; in erdma_qp_llp_close()
52 struct erdma_qp_attrs *attrs, in erdma_modify_qp_state_to_rts() argument
76 qp->attrs.state = ERDMA_QP_STATE_RTS; in erdma_modify_qp_state_to_rts()
83 req.cfg = FIELD_PREP(ERDMA_CMD_MODIFY_QP_STATE_MASK, qp->attrs.state) | in erdma_modify_qp_state_to_rts()
84 FIELD_PREP(ERDMA_CMD_MODIFY_QP_CC_MASK, qp->attrs.cc) | in erdma_modify_qp_state_to_rts()
95 if (qp->attrs.qp_type == ERDMA_QP_PASSIVE) in erdma_modify_qp_state_to_rts()
96 req.send_nxt += MPA_DEFAULT_HDR_LEN + qp->attrs.pd_len; in erdma_modify_qp_state_to_rts()
103 struct erdma_qp_attrs *attrs, in erdma_modify_qp_state_to_stop() argument
109 qp->attrs.state = attrs->state; in erdma_modify_qp_state_to_stop()
[all …]
Derdma_verbs.c52 ilog2(qp->attrs.sq_size)) | in create_qp_cmd()
55 ilog2(qp->attrs.rq_size)) | in create_qp_cmd()
80 (qp->attrs.sq_size << SQEBB_SHIFT); in create_qp_cmd()
82 (qp->attrs.rq_size << RQE_SHIFT); in create_qp_cmd()
127 qp->attrs.cookie = in create_qp_cmd()
319 attr->max_mr_size = dev->attrs.max_mr_size; in erdma_query_device()
323 attr->max_qp = dev->attrs.max_qp - 1; in erdma_query_device()
324 attr->max_qp_wr = min(dev->attrs.max_send_wr, dev->attrs.max_recv_wr); in erdma_query_device()
325 attr->max_qp_rd_atom = dev->attrs.max_ord; in erdma_query_device()
326 attr->max_qp_init_rd_atom = dev->attrs.max_ird; in erdma_query_device()
[all …]
Derdma_main.c75 dev->attrs.peer_addr)) { in erdma_enum_and_get_netdev()
144 dev->attrs.irq_num = ret; in erdma_request_vectors()
243 dev->attrs.numa_node = dev_to_node(&pdev->dev); in erdma_probe_dev()
366 dev->attrs.max_cqe = 1 << ERDMA_GET_CAP(MAX_CQE, cap0); in erdma_dev_attrs_init()
367 dev->attrs.max_mr_size = 1ULL << ERDMA_GET_CAP(MAX_MR_SIZE, cap0); in erdma_dev_attrs_init()
368 dev->attrs.max_mw = 1 << ERDMA_GET_CAP(MAX_MW, cap1); in erdma_dev_attrs_init()
369 dev->attrs.max_recv_wr = 1 << ERDMA_GET_CAP(MAX_RECV_WR, cap0); in erdma_dev_attrs_init()
370 dev->attrs.local_dma_key = ERDMA_GET_CAP(DMA_LOCAL_KEY, cap1); in erdma_dev_attrs_init()
371 dev->attrs.cc = ERDMA_GET_CAP(DEFAULT_CC, cap1); in erdma_dev_attrs_init()
372 dev->attrs.max_qp = ERDMA_NQP_PER_QBLOCK * ERDMA_GET_CAP(QBLOCK, cap1); in erdma_dev_attrs_init()
[all …]
/drivers/infiniband/sw/siw/
Dsiw_qp.c111 if (likely(qp->attrs.state == SIW_QP_STATE_RTS)) in siw_qp_llp_data_ready()
132 siw_qp_state_to_string[qp->attrs.state]); in siw_qp_llp_close()
138 qp->attrs.sk = NULL; in siw_qp_llp_close()
140 switch (qp->attrs.state) { in siw_qp_llp_close()
145 qp->attrs.state = SIW_QP_STATE_ERROR; in siw_qp_llp_close()
155 qp->attrs.state = SIW_QP_STATE_ERROR; in siw_qp_llp_close()
157 qp->attrs.state = SIW_QP_STATE_IDLE; in siw_qp_llp_close()
162 siw_qp_state_to_string[qp->attrs.state]); in siw_qp_llp_close()
179 siw_qp_state_to_string[qp->attrs.state]); in siw_qp_llp_close()
209 qp->attrs.irq_size = 0; in siw_qp_readq_init()
[all …]
Dsiw_verbs.c137 attr->max_cq = sdev->attrs.max_cq; in siw_query_device()
138 attr->max_cqe = sdev->attrs.max_cqe; in siw_query_device()
140 attr->max_mr = sdev->attrs.max_mr; in siw_query_device()
141 attr->max_mw = sdev->attrs.max_mw; in siw_query_device()
143 attr->max_pd = sdev->attrs.max_pd; in siw_query_device()
144 attr->max_qp = sdev->attrs.max_qp; in siw_query_device()
145 attr->max_qp_init_rd_atom = sdev->attrs.max_ird; in siw_query_device()
146 attr->max_qp_rd_atom = sdev->attrs.max_ord; in siw_query_device()
147 attr->max_qp_wr = sdev->attrs.max_qp_wr; in siw_query_device()
148 attr->max_recv_sge = sdev->attrs.max_sge; in siw_query_device()
[all …]
/drivers/infiniband/hw/vmw_pvrdma/
Dpvrdma_qp.c586 cmd->attrs.qp_state = ib_qp_state_to_pvrdma(attr->qp_state); in pvrdma_modify_qp()
587 cmd->attrs.cur_qp_state = in pvrdma_modify_qp()
589 cmd->attrs.path_mtu = ib_mtu_to_pvrdma(attr->path_mtu); in pvrdma_modify_qp()
590 cmd->attrs.path_mig_state = in pvrdma_modify_qp()
592 cmd->attrs.qkey = attr->qkey; in pvrdma_modify_qp()
593 cmd->attrs.rq_psn = attr->rq_psn; in pvrdma_modify_qp()
594 cmd->attrs.sq_psn = attr->sq_psn; in pvrdma_modify_qp()
595 cmd->attrs.dest_qp_num = attr->dest_qp_num; in pvrdma_modify_qp()
596 cmd->attrs.qp_access_flags = in pvrdma_modify_qp()
598 cmd->attrs.pkey_index = attr->pkey_index; in pvrdma_modify_qp()
[all …]
/drivers/infiniband/hw/mlx5/
Dqos.c25 struct uverbs_attr_bundle *attrs) in UVERBS_HANDLER()
28 struct ib_uobject *uobj = uverbs_attr_get_uobject(attrs, in UVERBS_HANDLER()
39 c = to_mucontext(ib_uverbs_get_ucontext(attrs)); in UVERBS_HANDLER()
52 in_ctx = uverbs_attr_get_alloced_ptr(attrs, in UVERBS_HANDLER()
54 inlen = uverbs_attr_get_len(attrs, in UVERBS_HANDLER()
57 err = uverbs_get_flags32(&flags, attrs, in UVERBS_HANDLER()
74 uverbs_finalize_uobj_create(attrs, MLX5_IB_ATTR_PP_OBJ_ALLOC_HANDLE); in UVERBS_HANDLER()
76 err = uverbs_copy_to(attrs, MLX5_IB_ATTR_PP_OBJ_ALLOC_INDEX, in UVERBS_HANDLER()
87 struct uverbs_attr_bundle *attrs) in pp_obj_cleanup() argument
Ddevx.c107 devx_ufile2uctx(const struct uverbs_attr_bundle *attrs) in devx_ufile2uctx() argument
109 return to_mucontext(ib_uverbs_get_ucontext(attrs)); in devx_ufile2uctx()
591 static bool devx_is_valid_obj_id(struct uverbs_attr_bundle *attrs, in devx_is_valid_obj_id() argument
594 struct mlx5_ib_dev *dev = mlx5_udata_to_mdev(&attrs->driver_udata); in devx_is_valid_obj_id()
988 struct uverbs_attr_bundle *attrs) in UVERBS_HANDLER()
996 if (uverbs_copy_from(&user_vector, attrs, in UVERBS_HANDLER()
1000 c = devx_ufile2uctx(attrs); in UVERBS_HANDLER()
1009 if (uverbs_copy_to(attrs, MLX5_IB_ATTR_DEVX_QUERY_EQN_DEV_EQN, in UVERBS_HANDLER()
1037 struct uverbs_attr_bundle *attrs) in UVERBS_HANDLER()
1044 c = devx_ufile2uctx(attrs); in UVERBS_HANDLER()
[all …]
Ddm.c174 struct uverbs_attr_bundle *attrs) in copy_op_to_user() argument
182 err = uverbs_copy_to(attrs, MLX5_IB_ATTR_DM_MAP_OP_ADDR_RESP_PAGE_INDEX, in copy_op_to_user()
187 return uverbs_copy_to(attrs, in copy_op_to_user()
193 struct uverbs_attr_bundle *attrs) in map_existing_op() argument
201 return copy_op_to_user(op_entry, attrs); in map_existing_op()
205 struct uverbs_attr_bundle *attrs) in UVERBS_HANDLER()
208 attrs, MLX5_IB_ATTR_DM_MAP_OP_ADDR_REQ_HANDLE); in UVERBS_HANDLER()
216 err = uverbs_copy_from(&op, attrs, MLX5_IB_ATTR_DM_MAP_OP_ADDR_REQ_OP); in UVERBS_HANDLER()
227 err = map_existing_op(dm, op, attrs); in UVERBS_HANDLER()
255 err = copy_op_to_user(op_entry, attrs); in UVERBS_HANDLER()
[all …]

12345678910>>...40