/drivers/infiniband/core/ |
D | multicast.c | 255 static int check_selector(ib_sa_comp_mask comp_mask, in check_selector() argument 262 if (!(comp_mask & selector_mask) || !(comp_mask & value_mask)) in check_selector() 284 struct ib_sa_mcmember_rec *dst, ib_sa_comp_mask comp_mask) in cmp_rec() argument 288 if (comp_mask & IB_SA_MCMEMBER_REC_PORT_GID && in cmp_rec() 291 if (comp_mask & IB_SA_MCMEMBER_REC_QKEY && src->qkey != dst->qkey) in cmp_rec() 293 if (comp_mask & IB_SA_MCMEMBER_REC_MLID && src->mlid != dst->mlid) in cmp_rec() 295 if (check_selector(comp_mask, IB_SA_MCMEMBER_REC_MTU_SELECTOR, in cmp_rec() 299 if (comp_mask & IB_SA_MCMEMBER_REC_TRAFFIC_CLASS && in cmp_rec() 302 if (comp_mask & IB_SA_MCMEMBER_REC_PKEY && src->pkey != dst->pkey) in cmp_rec() 304 if (check_selector(comp_mask, IB_SA_MCMEMBER_REC_RATE_SELECTOR, in cmp_rec() [all …]
|
D | sa_query.c | 704 ib_sa_comp_mask comp_mask = mad->sa_hdr.comp_mask; in ib_nl_set_path_rec_attrs() local 718 if ((comp_mask & IB_SA_PATH_REC_REVERSIBLE) && in ib_nl_set_path_rec_attrs() 726 if (comp_mask & IB_SA_PATH_REC_SERVICE_ID) { in ib_nl_set_path_rec_attrs() 731 if (comp_mask & IB_SA_PATH_REC_DGID) in ib_nl_set_path_rec_attrs() 734 if (comp_mask & IB_SA_PATH_REC_SGID) in ib_nl_set_path_rec_attrs() 737 if (comp_mask & IB_SA_PATH_REC_TRAFFIC_CLASS) in ib_nl_set_path_rec_attrs() 741 if (comp_mask & IB_SA_PATH_REC_PKEY) { in ib_nl_set_path_rec_attrs() 746 if (comp_mask & IB_SA_PATH_REC_QOS_CLASS) { in ib_nl_set_path_rec_attrs() 753 static int ib_nl_get_path_rec_attrs_len(ib_sa_comp_mask comp_mask) in ib_nl_get_path_rec_attrs_len() argument 757 if (comp_mask & IB_SA_PATH_REC_SERVICE_ID) in ib_nl_get_path_rec_attrs_len() [all …]
|
D | sa.h | 54 ib_sa_comp_mask comp_mask,
|
D | uverbs_cmd.c | 1109 if (cmd.comp_mask) in ib_uverbs_ex_create_cq() 1320 if (cmd->comp_mask & IB_UVERBS_CREATE_QP_MASK_IND_TABLE) { in create_qp() 1540 if (cmd.comp_mask & ~IB_UVERBS_CREATE_QP_SUP_COMP_MASK) in ib_uverbs_ex_create_qp() 2896 if (cmd.comp_mask) in ib_uverbs_ex_create_wq() 2979 if (cmd.comp_mask) in ib_uverbs_ex_destroy_wq() 3066 if (cmd.comp_mask) in ib_uverbs_ex_create_rwq_ind_table() 3169 if (cmd.comp_mask) in ib_uverbs_ex_destroy_rwq_ind_table() 3197 if (cmd.comp_mask) in ib_uverbs_ex_create_flow() 3352 if (cmd.comp_mask) in ib_uverbs_ex_destroy_flow() 3606 if (cmd.comp_mask) in ib_uverbs_ex_query_device() [all …]
|
D | cma.c | 2928 ib_sa_comp_mask comp_mask; in cma_query_ib_route() local 2946 comp_mask = IB_SA_PATH_REC_DGID | IB_SA_PATH_REC_SGID | in cma_query_ib_route() 2953 comp_mask |= IB_SA_PATH_REC_QOS_CLASS; in cma_query_ib_route() 2958 comp_mask |= IB_SA_PATH_REC_TRAFFIC_CLASS; in cma_query_ib_route() 2963 comp_mask |= IB_SA_PATH_REC_TRAFFIC_CLASS; in cma_query_ib_route() 2969 comp_mask, timeout_ms, in cma_query_ib_route() 4897 ib_sa_comp_mask comp_mask; in cma_join_ib_multicast() local 4918 comp_mask = IB_SA_MCMEMBER_REC_MGID | IB_SA_MCMEMBER_REC_PORT_GID | in cma_join_ib_multicast() 4925 comp_mask |= IB_SA_MCMEMBER_REC_RATE | in cma_join_ib_multicast() 4932 id_priv->id.port_num, &rec, comp_mask, in cma_join_ib_multicast()
|
/drivers/infiniband/hw/mlx4/ |
D | mcg.c | 298 mad.sa_hdr.comp_mask = IB_SA_MCMEMBER_REC_MGID | in send_leave_to_wire() 339 mad.sa_hdr.comp_mask = 0; /* ignored on responses, see IBTA spec */ in send_reply_to_slave() 352 static int check_selector(ib_sa_comp_mask comp_mask, in check_selector() argument 362 if (!(comp_mask & selector_mask) || !(comp_mask & value_mask)) in check_selector() 384 struct ib_sa_mcmember_data *dst, ib_sa_comp_mask comp_mask) in cmp_rec() argument 391 if (comp_mask & IB_SA_MCMEMBER_REC_QKEY && src->qkey != dst->qkey) in cmp_rec() 393 if (comp_mask & IB_SA_MCMEMBER_REC_MLID && src->mlid != dst->mlid) in cmp_rec() 395 if (check_selector(comp_mask, IB_SA_MCMEMBER_REC_MTU_SELECTOR, in cmp_rec() 399 if (comp_mask & IB_SA_MCMEMBER_REC_TRAFFIC_CLASS && in cmp_rec() 402 if (comp_mask & IB_SA_MCMEMBER_REC_PKEY && src->pkey != dst->pkey) in cmp_rec() [all …]
|
D | alias_GUID.c | 455 ib_sa_comp_mask comp_mask = 0; in invalidate_guid_record() local 474 comp_mask |= mlx4_ib_get_aguid_comp_mask_from_ix(i); in invalidate_guid_record() 477 all_rec_per_port[index].guid_indexes |= comp_mask; in invalidate_guid_record() 491 ib_sa_comp_mask comp_mask; in set_guid_rec() local 535 comp_mask = IB_SA_GUIDINFO_REC_LID | IB_SA_GUIDINFO_REC_BLOCK_NUM | in set_guid_rec() 546 comp_mask, rec->method, 1000, in set_guid_rec()
|
D | qp.c | 741 if (ucmd.comp_mask || ucmd.reserved1) in _mlx4_ib_create_qp_rss() 893 if (wq.comp_mask || wq.reserved[0] || wq.reserved[1] || in create_rq() 4132 required_cmd_sz = offsetof(typeof(ucmd), comp_mask) + in mlx4_ib_create_wq() 4133 sizeof(ucmd.comp_mask); in mlx4_ib_create_wq() 4282 if (ucmd.comp_mask || ucmd.reserved) in mlx4_ib_modify_wq()
|
D | mlx4_ib.h | 666 __u32 comp_mask; member
|
D | main.c | 456 if (cmd.comp_mask) in mlx4_ib_query_device() 584 resp.comp_mask |= MLX4_IB_QUERY_DEV_RESP_MASK_CORE_CLOCK_OFFSET; in mlx4_ib_query_device()
|
/drivers/crypto/ccree/ |
D | cc_driver.c | 207 if (irr & drvdata->comp_mask) { in cc_isr() 211 cc_iowrite(drvdata, CC_REG(HOST_IMR), imr | drvdata->comp_mask); in cc_isr() 212 irr &= ~drvdata->comp_mask; in cc_isr() 292 val = drvdata->comp_mask | CC_AXI_ERR_IRQ_MASK; in init_cc_regs() 338 new_drvdata->comp_mask = CC_COMP_IRQ_MASK; in init_cc_resources() 459 new_drvdata->comp_mask |= CC_CPP_SM4_ABORT_MASK; in init_cc_resources() 461 new_drvdata->comp_mask |= CC_CPP_AES_ABORT_MASK; in init_cc_resources()
|
D | cc_request_mgr.c | 622 irq = (drvdata->irq & drvdata->comp_mask); in comp_handler() 639 irq = (drvdata->irq & drvdata->comp_mask); in comp_handler() 658 cc_ioread(drvdata, CC_REG(HOST_IMR)) & ~drvdata->comp_mask); in comp_handler()
|
D | cc_driver.h | 155 u32 comp_mask; member
|
/drivers/gpu/drm/arm/display/komeda/ |
D | komeda_pipeline.c | 140 u32 comp_mask) in komeda_pipeline_get_first_component() argument 143 unsigned long comp_mask_local = (unsigned long)comp_mask; in komeda_pipeline_get_first_component()
|
D | komeda_pipeline.h | 497 u32 comp_mask);
|
/drivers/infiniband/ulp/ipoib/ |
D | ipoib_multicast.c | 469 ib_sa_comp_mask comp_mask; in ipoib_mcast_join() local 485 comp_mask = in ipoib_mcast_join() 499 comp_mask |= in ipoib_mcast_join() 536 &rec, comp_mask, GFP_ATOMIC, in ipoib_mcast_join()
|
/drivers/media/platform/qcom/camss/ |
D | camss-vfe-4-1.c | 570 u32 comp_mask = 0; in vfe_enable_irq_pix_line() local 580 comp_mask |= (1 << output->wm_idx[i]) << comp * 8; in vfe_enable_irq_pix_line() 586 vfe_reg_set(vfe, VFE_0_IRQ_COMPOSITE_MASK_0, comp_mask); in vfe_enable_irq_pix_line() 590 vfe_reg_clr(vfe, VFE_0_IRQ_COMPOSITE_MASK_0, comp_mask); in vfe_enable_irq_pix_line()
|
D | camss-vfe-4-8.c | 696 u32 comp_mask = 0; in vfe_enable_irq_pix_line() local 705 comp_mask |= (1 << output->wm_idx[i]) << comp * 8; in vfe_enable_irq_pix_line() 711 vfe_reg_set(vfe, VFE_0_IRQ_COMPOSITE_MASK_0, comp_mask); in vfe_enable_irq_pix_line() 715 vfe_reg_clr(vfe, VFE_0_IRQ_COMPOSITE_MASK_0, comp_mask); in vfe_enable_irq_pix_line()
|
D | camss-vfe-4-7.c | 724 u32 comp_mask = 0; in vfe_enable_irq_pix_line() local 734 comp_mask |= (1 << output->wm_idx[i]) << comp * 8; in vfe_enable_irq_pix_line() 740 vfe_reg_set(vfe, VFE_0_IRQ_COMPOSITE_MASK_0, comp_mask); in vfe_enable_irq_pix_line() 744 vfe_reg_clr(vfe, VFE_0_IRQ_COMPOSITE_MASK_0, comp_mask); in vfe_enable_irq_pix_line()
|
/drivers/infiniband/hw/efa/ |
D | efa_verbs.c | 676 if (cmd.comp_mask) { in efa_create_qp() 1060 resp->comp_mask |= EFA_CREATE_CQ_RESP_DB_OFF; in cq_mmap_entries_setup() 1118 if (cmd.comp_mask || !is_reserved_cleared(cmd.reserved_58)) { in efa_create_cq() 1795 if (EFA_CHECK_USER_COMP(dev, cmd->comp_mask, max_tx_batch, in efa_user_comp_handshake() 1799 if (EFA_CHECK_USER_COMP(dev, cmd->comp_mask, min_sq_depth, in efa_user_comp_handshake()
|
/drivers/infiniband/hw/mlx5/ |
D | qp.c | 1599 resp->comp_mask |= MLX5_IB_CREATE_QP_RESP_MASK_TISN; in create_raw_packet_qp() 1601 resp->comp_mask |= MLX5_IB_CREATE_QP_RESP_MASK_SQN; in create_raw_packet_qp() 1627 resp->comp_mask |= MLX5_IB_CREATE_QP_RESP_MASK_RQN; in create_raw_packet_qp() 1629 resp->comp_mask |= MLX5_IB_CREATE_QP_RESP_MASK_TIRN; in create_raw_packet_qp() 1642 resp->comp_mask |= in create_raw_packet_qp() 1736 if (ucmd->comp_mask) { in create_rss_raw_qp_tir() 1891 params->resp.comp_mask |= MLX5_IB_CREATE_QP_RESP_MASK_TIRN; in create_rss_raw_qp_tir() 1905 params->resp.comp_mask |= in create_rss_raw_qp_tir() 4687 if (ucmd.comp_mask || in mlx5_ib_modify_qp() 5373 if (ucmd.comp_mask & (~MLX5_IB_CREATE_WQ_STRIDING_RQ)) { in prepare_user_rq() [all …]
|
D | main.c | 834 resp_len = sizeof(resp.comp_mask) + sizeof(resp.response_length); in mlx5_ib_query_device() 1781 resp->comp_mask |= in set_ucontext_resp() 1822 resp->comp_mask |= in set_ucontext_resp() 1830 resp->comp_mask |= MLX5_IB_ALLOC_UCONTEXT_RESP_MASK_ECE; in set_ucontext_resp() 1835 resp->comp_mask |= in set_ucontext_resp() 1841 resp->comp_mask |= MLX5_IB_ALLOC_UCONTEXT_RESP_MASK_SQD2RTS; in set_ucontext_resp() 1843 resp->comp_mask |= in set_ucontext_resp() 1882 if (req.comp_mask || req.reserved0 || req.reserved1 || req.reserved2) in mlx5_ib_alloc_ucontext()
|
D | mr.c | 2201 __u32 comp_mask; in mlx5_ib_alloc_mw() member 2209 if (req.comp_mask || req.reserved1 || req.reserved2) in mlx5_ib_alloc_mw()
|
/drivers/infiniband/hw/bnxt_re/ |
D | ib_verbs.c | 4139 resp.comp_mask = BNXT_RE_UCNTX_CMASK_HAVE_CCTX; in bnxt_re_alloc_ucontext() 4153 resp.comp_mask |= BNXT_RE_UCNTX_CMASK_HAVE_MODE; in bnxt_re_alloc_ucontext() 4157 resp.comp_mask |= BNXT_RE_UCNTX_CMASK_WC_DPI_ENABLED; in bnxt_re_alloc_ucontext() 4166 resp.comp_mask |= BNXT_RE_UCNTX_CMASK_DBR_PACING_ENABLED; in bnxt_re_alloc_ucontext() 4172 if (ureq.comp_mask & BNXT_RE_COMP_MASK_REQ_UCNTX_POW2_SUPPORT) { in bnxt_re_alloc_ucontext() 4173 resp.comp_mask |= BNXT_RE_UCNTX_CMASK_POW2_DISABLED; in bnxt_re_alloc_ucontext()
|
/drivers/infiniband/hw/irdma/ |
D | verbs.c | 295 if (req.comp_mask & IRDMA_ALLOC_UCTX_USE_RAW_ATTR) in irdma_alloc_ucontext() 332 uresp.comp_mask |= IRDMA_ALLOC_UCTX_USE_RAW_ATTR; in irdma_alloc_ucontext() 334 uresp.comp_mask |= IRDMA_ALLOC_UCTX_MIN_HW_WQ_SIZE; in irdma_alloc_ucontext()
|