/drivers/xen/ |
D | evtchn.c | 95 static unsigned int evtchn_ring_offset(struct per_user_data *u, in evtchn_ring_offset() argument 98 return idx & (u->ring_size - 1); in evtchn_ring_offset() 101 static evtchn_port_t *evtchn_ring_entry(struct per_user_data *u, in evtchn_ring_entry() argument 104 return u->ring + evtchn_ring_offset(u, idx); in evtchn_ring_entry() 107 static int add_evtchn(struct per_user_data *u, struct user_evtchn *evtchn) in add_evtchn() argument 109 struct rb_node **new = &(u->evtchns.rb_node), *parent = NULL; in add_evtchn() 111 u->nr_evtchns++; in add_evtchn() 129 rb_insert_color(&evtchn->node, &u->evtchns); in add_evtchn() 134 static void del_evtchn(struct per_user_data *u, struct user_evtchn *evtchn) in del_evtchn() argument 136 u->nr_evtchns--; in del_evtchn() [all …]
|
D | efi.c | 38 .u.efi_runtime_call.function = XEN_EFI_##name, \ 39 .u.efi_runtime_call.misc = 0} 41 #define efi_data(op) (op.u.efi_runtime_call) 51 BUILD_BUG_ON(sizeof(*tm) != sizeof(efi_data(op).u.get_time.time)); in xen_efi_get_time() 52 memcpy(tm, &efi_data(op).u.get_time.time, sizeof(*tm)); in xen_efi_get_time() 56 tc->resolution = efi_data(op).u.get_time.resolution; in xen_efi_get_time() 57 tc->accuracy = efi_data(op).u.get_time.accuracy; in xen_efi_get_time() 69 BUILD_BUG_ON(sizeof(*tm) != sizeof(efi_data(op).u.set_time)); in xen_efi_set_time() 70 memcpy(&efi_data(op).u.set_time, tm, sizeof(*tm)); in xen_efi_set_time() 88 BUILD_BUG_ON(sizeof(*tm) != sizeof(efi_data(op).u.get_wakeup_time)); in xen_efi_get_wakeup_time() [all …]
|
/drivers/xen/xenbus/ |
D | xenbus_dev_frontend.c | 112 } u; member 129 struct xenbus_file_priv *u = filp->private_data; in xenbus_file_read() local 134 mutex_lock(&u->reply_mutex); in xenbus_file_read() 136 while (list_empty(&u->read_buffers)) { in xenbus_file_read() 137 mutex_unlock(&u->reply_mutex); in xenbus_file_read() 141 ret = wait_event_interruptible(u->read_waitq, in xenbus_file_read() 142 !list_empty(&u->read_buffers)); in xenbus_file_read() 145 mutex_lock(&u->reply_mutex); in xenbus_file_read() 148 rb = list_entry(u->read_buffers.next, struct read_buffer, list); in xenbus_file_read() 168 if (list_empty(&u->read_buffers)) in xenbus_file_read() [all …]
|
/drivers/block/xen-blkback/ |
D | common.h | 127 } u; member 183 } u; member 408 dst->u.rw.nr_segments = src->u.rw.nr_segments; in blkif_get_x86_32_req() 409 dst->u.rw.handle = src->u.rw.handle; in blkif_get_x86_32_req() 410 dst->u.rw.id = src->u.rw.id; in blkif_get_x86_32_req() 411 dst->u.rw.sector_number = src->u.rw.sector_number; in blkif_get_x86_32_req() 413 if (n > dst->u.rw.nr_segments) in blkif_get_x86_32_req() 414 n = dst->u.rw.nr_segments; in blkif_get_x86_32_req() 416 dst->u.rw.seg[i] = src->u.rw.seg[i]; in blkif_get_x86_32_req() 419 dst->u.discard.flag = src->u.discard.flag; in blkif_get_x86_32_req() [all …]
|
/drivers/scsi/fnic/ |
D | fnic_res.h | 96 desc->hdr.tag.u.req_id = req_id; /* id for this request */ in fnic_queue_wq_copy_desc_icmnd_16() 98 desc->u.icmnd_16.lunmap_id = lunmap_id; /* index into lunmap table */ in fnic_queue_wq_copy_desc_icmnd_16() 99 desc->u.icmnd_16.special_req_flags = spl_flags; /* exch req flags */ in fnic_queue_wq_copy_desc_icmnd_16() 100 desc->u.icmnd_16._resvd0[0] = 0; /* reserved */ in fnic_queue_wq_copy_desc_icmnd_16() 101 desc->u.icmnd_16._resvd0[1] = 0; /* reserved */ in fnic_queue_wq_copy_desc_icmnd_16() 102 desc->u.icmnd_16._resvd0[2] = 0; /* reserved */ in fnic_queue_wq_copy_desc_icmnd_16() 103 desc->u.icmnd_16.sgl_cnt = sgl_cnt; /* scatter-gather list count */ in fnic_queue_wq_copy_desc_icmnd_16() 104 desc->u.icmnd_16.sense_len = sense_len; /* sense buffer length */ in fnic_queue_wq_copy_desc_icmnd_16() 105 desc->u.icmnd_16.sgl_addr = sgl_addr; /* scatter-gather list addr */ in fnic_queue_wq_copy_desc_icmnd_16() 106 desc->u.icmnd_16.sense_addr = sns_addr; /* sense buffer address */ in fnic_queue_wq_copy_desc_icmnd_16() [all …]
|
/drivers/gpu/drm/selftests/ |
D | test-drm_dp_mst_helper.c | 62 #define IN in->u.i2c_read in sideband_msg_req_equal() 63 #define OUT out->u.i2c_read in sideband_msg_req_equal() 90 #define IN in->u.dpcd_write in sideband_msg_req_equal() 91 #define OUT out->u.dpcd_write in sideband_msg_req_equal() 102 #define IN in->u.i2c_write in sideband_msg_req_equal() 103 #define OUT out->u.i2c_write in sideband_msg_req_equal() 159 kfree(out->u.dpcd_write.bytes); in sideband_msg_req_encode_decode() 162 for (i = 0; i < out->u.i2c_read.num_transactions; i++) in sideband_msg_req_encode_decode() 163 kfree(out->u.i2c_read.transactions[i].bytes); in sideband_msg_req_encode_decode() 166 kfree(out->u.i2c_write.bytes); in sideband_msg_req_encode_decode() [all …]
|
/drivers/crypto/ccp/ |
D | ccp-crypto-rsa.c | 52 req->dst_len = rctx->cmd.u.rsa.key_size >> 3; in ccp_rsa_complete() 61 return ctx->u.rsa.n_len; in ccp_rsa_maxsize() 75 rctx->cmd.u.rsa.key_size = ctx->u.rsa.key_len; /* in bits */ in ccp_rsa_crypt() 77 rctx->cmd.u.rsa.exp = &ctx->u.rsa.e_sg; in ccp_rsa_crypt() 78 rctx->cmd.u.rsa.exp_len = ctx->u.rsa.e_len; in ccp_rsa_crypt() 80 rctx->cmd.u.rsa.exp = &ctx->u.rsa.d_sg; in ccp_rsa_crypt() 81 rctx->cmd.u.rsa.exp_len = ctx->u.rsa.d_len; in ccp_rsa_crypt() 83 rctx->cmd.u.rsa.mod = &ctx->u.rsa.n_sg; in ccp_rsa_crypt() 84 rctx->cmd.u.rsa.mod_len = ctx->u.rsa.n_len; in ccp_rsa_crypt() 85 rctx->cmd.u.rsa.src = req->src; in ccp_rsa_crypt() [all …]
|
D | ccp-crypto-aes-cmac.c | 69 if (!ctx->u.aes.key_len) in ccp_do_cmac_update() 153 cmac_key_sg = (need_pad) ? &ctx->u.aes.k2_sg in ccp_do_cmac_update() 154 : &ctx->u.aes.k1_sg; in ccp_do_cmac_update() 159 rctx->cmd.u.aes.type = ctx->u.aes.type; in ccp_do_cmac_update() 160 rctx->cmd.u.aes.mode = ctx->u.aes.mode; in ccp_do_cmac_update() 161 rctx->cmd.u.aes.action = CCP_AES_ACTION_ENCRYPT; in ccp_do_cmac_update() 162 rctx->cmd.u.aes.key = &ctx->u.aes.key_sg; in ccp_do_cmac_update() 163 rctx->cmd.u.aes.key_len = ctx->u.aes.key_len; in ccp_do_cmac_update() 164 rctx->cmd.u.aes.iv = &rctx->iv_sg; in ccp_do_cmac_update() 165 rctx->cmd.u.aes.iv_len = AES_BLOCK_SIZE; in ccp_do_cmac_update() [all …]
|
D | ccp-crypto-aes-galois.c | 36 ctx->u.aes.type = CCP_AES_TYPE_128; in ccp_aes_gcm_setkey() 39 ctx->u.aes.type = CCP_AES_TYPE_192; in ccp_aes_gcm_setkey() 42 ctx->u.aes.type = CCP_AES_TYPE_256; in ccp_aes_gcm_setkey() 48 ctx->u.aes.mode = CCP_AES_MODE_GCM; in ccp_aes_gcm_setkey() 49 ctx->u.aes.key_len = key_len; in ccp_aes_gcm_setkey() 51 memcpy(ctx->u.aes.key, key, key_len); in ccp_aes_gcm_setkey() 52 sg_init_one(&ctx->u.aes.key_sg, ctx->u.aes.key, key_len); in ccp_aes_gcm_setkey() 86 if (!ctx->u.aes.key_len) in ccp_aes_gcm_crypt() 89 if (ctx->u.aes.mode != CCP_AES_MODE_GCM) in ccp_aes_gcm_crypt() 119 rctx->cmd.u.aes.authsize = crypto_aead_authsize(tfm); in ccp_aes_gcm_crypt() [all …]
|
D | ccp-crypto-des3.c | 30 if (ctx->u.des3.mode != CCP_DES3_MODE_ECB) in ccp_des3_complete() 50 ctx->u.des3.type = CCP_DES3_TYPE_168; in ccp_des3_setkey() 51 ctx->u.des3.mode = alg->mode; in ccp_des3_setkey() 52 ctx->u.des3.key_len = key_len; in ccp_des3_setkey() 54 memcpy(ctx->u.des3.key, key, key_len); in ccp_des3_setkey() 55 sg_init_one(&ctx->u.des3.key_sg, ctx->u.des3.key, key_len); in ccp_des3_setkey() 69 if (!ctx->u.des3.key_len) in ccp_des3_crypt() 72 if (((ctx->u.des3.mode == CCP_DES3_MODE_ECB) || in ccp_des3_crypt() 73 (ctx->u.des3.mode == CCP_DES3_MODE_CBC)) && in ccp_des3_crypt() 77 if (ctx->u.des3.mode != CCP_DES3_MODE_ECB) { in ccp_des3_crypt() [all …]
|
D | ccp-crypto-aes.c | 31 if (ctx->u.aes.mode != CCP_AES_MODE_ECB) in ccp_aes_complete() 45 ctx->u.aes.type = CCP_AES_TYPE_128; in ccp_aes_setkey() 48 ctx->u.aes.type = CCP_AES_TYPE_192; in ccp_aes_setkey() 51 ctx->u.aes.type = CCP_AES_TYPE_256; in ccp_aes_setkey() 56 ctx->u.aes.mode = alg->mode; in ccp_aes_setkey() 57 ctx->u.aes.key_len = key_len; in ccp_aes_setkey() 59 memcpy(ctx->u.aes.key, key, key_len); in ccp_aes_setkey() 60 sg_init_one(&ctx->u.aes.key_sg, ctx->u.aes.key, key_len); in ccp_aes_setkey() 74 if (!ctx->u.aes.key_len) in ccp_aes_crypt() 77 if (((ctx->u.aes.mode == CCP_AES_MODE_ECB) || in ccp_aes_crypt() [all …]
|
D | ccp-crypto-aes-xts.c | 91 memcpy(ctx->u.aes.key, key, key_len); in ccp_aes_xts_setkey() 95 memcpy(ctx->u.aes.key, key, key_len); in ccp_aes_xts_setkey() 98 ctx->u.aes.key_len = key_len / 2; in ccp_aes_xts_setkey() 99 sg_init_one(&ctx->u.aes.key_sg, ctx->u.aes.key, key_len); in ccp_aes_xts_setkey() 101 return crypto_skcipher_setkey(ctx->u.aes.tfm_skcipher, key, key_len); in ccp_aes_xts_setkey() 116 if (!ctx->u.aes.key_len) in ccp_aes_xts_crypt() 142 (ctx->u.aes.key_len != AES_KEYSIZE_128)) in ccp_aes_xts_crypt() 144 if ((ctx->u.aes.key_len != AES_KEYSIZE_128) && in ccp_aes_xts_crypt() 145 (ctx->u.aes.key_len != AES_KEYSIZE_256)) in ccp_aes_xts_crypt() 152 ctx->u.aes.tfm_skcipher); in ccp_aes_xts_crypt() [all …]
|
D | ccp-dev-v3.c | 154 | (op->u.aes.type << REQ1_AES_TYPE_SHIFT) in ccp_perform_aes() 155 | (op->u.aes.mode << REQ1_AES_MODE_SHIFT) in ccp_perform_aes() 156 | (op->u.aes.action << REQ1_AES_ACTION_SHIFT) in ccp_perform_aes() 158 cr[1] = op->src.u.dma.length - 1; in ccp_perform_aes() 159 cr[2] = ccp_addr_lo(&op->src.u.dma); in ccp_perform_aes() 162 | ccp_addr_hi(&op->src.u.dma); in ccp_perform_aes() 163 cr[4] = ccp_addr_lo(&op->dst.u.dma); in ccp_perform_aes() 165 | ccp_addr_hi(&op->dst.u.dma); in ccp_perform_aes() 167 if (op->u.aes.mode == CCP_AES_MODE_CFB) in ccp_perform_aes() 185 | (op->u.xts.action << REQ1_AES_ACTION_SHIFT) in ccp_perform_xts_aes() [all …]
|
D | ccp-ops.c | 380 op->src.u.dma.address = src->dm_wa.dma.address; in ccp_prepare_data() 381 op->src.u.dma.offset = 0; in ccp_prepare_data() 382 op->src.u.dma.length = (blocksize_op) ? block_size : cp_len; in ccp_prepare_data() 387 op->src.u.dma.address = sg_dma_address(src->sg_wa.dma_sg); in ccp_prepare_data() 388 op->src.u.dma.offset = src->sg_wa.sg_used; in ccp_prepare_data() 389 op->src.u.dma.length = op_len & ~(block_size - 1); in ccp_prepare_data() 391 ccp_update_sg_workarea(&src->sg_wa, op->src.u.dma.length); in ccp_prepare_data() 401 op->dst.u.dma.address = dst->dm_wa.dma.address; in ccp_prepare_data() 402 op->dst.u.dma.offset = 0; in ccp_prepare_data() 403 op->dst.u.dma.length = op->src.u.dma.length; in ccp_prepare_data() [all …]
|
/drivers/staging/vc04_services/vchiq-mmal/ |
D | mmal-vchiq.c | 161 } u; member 242 msg->u.event_to_host.client_component, in event_to_host_cb() 243 msg->u.event_to_host.port_type, in event_to_host_cb() 244 msg->u.event_to_host.port_num, in event_to_host_cb() 245 msg->u.event_to_host.cmd, msg->u.event_to_host.length); in event_to_host_cb() 256 container_of(work, struct mmal_msg_context, u.bulk.work); in buffer_work_cb() 257 struct mmal_buffer *buffer = msg_context->u.bulk.buffer; in buffer_work_cb() 265 buffer->length = msg_context->u.bulk.buffer_used; in buffer_work_cb() 266 buffer->mmal_flags = msg_context->u.bulk.mmal_flags; in buffer_work_cb() 267 buffer->dts = msg_context->u.bulk.dts; in buffer_work_cb() [all …]
|
/drivers/block/paride/ |
D | mkd | 23 for u in 0 1 2 3 ; do pd $u ; done 24 for u in 0 1 2 3 ; do mkdev pcd$u b 46 $u ; done 25 for u in 0 1 2 3 ; do mkdev pf$u b 47 $u ; done 26 for u in 0 1 2 3 ; do mkdev pt$u c 96 $u ; done 27 for u in 0 1 2 3 ; do mkdev npt$u c 96 $[ $u + 128 ] ; done 28 for u in 0 1 2 3 ; do mkdev pg$u c 97 $u ; done
|
/drivers/input/joystick/iforce/ |
D | iforce-ff.c | 195 ret |= old->u.condition[i].right_saturation != new->u.condition[i].right_saturation in need_condition_modifier() 196 || old->u.condition[i].left_saturation != new->u.condition[i].left_saturation in need_condition_modifier() 197 || old->u.condition[i].right_coeff != new->u.condition[i].right_coeff in need_condition_modifier() 198 || old->u.condition[i].left_coeff != new->u.condition[i].left_coeff in need_condition_modifier() 199 || old->u.condition[i].deadband != new->u.condition[i].deadband in need_condition_modifier() 200 || old->u.condition[i].center != new->u.condition[i].center; in need_condition_modifier() 219 return old->u.constant.level != effect->u.constant.level; in need_magnitude_modifier() 231 if (old->u.constant.envelope.attack_length != effect->u.constant.envelope.attack_length in need_envelope_modifier() 232 || old->u.constant.envelope.attack_level != effect->u.constant.envelope.attack_level in need_envelope_modifier() 233 || old->u.constant.envelope.fade_length != effect->u.constant.envelope.fade_length in need_envelope_modifier() [all …]
|
/drivers/scsi/qla2xxx/ |
D | qla_gs.c | 518 ct_sns = (struct ct_sns_pkt *)sp->u.iocb_cmd.u.ctarg.rsp; in qla2x00_async_sns_sp_done() 532 e->u.iosb.sp = sp; in qla2x00_async_sns_sp_done() 542 if (sp->u.iocb_cmd.u.ctarg.req) { in qla2x00_async_sns_sp_done() 544 sp->u.iocb_cmd.u.ctarg.req_allocated_size, in qla2x00_async_sns_sp_done() 545 sp->u.iocb_cmd.u.ctarg.req, in qla2x00_async_sns_sp_done() 546 sp->u.iocb_cmd.u.ctarg.req_dma); in qla2x00_async_sns_sp_done() 547 sp->u.iocb_cmd.u.ctarg.req = NULL; in qla2x00_async_sns_sp_done() 550 if (sp->u.iocb_cmd.u.ctarg.rsp) { in qla2x00_async_sns_sp_done() 552 sp->u.iocb_cmd.u.ctarg.rsp_allocated_size, in qla2x00_async_sns_sp_done() 553 sp->u.iocb_cmd.u.ctarg.rsp, in qla2x00_async_sns_sp_done() [all …]
|
/drivers/staging/rtl8723bs/os_dep/ |
D | ioctl_linux.c | 70 param->u.crypt.err = 0; in wpa_set_encryption() 71 param->u.crypt.alg[IEEE_CRYPT_ALG_NAME_LEN - 1] = '\0'; in wpa_set_encryption() 73 if (param_len < (u32)((u8 *)param->u.crypt.key - (u8 *)param) + param->u.crypt.key_len) { in wpa_set_encryption() 85 if (strcmp(param->u.crypt.alg, "WEP") == 0) in wpa_set_encryption() 90 if (param->u.crypt.idx > max_idx) { in wpa_set_encryption() 91 netdev_err(dev, "Error crypt.idx %d > %d\n", param->u.crypt.idx, max_idx); in wpa_set_encryption() 96 if (strcmp(param->u.crypt.alg, "WEP") == 0) { in wpa_set_encryption() 102 wep_key_idx = param->u.crypt.idx; in wpa_set_encryption() 103 wep_key_len = param->u.crypt.key_len; in wpa_set_encryption() 129 memcpy(pwep->key_material, param->u.crypt.key, pwep->key_length); in wpa_set_encryption() [all …]
|
/drivers/scsi/ |
D | hptiop.c | 52 req = readl(&hba->u.itl.iop->inbound_queue); in iop_wait_ready_itl() 59 writel(req, &hba->u.itl.iop->outbound_queue); in iop_wait_ready_itl() 60 readl(&hba->u.itl.iop->outbound_intstatus); in iop_wait_ready_itl() 90 while ((req = readl(&hba->u.itl.iop->outbound_queue)) != in hptiop_drain_outbound_queue_itl() 99 ((char __iomem *)hba->u.itl.iop + req); in hptiop_drain_outbound_queue_itl() 115 struct hpt_iopmu_itl __iomem *iop = hba->u.itl.iop; in iop_intr_itl() 116 void __iomem *plx = hba->u.itl.plx; in iop_intr_itl() 163 u32 inbound_head = readl(&hba->u.mv.mu->inbound_head); in mv_inbound_write() 169 memcpy_toio(&hba->u.mv.mu->inbound_q[inbound_head], &p, 8); in mv_inbound_write() 170 writel(head, &hba->u.mv.mu->inbound_head); in mv_inbound_write() [all …]
|
/drivers/net/usb/ |
D | rndis_host.c | 237 } u; in rndis_query() local 240 u.buf = buf; in rndis_query() 242 memset(u.get, 0, sizeof *u.get + in_len); in rndis_query() 243 u.get->msg_type = cpu_to_le32(RNDIS_MSG_QUERY); in rndis_query() 244 u.get->msg_len = cpu_to_le32(sizeof *u.get + in_len); in rndis_query() 245 u.get->oid = cpu_to_le32(oid); in rndis_query() 246 u.get->len = cpu_to_le32(in_len); in rndis_query() 247 u.get->offset = cpu_to_le32(20); in rndis_query() 249 retval = rndis_command(dev, u.header, CONTROL_BUFFER_SIZE); in rndis_query() 256 off = le32_to_cpu(u.get_c->offset); in rndis_query() [all …]
|
/drivers/net/can/usb/kvaser_usb/ |
D | kvaser_usb_leaf.c | 363 } u; member 370 [CMD_START_CHIP_REPLY] = kvaser_fsize(u.simple), 371 [CMD_STOP_CHIP_REPLY] = kvaser_fsize(u.simple), 372 [CMD_GET_CARD_INFO_REPLY] = kvaser_fsize(u.cardinfo), 373 [CMD_TX_ACKNOWLEDGE] = kvaser_fsize(u.tx_acknowledge_header), 374 [CMD_GET_SOFTWARE_INFO_REPLY] = kvaser_fsize(u.leaf.softinfo), 375 [CMD_RX_STD_MESSAGE] = kvaser_fsize(u.leaf.rx_can), 376 [CMD_RX_EXT_MESSAGE] = kvaser_fsize(u.leaf.rx_can), 377 [CMD_LEAF_LOG_MESSAGE] = kvaser_fsize(u.leaf.log_message), 378 [CMD_CHIP_STATE_EVENT] = kvaser_fsize(u.leaf.chip_state_event), [all …]
|
/drivers/tee/optee/ |
D | core.c | 54 memset(&p->u, 0, sizeof(p->u)); in optee_from_msg_param() 61 p->u.value.a = mp->u.value.a; in optee_from_msg_param() 62 p->u.value.b = mp->u.value.b; in optee_from_msg_param() 63 p->u.value.c = mp->u.value.c; in optee_from_msg_param() 70 p->u.memref.size = mp->u.tmem.size; in optee_from_msg_param() 72 mp->u.tmem.shm_ref; in optee_from_msg_param() 74 p->u.memref.shm_offs = 0; in optee_from_msg_param() 75 p->u.memref.shm = NULL; in optee_from_msg_param() 81 p->u.memref.shm_offs = mp->u.tmem.buf_ptr - pa; in optee_from_msg_param() 82 p->u.memref.shm = shm; in optee_from_msg_param() [all …]
|
/drivers/hid/ |
D | uhid.c | 139 ev->u.start.dev_flags |= UHID_DEV_NUMBERED_FEATURE_REPORTS; in uhid_hid_start() 141 ev->u.start.dev_flags |= UHID_DEV_NUMBERED_OUTPUT_REPORTS; in uhid_hid_start() 143 ev->u.start.dev_flags |= UHID_DEV_NUMBERED_INPUT_REPORTS; in uhid_hid_start() 248 ev->u.get_report.rnum = rnum; in uhid_hid_get_report() 249 ev->u.get_report.rtype = rtype; in uhid_hid_get_report() 258 ret = __uhid_report_queue_and_wait(uhid, ev, &ev->u.get_report.id); in uhid_hid_get_report() 262 req = &uhid->report_buf.u.get_report_reply; in uhid_hid_get_report() 290 ev->u.set_report.rnum = rnum; in uhid_hid_set_report() 291 ev->u.set_report.rtype = rtype; in uhid_hid_set_report() 292 ev->u.set_report.size = count; in uhid_hid_set_report() [all …]
|
/drivers/net/wireless/ath/ath11k/ |
D | hal.c | 262 srng->u.dst_ring.max_buffer_length); in ath11k_hal_ce_dst_setup() 319 ((unsigned long)srng->u.dst_ring.hp_addr - in ath11k_hal_srng_dst_hw_init() 330 *srng->u.dst_ring.hp_addr = 0; in ath11k_hal_srng_dst_hw_init() 413 srng->u.src_ring.low_threshold); in ath11k_hal_srng_src_hw_init() 421 ((unsigned long)srng->u.src_ring.tp_addr - in ath11k_hal_srng_src_hw_init() 435 *srng->u.src_ring.tp_addr = 0; in ath11k_hal_srng_src_hw_init() 518 params->low_threshold = srng->u.src_ring.low_threshold; in ath11k_hal_srng_get_params() 532 ((unsigned long)srng->u.src_ring.hp_addr - in ath11k_hal_srng_get_hp_addr() 536 ((unsigned long)srng->u.dst_ring.hp_addr - in ath11k_hal_srng_get_hp_addr() 548 ((unsigned long)srng->u.src_ring.tp_addr - in ath11k_hal_srng_get_tp_addr() [all …]
|