Home
last modified time | relevance | path

Searched refs:__pa (Results 1 – 25 of 65) sorted by relevance

123

/drivers/char/hw_random/
Dn2-drv.c149 unsigned long ra = __pa(&np->scratch_control[0]); in n2rng_control_settle_v2()
391 unsigned long ra = __pa(&np->test_data); in n2rng_data_read()
425 unsigned long ra = __pa(&np->test_data); in n2rng_guest_check()
435 unsigned long post_ctl_ra = __pa(post_control); in n2rng_entropy_diag_read()
436 unsigned long pre_ctl_ra = __pa(pre_control); in n2rng_entropy_diag_read()
437 unsigned long buffer_ra = __pa(buffer); in n2rng_entropy_diag_read()
559 unsigned long ctl_ra = __pa(&up->control[0]); in n2rng_control_configure_units()
/drivers/crypto/nx/
Dnx.c108 sg_addr = __pa(sg_addr); in nx_build_sg_list()
242 nx_ctx->op.csbcpb = __pa(nx_ctx->csbcpb); in nx_ctx_init()
243 nx_ctx->op.in = __pa(nx_ctx->in_sg); in nx_ctx_init()
244 nx_ctx->op.out = __pa(nx_ctx->out_sg); in nx_ctx_init()
250 nx_ctx->op_aead.csbcpb = __pa(nx_ctx->csbcpb_aead); in nx_ctx_init()
251 nx_ctx->op_aead.in = __pa(nx_ctx->in_sg); in nx_ctx_init()
252 nx_ctx->op_aead.out = __pa(nx_ctx->out_sg); in nx_ctx_init()
Dnx-842.c171 entry->ptr = __pa(buf); in nx842_build_scatterlist()
373 op.csbcpb = __pa(csbcpb); in nx842_compress()
374 op.out = __pa(slout.entries); in nx842_compress()
404 op.in = __pa(inbuf); in nx842_compress()
410 op.in = __pa(slin.entries); in nx842_compress()
568 op.csbcpb = __pa(csbcpb); in nx842_decompress()
600 op.in = __pa(inbuf); in nx842_decompress()
605 op.in = __pa(slin.entries); in nx842_decompress()
616 op.out = __pa(outbuf); in nx842_decompress()
621 op.out = __pa(slout.entries); in nx842_decompress()
/drivers/s390/net/
Dclaw.c1062 p_last->r_TIC_1.cda =(__u32)__pa(&p_end->read2_nop1); in add_claw_reads()
1063 p_last->r_TIC_2.cda =(__u32)__pa(&p_end->read2_nop1); in add_claw_reads()
1072 p_last->r_TIC_1.cda = (__u32)__pa(&p_end->read1_nop1); in add_claw_reads()
1073 p_last->r_TIC_2.cda = (__u32)__pa(&p_end->read1_nop1); in add_claw_reads()
1085 temp_ccw.cda= (__u32)__pa(&p_first->read); in add_claw_reads()
1102 (__u32)__pa(&p_first->read); in add_claw_reads()
1104 (__u32)__pa(&p_first->read); in add_claw_reads()
1112 (__u32)__pa(&p_first->read); in add_claw_reads()
1114 (__u32)__pa(&p_first->read); in add_claw_reads()
1360 (__u32)__pa(&p_this_ccw->write); in claw_hw_tx()
[all …]
/drivers/net/ethernet/ibm/ehea/
Dehea_phyp.c144 __pa(cb_addr), /* R8 */ in ehea_h_query_ehea_qp()
418 __pa(cb_addr), /* R8 */ in ehea_h_modify_ehea_qp()
531 cb_logaddr = __pa(cb_addr); in ehea_h_query_ehea()
548 u64 cb_logaddr = __pa(cb_addr); in ehea_h_query_ehea_port()
570 u64 cb_logaddr = __pa(cb_addr); in ehea_h_modify_ehea_port()
624 __pa(rblock), /* R6 */ in ehea_h_error_data()
Dehea_qmr.c163 rpage = __pa(vpage); in ehea_create_cq()
288 rpage = __pa(vpage); in ehea_create_eq()
393 rpage = __pa(vpage); in ehea_qp_alloc_register()
786 index = __pa(caddr) >> SECTION_SIZE_BITS; in ehea_map_vaddr()
818 u64 pt_abs = __pa(pt); in ehea_reg_mr_section()
826 pt[m] = __pa(pg); in ehea_reg_mr_section()
/drivers/iommu/
Dexynos-iommu.c793 ret = __exynos_sysmmu_enable(data, __pa(priv->pgtable), domain); in exynos_iommu_attach_device()
806 __func__, __pa(priv->pgtable)); in exynos_iommu_attach_device()
810 __func__, __pa(priv->pgtable)); in exynos_iommu_attach_device()
813 __func__, __pa(priv->pgtable)); in exynos_iommu_attach_device()
842 __func__, __pa(priv->pgtable)); in exynos_iommu_detach_device()
847 __func__, __pa(priv->pgtable)); in exynos_iommu_detach_device()
868 *sent = mk_lv1ent_page(__pa(pent)); in alloc_lv2entry()
Damd_iommu_v2.c447 __pa(empty_page_table)); in mn_invalidate_range_start()
461 __pa(pasid_state->mm->pgd)); in mn_invalidate_range_end()
702 __pa(pasid_state->mm->pgd)); in amd_iommu_bind_pasid()
/drivers/tty/serial/
Dsunhv.c62 unsigned long ra = __pa(xmit->buf + xmit->tail); in transmit_chars_write()
124 unsigned long ra = __pa(con_read_page); in receive_chars_read()
445 unsigned long ra = __pa(con_write_page); in sunhv_console_write_paged()
559 port->membase = (unsigned char __iomem *) __pa(port); in hv_probe()
/drivers/s390/char/
Draw3270.c157 rq->ccw.cda = __pa(rq->buffer); in raw3270_request_alloc()
182 rq->ccw.cda = __pa(rq->buffer); in raw3270_request_reset()
216 rq->ccw.cda = __pa(data); in raw3270_request_set_data()
226 rq->ccw.cda = __pa(ib->data); in raw3270_request_set_idal()
584 rp->init_readmod.ccw.cda = (__u32) __pa(rp->init_data); in raw3270_read_modified()
603 rp->init_readpart.ccw.cda = (__u32) __pa(&rp->init_data); in raw3270_writesf_readpart()
640 rp->init_reset.ccw.cda = (__u32) __pa(rp->init_data); in __raw3270_reset_device()
/drivers/infiniband/hw/ehca/
Dhcp_if.c379 u64 r_cb = __pa(query_port_response_block); in hipz_h_query_port()
421 u64 r_cb = __pa(query_hca_rblock); in hipz_h_query_hca()
560 __pa(mqpcb), /* r7 */ in hipz_h_modify_qp()
578 __pa(qqpcb), /* r6 */ in hipz_h_query_qp()
927 u64 r_cb = __pa(rblock); in hipz_h_error_data()
Dehca_eq.c104 rpage = __pa(vpage); in ehca_create_eq()
/drivers/staging/goldfish/
DREADME10 - Use dma coherent memory not kmalloc/__pa for the memory (this is just
/drivers/watchdog/
Dwdrtas.c126 WDRTAS_SP_SPI, __pa(rtas_data_buf), in wdrtas_get_interval()
203 (void *)__pa(wdrtas_logbuffer), in wdrtas_timer_keepalive()
/drivers/lguest/x86/
Dcore.c102 pages->state.host_cr3 = __pa(current->mm->pgd); in copy_in_guest_info()
172 : "0"(pages), "1"(__pa(cpu->lg->pgdirs[cpu->cpu_pgd].pgdir)) in run_guest_once()
/drivers/char/
Dmspec.c119 nid = nasid_to_cnodeid(get_node_number(__pa(addr))); in mspec_zero_block()
379 phys = __pa(scratch_page[nid]); in mspec_init()
Dmem.c54 return addr + count <= __pa(high_memory); in valid_phys_addr_range()
259 return addr >= __pa(high_memory); in uncached_access()
350 pfn = __pa((u64)vma->vm_pgoff << PAGE_SHIFT) >> PAGE_SHIFT; in mmap_kmem()
/drivers/s390/block/
Dxpram.c100 : "+d" (cc) : "a" (__pa(page_addr)), "d" (xpage_index) : "cc"); in xpram_page_in()
130 : "+d" (cc) : "a" (__pa(page_addr)), "d" (xpage_index) : "cc"); in xpram_page_out()
/drivers/staging/cxt1e1/
Dsbecom_inline_linux.h138 return __pa (addr); in OS_vtophys()
/drivers/crypto/
Dn2_core.c558 ent->src_addr = __pa(walk.data); in n2_do_async_digest()
560 ent->auth_iv_addr = __pa(hash_loc); in n2_do_async_digest()
564 ent->dest_addr = __pa(hash_loc); in n2_do_async_digest()
571 ent->src_addr = __pa(walk.data); in n2_do_async_digest()
645 __pa(&ctx->hash_key), in n2_hmac_async_digest()
855 ent->enc_key_addr = __pa(&ctx->key); in __n2_crypt_chunk()
1049 iv_paddr = __pa(rctx->walk.iv); in n2_do_chaining()
1066 iv_paddr = __pa(rctx->walk.iv); in n2_do_chaining()
1673 hv_ret = sun4v_ncs_qconf(q_type, __pa(p->q), in spu_queue_register()
/drivers/misc/sgi-xp/
Dxp_sn2.c83 return __pa(addr); in xp_pa_sn2()
/drivers/net/ethernet/brocade/bna/
Dbfa_ioc.h66 #define bfa_alen_set(__alen, __len, __pa) \ argument
67 __bfa_alen_set(__alen, __len, (u64)__pa)
/drivers/lguest/
Dpage_tables.c325 set_pgd(spgd, __pgd(__pa(ptepage) | pgd_flags)); in find_spte()
359 set_pmd(spmd, __pmd(__pa(ptepage) | pmd_flags)); in find_spte()
1164 regs_page = pfn_to_page(__pa(cpu->regs_page) >> PAGE_SHIFT); in map_switcher_in_guest()
/drivers/platform/goldfish/
Dgoldfish_pipe.c172 if (paddr != (__pa(aps))) in valid_batchbuffer_addr()
189 paddr = __pa(aps); in setup_access_params_addr()
/drivers/xen/
Dxencomm.c146 xencomm_free((struct xencomm_handle *)__pa(desc)); in xencomm_create()

123