Home
last modified time | relevance | path

Searched refs:page_address (Results 1 – 25 of 206) sorted by relevance

123456789

/drivers/mtd/devices/
Dblock2mtd.c70 max = page_address(page) + PAGE_SIZE; in _block2mtd_erase()
71 for (p=page_address(page); p<max; p++) in _block2mtd_erase()
74 memset(page_address(page), 0xff, PAGE_SIZE); in _block2mtd_erase()
124 memcpy(buf, page_address(page) + offset, cpylen); in block2mtd_read()
158 if (memcmp(page_address(page)+offset, buf, cpylen)) { in _block2mtd_write()
160 memcpy(page_address(page) + offset, buf, cpylen); in _block2mtd_write()
/drivers/gpu/drm/v3d/
Dv3d_mmu.c97 u32 page_address = dma_addr >> V3D_MMU_PAGE_SHIFT; in v3d_mmu_insert_ptes() local
98 u32 pte = page_prot | page_address; in v3d_mmu_insert_ptes()
101 BUG_ON(page_address + (PAGE_SIZE >> V3D_MMU_PAGE_SHIFT) >= in v3d_mmu_insert_ptes()
/drivers/net/ethernet/google/gve/
Dgve_rx.c87 page_info->page_address = page_address(page); in gve_setup_rx_buffer()
163 rx->qpl_copy_pool[j].page_address = page_address(page); in gve_prefill_rx_pages()
456 void *src = page_info->page_address + page_info->page_offset; in gve_rx_copy_to_pool()
490 alloc_page_info.page_address = page_address(page); in gve_rx_copy_to_pool()
493 memcpy(alloc_page_info.page_address, src, page_info->pad + len); in gve_rx_copy_to_pool()
506 dst = copy_page_info->page_address + copy_page_info->page_offset; in gve_rx_copy_to_pool()
756 va = page_info->page_address + page_info->page_offset; in gve_rx()
778 xdp_prepare_buff(&xdp, page_info->page_address + in gve_rx()
Dgve_utils.c54 void *va = page_info->page_address + page_info->page_offset + in gve_rx_copy()
Dgve_rx_dqo.c191 buf_state->page_info.page_address = in gve_alloc_page_dqo()
192 page_address(buf_state->page_info.page); in gve_alloc_page_dqo()
542 memcpy(page_address(page), in gve_rx_copy_ondemand()
543 buf_state->page_info.page_address + in gve_rx_copy_ondemand()
/drivers/scsi/mpi3mr/
Dmpi3mr_fw.c5262 u32 page_address; in mpi3mr_cfg_get_dev_pg0() local
5272 cfg_req.page_address = 0; in mpi3mr_cfg_get_dev_pg0()
5285 page_address = ((form & MPI3_DEVICE_PGAD_FORM_MASK) | in mpi3mr_cfg_get_dev_pg0()
5287 cfg_req.page_address = cpu_to_le32(page_address); in mpi3mr_cfg_get_dev_pg0()
5322 u32 page_address; in mpi3mr_cfg_get_sas_phy_pg0() local
5332 cfg_req.page_address = 0; in mpi3mr_cfg_get_sas_phy_pg0()
5345 page_address = ((form & MPI3_SAS_PHY_PGAD_FORM_MASK) | in mpi3mr_cfg_get_sas_phy_pg0()
5347 cfg_req.page_address = cpu_to_le32(page_address); in mpi3mr_cfg_get_sas_phy_pg0()
5381 u32 page_address; in mpi3mr_cfg_get_sas_phy_pg1() local
5391 cfg_req.page_address = 0; in mpi3mr_cfg_get_sas_phy_pg1()
[all …]
/drivers/gpu/drm/
Ddrm_scatter.c160 tmp = page_address(entry->pagelist[i]); in drm_legacy_sg_alloc()
178 tmp = page_address(entry->pagelist[i]); in drm_legacy_sg_alloc()
/drivers/net/ethernet/sfc/falcon/
Drx.c59 return page_address(buf->page) + buf->page_offset; in ef4_rx_buf_va()
130 state = page_address(page); in ef4_reuse_page()
180 state = page_address(page); in ef4_init_rx_buffers()
183 state = page_address(page); in ef4_init_rx_buffers()
219 struct ef4_rx_page_state *state = page_address(page); in ef4_unmap_rx_buffer()
814 state = page_address(page); in ef4_fini_rx_queue()
/drivers/md/
Draid5-ppl.c256 pplhdr = page_address(io->header_page); in ppl_new_iounit()
313 pplhdr = page_address(io->header_page); in ppl_log_stripe()
350 page_address(sh->ppl_page), in ppl_log_stripe()
432 struct ppl_header *pplhdr = page_address(io->header_page); in ppl_submit_iounit()
853 memset(page_address(page1), 0, PAGE_SIZE); in ppl_recover_entry()
1007 crc = crc32c_le(crc, page_address(page), s); in ppl_recover()
1055 pplhdr = page_address(page); in ppl_write_empty_header()
1110 pplhdr = page_address(page); in ppl_load_distributed()
Draid5-cache.c720 block = page_address(io->meta_page); in r5l_submit_current_io()
784 block = page_address(io->meta_page); in r5l_new_meta()
832 payload = page_address(io->meta_page) + io->meta_offset; in r5l_append_payload_meta()
890 payload = page_address(io->meta_page) + io->meta_offset; in r5l_append_flush_payload()
1720 memcpy(page_address(page), in r5l_recovery_read_page()
1721 page_address(ctx->ra_pool[(offset - ctx->pool_offset) >> in r5l_recovery_read_page()
1739 mb = page_address(page); in r5l_recovery_read_meta_block()
1768 mb = page_address(page); in r5l_recovery_create_empty_meta_block()
1787 mb = page_address(page); in r5l_log_write_empty_meta_block()
2005 struct r5l_meta_block *mb = page_address(ctx->meta_page); in r5l_recovery_verify_data_checksum_for_mb()
[all …]
/drivers/net/vmxnet3/
Dvmxnet3_xdp.c308 skb = build_skb(page_address(page), PAGE_SIZE); in vmxnet3_build_skb()
342 xdp_prepare_buff(&xdp, page_address(page), rq->page_pool->p.offset, in vmxnet3_process_xdp_small()
389 xdp_prepare_buff(&xdp, page_address(page), rq->page_pool->p.offset, in vmxnet3_process_xdp()
/drivers/net/wireless/intel/iwlwifi/fw/
Dpaging.c152 memcpy(page_address(fwrt->fw_paging_db[0].fw_paging_block), in iwl_fill_paging_mem()
199 memcpy(page_address(block->fw_paging_block), in iwl_fill_paging_mem()
/drivers/net/thunderbolt/
Dmain.c842 hdr = page_address(page); in tbnet_poll()
858 skb = build_skb(page_address(page), in tbnet_poll()
992 struct thunderbolt_ip_frame_header *hdr = page_address(frames[0]->page); in tbnet_xmit_csum_and_map()
1008 hdr = page_address(frames[i]->page); in tbnet_xmit_csum_and_map()
1070 hdr = page_address(frames[i]->page); in tbnet_xmit_csum_and_map()
1131 hdr = page_address(frames[frame_index]->page); in tbnet_start_xmit()
1181 hdr = page_address(frames[frame_index]->page); in tbnet_start_xmit()
/drivers/gpu/drm/vmwgfx/
Dvmwgfx_validation.c130 ctx->page_address = page_address(page); in vmw_validation_mem_alloc()
134 addr = (void *) (ctx->page_address + (PAGE_SIZE - ctx->mem_size_left)); in vmw_validation_mem_alloc()
Dvmwgfx_mksstat.h125 …pstat = vmw_mksstat_get_kern_pstat(page_address(dev_priv->mksstat_kern_pages[_##kern_cntr.slot]));…
Dvmwgfx_validation.h70 u8 *page_address; member
/drivers/net/
Dvirtio_net.c520 p = page_address(page) + offset; in page_to_skb()
614 head = page_address(page); in virtnet_rq_unmap()
659 head = page_address(rq->alloc_frag.page); in virtnet_rq_init_one_sg()
682 head = page_address(alloc_frag->page); in virtnet_rq_alloc()
1107 memcpy(page_address(page) + page_off, page_address(p) + offset, *len); in xdp_linearize_page()
1120 off = buf - page_address(p); in xdp_linearize_page()
1130 memcpy(page_address(page) + page_off, in xdp_linearize_page()
1131 page_address(p) + off, buflen); in xdp_linearize_page()
1201 int offset = buf - page_address(page) + header_offset; in receive_small_xdp()
1216 buf = page_address(xdp_page); in receive_small_xdp()
[all …]
/drivers/net/wireless/mediatek/mt7601u/
Ddma.c62 skb_add_rx_frag(skb, 0, p, data - page_address(p), in mt7601u_rx_skb_from_seg()
137 u8 *data = page_address(e->p); in mt7601u_rx_process_entry()
402 u8 *buf = page_address(e->p); in mt7601u_submit_rx_buf()
/drivers/net/ethernet/sfc/siena/
Drx_common.c61 state = page_address(page); in efx_reuse_page()
167 state = page_address(page); in efx_fini_rx_recycle_ring()
327 struct efx_rx_page_state *state = page_address(page); in efx_unmap_rx_buffer()
399 state = page_address(page); in efx_init_rx_buffers()
402 state = page_address(page); in efx_init_rx_buffers()
Drx_common.h29 return page_address(buf->page) + buf->page_offset; in efx_rx_buf_va()
/drivers/net/ethernet/sfc/
Drx_common.c58 state = page_address(page); in efx_reuse_page()
164 state = page_address(page); in efx_fini_rx_recycle_ring()
327 struct efx_rx_page_state *state = page_address(page); in efx_unmap_rx_buffer()
399 state = page_address(page); in efx_init_rx_buffers()
402 state = page_address(page); in efx_init_rx_buffers()
/drivers/ptp/
Dptp_kvm_x86.c36 clock_pair = page_address(p); in kvm_arch_ptp_init()
/drivers/iommu/
Dtegra-smmu.c536 u32 *pd = page_address(as->pd); in tegra_smmu_set_pde()
554 u32 *pt = page_address(pt_page); in tegra_smmu_pte_offset()
571 pd = page_address(as->pd); in tegra_smmu_pte_lookup()
607 u32 *pd = page_address(as->pd); in as_get_pte()
633 u32 *pd = page_address(as->pd); in tegra_smmu_pte_put_use()
/drivers/uio/
Duio_hv_generic.c132 void *ring_buffer = page_address(channel->ringbuffer_page); in hv_uio_ring_mmap()
272 ring_buffer = page_address(channel->ringbuffer_page); in hv_uio_probe()
/drivers/net/wireless/intel/iwlwifi/queue/
Dtx.c203 *(void **)((u8 *)page_address(ret) + PAGE_SIZE - sizeof(void *)) = *page_ptr; in get_workaround_page()
261 memcpy(page_address(page), virt, len); in iwl_txq_gen2_set_tb_with_wa()
263 phys = dma_map_single(trans->dev, page_address(page), len, in iwl_txq_gen2_set_tb_with_wa()
315 if (p->pos + len < (u8 *)page_address(p->page) + PAGE_SIZE - in get_page_hdr()
326 p->pos = page_address(p->page); in get_page_hdr()
328 *(void **)((u8 *)page_address(p->page) + PAGE_SIZE - sizeof(void *)) = NULL; in get_page_hdr()
984 next = *(void **)((u8 *)page_address(next) + PAGE_SIZE - in iwl_txq_free_tso_page()

123456789