Home
last modified time | relevance | path

Searched refs:prot (Results 1 – 25 of 112) sorted by relevance

12345

/drivers/net/wireless/mediatek/mt7601u/
Dmac.c205 u32 prot[6]; in mt7601u_mac_set_protection() local
209 prot[0] = MT_PROT_NAV_SHORT | in mt7601u_mac_set_protection()
212 prot[1] = prot[0]; in mt7601u_mac_set_protection()
214 prot[1] |= MT_PROT_CTRL_CTS2SELF; in mt7601u_mac_set_protection()
216 prot[2] = prot[4] = MT_PROT_NAV_SHORT | MT_PROT_TXOP_ALLOW_BW20; in mt7601u_mac_set_protection()
217 prot[3] = prot[5] = MT_PROT_NAV_SHORT | MT_PROT_TXOP_ALLOW_ALL; in mt7601u_mac_set_protection()
220 prot[2] |= MT_PROT_RATE_CCK_11; in mt7601u_mac_set_protection()
221 prot[3] |= MT_PROT_RATE_CCK_11; in mt7601u_mac_set_protection()
222 prot[4] |= MT_PROT_RATE_CCK_11; in mt7601u_mac_set_protection()
223 prot[5] |= MT_PROT_RATE_CCK_11; in mt7601u_mac_set_protection()
[all …]
/drivers/net/ethernet/mellanox/mlx4/
Dmcg.c158 u32 prot; in new_steering_entry() local
206 prot = be32_to_cpu(mgm->members_count) >> 30; in new_steering_entry()
221 mgm->members_count = cpu_to_be32(members_count | (prot << 30)); in new_steering_entry()
428 u32 prot; in add_promisc_qp() local
474 prot = be32_to_cpu(mgm->members_count) >> 30; in add_promisc_qp()
505 (prot << 30)); in add_promisc_qp()
695 u8 *gid, enum mlx4_protocol prot, in find_entry() argument
704 u8 op_mod = (prot == MLX4_PROT_ETH) ? in find_entry()
739 be32_to_cpu(mgm->members_count) >> 30 == prot) in find_entry()
1105 int block_mcast_loopback, enum mlx4_protocol prot, in mlx4_qp_attach_common() argument
[all …]
/drivers/gpu/drm/ttm/
Dttm_bo_vm.c193 pgprot_t prot, in ttm_bo_vm_fault_reserved() argument
229 prot = ttm_io_prot(bo, bo->resource, prot); in ttm_bo_vm_fault_reserved()
242 prot = pgprot_decrypted(prot); in ttm_bo_vm_fault_reserved()
272 ret = vmf_insert_pfn_prot(vma, address, pfn, prot); in ttm_bo_vm_fault_reserved()
297 vm_fault_t ttm_bo_vm_dummy_page(struct vm_fault *vmf, pgprot_t prot) in ttm_bo_vm_dummy_page() argument
321 ret = vmf_insert_pfn_prot(vma, address, pfn, prot); in ttm_bo_vm_dummy_page()
330 pgprot_t prot; in ttm_bo_vm_fault() local
340 prot = vma->vm_page_prot; in ttm_bo_vm_fault()
342 ret = ttm_bo_vm_fault_reserved(vmf, prot, TTM_BO_VM_NUM_PREFAULT); in ttm_bo_vm_fault()
345 ret = ttm_bo_vm_dummy_page(vmf, prot); in ttm_bo_vm_fault()
Dttm_bo_util.c306 pgprot_t prot; in ttm_bo_kmap_ttm() local
329 prot = ttm_io_prot(bo, mem, PAGE_KERNEL); in ttm_bo_kmap_ttm()
332 0, prot); in ttm_bo_kmap_ttm()
425 pgprot_t prot; in ttm_bo_vmap() local
436 prot = ttm_io_prot(bo, mem, PAGE_KERNEL); in ttm_bo_vmap()
437 vaddr = vmap(ttm->pages, ttm->num_pages, 0, prot); in ttm_bo_vmap()
/drivers/gpu/drm/i915/
Di915_mm.c34 pgprot_t prot; member
45 set_pte_at(r->mm, addr, pte, pte_mkspecial(pfn_pte(r->pfn, r->prot))); in remap_pfn()
70 pte_mkspecial(pfn_pte(sgt_pfn(r), r->prot))); in remap_sg()
103 r.prot = __pgprot((pgprot_val(iomap->prot) & _PAGE_CACHE_MASK) | in remap_io_mapping()
131 .prot = vma->vm_page_prot, in remap_io_sg()
/drivers/iommu/
Dio-pgtable-arm-v7s.c335 static arm_v7s_iopte arm_v7s_prot_to_pte(int prot, int lvl, in arm_v7s_prot_to_pte() argument
341 if (!(prot & IOMMU_MMIO)) in arm_v7s_prot_to_pte()
345 if (!(prot & IOMMU_PRIV)) in arm_v7s_prot_to_pte()
347 if (!(prot & IOMMU_WRITE)) in arm_v7s_prot_to_pte()
352 if ((prot & IOMMU_NOEXEC) && ap) in arm_v7s_prot_to_pte()
354 if (prot & IOMMU_MMIO) in arm_v7s_prot_to_pte()
356 else if (prot & IOMMU_CACHE) in arm_v7s_prot_to_pte()
368 int prot = IOMMU_READ; in arm_v7s_pte_to_prot() local
372 prot |= IOMMU_WRITE; in arm_v7s_pte_to_prot()
374 prot |= IOMMU_PRIV; in arm_v7s_pte_to_prot()
[all …]
Dio-pgtable-arm.c267 phys_addr_t paddr, arm_lpae_iopte prot, in __arm_lpae_init_pte() argument
270 arm_lpae_iopte pte = prot; in __arm_lpae_init_pte()
289 arm_lpae_iopte prot, int lvl, int num_entries, in arm_lpae_init_pte() argument
315 __arm_lpae_init_pte(data, paddr, prot, lvl, num_entries, ptep); in arm_lpae_init_pte()
353 arm_lpae_iopte prot, int lvl, arm_lpae_iopte *ptep, in __arm_lpae_map() argument
370 ret = arm_lpae_init_pte(data, iova, paddr, prot, lvl, num_entries, ptep); in __arm_lpae_map()
404 return __arm_lpae_map(data, iova, paddr, size, pgcount, prot, lvl + 1, in __arm_lpae_map()
409 int prot) in arm_lpae_prot_to_pte() argument
415 if (!(prot & IOMMU_WRITE)) in arm_lpae_prot_to_pte()
417 if (!(prot & IOMMU_READ)) in arm_lpae_prot_to_pte()
[all …]
Ddma-iommu.c457 int prot = coherent ? IOMMU_CACHE : 0; in dma_info_to_prot() local
460 prot |= IOMMU_PRIV; in dma_info_to_prot()
462 prot |= IOMMU_SYS_CACHE; in dma_info_to_prot()
464 prot |= IOMMU_SYS_CACHE_NWA; in dma_info_to_prot()
468 return prot | IOMMU_READ | IOMMU_WRITE; in dma_info_to_prot()
470 return prot | IOMMU_READ; in dma_info_to_prot()
472 return prot | IOMMU_WRITE; in dma_info_to_prot()
563 size_t size, int prot, u64 dma_mask) in __iommu_dma_map() argument
581 if (iommu_map_atomic(domain, iova, phys - iova_off, size, prot)) { in __iommu_dma_map()
656 size_t size, struct sg_table *sgt, gfp_t gfp, pgprot_t prot, in __iommu_dma_alloc_noncontiguous() argument
[all …]
Dexynos-iommu.c122 #define mk_lv1ent_sect(pa, prot) ((pa >> PG_ENT_SHIFT) | LV1_PROT[prot] | 2) argument
124 #define mk_lv2ent_lpage(pa, prot) ((pa >> PG_ENT_SHIFT) | LV2_PROT[prot] | 1) argument
125 #define mk_lv2ent_spage(pa, prot) ((pa >> PG_ENT_SHIFT) | LV2_PROT[prot] | 2) argument
967 phys_addr_t paddr, int prot, short *pgcnt) in lv1set_section() argument
986 exynos_iommu_set_pte(sent, mk_lv1ent_sect(paddr, prot)); in lv1set_section()
1004 int prot, short *pgcnt) in lv2set_page() argument
1010 exynos_iommu_set_pte(pent, mk_lv2ent_spage(paddr, prot)); in lv2set_page()
1026 *pent = mk_lv2ent_lpage(paddr, prot); in lv2set_page()
1065 int prot, gfp_t gfp) in exynos_iommu_map() argument
1074 prot &= SYSMMU_SUPPORTED_PROT_BITS; in exynos_iommu_map()
[all …]
Dsun50i-iommu.c269 static u32 sun50i_mk_pte(phys_addr_t page, int prot) in sun50i_mk_pte() argument
274 if ((prot & (IOMMU_READ | IOMMU_WRITE)) == (IOMMU_READ | IOMMU_WRITE)) in sun50i_mk_pte()
276 else if (prot & IOMMU_READ) in sun50i_mk_pte()
278 else if (prot & IOMMU_WRITE) in sun50i_mk_pte()
522 phys_addr_t paddr, size_t size, int prot, gfp_t gfp) in sun50i_iommu_map() argument
542 &iova, &page_phys, &paddr, prot); in sun50i_iommu_map()
547 *pte_addr = sun50i_mk_pte(paddr, prot); in sun50i_iommu_map()
780 unsigned prot) in sun50i_iommu_report_fault() argument
783 &iova, master, (prot == IOMMU_FAULT_WRITE) ? "wr" : "rd"); in sun50i_iommu_report_fault()
786 report_iommu_fault(iommu->domain, iommu->dev, iova, prot); in sun50i_iommu_report_fault()
Diommu.c453 new->prot, new->type); in iommu_insert_resv_region()
838 entry->prot); in iommu_create_device_direct_mappings()
2481 phys_addr_t paddr, size_t size, int prot, in __iommu_map_pages() argument
2494 ret = ops->map_pages(domain, iova, paddr, pgsize, count, prot, in __iommu_map_pages()
2497 ret = ops->map(domain, iova, paddr, pgsize, prot, gfp); in __iommu_map_pages()
2505 phys_addr_t paddr, size_t size, int prot, gfp_t gfp) in __iommu_map() argument
2540 ret = __iommu_map_pages(domain, iova, paddr, size, prot, gfp, in __iommu_map()
2565 phys_addr_t paddr, size_t size, int prot, gfp_t gfp) in _iommu_map() argument
2570 ret = __iommu_map(domain, iova, paddr, size, prot, gfp); in _iommu_map()
2578 phys_addr_t paddr, size_t size, int prot) in iommu_map() argument
[all …]
Drockchip-iommu.c100 u32 (*mk_ptentries)(phys_addr_t page, int prot);
266 static u32 rk_mk_pte(phys_addr_t page, int prot) in rk_mk_pte() argument
269 flags |= (prot & IOMMU_READ) ? RK_PTE_PAGE_READABLE : 0; in rk_mk_pte()
270 flags |= (prot & IOMMU_WRITE) ? RK_PTE_PAGE_WRITABLE : 0; in rk_mk_pte()
286 static u32 rk_mk_pte_v2(phys_addr_t page, int prot) in rk_mk_pte_v2() argument
290 flags |= (prot & IOMMU_READ) ? RK_PTE_PAGE_READABLE : 0; in rk_mk_pte_v2()
291 flags |= (prot & IOMMU_WRITE) ? RK_PTE_PAGE_WRITABLE : 0; in rk_mk_pte_v2()
774 phys_addr_t paddr, size_t size, int prot) in rk_iommu_map_iova() argument
788 pte_addr[pte_count] = rk_ops->mk_ptentries(paddr, prot); in rk_iommu_map_iova()
812 &iova, &page_phys, &paddr, prot); in rk_iommu_map_iova()
[all …]
/drivers/xen/
Dxlate_mmu.c67 pgprot_t prot; member
100 pte_t pte = pte_mkspecial(pfn_pte(page_to_pfn(page), info->prot)); in remap_pte_fn()
146 int *err_ptr, pgprot_t prot, in xen_xlate_remap_gfn_array() argument
160 data.prot = prot; in xen_xlate_remap_gfn_array()
268 pgprot_t prot; member
276 pte_t pte = pte_mkspecial(pfn_pte(page_to_pfn(page), r->prot)); in remap_pfn_fn()
290 .prot = vma->vm_page_prot, in xen_remap_vma_range()
/drivers/net/wireless/mediatek/mt76/
Dmt76x02_mac.c951 u32 prot[6]; in mt76x02_mac_set_tx_protection() local
956 for (i = 0; i < ARRAY_SIZE(prot); i++) { in mt76x02_mac_set_tx_protection()
957 prot[i] = mt76_rr(dev, MT_CCK_PROT_CFG + i * 4); in mt76x02_mac_set_tx_protection()
958 prot[i] &= ~MT_PROT_CFG_CTRL; in mt76x02_mac_set_tx_protection()
960 prot[i] &= ~MT_PROT_CFG_RATE; in mt76x02_mac_set_tx_protection()
971 prot[0] |= MT_PROT_CTRL_RTS_CTS; in mt76x02_mac_set_tx_protection()
974 prot[1] |= MT_PROT_CTRL_CTS2SELF; in mt76x02_mac_set_tx_protection()
976 prot[2] |= MT_PROT_RATE_CCK_11; in mt76x02_mac_set_tx_protection()
977 prot[3] |= MT_PROT_RATE_CCK_11; in mt76x02_mac_set_tx_protection()
978 prot[4] |= MT_PROT_RATE_CCK_11; in mt76x02_mac_set_tx_protection()
[all …]
/drivers/vfio/
Dvfio_iommu_type1.c87 int prot; /* IOMMU_CACHE */ member
96 int prot; /* IOMMU_READ/WRITE */ member
163 static int put_pfn(unsigned long pfn, int prot);
415 ret = put_pfn(vpfn->pfn, dma->prot); in vfio_iova_put_vfio_pfn()
471 static int put_pfn(unsigned long pfn, int prot) in put_pfn() argument
476 unpin_user_pages_dirty_lock(&page, 1, prot & IOMMU_WRITE); in put_pfn()
509 put_pfn(pfn, dma->prot); in vfio_batch_unpin()
562 long npages, int prot, unsigned long *pfn, in vaddr_get_pfns() argument
569 if (prot & IOMMU_WRITE) in vaddr_get_pfns()
598 ret = follow_fault_pfn(vma, mm, vaddr, pfn, prot & IOMMU_WRITE); in vaddr_get_pfns()
[all …]
/drivers/staging/android/
Dashmem.c357 static inline vm_flags_t calc_vm_may_flags(unsigned long prot) in calc_vm_may_flags() argument
359 return _calc_vm_trans(prot, PROT_READ, VM_MAYREAD) | in calc_vm_may_flags()
360 _calc_vm_trans(prot, PROT_WRITE, VM_MAYWRITE) | in calc_vm_may_flags()
361 _calc_vm_trans(prot, PROT_EXEC, VM_MAYEXEC); in calc_vm_may_flags()
540 static int set_prot_mask(struct ashmem_area *asma, unsigned long prot) in set_prot_mask() argument
547 if ((asma->prot_mask & prot) != prot) { in set_prot_mask()
553 if ((prot & PROT_READ) && (current->personality & READ_IMPLIES_EXEC)) in set_prot_mask()
554 prot |= PROT_EXEC; in set_prot_mask()
556 asma->prot_mask = prot; in set_prot_mask()
/drivers/soc/aspeed/
Daspeed-p2a-ctrl.c103 pgprot_t prot; in aspeed_p2a_mmap() local
111 prot = vma->vm_page_prot; in aspeed_p2a_mmap()
117 prot = pgprot_noncached(prot); in aspeed_p2a_mmap()
121 vsize, prot)) in aspeed_p2a_mmap()
Daspeed-lpc-ctrl.c53 pgprot_t prot = vma->vm_page_prot; in aspeed_lpc_ctrl_mmap() local
59 prot = pgprot_noncached(prot); in aspeed_lpc_ctrl_mmap()
63 vsize, prot)) in aspeed_lpc_ctrl_mmap()
/drivers/slimbus/
Dstream.c230 rt->prot = SLIM_PROTO_PUSH; in slim_stream_prepare()
232 rt->prot = SLIM_PROTO_PULL; in slim_stream_prepare()
234 rt->prot = SLIM_PROTO_ISO; in slim_stream_prepare()
276 if (stream->prot != SLIM_PROTO_ISO) in slim_define_channel_content()
311 wbuf[2] = (stream->prot << 4) | ((port->ch.seg_dist & 0xF00) >> 8); in slim_define_channel()
312 if (stream->prot == SLIM_PROTO_ISO) in slim_define_channel()
/drivers/infiniband/ulp/iser/
Diser_initiator.c65 struct iser_data_buf *pbuf_in = &iser_task->prot[ISER_DIR_IN]; in iser_prepare_read_cmd()
119 struct iser_data_buf *pbuf_out = &iser_task->prot[ISER_DIR_OUT]; in iser_prepare_write_cmd()
385 prot_buf = &iser_task->prot[ISER_DIR_IN]; in iser_send_command()
388 prot_buf = &iser_task->prot[ISER_DIR_OUT]; in iser_send_command()
756 iser_task->prot[ISER_DIR_IN].data_len = 0; in iser_task_rdma_init()
757 iser_task->prot[ISER_DIR_OUT].data_len = 0; in iser_task_rdma_init()
759 iser_task->prot[ISER_DIR_IN].dma_nents = 0; in iser_task_rdma_init()
760 iser_task->prot[ISER_DIR_OUT].dma_nents = 0; in iser_task_rdma_init()
779 &iser_task->prot[ISER_DIR_IN], in iser_task_rdma_finalize()
790 &iser_task->prot[ISER_DIR_OUT], in iser_task_rdma_finalize()
/drivers/gpu/drm/msm/
Dmsm_gpummu.c29 struct sg_table *sgt, size_t len, int prot) in msm_gpummu_map() argument
36 if (prot & IOMMU_WRITE) in msm_gpummu_map()
38 if (prot & IOMMU_READ) in msm_gpummu_map()
Dmsm_iommu.c52 struct sg_table *sgt, size_t len, int prot) in msm_iommu_pagetable_map() argument
67 if (ops->map(ops, addr, phys, 4096, prot, GFP_KERNEL)) { in msm_iommu_pagetable_map()
262 struct sg_table *sgt, size_t len, int prot) in msm_iommu_map() argument
271 ret = iommu_map_sgtable(iommu->domain, iova, sgt, prot); in msm_iommu_map()
/drivers/gpu/drm/i915/selftests/
Digt_mmap.c15 unsigned long prot, in igt_mmap_offset() argument
46 prot, flags, drm_vma_node_offset_addr(node)); in igt_mmap_offset()
/drivers/infiniband/core/
Dib_core_uverbs.c68 unsigned long pfn, unsigned long size, pgprot_t prot, in rdma_user_mmap_io() argument
90 vma->vm_page_prot = prot; in rdma_user_mmap_io()
91 if (io_remap_pfn_range(vma, vma->vm_start, pfn, size, prot)) { in rdma_user_mmap_io()
/drivers/ata/
Dsata_sil24.c31 __le16 prot; member
795 u8 prot = qc->tf.protocol; in sil24_qc_defer() local
816 int is_excl = (ata_is_atapi(prot) || in sil24_qc_defer()
851 u16 prot = 0; in sil24_qc_prep() local
854 prot |= PRB_PROT_NCQ; in sil24_qc_prep()
856 prot |= PRB_PROT_WRITE; in sil24_qc_prep()
858 prot |= PRB_PROT_READ; in sil24_qc_prep()
859 prb->prot = cpu_to_le16(prot); in sil24_qc_prep()

12345