Searched refs:arena (Results 1 – 8 of 8) sorted by relevance
/arch/alpha/kernel/ |
D | pci_iommu.c | 62 struct pci_iommu_arena *arena; in iommu_arena_new_node() local 76 arena = alloc_bootmem_node(NODE_DATA(nid), sizeof(*arena)); in iommu_arena_new_node() 77 if (!NODE_DATA(nid) || !arena) { in iommu_arena_new_node() 81 arena = alloc_bootmem(sizeof(*arena)); in iommu_arena_new_node() 84 arena->ptes = __alloc_bootmem_node(NODE_DATA(nid), mem_size, align, 0); in iommu_arena_new_node() 85 if (!NODE_DATA(nid) || !arena->ptes) { in iommu_arena_new_node() 89 arena->ptes = __alloc_bootmem(mem_size, align, 0); in iommu_arena_new_node() 94 arena = alloc_bootmem(sizeof(*arena)); in iommu_arena_new_node() 95 arena->ptes = __alloc_bootmem(mem_size, align, 0); in iommu_arena_new_node() 99 spin_lock_init(&arena->lock); in iommu_arena_new_node() [all …]
|
D | core_titan.c | 580 struct pci_iommu_arena *arena; member 597 aper->arena = agp->hose->sg_pci; in titan_agp_setup() 599 aper->pg_start = iommu_reserve(aper->arena, aper->pg_count, in titan_agp_setup() 608 aper->arena->dma_base + aper->pg_start * PAGE_SIZE; in titan_agp_setup() 621 status = iommu_release(aper->arena, aper->pg_start, aper->pg_count); in titan_agp_cleanup() 625 iommu_unbind(aper->arena, aper->pg_start, aper->pg_count); in titan_agp_cleanup() 626 status = iommu_release(aper->arena, aper->pg_start, in titan_agp_cleanup() 682 return iommu_bind(aper->arena, aper->pg_start + pg_start, in titan_agp_bind_memory() 690 return iommu_unbind(aper->arena, aper->pg_start + pg_start, in titan_agp_unbind_memory() 698 unsigned long baddr = addr - aper->arena->dma_base; in titan_agp_translate() [all …]
|
D | core_marvel.c | 894 struct pci_iommu_arena *arena; member 910 aper->arena = agp->hose->sg_pci; in marvel_agp_setup() 912 aper->pg_start = iommu_reserve(aper->arena, aper->pg_count, in marvel_agp_setup() 922 aper->arena->dma_base + aper->pg_start * PAGE_SIZE; in marvel_agp_setup() 935 status = iommu_release(aper->arena, aper->pg_start, aper->pg_count); in marvel_agp_cleanup() 939 iommu_unbind(aper->arena, aper->pg_start, aper->pg_count); in marvel_agp_cleanup() 940 status = iommu_release(aper->arena, aper->pg_start, in marvel_agp_cleanup() 1018 return iommu_bind(aper->arena, aper->pg_start + pg_start, in marvel_agp_bind_memory() 1026 return iommu_unbind(aper->arena, aper->pg_start + pg_start, in marvel_agp_unbind_memory() 1034 unsigned long baddr = addr - aper->arena->dma_base; in marvel_agp_translate() [all …]
|
D | core_cia.c | 352 struct pci_iommu_arena *arena = pci_isa_hose->sg_isa; in verify_tb_operation() local 371 addr0 = arena->dma_base; in verify_tb_operation() 445 cia_pci_tbi(arena->hose, 0, -1); in verify_tb_operation() 460 arena->ptes[4] = pte0; in verify_tb_operation() 484 arena->ptes[5] = pte0; in verify_tb_operation() 497 arena->align_entry = 4; in verify_tb_operation() 520 arena->ptes[4] = 0; in verify_tb_operation() 521 arena->ptes[5] = 0; in verify_tb_operation() 535 alpha_mv.mv_pci_tbi(arena->hose, 0, -1); in verify_tb_operation()
|
/arch/sparc/kernel/ |
D | iommu.c | 102 struct iommu_arena *arena = &iommu->arena; in iommu_range_alloc() local 117 start = arena->hint; in iommu_range_alloc() 119 limit = arena->limit; in iommu_range_alloc() 139 n = iommu_area_alloc(arena->map, limit, start, npages, in iommu_range_alloc() 158 arena->hint = end; in iommu_range_alloc() 169 struct iommu_arena *arena = &iommu->arena; in iommu_range_free() local 174 bitmap_clear(arena->map, entry, npages); in iommu_range_free() 195 iommu->arena.map = kmalloc_node(sz, GFP_KERNEL, numa_node); in iommu_table_init() 196 if (!iommu->arena.map) { in iommu_table_init() 200 memset(iommu->arena.map, 0, sz); in iommu_table_init() [all …]
|
D | ldc.c | 103 struct iommu_arena arena; member 1019 iommu->arena.map = kzalloc(sz, GFP_KERNEL); in ldc_iommu_init() 1020 if (!iommu->arena.map) { in ldc_iommu_init() 1025 iommu->arena.limit = num_tsb_entries; in ldc_iommu_init() 1055 kfree(iommu->arena.map); in ldc_iommu_init() 1056 iommu->arena.map = NULL; in ldc_iommu_init() 1068 num_tsb_entries = iommu->arena.limit; in ldc_iommu_release() 1075 kfree(iommu->arena.map); in ldc_iommu_release() 1076 iommu->arena.map = NULL; in ldc_iommu_release() 1883 struct iommu_arena *arena = &iommu->arena; in arena_alloc() local [all …]
|
D | pci_sun4v.c | 555 struct iommu_arena *arena = &iommu->arena; in probe_existing_entries() local 560 for (i = 0; i < arena->limit; i++) { in probe_existing_entries() 572 __set_bit(i, arena->map); in probe_existing_entries() 612 iommu->arena.map = kzalloc(sz, GFP_KERNEL); in pci_sun4v_iommu_init() 613 if (!iommu->arena.map) { in pci_sun4v_iommu_init() 617 iommu->arena.limit = num_tsb_entries; in pci_sun4v_iommu_init()
|
/arch/sparc/include/asm/ |
D | iommu_64.h | 28 struct iommu_arena arena; member
|