Searched refs:area (Results 1 – 8 of 8) sorted by relevance
/kernel/ |
D | kcov.c | 61 void *area; member 136 struct kcov_remote_area *area; in kcov_remote_area_get() local 140 area = list_entry(pos, struct kcov_remote_area, list); in kcov_remote_area_get() 141 if (area->size == size) { in kcov_remote_area_get() 142 list_del(&area->list); in kcov_remote_area_get() 143 return area; in kcov_remote_area_get() 150 static void kcov_remote_area_put(struct kcov_remote_area *area, in kcov_remote_area_put() argument 153 INIT_LIST_HEAD(&area->list); in kcov_remote_area_put() 154 area->size = size; in kcov_remote_area_put() 155 list_add(&area->list, &kcov_remote_areas); in kcov_remote_area_put() [all …]
|
/kernel/events/ |
D | uprobes.c | 1440 static int xol_add_vma(struct mm_struct *mm, struct xol_area *area) in xol_add_vma() argument 1453 if (!area->vaddr) { in xol_add_vma() 1455 area->vaddr = get_unmapped_area(NULL, TASK_SIZE - PAGE_SIZE, in xol_add_vma() 1457 if (IS_ERR_VALUE(area->vaddr)) { in xol_add_vma() 1458 ret = area->vaddr; in xol_add_vma() 1463 vma = _install_special_mapping(mm, area->vaddr, PAGE_SIZE, in xol_add_vma() 1465 &area->xol_mapping); in xol_add_vma() 1473 smp_store_release(&mm->uprobes_state.xol_area, area); /* ^^^ */ in xol_add_vma() 1484 struct xol_area *area; in __create_xol_area() local 1486 area = kmalloc(sizeof(*area), GFP_KERNEL); in __create_xol_area() [all …]
|
/kernel/dma/ |
D | remap.c | 11 struct vm_struct *area = find_vm_area(cpu_addr); in dma_common_find_pages() local 13 if (!area || area->flags != VM_DMA_COHERENT) in dma_common_find_pages() 15 return area->pages; in dma_common_find_pages() 62 struct vm_struct *area = find_vm_area(cpu_addr); in dma_common_free_remap() local 64 if (!area || area->flags != VM_DMA_COHERENT) { in dma_common_free_remap()
|
D | swiotlb.c | 652 struct io_tlb_area *area = mem->areas + area_index; in swiotlb_do_find_slots() local 679 spin_lock_irqsave(&area->lock, flags); in swiotlb_do_find_slots() 680 if (unlikely(nslots > mem->area_nslabs - area->used)) in swiotlb_do_find_slots() 684 index = wrap = wrap_area_index(mem, ALIGN(area->index, stride)); in swiotlb_do_find_slots() 711 spin_unlock_irqrestore(&area->lock, flags); in swiotlb_do_find_slots() 729 area->index = index + nslots; in swiotlb_do_find_slots() 731 area->index = 0; in swiotlb_do_find_slots() 732 area->used += nslots; in swiotlb_do_find_slots() 733 spin_unlock_irqrestore(&area->lock, flags); in swiotlb_do_find_slots() 829 struct io_tlb_area *area = &mem->areas[aindex]; in swiotlb_release_slots() local [all …]
|
D | Kconfig | 144 comment "Default contiguous memory area size:" 152 Defines the size (in MiB) of the default memory area for Contiguous 163 Defines the size of the default memory area for Contiguous Memory
|
/kernel/irq/ |
D | matrix.c | 113 unsigned int area, start = m->alloc_start; in matrix_alloc_area() local 118 area = bitmap_find_next_zero_area(m->scratch_map, end, start, num, 0); in matrix_alloc_area() 119 if (area >= end) in matrix_alloc_area() 120 return area; in matrix_alloc_area() 122 bitmap_set(cm->managed_map, area, num); in matrix_alloc_area() 124 bitmap_set(cm->alloc_map, area, num); in matrix_alloc_area() 125 return area; in matrix_alloc_area()
|
/kernel/bpf/ |
D | syscall.c | 317 void *area; in __bpf_map_area_alloc() local 328 area = kmalloc_node(size, gfp | GFP_USER | __GFP_NORETRY, in __bpf_map_area_alloc() 330 if (area != NULL) in __bpf_map_area_alloc() 331 return area; in __bpf_map_area_alloc() 349 void bpf_map_area_free(void *area) in bpf_map_area_free() argument 351 kvfree(area); in bpf_map_area_free()
|
D | core.c | 833 void bpf_jit_fill_hole_with_zero(void *area, unsigned int size) in bpf_jit_fill_hole_with_zero() argument 835 memset(area, 0, size); in bpf_jit_fill_hole_with_zero()
|