Home
last modified time | relevance | path

Searched refs:new_pgd (Results 1 – 4 of 4) sorted by relevance

/arch/unicore32/mm/
Dpgd.c29 pgd_t *new_pgd, *init_pgd; in get_pgd_slow() local
33 new_pgd = (pgd_t *)__get_free_pages(GFP_KERNEL, 0); in get_pgd_slow()
34 if (!new_pgd) in get_pgd_slow()
37 memset(new_pgd, 0, FIRST_KERNEL_PGD_NR * sizeof(pgd_t)); in get_pgd_slow()
43 memcpy(new_pgd + FIRST_KERNEL_PGD_NR, init_pgd + FIRST_KERNEL_PGD_NR, in get_pgd_slow()
46 clean_dcache_area(new_pgd, PTRS_PER_PGD * sizeof(pgd_t)); in get_pgd_slow()
53 new_pmd = pmd_alloc(mm, (pud_t *)new_pgd, 0); in get_pgd_slow()
68 return new_pgd; in get_pgd_slow()
74 free_pages((unsigned long)new_pgd, 0); in get_pgd_slow()
/arch/arm/mm/
Dpgd.c35 pgd_t *new_pgd, *init_pgd; in pgd_alloc() local
40 new_pgd = __pgd_alloc(); in pgd_alloc()
41 if (!new_pgd) in pgd_alloc()
44 memset(new_pgd, 0, USER_PTRS_PER_PGD * sizeof(pgd_t)); in pgd_alloc()
50 memcpy(new_pgd + USER_PTRS_PER_PGD, init_pgd + USER_PTRS_PER_PGD, in pgd_alloc()
53 clean_dcache_area(new_pgd, PTRS_PER_PGD * sizeof(pgd_t)); in pgd_alloc()
59 new_pud = pud_alloc(mm, new_pgd + pgd_index(MODULES_VADDR), in pgd_alloc()
75 new_pud = pud_alloc(mm, new_pgd, 0); in pgd_alloc()
106 return new_pgd; in pgd_alloc()
114 __pgd_free(new_pgd); in pgd_alloc()
/arch/m68k/include/asm/
Dmcf_pgalloc.h97 pgd_t *new_pgd; in pgd_alloc() local
99 new_pgd = (pgd_t *)__get_free_page(GFP_DMA | __GFP_NOWARN); in pgd_alloc()
100 if (!new_pgd) in pgd_alloc()
102 memcpy(new_pgd, swapper_pg_dir, PAGE_SIZE); in pgd_alloc()
103 memset(new_pgd, 0, PAGE_OFFSET >> PGDIR_SHIFT); in pgd_alloc()
104 return new_pgd; in pgd_alloc()
Dsun3_pgalloc.h92 pgd_t *new_pgd; in pgd_alloc() local
94 new_pgd = (pgd_t *)get_zeroed_page(GFP_KERNEL); in pgd_alloc()
95 memcpy(new_pgd, swapper_pg_dir, PAGE_SIZE); in pgd_alloc()
96 memset(new_pgd, 0, (PAGE_OFFSET >> PGDIR_SHIFT)); in pgd_alloc()
97 return new_pgd; in pgd_alloc()