Home
last modified time | relevance | path

Searched refs:SECTION_SIZE (Results 1 – 15 of 15) sorted by relevance

/arch/arm/mach-omap2/
Domap-secure.c65 size = ALIGN(size, SECTION_SIZE); in omap_secure_ram_reserve_memblock()
66 omap_secure_memblock_base = arm_memblock_steal(size, SECTION_SIZE); in omap_secure_ram_reserve_memblock()
/arch/arm/mach-tegra/
Diomap.h134 #define IO_PPSB_SIZE SECTION_SIZE
138 #define IO_APB_SIZE SECTION_SIZE
/arch/arm/include/asm/
Dpgtable-2level.h95 #define SECTION_SIZE (1UL << SECTION_SHIFT) macro
96 #define SECTION_MASK (~(SECTION_SIZE-1))
Dpgtable-3level.h59 #define SECTION_SIZE (1UL << SECTION_SHIFT) macro
/arch/arm/mm/
Dinit.c689 if (addr & SECTION_SIZE) in section_update()
695 local_flush_tlb_kernel_range(addr, addr + SECTION_SIZE); in section_update()
715 if (!IS_ALIGNED(perms[i].start, SECTION_SIZE) || \
716 !IS_ALIGNED(perms[i].end, SECTION_SIZE)) { \
719 SECTION_SIZE); \
725 addr += SECTION_SIZE) \
Dmmu.c770 if (addr & SECTION_SIZE) in __map_init_section()
775 phys += SECTION_SIZE; in __map_init_section()
776 } while (pmd++, addr += SECTION_SIZE, addr != end); in __map_init_section()
1013 vm_reserve_area_early(addr, SECTION_SIZE, pmd_empty_section_gap); in pmd_empty_section_gap()
1034 if ((addr & ~PMD_MASK) == SECTION_SIZE) { in fill_pmd_gaps()
1046 if ((addr & ~PMD_MASK) == SECTION_SIZE) { in fill_pmd_gaps()
1388 unsigned long kernel_x_start = round_down(__pa(_stext), SECTION_SIZE); in map_lowmem()
1389 unsigned long kernel_x_end = round_up(__pa(__init_end), SECTION_SIZE); in map_lowmem()
Didmap.c59 addr += SECTION_SIZE; in idmap_add_pmd()
Ddump.c285 if (SECTION_SIZE < PMD_SIZE && pmd_large(pmd[1])) in walk_pmd()
286 note_page(st, addr + SECTION_SIZE, 3, pmd_val(pmd[1])); in walk_pmd()
/arch/arm64/include/asm/
Dkernel-pgtable.h56 #define SWAPPER_BLOCK_SIZE SECTION_SIZE
Dpgtable-hwdef.h54 #define SECTION_SIZE (_AC(1, UL) << SECTION_SHIFT) macro
55 #define SECTION_MASK (~(SECTION_SIZE-1))
/arch/arm64/mm/
Dmmu.c308 unsigned long kernel_x_start = round_down(__pa(_stext), SECTION_SIZE); in __map_memblock()
309 unsigned long kernel_x_end = round_up(__pa(__init_end), SECTION_SIZE); in __map_memblock()
396 if (!IS_ALIGNED((unsigned long)_stext, SECTION_SIZE)) { in fixup_executable()
398 SECTION_SIZE); in fixup_executable()
405 if (!IS_ALIGNED((unsigned long)__init_end, SECTION_SIZE)) { in fixup_executable()
407 SECTION_SIZE); in fixup_executable()
/arch/unicore32/include/asm/
Dpgtable.h73 #define SECTION_SIZE (1UL << SECTION_SHIFT) macro
74 #define SECTION_MASK (~(SECTION_SIZE-1))
/arch/arm/mach-davinci/
Dboard-dm365-evm.c643 if (request_mem_region(DM365_ASYNC_EMIF_DATA_CE1_BASE, SECTION_SIZE, in evm_init_cpld()
646 cpld = ioremap(DM365_ASYNC_EMIF_DATA_CE1_BASE, SECTION_SIZE); in evm_init_cpld()
649 SECTION_SIZE); in evm_init_cpld()
/arch/unicore32/mm/
Dmmu.c189 phys += SECTION_SIZE; in alloc_init_section()
190 } while (pmd++, addr += SECTION_SIZE, addr != end); in alloc_init_section()
/arch/arm64/
DKconfig.debug81 bool "Align linker sections up to SECTION_SIZE"