Searched refs:aligned_start (Results 1 – 4 of 4) sorted by relevance
/arch/sh/mm/ |
D | flush-sh4.c | 15 reg_size_t aligned_start, v, cnt, end; in sh4__flush_wback_region() local 17 aligned_start = register_align(start); in sh4__flush_wback_region() 18 v = aligned_start & ~(L1_CACHE_BYTES-1); in sh4__flush_wback_region() 19 end = (aligned_start + size + L1_CACHE_BYTES-1) in sh4__flush_wback_region() 49 reg_size_t aligned_start, v, cnt, end; in sh4__flush_purge_region() local 51 aligned_start = register_align(start); in sh4__flush_purge_region() 52 v = aligned_start & ~(L1_CACHE_BYTES-1); in sh4__flush_purge_region() 53 end = (aligned_start + size + L1_CACHE_BYTES-1) in sh4__flush_purge_region() 79 reg_size_t aligned_start, v, cnt, end; in sh4__flush_invalidate_region() local 81 aligned_start = register_align(start); in sh4__flush_invalidate_region() [all …]
|
D | cache-sh5.c | 76 unsigned long long ullend, addr, aligned_start; in sh64_icache_inv_kernel_range() local 77 aligned_start = (unsigned long long)(signed long long)(signed long) start; in sh64_icache_inv_kernel_range() 78 addr = L1_CACHE_ALIGN(aligned_start); in sh64_icache_inv_kernel_range() 158 unsigned long aligned_start; in sh64_icache_inv_user_page_range() local 173 aligned_start = start & PAGE_MASK; in sh64_icache_inv_user_page_range() 176 while (aligned_start < after_last_page_start) { in sh64_icache_inv_user_page_range() 179 vma = find_vma(mm, aligned_start); in sh64_icache_inv_user_page_range() 180 if (!vma || (aligned_start <= vma->vm_end)) { in sh64_icache_inv_user_page_range() 182 aligned_start += PAGE_SIZE; in sh64_icache_inv_user_page_range() 188 eaddr = aligned_start; in sh64_icache_inv_user_page_range() [all …]
|
/arch/arm/kernel/ |
D | setup.c | 673 u64 aligned_start; in arm_add_memory() local 680 aligned_start = PAGE_ALIGN(start); in arm_add_memory() 683 if (aligned_start > ULONG_MAX) { in arm_add_memory() 689 if (aligned_start + size > ULONG_MAX) { in arm_add_memory() 697 size = ULONG_MAX - aligned_start; in arm_add_memory() 701 if (aligned_start < PHYS_OFFSET) { in arm_add_memory() 702 if (aligned_start + size <= PHYS_OFFSET) { in arm_add_memory() 704 aligned_start, aligned_start + size); in arm_add_memory() 709 aligned_start, (u64)PHYS_OFFSET); in arm_add_memory() 711 size -= PHYS_OFFSET - aligned_start; in arm_add_memory() [all …]
|
/arch/arm64/mm/ |
D | mmu.c | 397 unsigned long aligned_start = round_down(__pa(_stext), in fixup_executable() local 400 create_mapping(aligned_start, __phys_to_virt(aligned_start), in fixup_executable() 401 __pa(_stext) - aligned_start, in fixup_executable()
|