/arch/sh/mm/ |
D | flush-sh4.c | 18 v = aligned_start & ~(L1_CACHE_BYTES-1); in sh4__flush_wback_region() 19 end = (aligned_start + size + L1_CACHE_BYTES-1) in sh4__flush_wback_region() 20 & ~(L1_CACHE_BYTES-1); in sh4__flush_wback_region() 21 cnt = (end - v) / L1_CACHE_BYTES; in sh4__flush_wback_region() 24 __ocbwb(v); v += L1_CACHE_BYTES; in sh4__flush_wback_region() 25 __ocbwb(v); v += L1_CACHE_BYTES; in sh4__flush_wback_region() 26 __ocbwb(v); v += L1_CACHE_BYTES; in sh4__flush_wback_region() 27 __ocbwb(v); v += L1_CACHE_BYTES; in sh4__flush_wback_region() 28 __ocbwb(v); v += L1_CACHE_BYTES; in sh4__flush_wback_region() 29 __ocbwb(v); v += L1_CACHE_BYTES; in sh4__flush_wback_region() [all …]
|
D | cache-sh2.c | 24 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh2__flush_wback_region() 25 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh2__flush_wback_region() 26 & ~(L1_CACHE_BYTES-1); in sh2__flush_wback_region() 27 for (v = begin; v < end; v+=L1_CACHE_BYTES) { in sh2__flush_wback_region() 45 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh2__flush_purge_region() 46 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh2__flush_purge_region() 47 & ~(L1_CACHE_BYTES-1); in sh2__flush_purge_region() 49 for (v = begin; v < end; v+=L1_CACHE_BYTES) in sh2__flush_purge_region() 76 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh2__flush_invalidate_region() 77 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh2__flush_invalidate_region() [all …]
|
D | cache-sh2a.c | 58 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh2a__flush_wback_region() 59 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh2a__flush_wback_region() 60 & ~(L1_CACHE_BYTES-1); in sh2a__flush_wback_region() 71 for (v = begin; v < end; v += L1_CACHE_BYTES) { in sh2a__flush_wback_region() 79 for (v = begin; v < end; v += L1_CACHE_BYTES) in sh2a__flush_wback_region() 98 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh2a__flush_purge_region() 99 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh2a__flush_purge_region() 100 & ~(L1_CACHE_BYTES-1); in sh2a__flush_purge_region() 105 for (v = begin; v < end; v+=L1_CACHE_BYTES) { in sh2a__flush_purge_region() 128 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh2a__flush_invalidate_region() [all …]
|
D | cache-sh3.c | 41 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh3__flush_wback_region() 42 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh3__flush_wback_region() 43 & ~(L1_CACHE_BYTES-1); in sh3__flush_wback_region() 45 for (v = begin; v < end; v+=L1_CACHE_BYTES) { in sh3__flush_wback_region() 79 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh3__flush_purge_region() 80 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh3__flush_purge_region() 81 & ~(L1_CACHE_BYTES-1); in sh3__flush_purge_region() 83 for (v = begin; v < end; v+=L1_CACHE_BYTES) { in sh3__flush_purge_region()
|
/arch/mn10300/include/asm/ |
D | cache.h | 19 #define L1_CACHE_DISPARITY (L1_CACHE_NENTRIES * L1_CACHE_BYTES) 21 #define L1_CACHE_DISPARITY L1_CACHE_NENTRIES * L1_CACHE_BYTES 24 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES 33 (ENTRY) * L1_CACHE_BYTES, u32) 36 __SYSREG(0xc8400000 + 0 * L1_CACHE_WAYDISP + (ENTRY) * L1_CACHE_BYTES, u32) 38 __SYSREG(0xc8400000 + 1 * L1_CACHE_WAYDISP + (ENTRY) * L1_CACHE_BYTES, u32) 40 __SYSREG(0xc8400000 + 2 * L1_CACHE_WAYDISP + (ENTRY) * L1_CACHE_BYTES, u32) 42 __SYSREG(0xc8400000 + 3 * L1_CACHE_WAYDISP + (ENTRY) * L1_CACHE_BYTES, u32) 47 (ENTRY) * L1_CACHE_BYTES + (OFF) * 4, u32) 50 (ENTRY) * L1_CACHE_BYTES, u32) [all …]
|
/arch/frv/lib/ |
D | cache.S | 27 andi gr8,~(L1_CACHE_BYTES-1),gr8 30 addi gr8,#L1_CACHE_BYTES,gr8 47 andi gr8,~(L1_CACHE_BYTES-1),gr8 51 addi gr8,#L1_CACHE_BYTES,gr8 68 andi gr8,~(L1_CACHE_BYTES-1),gr8 71 addi gr8,#L1_CACHE_BYTES,gr8 88 andi gr8,~(L1_CACHE_BYTES-1),gr8 92 addi gr8,#L1_CACHE_BYTES,gr8
|
/arch/blackfin/include/asm/ |
D | cache.h | 17 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro 18 #define SMP_CACHE_BYTES L1_CACHE_BYTES 20 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES 32 __attribute__((__aligned__(L1_CACHE_BYTES), \
|
/arch/score/mm/ |
D | cache.c | 45 for (i = 0; i < (PAGE_SIZE / L1_CACHE_BYTES); i += L1_CACHE_BYTES) { in flush_data_cache_page() 51 addr += L1_CACHE_BYTES; in flush_data_cache_page() 249 start = start & ~(L1_CACHE_BYTES - 1); in flush_dcache_range() 250 end = end & ~(L1_CACHE_BYTES - 1); in flush_dcache_range() 253 for (i = 0; i < size; i += L1_CACHE_BYTES) { in flush_dcache_range() 260 start += L1_CACHE_BYTES; in flush_dcache_range() 267 start = start & ~(L1_CACHE_BYTES - 1); in flush_icache_range() 268 end = end & ~(L1_CACHE_BYTES - 1); in flush_icache_range() 272 for (i = 0; i < size; i += L1_CACHE_BYTES) { in flush_icache_range() 277 start += L1_CACHE_BYTES; in flush_icache_range()
|
/arch/parisc/include/asm/ |
D | cache.h | 18 #define L1_CACHE_BYTES 64 macro 21 #define L1_CACHE_BYTES 32 macro 27 #define SMP_CACHE_BYTES L1_CACHE_BYTES 29 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
|
/arch/alpha/include/asm/ |
D | cache.h | 10 # define L1_CACHE_BYTES 64 macro 16 # define L1_CACHE_BYTES 32 macro 20 #define SMP_CACHE_BYTES L1_CACHE_BYTES
|
/arch/arm/lib/ |
D | copy_page.S | 17 #define COPY_COUNT (PAGE_SZ / (2 * L1_CACHE_BYTES) PLD( -1 )) 30 PLD( pld [r1, #L1_CACHE_BYTES] ) 33 1: PLD( pld [r1, #2 * L1_CACHE_BYTES]) 34 PLD( pld [r1, #3 * L1_CACHE_BYTES]) 36 .rept (2 * L1_CACHE_BYTES / 16 - 1)
|
/arch/hexagon/include/asm/ |
D | cache.h | 26 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro 28 #define __cacheline_aligned __aligned(L1_CACHE_BYTES) 29 #define ____cacheline_aligned __aligned(L1_CACHE_BYTES)
|
/arch/arc/include/asm/ |
D | cache.h | 19 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro 25 #define ARC_ICACHE_LINE_LEN L1_CACHE_BYTES 26 #define ARC_DCACHE_LINE_LEN L1_CACHE_BYTES 58 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
|
/arch/frv/include/asm/ |
D | cache.h | 18 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro 20 #define __cacheline_aligned __attribute__((aligned(L1_CACHE_BYTES))) 21 #define ____cacheline_aligned __attribute__((aligned(L1_CACHE_BYTES)))
|
/arch/mn10300/proc-mn2ws0050/include/proc/ |
D | cache.h | 24 #define L1_CACHE_BYTES 32 /* bytes per entry */ macro 45 +(L1_CACHE_NWAYS * L1_CACHE_NENTRIES * L1_CACHE_BYTES) 47 +(L1_CACHE_NWAYS * L1_CACHE_NENTRIES * L1_CACHE_BYTES)
|
/arch/mn10300/proc-mn103e010/include/proc/ |
D | cache.h | 18 #define L1_CACHE_BYTES 16 /* bytes per entry */ macro 39 +(L1_CACHE_NWAYS * L1_CACHE_NENTRIES * L1_CACHE_BYTES) 41 +(L1_CACHE_NWAYS * L1_CACHE_NENTRIES * L1_CACHE_BYTES)
|
/arch/xtensa/include/asm/ |
D | cache.h | 17 #define L1_CACHE_BYTES XCHAL_DCACHE_LINESIZE macro 18 #define SMP_CACHE_BYTES L1_CACHE_BYTES 32 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
|
/arch/arc/kernel/ |
D | vmlinux.lds.S | 66 INIT_TEXT_SECTION(L1_CACHE_BYTES) 71 INIT_SETUP(L1_CACHE_BYTES) 83 PERCPU_SECTION(L1_CACHE_BYTES) 105 EXCEPTION_TABLE(L1_CACHE_BYTES) 115 RW_DATA_SECTION(L1_CACHE_BYTES, PAGE_SIZE, THREAD_SIZE)
|
/arch/unicore32/kernel/ |
D | vmlinux.lds.S | 33 PERCPU_SECTION(L1_CACHE_BYTES) 49 RW_DATA_SECTION(L1_CACHE_BYTES, PAGE_SIZE, THREAD_SIZE) 52 EXCEPTION_TABLE(L1_CACHE_BYTES)
|
/arch/microblaze/include/asm/ |
D | cache.h | 20 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro 22 #define SMP_CACHE_BYTES L1_CACHE_BYTES
|
/arch/m68k/include/asm/ |
D | cache.h | 9 #define L1_CACHE_BYTES (1<< L1_CACHE_SHIFT) macro 11 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
|
/arch/unicore32/include/asm/ |
D | cache.h | 16 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro 25 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
|
/arch/arm64/include/asm/ |
D | cache.h | 20 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro 29 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
|
/arch/frv/kernel/ |
D | vmlinux.lds.S | 40 PERCPU_SECTION(L1_CACHE_BYTES) 95 CACHELINE_ALIGNED_DATA(L1_CACHE_BYTES) 105 . = ALIGN(L1_CACHE_BYTES); 112 . = ALIGN(L1_CACHE_BYTES);
|
/arch/mips/include/asm/ |
D | cache.h | 15 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro 18 #define SMP_CACHE_BYTES L1_CACHE_BYTES
|