/arch/sh/kernel/cpu/sh3/ |
D | probe.c | 53 boot_cpu_data.dcache.ways = 4; in cpu_probe() 54 boot_cpu_data.dcache.entry_shift = 4; in cpu_probe() 55 boot_cpu_data.dcache.linesz = L1_CACHE_BYTES; in cpu_probe() 56 boot_cpu_data.dcache.flags = 0; in cpu_probe() 63 boot_cpu_data.dcache.way_incr = (1 << 11); in cpu_probe() 64 boot_cpu_data.dcache.entry_mask = 0x7f0; in cpu_probe() 65 boot_cpu_data.dcache.sets = 128; in cpu_probe() 70 boot_cpu_data.dcache.way_incr = (1 << 12); in cpu_probe() 71 boot_cpu_data.dcache.entry_mask = 0xff0; in cpu_probe() 72 boot_cpu_data.dcache.sets = 256; in cpu_probe() [all …]
|
/arch/mips/mm/ |
D | c-octeon.c | 204 c->dcache.linesz = 128; in probe_octeon() 206 c->dcache.sets = 2; /* CN5XXX has two Dcache sets */ in probe_octeon() 208 c->dcache.sets = 1; /* CN3XXX has one Dcache set */ in probe_octeon() 209 c->dcache.ways = 64; in probe_octeon() 211 c->dcache.sets * c->dcache.ways * c->dcache.linesz; in probe_octeon() 212 c->dcache.waybit = ffs(dcache_size / c->dcache.ways) - 1; in probe_octeon() 223 c->dcache.linesz = 128; in probe_octeon() 224 c->dcache.ways = 32; in probe_octeon() 225 c->dcache.sets = 8; in probe_octeon() 226 dcache_size = c->dcache.sets * c->dcache.ways * c->dcache.linesz; in probe_octeon() [all …]
|
D | c-r4k.c | 1043 c->dcache.linesz = 16 << ((config & CONF_DB) >> 4); in probe_pcache() 1044 c->dcache.ways = 2; in probe_pcache() 1045 c->dcache.waybit= __ffs(dcache_size/2); in probe_pcache() 1058 c->dcache.linesz = 16 << ((config & CONF_DB) >> 4); in probe_pcache() 1059 c->dcache.ways = 2; in probe_pcache() 1060 c->dcache.waybit = 0; in probe_pcache() 1072 c->dcache.linesz = 16 << ((config & CONF_DB) >> 4); in probe_pcache() 1073 c->dcache.ways = 4; in probe_pcache() 1074 c->dcache.waybit = 0; in probe_pcache() 1093 c->dcache.linesz = 16 << ((config & CONF_DB) >> 4); in probe_pcache() [all …]
|
D | c-tx39.c | 295 unsigned long dc_lsize = current_cpu_data.dcache.linesz; in tx39_flush_cache_sigtramp() 326 current_cpu_data.dcache.ways = 1; in tx39_probe_cache() 327 current_cpu_data.dcache.linesz = 4; in tx39_probe_cache() 332 current_cpu_data.dcache.ways = 2; in tx39_probe_cache() 333 current_cpu_data.dcache.linesz = 16; in tx39_probe_cache() 339 current_cpu_data.dcache.ways = 1; in tx39_probe_cache() 340 current_cpu_data.dcache.linesz = 16; in tx39_probe_cache() 408 (dcache_size / current_cpu_data.dcache.ways) - 1, in tx39_cache_init() 415 current_cpu_data.dcache.waysize = dcache_size / current_cpu_data.dcache.ways; in tx39_cache_init() 419 current_cpu_data.dcache.sets = in tx39_cache_init() [all …]
|
/arch/sh/kernel/cpu/sh2/ |
D | probe.c | 20 boot_cpu_data.dcache.ways = 4; in cpu_probe() 21 boot_cpu_data.dcache.way_incr = (1<<12); in cpu_probe() 22 boot_cpu_data.dcache.sets = 256; in cpu_probe() 23 boot_cpu_data.dcache.entry_shift = 4; in cpu_probe() 24 boot_cpu_data.dcache.linesz = L1_CACHE_BYTES; in cpu_probe() 25 boot_cpu_data.dcache.flags = 0; in cpu_probe() 30 boot_cpu_data.dcache.flags |= SH_CACHE_COMBINED; in cpu_probe() 31 boot_cpu_data.icache = boot_cpu_data.dcache; in cpu_probe()
|
/arch/sh/kernel/cpu/ |
D | init.c | 131 waysize = current_cpu_data.dcache.sets; in cache_init() 142 waysize <<= current_cpu_data.dcache.entry_shift; in cache_init() 150 ways = current_cpu_data.dcache.ways; in cache_init() 158 addr += current_cpu_data.dcache.linesz) in cache_init() 161 addrstart += current_cpu_data.dcache.way_incr; in cache_init() 173 if (current_cpu_data.dcache.ways > 1) in cache_init() 207 l1d_cache_shape = CACHE_DESC_SHAPE(current_cpu_data.dcache); in detect_cache_shape() 209 if (current_cpu_data.dcache.flags & SH_CACHE_COMBINED) in detect_cache_shape() 316 current_cpu_data.dcache.entry_mask = current_cpu_data.dcache.way_incr - in cpu_init() 317 current_cpu_data.dcache.linesz; in cpu_init() [all …]
|
/arch/sh/kernel/cpu/sh2a/ |
D | probe.c | 46 boot_cpu_data.dcache.ways = 4; in cpu_probe() 47 boot_cpu_data.dcache.way_incr = (1 << 11); in cpu_probe() 48 boot_cpu_data.dcache.sets = 128; in cpu_probe() 49 boot_cpu_data.dcache.entry_shift = 4; in cpu_probe() 50 boot_cpu_data.dcache.linesz = L1_CACHE_BYTES; in cpu_probe() 51 boot_cpu_data.dcache.flags = 0; in cpu_probe() 59 boot_cpu_data.icache = boot_cpu_data.dcache; in cpu_probe()
|
/arch/sh/mm/ |
D | cache-sh7705.c | 36 ways = current_cpu_data.dcache.ways; in cache_wback_all() 37 waysize = current_cpu_data.dcache.sets; in cache_wback_all() 38 waysize <<= current_cpu_data.dcache.entry_shift; in cache_wback_all() 47 addr += current_cpu_data.dcache.linesz) { in cache_wback_all() 58 addrstart += current_cpu_data.dcache.way_incr; in cache_wback_all() 104 ways = current_cpu_data.dcache.ways; in __flush_dcache_page() 105 waysize = current_cpu_data.dcache.sets; in __flush_dcache_page() 106 waysize <<= current_cpu_data.dcache.entry_shift; in __flush_dcache_page() 115 addr += current_cpu_data.dcache.linesz) { in __flush_dcache_page() 125 addrstart += current_cpu_data.dcache.way_incr; in __flush_dcache_page()
|
D | cache.c | 62 if (boot_cpu_data.dcache.n_aliases && page_mapped(page) && in copy_to_user_page() 69 if (boot_cpu_data.dcache.n_aliases) in copy_to_user_page() 81 if (boot_cpu_data.dcache.n_aliases && page_mapped(page) && in copy_from_user_page() 88 if (boot_cpu_data.dcache.n_aliases) in copy_from_user_page() 100 if (boot_cpu_data.dcache.n_aliases && page_mapped(from) && in copy_user_highpage() 140 if (!boot_cpu_data.dcache.n_aliases) in __update_cache() 156 if (boot_cpu_data.dcache.n_aliases && page_mapped(page) && in __flush_anon_page() 177 if (boot_cpu_data.dcache.n_aliases == 0) in flush_cache_mm() 185 if (boot_cpu_data.dcache.n_aliases == 0) in flush_cache_dup_mm() 262 boot_cpu_data.dcache.ways, in emit_cache_params() [all …]
|
D | cache-sh4.c | 154 (current_cpu_data.dcache.sets << in flush_dcache_all() 155 current_cpu_data.dcache.entry_shift) * in flush_dcache_all() 156 current_cpu_data.dcache.ways; in flush_dcache_all() 158 entry_offset = 1 << current_cpu_data.dcache.entry_shift; in flush_dcache_all() 242 map_coherent = (current_cpu_data.dcache.n_aliases && in sh4_flush_cache_page() 293 if (boot_cpu_data.dcache.n_aliases == 0) in sh4_flush_cache_range() 321 struct cache_info *dcache; in __flush_cache_one() local 326 dcache = &boot_cpu_data.dcache; in __flush_cache_one() 328 way_count = dcache->ways; in __flush_cache_one() 329 way_incr = dcache->way_incr; in __flush_cache_one()
|
D | cache-sh3.c | 47 for (j = 0; j < current_cpu_data.dcache.ways; j++) { in sh3__flush_wback_region() 51 addr = addrstart | (v & current_cpu_data.dcache.entry_mask); in sh3__flush_wback_region() 63 addrstart += current_cpu_data.dcache.way_incr; in sh3__flush_wback_region() 88 (v & current_cpu_data.dcache.entry_mask) | SH_CACHE_ASSOC; in sh3__flush_purge_region()
|
D | cache-sh5.c | 249 cpu_data->dcache.entry_mask) >> in sh64_dcache_purge_sets() 250 cpu_data->dcache.entry_shift; in sh64_dcache_purge_sets() 254 set_offset &= (cpu_data->dcache.sets - 1); in sh64_dcache_purge_sets() 256 (set_offset << cpu_data->dcache.entry_shift); in sh64_dcache_purge_sets() 265 eaddr1 = eaddr0 + cpu_data->dcache.way_size * in sh64_dcache_purge_sets() 266 cpu_data->dcache.ways; in sh64_dcache_purge_sets() 269 eaddr += cpu_data->dcache.way_size) { in sh64_dcache_purge_sets() 274 eaddr1 = eaddr0 + cpu_data->dcache.way_size * in sh64_dcache_purge_sets() 275 cpu_data->dcache.ways; in sh64_dcache_purge_sets() 278 eaddr += cpu_data->dcache.way_size) { in sh64_dcache_purge_sets() [all …]
|
D | cache-shx3.c | 27 if (boot_cpu_data.dcache.n_aliases || boot_cpu_data.icache.n_aliases) { in shx3_cache_init() 31 boot_cpu_data.dcache.n_aliases = 0; in shx3_cache_init()
|
D | cache-sh2a.c | 61 nr_ways = current_cpu_data.dcache.ways; in sh2a__flush_wback_region() 69 end = begin + (nr_ways * current_cpu_data.dcache.way_size); in sh2a__flush_wback_region() 108 int nr_ways = current_cpu_data.dcache.ways; in sh2a__flush_purge_region()
|
/arch/sh/kernel/cpu/sh4/ |
D | probe.c | 47 boot_cpu_data.dcache.way_incr = (1 << 14); in cpu_probe() 48 boot_cpu_data.dcache.entry_shift = 5; in cpu_probe() 49 boot_cpu_data.dcache.sets = 512; in cpu_probe() 50 boot_cpu_data.dcache.ways = 1; in cpu_probe() 51 boot_cpu_data.dcache.linesz = L1_CACHE_BYTES; in cpu_probe() 71 boot_cpu_data.dcache.ways = 4; in cpu_probe() 175 boot_cpu_data.dcache.ways = 2; in cpu_probe() 180 boot_cpu_data.dcache.ways = 2; in cpu_probe() 196 boot_cpu_data.dcache.ways = 2; in cpu_probe() 213 if (boot_cpu_data.dcache.ways > 1) { in cpu_probe() [all …]
|
/arch/sh/kernel/cpu/sh5/ |
D | probe.c | 62 boot_cpu_data.dcache = boot_cpu_data.icache; in cpu_probe() 68 set_bit(SH_CACHE_MODE_WT, &(boot_cpu_data.dcache.flags)); in cpu_probe() 70 set_bit(SH_CACHE_MODE_WB, &(boot_cpu_data.dcache.flags)); in cpu_probe()
|
/arch/mips/include/asm/ |
D | r4kcache.h | 615 __BUILD_BLAST_CACHE(d, dcache, Index_Writeback_Inv_D, Hit_Writeback_Inv_D, 16, ) 618 __BUILD_BLAST_CACHE(d, dcache, Index_Writeback_Inv_D, Hit_Writeback_Inv_D, 32, ) 622 __BUILD_BLAST_CACHE(d, dcache, Index_Writeback_Inv_D, Hit_Writeback_Inv_D, 64, ) 625 __BUILD_BLAST_CACHE(d, dcache, Index_Writeback_Inv_D, Hit_Writeback_Inv_D, 128, ) 629 __BUILD_BLAST_CACHE(inv_d, dcache, Index_Writeback_Inv_D, Hit_Invalidate_D, 16, ) 630 __BUILD_BLAST_CACHE(inv_d, dcache, Index_Writeback_Inv_D, Hit_Invalidate_D, 32, ) 652 __BUILD_BLAST_USER_CACHE(d, dcache, Index_Writeback_Inv_D, Hit_Writeback_Inv_D, 655 __BUILD_BLAST_USER_CACHE(d, dcache, Index_Writeback_Inv_D, Hit_Writeback_Inv_D, 658 __BUILD_BLAST_USER_CACHE(d, dcache, Index_Writeback_Inv_D, Hit_Writeback_Inv_D, 685 __BUILD_BLAST_CACHE_RANGE(d, dcache, Hit_Writeback_Inv_D, protected_, ) [all …]
|
/arch/avr32/mm/ |
D | cache.c | 28 linesz = boot_cpu_data.dcache.linesz; in invalidate_dcache_region() 56 linesz = boot_cpu_data.dcache.linesz; in clean_dcache_region() 69 linesz = boot_cpu_data.dcache.linesz; in flush_dcache_region() 94 linesz = boot_cpu_data.dcache.linesz; in __flush_icache_range() 110 linesz = boot_cpu_data.dcache.linesz; in flush_icache_range()
|
/arch/mn10300/mm/ |
D | Kconfig.cache | 26 The dcache operates in delayed write-back mode. It must be manually 33 The dcache operates in immediate write-through mode. Writes are 44 The icache and dcache are disabled. 95 Set if we need the dcache flushing before the icache is invalidated. 100 Set if we need the icache to be invalidated, even if the dcache is in 112 Set if the debugger needs to flush the dcache and invalidate the 121 Set if the debugger needs to flush the dcache and invalidate the 146 Set if the debugger does not need to flush the dcache and/or
|
D | cache-dbg-flush-by-tag.S | 34 # firstly flush the dcache 98 # determine the dcache purge control reg address
|
D | cache-inv-by-tag.S | 83 # Invalidate the entire dcache 107 # Invalidate a range of addresses on a page in the dcache 159 add d0,a0 # starting dcache tag RAM 171 # disable the dcache
|
D | cache-inv-by-reg.S | 75 # Invalidate the entire dcache 99 # Invalidate a range of addresses on a page in the dcache
|
/arch/avr32/kernel/ |
D | cpu.c | 289 boot_cpu_data.dcache.ways = 1 << SYSREG_BFEXT(DASS, config1); in setup_processor() 290 boot_cpu_data.dcache.sets = 1 << SYSREG_BFEXT(DSET, config1); in setup_processor() 291 boot_cpu_data.dcache.linesz = 1 << (tmp + 1); in setup_processor() 344 dcache_size = boot_cpu_data.dcache.ways * in c_show() 345 boot_cpu_data.dcache.sets * in c_show() 346 boot_cpu_data.dcache.linesz; in c_show() 372 boot_cpu_data.dcache.ways, in c_show() 373 boot_cpu_data.dcache.sets, in c_show() 374 boot_cpu_data.dcache.linesz); in c_show()
|
/arch/frv/lib/ |
D | cache.S | 20 # Write back a range of dcache 40 # Invalidate a range of dcache and icache 81 # Write back and invalidate a range of dcache and icache
|
/arch/powerpc/kernel/ |
D | cacheinfo.c | 366 struct cache *dcache, *icache; in cache_do_one_devnode_split() local 371 dcache = new_cache(CACHE_TYPE_DATA, level, node); in cache_do_one_devnode_split() 374 if (!dcache || !icache) in cache_do_one_devnode_split() 377 dcache->next_local = icache; in cache_do_one_devnode_split() 379 return dcache; in cache_do_one_devnode_split() 381 release_cache(dcache); in cache_do_one_devnode_split()
|