1#include <config.h> 2 3#define P15_CTRL_RR (1<<14) /* cache replace method bit */ 4#define P15_CTRL_IC (1<<12) /* I-cache enable bit */ 5#define P15_CTRL_DC (1<<2) /* D-cache enable bit */ 6#define P15_CTRL_MMU (1<<0) /* MMU enable bit */ 7 8/* A:permit |Domain|Not Use|Not Use*/ 9#define MMU_SEC_DESC ( (3<<10) | (0<<5) | (1<<4) | (2<<0) ) 10#define MMU_SEC_CB ( (1<<3) | (1<<2) ) /* cached, write back */ 11#define MMU_SEC_NCB ~((1<<3) | (1<<2)) /* no cached, no writebuf */ 12 13#define VM_EXEC 0x00000004 14 15/* r0: page directory address, must align to 16KB */ 16.global mmu_pagedir_init 17mmu_pagedir_init: 18 19 /* 20 * init page dir param for setction address 21 */ 22 add r3, r0, #SZ_16K 23 ldr r2, =MMU_SEC_DESC /* param */ 241: 25 str r2, [r0], #4 /* section 1:1 mapping */ 26 add r2, r2, #SZ_1M /* setction base address */ 27 teq r0, r3 28 bne 1b 29 30 mov pc,lr 31 32/* 33 * r0: page directory address, must align to 16KB 34 * r1: start of cached MEM, must align to 1MB 35 * r2: size of cached MEM, must align to 1MB 36 */ 37.global mmu_pagedir_cached_range 38mmu_pagedir_cached_range: 39 40 /* 41 * init page dir param for cached , writebuffer 42 */ 43 add r1, r0, r1, lsr #18 /* r1 = r0 + r1>>18 the start index addr of map mem */ 44 add r2, r1, r2, lsr #18 /* the end index addr of map mem */ 45 461: 47 ldr r0, [r1] 48 orr r0, #MMU_SEC_CB 49 str r0, [r1], #4 /*store and index addr ++ */ 50 51 cmp r1, r2 52 blt 1b 53 54 mov pc,lr 55 56/* 57 * r0: page directory address, must align to 16KB 58 * r1: start of cached MEM, must align to 1MB 59 * r2: size of cached MEM, must align to 1MB 60 */ 61.global mmu_pagedir_nocached_range 62mmu_pagedir_nocached_range: 63clean_loop: 64 65 /* 66 * init page dir param for no cache ,no writebuffer 67 */ 68 mrc p15, 0, r15, c7, c14, 3 69 bne clean_loop /* test, clean and invalidate D-cache */ 70 71 mov r3, #0 72 mcr p15, 0, r3, c8, c7, 0 /* flush TLBs */ 73 74 add r1, r0, r1, lsr #18 /* the start section index of map mem */ 75 add r2, r1, r2, lsr #18 /* the end section index of map mem */ 76 771: 78 ldr r0, [r1] 79 and r0, #MMU_SEC_NCB /* the setcion is no cache,no writebuf */ 80 str r0, [r1], #4 81 82 cmp r1, r2 83 blt 1b 84 85 mov pc,lr 86 87.global flush_cache_off 88flush_cache_off: 89 90 /* 91 * invalidate(flush) TLB 92 */ 93 mrc p15, 0, r0, c1, c0, 0 /* read control reg >> r0 */ 94 mcr p15, 0, r0, c1, c0, 0 /* write r0 >> control reg */ 95 96 mov r0, #0 97 mcr p15, 0, r0, c8, c7, 0 /* flush TLBs */ 98 99 mov pc,lr 100 101/* r0: page directory address, must align to 16KB */ 102.global mmu_startup 103mmu_startup: 104 105 /* 106 * enable mmu 107 */ 108 stmdb sp!, {r0, lr} 109 bl flush_cache_off /* r0,lr >> stack */ 110 ldmia sp!, {r0, lr} 111 112 mrc p15, 0, r3, c1, c0, 0 /* read control reg */ 113 bic r3, r3, #P15_CTRL_RR /* cache replace method */ 114 orr r3, r3, #P15_CTRL_MMU /* mmu enable bit */ 115 orr r3, r3, #P15_CTRL_DC /* Dcache enable bit */ 116 117 mov r2, #0 118 mov r1, #-1 119 120 mcr p15, 0, r0, c2, c0, 0 /* write page table pointer to Base Reg */ 121 mcr p15, 0, r1, c3, c0, 0 /* write domain access control reg */ 122 mcr p15, 0, r3, c1, c0, 0 /* enable mmu */ 123 mcr p15, 0, r2, c8, c7, 0 /* flush TLBs */ 124#if __ARM_ARCH__ >= 7 125 isb 126#endif 127 128 mov pc,lr 129 130.global mmu_turnoff 131mmu_turnoff: 132 133 /* 134 * disable d-cache, mmu 135 */ 136 mrc p15, 0, r3, c1, c0, 0 /* read control reg */ 137 bic r3, r3, #P15_CTRL_DC /* disable d-cache bit */ 138 bic r3, r3, #P15_CTRL_MMU /* disable mmu bit */ 139 mcr p15, 0, r3, c1, c0, 0 /* load control register */ 140 141#if __ARM_ARCH__ >= 7 142 isb 143#endif 144 145 mov pc,lr 146 147.global dcache_stop 148dcache_stop: 149 150 mrc p15, 0, r0, c1, c0, 0 151 bic r0, r0, #P15_CTRL_DC 152 mcr p15, 0, r0, c1, c0, 0 153 154 mov pc,lr 155 156.global dcache_start 157dcache_start: 158 mrc p15, 0, r0, c1, c0, 0 159 orr r0, r0, #P15_CTRL_DC 160 mcr p15, 0, r0, c1, c0, 0 161 162 mov pc,lr 163 164.global dcache_stop_noflush 165dcache_stop_noflush: 166 167 mrc p15, 0, r0, c1, c0, 0 168 bic r0, r0, #P15_CTRL_DC 169 mcr p15, 0, r0, c1, c0, 0 170 171 mov pc,lr 172#if !defined(CONFIG_HI3516A) \ 173 && !defined(CONFIG_HI3536) \ 174 && !defined(CONFIG_HI3521A) \ 175 && !defined(CONFIG_HI3519) \ 176 && !defined(CONFIG_HI3519V101) \ 177 && !defined(CONFIG_HI3559) \ 178 && !defined(CONFIG_HI3559AV100ES) 179.global dcache_flush_all 180dcache_flush_all: 181#if __ARM_ARCH__ >= 7 182 mov r0, #0 /* set up for MCR */ 183 mcr p15, 0, r0, c8, c7, 0 /* invalidate TLBs */ 184 mcr p15, 0, r0, c7, c5, 0 /* invalidate icache */ 185 186 /* Invalidate L1 D-cache */ 187 mcr p15, 2, r0, c0, c0, 0 /* select L1 data cache */ 188 189 /* Read Current Cache Size Identification Register */ 190 mrc p15, 1, r3, c0, c0, 0 191 ldr r1, =0x1ff 192 and r3, r1, r3, LSR #13 /* r3 = number of sets -1 */ 193 mov r0, #0 194way_loop: 195 mov r1, #0 /* r1->set counter */ 196line_loop: 197 mov r2, r0, LSL #30 198 orr r2, r1, LSL #5 /* r2->set/way cache-op format */ 199 200 /* Clean and Invalidate line described by r2 */ 201 mcr p15, 0, r2, c7, c14, 2 202 add r1, r1, #1 /* Increment set counter */ 203 cmp r1, r3 /* Check if the last set is reached... */ 204 ble line_loop /* if not, continue the set_loop */ 205 add r0, r0, #1 /* else, Increment way counter */ 206 cmp r0, #4 /* Check if the last way is reached... */ 207 blt way_loop /* if not, continue the way_loop */ 208 209 mov pc,lr 210#else 211 stmfd r13!, {r2, ip, lr} 212 mov r2, #VM_EXEC 213 mov ip, #0 2141: mrc p15, 0, r15, c7, c14, 3 @ test,clean,invalidate 215 bne 1b 216 217 tst r2, #VM_EXEC 218 mcrne p15, 0, ip, c7, c5, 0 @ invalidate I cache 219 mcrne p15, 0, ip, c7, c10, 4 @ drain WB 220 ldmfd r13!, {r2, ip, pc} 221#endif 222 223.global dcache_inv_all 224dcache_inv_all: 225#if __ARM_ARCH__ >= 7 226 mov r0, #0 /* set up for MCR */ 227 mcr p15, 0, r0, c8, c7, 0 /* invalidate TLBs */ 228 mcr p15, 0, r0, c7, c5, 0 /* invalidate icache */ 229 230 /* Invalidate L1 D-cache */ 231 mcr p15, 2, r0, c0, c0, 0 /* select L1 data cache*/ 232 233 /* Read Current Cache Size Identification Register */ 234 mrc p15, 1, r3, c0, c0, 0 235 ldr r1, =0x1ff 236 and r3, r1, r3, LSR #13 /* r3 = number of sets -1 */ 237 mov r0, #0 238way_lp: 239 mov r1, #0 /* r1->set counter */ 240line_lp: 241 mov r2, r0, LSL #30 242 orr r2, r1, LSL #5 /* r2->set/way cache-op format */ 243 mcr p15, 0, r2, c7, c6, 2 /* Invalidate line described by r2 */ 244 add r1, r1, #1 /* Increment set counter */ 245 cmp r1, r3 /* Check if the last set is reached... */ 246 ble line_lp /* if not, continue the set_loop */ 247 add r0, r0, #1 /* else, Increment way counter */ 248 cmp r0, #4 /* Check if the last way is reached... */ 249 blt way_lp /* if not, continue the way_loop */ 250 251 mov pc,lr 252#else 253 mcr p15, 0, r0, c7, c7, 0 254 mov pc,lr 255#endif 256#endif 257