Tom Rini | 83d290c | 2018-05-06 17:58:06 -0400 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0+ */ |
wdenk | c021880 | 2003-03-27 12:09:35 +0000 | [diff] [blame] | 2 | /* |
Shinya Kuribayashi | 373b16f | 2008-03-25 21:30:07 +0900 | [diff] [blame] | 3 | * Cache-handling routined for MIPS CPUs |
wdenk | c021880 | 2003-03-27 12:09:35 +0000 | [diff] [blame] | 4 | * |
| 5 | * Copyright (c) 2003 Wolfgang Denk <wd@denx.de> |
wdenk | c021880 | 2003-03-27 12:09:35 +0000 | [diff] [blame] | 6 | */ |
| 7 | |
Wolfgang Denk | 25ddd1f | 2010-10-26 14:34:52 +0200 | [diff] [blame] | 8 | #include <asm-offsets.h> |
wdenk | c021880 | 2003-03-27 12:09:35 +0000 | [diff] [blame] | 9 | #include <config.h> |
Shinya Kuribayashi | 2f5d414 | 2008-03-25 21:30:06 +0900 | [diff] [blame] | 10 | #include <asm/asm.h> |
wdenk | c021880 | 2003-03-27 12:09:35 +0000 | [diff] [blame] | 11 | #include <asm/regdef.h> |
| 12 | #include <asm/mipsregs.h> |
| 13 | #include <asm/addrspace.h> |
| 14 | #include <asm/cacheops.h> |
Paul Burton | 4baa0ab | 2016-09-21 11:18:54 +0100 | [diff] [blame] | 15 | #include <asm/cm.h> |
wdenk | c021880 | 2003-03-27 12:09:35 +0000 | [diff] [blame] | 16 | |
Shinya Kuribayashi | 1898840 | 2008-03-25 21:30:06 +0900 | [diff] [blame] | 17 | .macro f_fill64 dst, offset, val |
| 18 | LONG_S \val, (\offset + 0 * LONGSIZE)(\dst) |
| 19 | LONG_S \val, (\offset + 1 * LONGSIZE)(\dst) |
| 20 | LONG_S \val, (\offset + 2 * LONGSIZE)(\dst) |
| 21 | LONG_S \val, (\offset + 3 * LONGSIZE)(\dst) |
| 22 | LONG_S \val, (\offset + 4 * LONGSIZE)(\dst) |
| 23 | LONG_S \val, (\offset + 5 * LONGSIZE)(\dst) |
| 24 | LONG_S \val, (\offset + 6 * LONGSIZE)(\dst) |
| 25 | LONG_S \val, (\offset + 7 * LONGSIZE)(\dst) |
| 26 | #if LONGSIZE == 4 |
| 27 | LONG_S \val, (\offset + 8 * LONGSIZE)(\dst) |
| 28 | LONG_S \val, (\offset + 9 * LONGSIZE)(\dst) |
| 29 | LONG_S \val, (\offset + 10 * LONGSIZE)(\dst) |
| 30 | LONG_S \val, (\offset + 11 * LONGSIZE)(\dst) |
| 31 | LONG_S \val, (\offset + 12 * LONGSIZE)(\dst) |
| 32 | LONG_S \val, (\offset + 13 * LONGSIZE)(\dst) |
| 33 | LONG_S \val, (\offset + 14 * LONGSIZE)(\dst) |
| 34 | LONG_S \val, (\offset + 15 * LONGSIZE)(\dst) |
| 35 | #endif |
| 36 | .endm |
| 37 | |
Paul Burton | ac22fec | 2015-01-29 01:28:00 +0000 | [diff] [blame] | 38 | .macro cache_loop curr, end, line_sz, op |
| 39 | 10: cache \op, 0(\curr) |
| 40 | PTR_ADDU \curr, \curr, \line_sz |
| 41 | bne \curr, \end, 10b |
| 42 | .endm |
| 43 | |
Paul Burton | 536cb7c | 2015-01-29 01:27:59 +0000 | [diff] [blame] | 44 | .macro l1_info sz, line_sz, off |
| 45 | .set push |
| 46 | .set noat |
| 47 | |
| 48 | mfc0 $1, CP0_CONFIG, 1 |
| 49 | |
| 50 | /* detect line size */ |
Daniel Schwierzeck | a3ab2ae | 2016-01-12 21:48:26 +0100 | [diff] [blame] | 51 | srl \line_sz, $1, \off + MIPS_CONF1_DL_SHF - MIPS_CONF1_DA_SHF |
| 52 | andi \line_sz, \line_sz, (MIPS_CONF1_DL >> MIPS_CONF1_DL_SHF) |
Paul Burton | 536cb7c | 2015-01-29 01:27:59 +0000 | [diff] [blame] | 53 | move \sz, zero |
| 54 | beqz \line_sz, 10f |
| 55 | li \sz, 2 |
| 56 | sllv \line_sz, \sz, \line_sz |
| 57 | |
| 58 | /* detect associativity */ |
Daniel Schwierzeck | a3ab2ae | 2016-01-12 21:48:26 +0100 | [diff] [blame] | 59 | srl \sz, $1, \off + MIPS_CONF1_DA_SHF - MIPS_CONF1_DA_SHF |
| 60 | andi \sz, \sz, (MIPS_CONF1_DA >> MIPS_CONF1_DA_SHF) |
Paul Burton | 9f8ac82 | 2016-05-16 10:52:10 +0100 | [diff] [blame] | 61 | addiu \sz, \sz, 1 |
Paul Burton | 536cb7c | 2015-01-29 01:27:59 +0000 | [diff] [blame] | 62 | |
| 63 | /* sz *= line_sz */ |
| 64 | mul \sz, \sz, \line_sz |
| 65 | |
| 66 | /* detect log32(sets) */ |
Daniel Schwierzeck | a3ab2ae | 2016-01-12 21:48:26 +0100 | [diff] [blame] | 67 | srl $1, $1, \off + MIPS_CONF1_DS_SHF - MIPS_CONF1_DA_SHF |
| 68 | andi $1, $1, (MIPS_CONF1_DS >> MIPS_CONF1_DS_SHF) |
Paul Burton | 536cb7c | 2015-01-29 01:27:59 +0000 | [diff] [blame] | 69 | addiu $1, $1, 1 |
| 70 | andi $1, $1, 0x7 |
| 71 | |
| 72 | /* sz <<= log32(sets) */ |
| 73 | sllv \sz, \sz, $1 |
| 74 | |
| 75 | /* sz *= 32 */ |
| 76 | li $1, 32 |
| 77 | mul \sz, \sz, $1 |
| 78 | 10: |
| 79 | .set pop |
| 80 | .endm |
Daniel Schwierzeck | b838586 | 2018-09-07 19:02:04 +0200 | [diff] [blame] | 81 | |
Shinya Kuribayashi | 7aa1f19 | 2011-05-07 00:18:13 +0900 | [diff] [blame] | 82 | /* |
| 83 | * mips_cache_reset - low level initialisation of the primary caches |
| 84 | * |
| 85 | * This routine initialises the primary caches to ensure that they have good |
| 86 | * parity. It must be called by the ROM before any cached locations are used |
| 87 | * to prevent the possibility of data with bad parity being written to memory. |
| 88 | * |
| 89 | * To initialise the instruction cache it is essential that a source of data |
| 90 | * with good parity is available. This routine will initialise an area of |
| 91 | * memory starting at location zero to be used as a source of parity. |
| 92 | * |
Paul Burton | 4baa0ab | 2016-09-21 11:18:54 +0100 | [diff] [blame] | 93 | * Note that this function does not follow the standard calling convention & |
| 94 | * may clobber typically callee-saved registers. |
| 95 | * |
Shinya Kuribayashi | 7aa1f19 | 2011-05-07 00:18:13 +0900 | [diff] [blame] | 96 | * RETURNS: N/A |
| 97 | * |
| 98 | */ |
Paul Burton | 4baa0ab | 2016-09-21 11:18:54 +0100 | [diff] [blame] | 99 | #define R_RETURN s0 |
| 100 | #define R_IC_SIZE s1 |
| 101 | #define R_IC_LINE s2 |
| 102 | #define R_DC_SIZE s3 |
| 103 | #define R_DC_LINE s4 |
| 104 | #define R_L2_SIZE s5 |
| 105 | #define R_L2_LINE s6 |
| 106 | #define R_L2_BYPASSED s7 |
| 107 | #define R_L2_L2C t8 |
Paul Burton | ca4e833 | 2015-01-29 01:28:01 +0000 | [diff] [blame] | 108 | LEAF(mips_cache_reset) |
Paul Burton | 4baa0ab | 2016-09-21 11:18:54 +0100 | [diff] [blame] | 109 | move R_RETURN, ra |
| 110 | |
| 111 | #ifdef CONFIG_MIPS_L2_CACHE |
| 112 | /* |
| 113 | * For there to be an L2 present, Config2 must be present. If it isn't |
| 114 | * then we proceed knowing there's no L2 cache. |
| 115 | */ |
| 116 | move R_L2_SIZE, zero |
| 117 | move R_L2_LINE, zero |
| 118 | move R_L2_BYPASSED, zero |
| 119 | move R_L2_L2C, zero |
| 120 | mfc0 t0, CP0_CONFIG, 1 |
| 121 | bgez t0, l2_probe_done |
| 122 | |
| 123 | /* |
| 124 | * From MIPSr6 onwards the L2 cache configuration might not be reported |
| 125 | * by Config2. The Config5.L2C bit indicates whether this is the case, |
| 126 | * and if it is then we need knowledge of where else to look. For cores |
| 127 | * from Imagination Technologies this is a CM GCR. |
| 128 | */ |
| 129 | # if __mips_isa_rev >= 6 |
| 130 | /* Check that Config5 exists */ |
| 131 | mfc0 t0, CP0_CONFIG, 2 |
| 132 | bgez t0, l2_probe_cop0 |
| 133 | mfc0 t0, CP0_CONFIG, 3 |
| 134 | bgez t0, l2_probe_cop0 |
| 135 | mfc0 t0, CP0_CONFIG, 4 |
| 136 | bgez t0, l2_probe_cop0 |
| 137 | |
| 138 | /* Check Config5.L2C is set */ |
| 139 | mfc0 t0, CP0_CONFIG, 5 |
| 140 | and R_L2_L2C, t0, MIPS_CONF5_L2C |
| 141 | beqz R_L2_L2C, l2_probe_cop0 |
| 142 | |
| 143 | /* Config5.L2C is set */ |
| 144 | # ifdef CONFIG_MIPS_CM |
| 145 | /* The CM will provide L2 configuration */ |
| 146 | PTR_LI t0, CKSEG1ADDR(CONFIG_MIPS_CM_BASE) |
| 147 | lw t1, GCR_L2_CONFIG(t0) |
| 148 | bgez t1, l2_probe_done |
| 149 | |
| 150 | ext R_L2_LINE, t1, \ |
| 151 | GCR_L2_CONFIG_LINESZ_SHIFT, GCR_L2_CONFIG_LINESZ_BITS |
| 152 | beqz R_L2_LINE, l2_probe_done |
| 153 | li t2, 2 |
| 154 | sllv R_L2_LINE, t2, R_L2_LINE |
| 155 | |
| 156 | ext t2, t1, GCR_L2_CONFIG_ASSOC_SHIFT, GCR_L2_CONFIG_ASSOC_BITS |
| 157 | addiu t2, t2, 1 |
| 158 | mul R_L2_SIZE, R_L2_LINE, t2 |
| 159 | |
| 160 | ext t2, t1, GCR_L2_CONFIG_SETSZ_SHIFT, GCR_L2_CONFIG_SETSZ_BITS |
| 161 | sllv R_L2_SIZE, R_L2_SIZE, t2 |
| 162 | li t2, 64 |
| 163 | mul R_L2_SIZE, R_L2_SIZE, t2 |
| 164 | |
| 165 | /* Bypass the L2 cache so that we can init the L1s early */ |
| 166 | or t1, t1, GCR_L2_CONFIG_BYPASS |
| 167 | sw t1, GCR_L2_CONFIG(t0) |
| 168 | sync |
| 169 | li R_L2_BYPASSED, 1 |
| 170 | |
| 171 | /* Zero the L2 tag registers */ |
| 172 | sw zero, GCR_L2_TAG_ADDR(t0) |
| 173 | sw zero, GCR_L2_TAG_ADDR_UPPER(t0) |
| 174 | sw zero, GCR_L2_TAG_STATE(t0) |
| 175 | sw zero, GCR_L2_TAG_STATE_UPPER(t0) |
| 176 | sw zero, GCR_L2_DATA(t0) |
| 177 | sw zero, GCR_L2_DATA_UPPER(t0) |
| 178 | sync |
| 179 | # else |
| 180 | /* We don't know how to retrieve L2 configuration on this system */ |
| 181 | # endif |
| 182 | b l2_probe_done |
| 183 | # endif |
| 184 | |
| 185 | /* |
| 186 | * For pre-r6 systems, or r6 systems with Config5.L2C==0, probe the L2 |
| 187 | * cache configuration from the cop0 Config2 register. |
| 188 | */ |
| 189 | l2_probe_cop0: |
| 190 | mfc0 t0, CP0_CONFIG, 2 |
| 191 | |
| 192 | srl R_L2_LINE, t0, MIPS_CONF2_SL_SHF |
| 193 | andi R_L2_LINE, R_L2_LINE, MIPS_CONF2_SL >> MIPS_CONF2_SL_SHF |
| 194 | beqz R_L2_LINE, l2_probe_done |
| 195 | li t1, 2 |
| 196 | sllv R_L2_LINE, t1, R_L2_LINE |
| 197 | |
| 198 | srl t1, t0, MIPS_CONF2_SA_SHF |
| 199 | andi t1, t1, MIPS_CONF2_SA >> MIPS_CONF2_SA_SHF |
| 200 | addiu t1, t1, 1 |
| 201 | mul R_L2_SIZE, R_L2_LINE, t1 |
| 202 | |
| 203 | srl t1, t0, MIPS_CONF2_SS_SHF |
| 204 | andi t1, t1, MIPS_CONF2_SS >> MIPS_CONF2_SS_SHF |
| 205 | sllv R_L2_SIZE, R_L2_SIZE, t1 |
| 206 | li t1, 64 |
| 207 | mul R_L2_SIZE, R_L2_SIZE, t1 |
| 208 | |
| 209 | /* Attempt to bypass the L2 so that we can init the L1s early */ |
| 210 | or t0, t0, MIPS_CONF2_L2B |
| 211 | mtc0 t0, CP0_CONFIG, 2 |
| 212 | ehb |
| 213 | mfc0 t0, CP0_CONFIG, 2 |
| 214 | and R_L2_BYPASSED, t0, MIPS_CONF2_L2B |
| 215 | |
| 216 | /* Zero the L2 tag registers */ |
| 217 | mtc0 zero, CP0_TAGLO, 4 |
| 218 | ehb |
| 219 | l2_probe_done: |
| 220 | #endif |
| 221 | |
Paul Burton | ace3be4 | 2016-05-27 14:28:04 +0100 | [diff] [blame] | 222 | #ifndef CONFIG_SYS_CACHE_SIZE_AUTO |
Paul Burton | 5c72e5a | 2016-09-21 11:18:52 +0100 | [diff] [blame] | 223 | li R_IC_SIZE, CONFIG_SYS_ICACHE_SIZE |
| 224 | li R_IC_LINE, CONFIG_SYS_ICACHE_LINE_SIZE |
Paul Burton | fa476f7 | 2013-11-08 11:18:42 +0000 | [diff] [blame] | 225 | #else |
Paul Burton | 5c72e5a | 2016-09-21 11:18:52 +0100 | [diff] [blame] | 226 | l1_info R_IC_SIZE, R_IC_LINE, MIPS_CONF1_IA_SHF |
Paul Burton | fa476f7 | 2013-11-08 11:18:42 +0000 | [diff] [blame] | 227 | #endif |
| 228 | |
Paul Burton | ace3be4 | 2016-05-27 14:28:04 +0100 | [diff] [blame] | 229 | #ifndef CONFIG_SYS_CACHE_SIZE_AUTO |
Paul Burton | 5c72e5a | 2016-09-21 11:18:52 +0100 | [diff] [blame] | 230 | li R_DC_SIZE, CONFIG_SYS_DCACHE_SIZE |
| 231 | li R_DC_LINE, CONFIG_SYS_DCACHE_LINE_SIZE |
Paul Burton | fa476f7 | 2013-11-08 11:18:42 +0000 | [diff] [blame] | 232 | #else |
Paul Burton | 5c72e5a | 2016-09-21 11:18:52 +0100 | [diff] [blame] | 233 | l1_info R_DC_SIZE, R_DC_LINE, MIPS_CONF1_DA_SHF |
Paul Burton | fa476f7 | 2013-11-08 11:18:42 +0000 | [diff] [blame] | 234 | #endif |
| 235 | |
Paul Burton | dd7c720 | 2015-01-29 01:28:02 +0000 | [diff] [blame] | 236 | #ifdef CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD |
| 237 | |
Paul Burton | fa476f7 | 2013-11-08 11:18:42 +0000 | [diff] [blame] | 238 | /* Determine the largest L1 cache size */ |
Paul Burton | ace3be4 | 2016-05-27 14:28:04 +0100 | [diff] [blame] | 239 | #ifndef CONFIG_SYS_CACHE_SIZE_AUTO |
Paul Burton | fa476f7 | 2013-11-08 11:18:42 +0000 | [diff] [blame] | 240 | #if CONFIG_SYS_ICACHE_SIZE > CONFIG_SYS_DCACHE_SIZE |
| 241 | li v0, CONFIG_SYS_ICACHE_SIZE |
| 242 | #else |
| 243 | li v0, CONFIG_SYS_DCACHE_SIZE |
| 244 | #endif |
| 245 | #else |
Paul Burton | 5c72e5a | 2016-09-21 11:18:52 +0100 | [diff] [blame] | 246 | move v0, R_IC_SIZE |
| 247 | sltu t1, R_IC_SIZE, R_DC_SIZE |
| 248 | movn v0, R_DC_SIZE, t1 |
Paul Burton | fa476f7 | 2013-11-08 11:18:42 +0000 | [diff] [blame] | 249 | #endif |
Shinya Kuribayashi | 1898840 | 2008-03-25 21:30:06 +0900 | [diff] [blame] | 250 | /* |
| 251 | * Now clear that much memory starting from zero. |
wdenk | c021880 | 2003-03-27 12:09:35 +0000 | [diff] [blame] | 252 | */ |
Daniel Schwierzeck | 5ef337a | 2018-09-07 19:02:05 +0200 | [diff] [blame] | 253 | PTR_LI a0, CKSEG1ADDR(CONFIG_MIPS_CACHE_INDEX_BASE) |
Shinya Kuribayashi | 1898840 | 2008-03-25 21:30:06 +0900 | [diff] [blame] | 254 | PTR_ADDU a1, a0, v0 |
| 255 | 2: PTR_ADDIU a0, 64 |
| 256 | f_fill64 a0, -64, zero |
| 257 | bne a0, a1, 2b |
wdenk | 8bde7f7 | 2003-06-27 21:31:46 +0000 | [diff] [blame] | 258 | |
Paul Burton | dd7c720 | 2015-01-29 01:28:02 +0000 | [diff] [blame] | 259 | #endif /* CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD */ |
wdenk | c021880 | 2003-03-27 12:09:35 +0000 | [diff] [blame] | 260 | |
Paul Burton | 4baa0ab | 2016-09-21 11:18:54 +0100 | [diff] [blame] | 261 | #ifdef CONFIG_MIPS_L2_CACHE |
| 262 | /* |
| 263 | * If the L2 is bypassed, init the L1 first so that we can execute the |
| 264 | * rest of the cache initialisation using the L1 instruction cache. |
| 265 | */ |
| 266 | bnez R_L2_BYPASSED, l1_init |
| 267 | |
| 268 | l2_init: |
Daniel Schwierzeck | 5ef337a | 2018-09-07 19:02:05 +0200 | [diff] [blame] | 269 | PTR_LI t0, CKSEG0ADDR(CONFIG_MIPS_CACHE_INDEX_BASE) |
Paul Burton | 4baa0ab | 2016-09-21 11:18:54 +0100 | [diff] [blame] | 270 | PTR_ADDU t1, t0, R_L2_SIZE |
| 271 | 1: cache INDEX_STORE_TAG_SD, 0(t0) |
| 272 | PTR_ADDU t0, t0, R_L2_LINE |
| 273 | bne t0, t1, 1b |
| 274 | |
| 275 | /* |
| 276 | * If the L2 was bypassed then we already initialised the L1s before |
| 277 | * the L2, so we are now done. |
| 278 | */ |
| 279 | bnez R_L2_BYPASSED, l2_unbypass |
| 280 | #endif |
| 281 | |
Shinya Kuribayashi | 2e0e527 | 2008-03-25 21:30:06 +0900 | [diff] [blame] | 282 | /* |
Paul Burton | 8755d50 | 2015-01-29 01:28:03 +0000 | [diff] [blame] | 283 | * The TagLo registers used depend upon the CPU implementation, but the |
| 284 | * architecture requires that it is safe for software to write to both |
| 285 | * TagLo selects 0 & 2 covering supported cases. |
| 286 | */ |
Paul Burton | 4baa0ab | 2016-09-21 11:18:54 +0100 | [diff] [blame] | 287 | l1_init: |
Paul Burton | 8755d50 | 2015-01-29 01:28:03 +0000 | [diff] [blame] | 288 | mtc0 zero, CP0_TAGLO |
| 289 | mtc0 zero, CP0_TAGLO, 2 |
Paul Burton | d608254 | 2016-09-21 11:18:58 +0100 | [diff] [blame] | 290 | ehb |
Paul Burton | 8755d50 | 2015-01-29 01:28:03 +0000 | [diff] [blame] | 291 | |
| 292 | /* |
Paul Burton | dd7c720 | 2015-01-29 01:28:02 +0000 | [diff] [blame] | 293 | * The caches are probably in an indeterminate state, so we force good |
| 294 | * parity into them by doing an invalidate for each line. If |
| 295 | * CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD is set then we'll proceed to |
| 296 | * perform a load/fill & a further invalidate for each line, assuming |
| 297 | * that the bottom of RAM (having just been cleared) will generate good |
| 298 | * parity for the cache. |
wdenk | c021880 | 2003-03-27 12:09:35 +0000 | [diff] [blame] | 299 | */ |
| 300 | |
Shinya Kuribayashi | 2e0e527 | 2008-03-25 21:30:06 +0900 | [diff] [blame] | 301 | /* |
| 302 | * Initialize the I-cache first, |
wdenk | c021880 | 2003-03-27 12:09:35 +0000 | [diff] [blame] | 303 | */ |
Paul Burton | 5c72e5a | 2016-09-21 11:18:52 +0100 | [diff] [blame] | 304 | blez R_IC_SIZE, 1f |
Daniel Schwierzeck | 5ef337a | 2018-09-07 19:02:05 +0200 | [diff] [blame] | 305 | PTR_LI t0, CKSEG0ADDR(CONFIG_MIPS_CACHE_INDEX_BASE) |
Paul Burton | 5c72e5a | 2016-09-21 11:18:52 +0100 | [diff] [blame] | 306 | PTR_ADDU t1, t0, R_IC_SIZE |
Paul Burton | ca4e833 | 2015-01-29 01:28:01 +0000 | [diff] [blame] | 307 | /* clear tag to invalidate */ |
Paul Burton | 5c72e5a | 2016-09-21 11:18:52 +0100 | [diff] [blame] | 308 | cache_loop t0, t1, R_IC_LINE, INDEX_STORE_TAG_I |
Paul Burton | dd7c720 | 2015-01-29 01:28:02 +0000 | [diff] [blame] | 309 | #ifdef CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD |
Paul Burton | ca4e833 | 2015-01-29 01:28:01 +0000 | [diff] [blame] | 310 | /* fill once, so data field parity is correct */ |
Daniel Schwierzeck | 5ef337a | 2018-09-07 19:02:05 +0200 | [diff] [blame] | 311 | PTR_LI t0, CKSEG0ADDR(CONFIG_MIPS_CACHE_INDEX_BASE) |
Paul Burton | 5c72e5a | 2016-09-21 11:18:52 +0100 | [diff] [blame] | 312 | cache_loop t0, t1, R_IC_LINE, FILL |
Paul Burton | ca4e833 | 2015-01-29 01:28:01 +0000 | [diff] [blame] | 313 | /* invalidate again - prudent but not strictly neccessary */ |
Daniel Schwierzeck | 5ef337a | 2018-09-07 19:02:05 +0200 | [diff] [blame] | 314 | PTR_LI t0, CKSEG0ADDR(CONFIG_MIPS_CACHE_INDEX_BASE) |
Paul Burton | 5c72e5a | 2016-09-21 11:18:52 +0100 | [diff] [blame] | 315 | cache_loop t0, t1, R_IC_LINE, INDEX_STORE_TAG_I |
Paul Burton | dd7c720 | 2015-01-29 01:28:02 +0000 | [diff] [blame] | 316 | #endif |
Paul Burton | 33b5c9b | 2016-09-21 11:18:49 +0100 | [diff] [blame] | 317 | sync |
Daniel Schwierzeck | b838586 | 2018-09-07 19:02:04 +0200 | [diff] [blame] | 318 | |
| 319 | /* |
| 320 | * Enable use of the I-cache by setting Config.K0. The code for this |
| 321 | * must be executed from KSEG1. Jump from KSEG0 to KSEG1 to do this. |
| 322 | * Jump back to KSEG0 after caches are enabled and insert an |
| 323 | * instruction hazard barrier. |
| 324 | */ |
| 325 | PTR_LA t0, change_k0_cca |
| 326 | li t1, CPHYSADDR(~0) |
| 327 | and t0, t0, t1 |
| 328 | PTR_LI t1, CKSEG1 |
Paul Burton | 33b5c9b | 2016-09-21 11:18:49 +0100 | [diff] [blame] | 329 | or t0, t0, t1 |
Daniel Schwierzeck | 46203ba | 2018-09-07 19:02:06 +0200 | [diff] [blame] | 330 | li a0, CONF_CM_CACHABLE_NONCOHERENT |
Daniel Schwierzeck | b838586 | 2018-09-07 19:02:04 +0200 | [diff] [blame] | 331 | jalr.hb t0 |
Paul Burton | 33b5c9b | 2016-09-21 11:18:49 +0100 | [diff] [blame] | 332 | |
Shinya Kuribayashi | 2e0e527 | 2008-03-25 21:30:06 +0900 | [diff] [blame] | 333 | /* |
| 334 | * then initialize D-cache. |
wdenk | c021880 | 2003-03-27 12:09:35 +0000 | [diff] [blame] | 335 | */ |
Paul Burton | 5c72e5a | 2016-09-21 11:18:52 +0100 | [diff] [blame] | 336 | 1: blez R_DC_SIZE, 3f |
Daniel Schwierzeck | 5ef337a | 2018-09-07 19:02:05 +0200 | [diff] [blame] | 337 | PTR_LI t0, CKSEG0ADDR(CONFIG_MIPS_CACHE_INDEX_BASE) |
Paul Burton | 5c72e5a | 2016-09-21 11:18:52 +0100 | [diff] [blame] | 338 | PTR_ADDU t1, t0, R_DC_SIZE |
Paul Burton | ca4e833 | 2015-01-29 01:28:01 +0000 | [diff] [blame] | 339 | /* clear all tags */ |
Paul Burton | 5c72e5a | 2016-09-21 11:18:52 +0100 | [diff] [blame] | 340 | cache_loop t0, t1, R_DC_LINE, INDEX_STORE_TAG_D |
Paul Burton | dd7c720 | 2015-01-29 01:28:02 +0000 | [diff] [blame] | 341 | #ifdef CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD |
Paul Burton | ca4e833 | 2015-01-29 01:28:01 +0000 | [diff] [blame] | 342 | /* load from each line (in cached space) */ |
Daniel Schwierzeck | 5ef337a | 2018-09-07 19:02:05 +0200 | [diff] [blame] | 343 | PTR_LI t0, CKSEG0ADDR(CONFIG_MIPS_CACHE_INDEX_BASE) |
Paul Burton | ca4e833 | 2015-01-29 01:28:01 +0000 | [diff] [blame] | 344 | 2: LONG_L zero, 0(t0) |
Paul Burton | 5c72e5a | 2016-09-21 11:18:52 +0100 | [diff] [blame] | 345 | PTR_ADDU t0, R_DC_LINE |
Paul Burton | ca4e833 | 2015-01-29 01:28:01 +0000 | [diff] [blame] | 346 | bne t0, t1, 2b |
| 347 | /* clear all tags */ |
Daniel Schwierzeck | 5ef337a | 2018-09-07 19:02:05 +0200 | [diff] [blame] | 348 | PTR_LI t0, CKSEG0ADDR(CONFIG_MIPS_CACHE_INDEX_BASE) |
Paul Burton | 5c72e5a | 2016-09-21 11:18:52 +0100 | [diff] [blame] | 349 | cache_loop t0, t1, R_DC_LINE, INDEX_STORE_TAG_D |
Paul Burton | dd7c720 | 2015-01-29 01:28:02 +0000 | [diff] [blame] | 350 | #endif |
Paul Burton | 4baa0ab | 2016-09-21 11:18:54 +0100 | [diff] [blame] | 351 | 3: |
wdenk | c021880 | 2003-03-27 12:09:35 +0000 | [diff] [blame] | 352 | |
Paul Burton | 4baa0ab | 2016-09-21 11:18:54 +0100 | [diff] [blame] | 353 | #ifdef CONFIG_MIPS_L2_CACHE |
| 354 | /* If the L2 isn't bypassed then we're done */ |
| 355 | beqz R_L2_BYPASSED, return |
| 356 | |
| 357 | /* The L2 is bypassed - go initialise it */ |
| 358 | b l2_init |
| 359 | |
| 360 | l2_unbypass: |
| 361 | # if __mips_isa_rev >= 6 |
| 362 | beqz R_L2_L2C, 1f |
| 363 | |
| 364 | li t0, CKSEG1ADDR(CONFIG_MIPS_CM_BASE) |
| 365 | lw t1, GCR_L2_CONFIG(t0) |
| 366 | xor t1, t1, GCR_L2_CONFIG_BYPASS |
| 367 | sw t1, GCR_L2_CONFIG(t0) |
| 368 | sync |
| 369 | ehb |
| 370 | b 2f |
| 371 | # endif |
| 372 | 1: mfc0 t0, CP0_CONFIG, 2 |
| 373 | xor t0, t0, MIPS_CONF2_L2B |
| 374 | mtc0 t0, CP0_CONFIG, 2 |
| 375 | ehb |
| 376 | |
| 377 | 2: |
Paul Burton | 7953354 | 2016-09-21 11:18:55 +0100 | [diff] [blame] | 378 | # ifdef CONFIG_MIPS_CM |
| 379 | /* Config3 must exist for a CM to be present */ |
| 380 | mfc0 t0, CP0_CONFIG, 1 |
| 381 | bgez t0, 2f |
| 382 | mfc0 t0, CP0_CONFIG, 2 |
| 383 | bgez t0, 2f |
| 384 | |
| 385 | /* Check Config3.CMGCR to determine CM presence */ |
| 386 | mfc0 t0, CP0_CONFIG, 3 |
| 387 | and t0, t0, MIPS_CONF3_CMGCR |
| 388 | beqz t0, 2f |
| 389 | |
| 390 | /* Change Config.K0 to a coherent CCA */ |
Daniel Schwierzeck | b838586 | 2018-09-07 19:02:04 +0200 | [diff] [blame] | 391 | PTR_LA t0, change_k0_cca |
| 392 | li a0, CONF_CM_CACHABLE_COW |
| 393 | jalr t0 |
Paul Burton | 7953354 | 2016-09-21 11:18:55 +0100 | [diff] [blame] | 394 | |
| 395 | /* |
| 396 | * Join the coherent domain such that the caches of this core are kept |
| 397 | * coherent with those of other cores. |
| 398 | */ |
| 399 | PTR_LI t0, CKSEG1ADDR(CONFIG_MIPS_CM_BASE) |
| 400 | lw t1, GCR_REV(t0) |
| 401 | li t2, GCR_REV_CM3 |
| 402 | li t3, GCR_Cx_COHERENCE_EN |
| 403 | bge t1, t2, 1f |
| 404 | li t3, GCR_Cx_COHERENCE_DOM_EN |
| 405 | 1: sw t3, GCR_Cx_COHERENCE(t0) |
| 406 | ehb |
| 407 | 2: |
| 408 | # endif |
Paul Burton | 4baa0ab | 2016-09-21 11:18:54 +0100 | [diff] [blame] | 409 | #endif |
| 410 | |
| 411 | return: |
Paul Burton | 639200f | 2016-09-21 11:18:59 +0100 | [diff] [blame] | 412 | /* Ensure all cache operations complete before returning */ |
| 413 | sync |
Daniel Schwierzeck | b838586 | 2018-09-07 19:02:04 +0200 | [diff] [blame] | 414 | jr R_RETURN |
Shinya Kuribayashi | 2f5d414 | 2008-03-25 21:30:06 +0900 | [diff] [blame] | 415 | END(mips_cache_reset) |
Daniel Schwierzeck | b838586 | 2018-09-07 19:02:04 +0200 | [diff] [blame] | 416 | |
| 417 | LEAF(change_k0_cca) |
| 418 | mfc0 t0, CP0_CONFIG |
| 419 | #if __mips_isa_rev >= 2 |
| 420 | ins t0, a0, 0, 3 |
| 421 | #else |
| 422 | xor a0, a0, t0 |
| 423 | andi a0, a0, CONF_CM_CMASK |
| 424 | xor a0, a0, t0 |
| 425 | #endif |
| 426 | mtc0 a0, CP0_CONFIG |
| 427 | |
| 428 | jr.hb ra |
| 429 | END(change_k0_cca) |