cache.S 6.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332
  1. #include <config.h>
  2. #include <mpc86xx.h>
  3. #include <ppc_asm.tmpl>
  4. #include <ppc_defs.h>
  5. #include <asm/cache.h>
  6. #include <asm/mmu.h>
  7. #ifndef CACHE_LINE_SIZE
  8. # define CACHE_LINE_SIZE L1_CACHE_BYTES
  9. #endif
  10. #if CACHE_LINE_SIZE == 128
  11. #define LG_CACHE_LINE_SIZE 7
  12. #elif CACHE_LINE_SIZE == 32
  13. #define LG_CACHE_LINE_SIZE 5
  14. #elif CACHE_LINE_SIZE == 16
  15. #define LG_CACHE_LINE_SIZE 4
  16. #elif CACHE_LINE_SIZE == 8
  17. #define LG_CACHE_LINE_SIZE 3
  18. #else
  19. # error "Invalid cache line size!"
  20. #endif
  21. /*
  22. * Most of this code is taken from 74xx_7xx/cache.S
  23. * and then cleaned up a bit
  24. */
  25. /*
  26. * Invalidate L1 instruction cache.
  27. */
  28. _GLOBAL(invalidate_l1_instruction_cache)
  29. /* use invalidate-all bit in HID0 */
  30. mfspr r3,HID0
  31. ori r3,r3,HID0_ICFI
  32. mtspr HID0,r3
  33. isync
  34. blr
  35. /*
  36. * Invalidate L1 data cache.
  37. */
  38. _GLOBAL(invalidate_l1_data_cache)
  39. mfspr r3,HID0
  40. ori r3,r3,HID0_DCFI
  41. mtspr HID0,r3
  42. isync
  43. blr
  44. /*
  45. * Flush data cache.
  46. */
  47. _GLOBAL(flush_dcache)
  48. lis r3,0
  49. lis r5,CACHE_LINE_SIZE
  50. flush:
  51. cmp 0,1,r3,r5
  52. bge done
  53. lwz r5,0(r3)
  54. lis r5,CACHE_LINE_SIZE
  55. addi r3,r3,0x4
  56. b flush
  57. done:
  58. blr
  59. /*
  60. * Write any modified data cache blocks out to memory
  61. * and invalidate the corresponding instruction cache blocks.
  62. * This is a no-op on the 601.
  63. *
  64. * flush_icache_range(unsigned long start, unsigned long stop)
  65. */
  66. _GLOBAL(flush_icache_range)
  67. li r5,CACHE_LINE_SIZE-1
  68. andc r3,r3,r5
  69. subf r4,r3,r4
  70. add r4,r4,r5
  71. srwi. r4,r4,LG_CACHE_LINE_SIZE
  72. beqlr
  73. mtctr r4
  74. mr r6,r3
  75. 1: dcbst 0,r3
  76. addi r3,r3,CACHE_LINE_SIZE
  77. bdnz 1b
  78. sync /* wait for dcbst's to get to ram */
  79. mtctr r4
  80. 2: icbi 0,r6
  81. addi r6,r6,CACHE_LINE_SIZE
  82. bdnz 2b
  83. sync /* additional sync needed on g4 */
  84. isync
  85. blr
  86. /*
  87. * Write any modified data cache blocks out to memory.
  88. * Does not invalidate the corresponding cache lines (especially for
  89. * any corresponding instruction cache).
  90. *
  91. * clean_dcache_range(unsigned long start, unsigned long stop)
  92. */
  93. _GLOBAL(clean_dcache_range)
  94. li r5,CACHE_LINE_SIZE-1
  95. andc r3,r3,r5 /* align r3 down to cache line */
  96. subf r4,r3,r4 /* r4 = offset of stop from start of cache line */
  97. add r4,r4,r5 /* r4 += cache_line_size-1 */
  98. srwi. r4,r4,LG_CACHE_LINE_SIZE /* r4 = number of cache lines to flush */
  99. beqlr /* if r4 == 0 return */
  100. mtctr r4 /* ctr = r4 */
  101. sync
  102. 1: dcbst 0,r3
  103. addi r3,r3,CACHE_LINE_SIZE
  104. bdnz 1b
  105. sync /* wait for dcbst's to get to ram */
  106. blr
  107. /*
  108. * Flush a particular page from the data cache to RAM.
  109. * Note: this is necessary because the instruction cache does *not*
  110. * snoop from the data cache.
  111. *
  112. * void __flush_page_to_ram(void *page)
  113. */
  114. _GLOBAL(__flush_page_to_ram)
  115. rlwinm r3,r3,0,0,19 /* Get page base address */
  116. li r4,4096/CACHE_LINE_SIZE /* Number of lines in a page */
  117. mtctr r4
  118. mr r6,r3
  119. 0: dcbst 0,r3 /* Write line to ram */
  120. addi r3,r3,CACHE_LINE_SIZE
  121. bdnz 0b
  122. sync
  123. mtctr r4
  124. 1: icbi 0,r6
  125. addi r6,r6,CACHE_LINE_SIZE
  126. bdnz 1b
  127. sync
  128. isync
  129. blr
  130. /*
  131. * Flush a particular page from the instruction cache.
  132. * Note: this is necessary because the instruction cache does *not*
  133. * snoop from the data cache.
  134. *
  135. * void __flush_icache_page(void *page)
  136. */
  137. _GLOBAL(__flush_icache_page)
  138. li r4,4096/CACHE_LINE_SIZE /* Number of lines in a page */
  139. mtctr r4
  140. 1: icbi 0,r3
  141. addi r3,r3,CACHE_LINE_SIZE
  142. bdnz 1b
  143. sync
  144. isync
  145. blr
  146. /*
  147. * Clear a page using the dcbz instruction, which doesn't cause any
  148. * memory traffic (except to write out any cache lines which get
  149. * displaced). This only works on cacheable memory.
  150. */
  151. _GLOBAL(clear_page)
  152. li r0,4096/CACHE_LINE_SIZE
  153. mtctr r0
  154. 1: dcbz 0,r3
  155. addi r3,r3,CACHE_LINE_SIZE
  156. bdnz 1b
  157. blr
  158. /*
  159. * Enable L1 Instruction cache
  160. */
  161. _GLOBAL(icache_enable)
  162. mfspr r3, HID0
  163. li r5, HID0_ICFI|HID0_ILOCK
  164. andc r3, r3, r5
  165. ori r3, r3, HID0_ICE
  166. ori r5, r3, HID0_ICFI
  167. mtspr HID0, r5
  168. mtspr HID0, r3
  169. isync
  170. blr
  171. /*
  172. * Disable L1 Instruction cache
  173. */
  174. _GLOBAL(icache_disable)
  175. mflr r4
  176. bl invalidate_l1_instruction_cache /* uses r3 */
  177. sync
  178. mtlr r4
  179. mfspr r3, HID0
  180. li r5, 0
  181. ori r5, r5, HID0_ICE
  182. andc r3, r3, r5
  183. mtspr HID0, r3
  184. isync
  185. blr
  186. /*
  187. * Is instruction cache enabled?
  188. */
  189. _GLOBAL(icache_status)
  190. mfspr r3, HID0
  191. andi. r3, r3, HID0_ICE
  192. blr
  193. _GLOBAL(l1dcache_enable)
  194. mfspr r3, HID0
  195. li r5, HID0_DCFI|HID0_DLOCK
  196. andc r3, r3, r5
  197. mtspr HID0, r3 /* no invalidate, unlock */
  198. ori r3, r3, HID0_DCE
  199. ori r5, r3, HID0_DCFI
  200. mtspr HID0, r5 /* enable + invalidate */
  201. mtspr HID0, r3 /* enable */
  202. sync
  203. blr
  204. /*
  205. * Enable data cache(s) - L1 and optionally L2
  206. * Calls l2cache_enable. LR saved in r5
  207. */
  208. _GLOBAL(dcache_enable)
  209. mfspr r3, HID0
  210. li r5, HID0_DCFI|HID0_DLOCK
  211. andc r3, r3, r5
  212. mtspr HID0, r3 /* no invalidate, unlock */
  213. ori r3, r3, HID0_DCE
  214. ori r5, r3, HID0_DCFI
  215. mtspr HID0, r5 /* enable + invalidate */
  216. mtspr HID0, r3 /* enable */
  217. sync
  218. #ifdef CONFIG_SYS_L2
  219. mflr r5
  220. bl l2cache_enable /* uses r3 and r4 */
  221. sync
  222. mtlr r5
  223. #endif
  224. blr
  225. /*
  226. * Disable data cache(s) - L1 and optionally L2
  227. * Calls flush_dcache and l2cache_disable_no_flush.
  228. * LR saved in r4
  229. */
  230. _GLOBAL(dcache_disable)
  231. mflr r4 /* save link register */
  232. bl flush_dcache /* uses r3 and r5 */
  233. sync
  234. mfspr r3, HID0
  235. li r5, HID0_DCFI|HID0_DLOCK
  236. andc r3, r3, r5
  237. mtspr HID0, r3 /* no invalidate, unlock */
  238. li r5, HID0_DCE|HID0_DCFI
  239. andc r3, r3, r5 /* no enable, no invalidate */
  240. mtspr HID0, r3
  241. sync
  242. #ifdef CONFIG_SYS_L2
  243. bl l2cache_disable_no_flush /* uses r3 */
  244. #endif
  245. mtlr r4 /* restore link register */
  246. blr
  247. /*
  248. * Is data cache enabled?
  249. */
  250. _GLOBAL(dcache_status)
  251. mfspr r3, HID0
  252. andi. r3, r3, HID0_DCE
  253. blr
  254. /*
  255. * Invalidate L2 cache using L2I, assume L2 is enabled
  256. */
  257. _GLOBAL(l2cache_invalidate)
  258. mfspr r3, l2cr
  259. rlwinm. r3, r3, 0, 0, 0
  260. beq 1f
  261. mfspr r3, l2cr
  262. rlwinm r3, r3, 0, 1, 31
  263. #ifdef CONFIG_ALTIVEC
  264. dssall
  265. #endif
  266. sync
  267. mtspr l2cr, r3
  268. sync
  269. 1: mfspr r3, l2cr
  270. oris r3, r3, L2CR_L2I@h
  271. mtspr l2cr, r3
  272. invl2:
  273. mfspr r3, l2cr
  274. andis. r3, r3, L2CR_L2I@h
  275. bne invl2
  276. blr
  277. /*
  278. * Enable L2 cache
  279. * Calls l2cache_invalidate. LR is saved in r4
  280. */
  281. _GLOBAL(l2cache_enable)
  282. mflr r4 /* save link register */
  283. bl l2cache_invalidate /* uses r3 */
  284. sync
  285. lis r3, L2_ENABLE@h
  286. ori r3, r3, L2_ENABLE@l
  287. mtspr l2cr, r3
  288. isync
  289. mtlr r4 /* restore link register */
  290. blr
  291. /*
  292. * Disable L2 cache
  293. * Calls flush_dcache. LR is saved in r4
  294. */
  295. _GLOBAL(l2cache_disable)
  296. mflr r4 /* save link register */
  297. bl flush_dcache /* uses r3 and r5 */
  298. sync
  299. mtlr r4 /* restore link register */
  300. l2cache_disable_no_flush: /* provide way to disable L2 w/o flushing */
  301. lis r3, L2_INIT@h
  302. ori r3, r3, L2_INIT@l
  303. mtspr l2cr, r3
  304. isync
  305. blr