cache_v7_asm.S 5.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152
  1. /* SPDX-License-Identifier: GPL-2.0+ */
  2. #include <config.h>
  3. #include <linux/linkage.h>
  4. #include <linux/sizes.h>
  5. #include <asm/system.h>
  6. #if CONFIG_IS_ENABLED(SYS_THUMB_BUILD)
  7. #define ARM(x...)
  8. #define THUMB(x...) x
  9. #else
  10. #define ARM(x...) x
  11. #define THUMB(x...)
  12. #endif
  13. /*
  14. * v7_flush_dcache_all()
  15. *
  16. * Flush the whole D-cache.
  17. *
  18. * Corrupted registers: r0-r7, r9-r11 (r6 only in Thumb mode)
  19. *
  20. * Note: copied from arch/arm/mm/cache-v7.S of Linux 4.4
  21. */
  22. ENTRY(__v7_flush_dcache_all)
  23. dmb @ ensure ordering with previous memory accesses
  24. mrc p15, 1, r0, c0, c0, 1 @ read clidr
  25. mov r3, r0, lsr #23 @ move LoC into position
  26. ands r3, r3, #7 << 1 @ extract LoC*2 from clidr
  27. beq finished @ if loc is 0, then no need to clean
  28. start_flush_levels:
  29. mov r10, #0 @ start clean at cache level 0
  30. flush_levels:
  31. add r2, r10, r10, lsr #1 @ work out 3x current cache level
  32. mov r1, r0, lsr r2 @ extract cache type bits from clidr
  33. and r1, r1, #7 @ mask of the bits for current cache only
  34. cmp r1, #2 @ see what cache we have at this level
  35. blt skip @ skip if no cache, or just i-cache
  36. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  37. isb @ isb to sych the new cssr&csidr
  38. mrc p15, 1, r1, c0, c0, 0 @ read the new csidr
  39. and r2, r1, #7 @ extract the length of the cache lines
  40. add r2, r2, #4 @ add 4 (line length offset)
  41. movw r4, #0x3ff
  42. ands r4, r4, r1, lsr #3 @ find maximum number on the way size
  43. clz r5, r4 @ find bit position of way size increment
  44. movw r7, #0x7fff
  45. ands r7, r7, r1, lsr #13 @ extract max number of the index size
  46. loop1:
  47. mov r9, r7 @ create working copy of max index
  48. loop2:
  49. ARM( orr r11, r10, r4, lsl r5 ) @ factor way and cache number into r11
  50. THUMB( lsl r6, r4, r5 )
  51. THUMB( orr r11, r10, r6 ) @ factor way and cache number into r11
  52. ARM( orr r11, r11, r9, lsl r2 ) @ factor index number into r11
  53. THUMB( lsl r6, r9, r2 )
  54. THUMB( orr r11, r11, r6 ) @ factor index number into r11
  55. mcr p15, 0, r11, c7, c14, 2 @ clean & invalidate by set/way
  56. subs r9, r9, #1 @ decrement the index
  57. bge loop2
  58. subs r4, r4, #1 @ decrement the way
  59. bge loop1
  60. skip:
  61. add r10, r10, #2 @ increment cache number
  62. cmp r3, r10
  63. bgt flush_levels
  64. finished:
  65. mov r10, #0 @ swith back to cache level 0
  66. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  67. dsb st
  68. isb
  69. bx lr
  70. ENDPROC(__v7_flush_dcache_all)
  71. ENTRY(v7_flush_dcache_all)
  72. ARM( stmfd sp!, {r4-r5, r7, r9-r11, lr} )
  73. THUMB( stmfd sp!, {r4-r7, r9-r11, lr} )
  74. bl __v7_flush_dcache_all
  75. ARM( ldmfd sp!, {r4-r5, r7, r9-r11, lr} )
  76. THUMB( ldmfd sp!, {r4-r7, r9-r11, lr} )
  77. bx lr
  78. ENDPROC(v7_flush_dcache_all)
  79. /*
  80. * v7_invalidate_dcache_all()
  81. *
  82. * Invalidate the whole D-cache.
  83. *
  84. * Corrupted registers: r0-r7, r9-r11 (r6 only in Thumb mode)
  85. *
  86. * Note: copied from __v7_flush_dcache_all above with
  87. * mcr p15, 0, r11, c7, c14, 2
  88. * Replaced with:
  89. * mcr p15, 0, r11, c7, c6, 2
  90. */
  91. ENTRY(__v7_invalidate_dcache_all)
  92. dmb @ ensure ordering with previous memory accesses
  93. mrc p15, 1, r0, c0, c0, 1 @ read clidr
  94. mov r3, r0, lsr #23 @ move LoC into position
  95. ands r3, r3, #7 << 1 @ extract LoC*2 from clidr
  96. beq inval_finished @ if loc is 0, then no need to clean
  97. mov r10, #0 @ start clean at cache level 0
  98. inval_levels:
  99. add r2, r10, r10, lsr #1 @ work out 3x current cache level
  100. mov r1, r0, lsr r2 @ extract cache type bits from clidr
  101. and r1, r1, #7 @ mask of the bits for current cache only
  102. cmp r1, #2 @ see what cache we have at this level
  103. blt inval_skip @ skip if no cache, or just i-cache
  104. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  105. isb @ isb to sych the new cssr&csidr
  106. mrc p15, 1, r1, c0, c0, 0 @ read the new csidr
  107. and r2, r1, #7 @ extract the length of the cache lines
  108. add r2, r2, #4 @ add 4 (line length offset)
  109. movw r4, #0x3ff
  110. ands r4, r4, r1, lsr #3 @ find maximum number on the way size
  111. clz r5, r4 @ find bit position of way size increment
  112. movw r7, #0x7fff
  113. ands r7, r7, r1, lsr #13 @ extract max number of the index size
  114. inval_loop1:
  115. mov r9, r7 @ create working copy of max index
  116. inval_loop2:
  117. ARM( orr r11, r10, r4, lsl r5 ) @ factor way and cache number into r11
  118. THUMB( lsl r6, r4, r5 )
  119. THUMB( orr r11, r10, r6 ) @ factor way and cache number into r11
  120. ARM( orr r11, r11, r9, lsl r2 ) @ factor index number into r11
  121. THUMB( lsl r6, r9, r2 )
  122. THUMB( orr r11, r11, r6 ) @ factor index number into r11
  123. mcr p15, 0, r11, c7, c6, 2 @ invalidate by set/way
  124. subs r9, r9, #1 @ decrement the index
  125. bge inval_loop2
  126. subs r4, r4, #1 @ decrement the way
  127. bge inval_loop1
  128. inval_skip:
  129. add r10, r10, #2 @ increment cache number
  130. cmp r3, r10
  131. bgt inval_levels
  132. inval_finished:
  133. mov r10, #0 @ swith back to cache level 0
  134. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  135. dsb st
  136. isb
  137. bx lr
  138. ENDPROC(__v7_invalidate_dcache_all)
  139. ENTRY(v7_invalidate_dcache_all)
  140. ARM( stmfd sp!, {r4-r5, r7, r9-r11, lr} )
  141. THUMB( stmfd sp!, {r4-r7, r9-r11, lr} )
  142. bl __v7_invalidate_dcache_all
  143. ARM( ldmfd sp!, {r4-r5, r7, r9-r11, lr} )
  144. THUMB( ldmfd sp!, {r4-r7, r9-r11, lr} )
  145. bx lr
  146. ENDPROC(v7_invalidate_dcache_all)