ArmV7Support.S 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397
  1. #------------------------------------------------------------------------------
  2. #
  3. # Copyright (c) 2008 - 2010, Apple Inc. All rights reserved.<BR>
  4. # Copyright (c) 2011 - 2014, ARM Limited. All rights reserved.
  5. #
  6. # This program and the accompanying materials
  7. # are licensed and made available under the terms and conditions of the BSD License
  8. # which accompanies this distribution. The full text of the license may be found at
  9. # http://opensource.org/licenses/bsd-license.php
  10. #
  11. # THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
  13. #
  14. #------------------------------------------------------------------------------
  15. .text
  16. .align 2
  17. GCC_ASM_EXPORT (ArmInvalidateInstructionCache)
  18. GCC_ASM_EXPORT (ArmInvalidateDataCacheEntryByMVA)
  19. GCC_ASM_EXPORT (ArmCleanDataCacheEntryByMVA)
  20. GCC_ASM_EXPORT (ArmCleanInvalidateDataCacheEntryByMVA)
  21. GCC_ASM_EXPORT (ArmInvalidateDataCacheEntryBySetWay)
  22. GCC_ASM_EXPORT (ArmCleanDataCacheEntryBySetWay)
  23. GCC_ASM_EXPORT (ArmCleanInvalidateDataCacheEntryBySetWay)
  24. GCC_ASM_EXPORT (ArmDrainWriteBuffer)
  25. GCC_ASM_EXPORT (ArmEnableMmu)
  26. GCC_ASM_EXPORT (ArmDisableMmu)
  27. GCC_ASM_EXPORT (ArmDisableCachesAndMmu)
  28. GCC_ASM_EXPORT (ArmMmuEnabled)
  29. GCC_ASM_EXPORT (ArmEnableDataCache)
  30. GCC_ASM_EXPORT (ArmDisableDataCache)
  31. GCC_ASM_EXPORT (ArmEnableInstructionCache)
  32. GCC_ASM_EXPORT (ArmDisableInstructionCache)
  33. GCC_ASM_EXPORT (ArmEnableSWPInstruction)
  34. GCC_ASM_EXPORT (ArmEnableBranchPrediction)
  35. GCC_ASM_EXPORT (ArmDisableBranchPrediction)
  36. GCC_ASM_EXPORT (ArmSetLowVectors)
  37. GCC_ASM_EXPORT (ArmSetHighVectors)
  38. GCC_ASM_EXPORT (ArmV7AllDataCachesOperation)
  39. GCC_ASM_EXPORT (ArmV7PerformPoUDataCacheOperation)
  40. GCC_ASM_EXPORT (ArmDataMemoryBarrier)
  41. GCC_ASM_EXPORT (ArmDataSyncronizationBarrier)
  42. GCC_ASM_EXPORT (ArmInstructionSynchronizationBarrier)
  43. GCC_ASM_EXPORT (ArmReadVBar)
  44. GCC_ASM_EXPORT (ArmWriteVBar)
  45. GCC_ASM_EXPORT (ArmEnableVFP)
  46. GCC_ASM_EXPORT (ArmCallWFI)
  47. GCC_ASM_EXPORT (ArmReadCbar)
  48. GCC_ASM_EXPORT (ArmInvalidateInstructionAndDataTlb)
  49. GCC_ASM_EXPORT (ArmReadMpidr)
  50. GCC_ASM_EXPORT (ArmReadTpidrurw)
  51. GCC_ASM_EXPORT (ArmWriteTpidrurw)
  52. GCC_ASM_EXPORT (ArmIsArchTimerImplemented)
  53. GCC_ASM_EXPORT (ArmReadIdPfr1)
  54. .set DC_ON, (0x1<<2)
  55. .set IC_ON, (0x1<<12)
  56. .set CTRL_M_BIT, (1 << 0)
  57. .set CTRL_C_BIT, (1 << 2)
  58. .set CTRL_B_BIT, (1 << 7)
  59. .set CTRL_I_BIT, (1 << 12)
  60. ASM_PFX(ArmInvalidateDataCacheEntryByMVA):
  61. mcr p15, 0, r0, c7, c6, 1 @invalidate single data cache line
  62. dsb
  63. isb
  64. bx lr
  65. ASM_PFX(ArmCleanDataCacheEntryByMVA):
  66. mcr p15, 0, r0, c7, c10, 1 @clean single data cache line
  67. dsb
  68. isb
  69. bx lr
  70. ASM_PFX(ArmCleanInvalidateDataCacheEntryByMVA):
  71. mcr p15, 0, r0, c7, c14, 1 @clean and invalidate single data cache line
  72. dsb
  73. isb
  74. bx lr
  75. ASM_PFX(ArmInvalidateDataCacheEntryBySetWay):
  76. mcr p15, 0, r0, c7, c6, 2 @ Invalidate this line
  77. dsb
  78. isb
  79. bx lr
  80. ASM_PFX(ArmCleanInvalidateDataCacheEntryBySetWay):
  81. mcr p15, 0, r0, c7, c14, 2 @ Clean and Invalidate this line
  82. dsb
  83. isb
  84. bx lr
  85. ASM_PFX(ArmCleanDataCacheEntryBySetWay):
  86. mcr p15, 0, r0, c7, c10, 2 @ Clean this line
  87. dsb
  88. isb
  89. bx lr
  90. ASM_PFX(ArmInvalidateInstructionCache):
  91. mcr p15,0,R0,c7,c5,0 @Invalidate entire instruction cache
  92. dsb
  93. isb
  94. bx LR
  95. ASM_PFX(ArmEnableMmu):
  96. mrc p15,0,R0,c1,c0,0
  97. orr R0,R0,#1
  98. mcr p15,0,R0,c1,c0,0
  99. dsb
  100. isb
  101. bx LR
  102. ASM_PFX(ArmDisableMmu):
  103. mrc p15,0,R0,c1,c0,0
  104. bic R0,R0,#1
  105. mcr p15,0,R0,c1,c0,0 @Disable MMU
  106. mcr p15,0,R0,c8,c7,0 @Invalidate TLB
  107. mcr p15,0,R0,c7,c5,6 @Invalidate Branch predictor array
  108. dsb
  109. isb
  110. bx LR
  111. ASM_PFX(ArmDisableCachesAndMmu):
  112. mrc p15, 0, r0, c1, c0, 0 @ Get control register
  113. bic r0, r0, #CTRL_M_BIT @ Disable MMU
  114. bic r0, r0, #CTRL_C_BIT @ Disable D Cache
  115. bic r0, r0, #CTRL_I_BIT @ Disable I Cache
  116. mcr p15, 0, r0, c1, c0, 0 @ Write control register
  117. dsb
  118. isb
  119. bx LR
  120. ASM_PFX(ArmMmuEnabled):
  121. mrc p15,0,R0,c1,c0,0
  122. and R0,R0,#1
  123. bx LR
  124. ASM_PFX(ArmEnableDataCache):
  125. ldr R1,=DC_ON
  126. mrc p15,0,R0,c1,c0,0 @Read control register configuration data
  127. orr R0,R0,R1 @Set C bit
  128. mcr p15,0,r0,c1,c0,0 @Write control register configuration data
  129. dsb
  130. isb
  131. bx LR
  132. ASM_PFX(ArmDisableDataCache):
  133. ldr R1,=DC_ON
  134. mrc p15,0,R0,c1,c0,0 @Read control register configuration data
  135. bic R0,R0,R1 @Clear C bit
  136. mcr p15,0,r0,c1,c0,0 @Write control register configuration data
  137. dsb
  138. isb
  139. bx LR
  140. ASM_PFX(ArmEnableInstructionCache):
  141. ldr R1,=IC_ON
  142. mrc p15,0,R0,c1,c0,0 @Read control register configuration data
  143. orr R0,R0,R1 @Set I bit
  144. mcr p15,0,r0,c1,c0,0 @Write control register configuration data
  145. dsb
  146. isb
  147. bx LR
  148. ASM_PFX(ArmDisableInstructionCache):
  149. ldr R1,=IC_ON
  150. mrc p15,0,R0,c1,c0,0 @Read control register configuration data
  151. bic R0,R0,R1 @Clear I bit.
  152. mcr p15,0,r0,c1,c0,0 @Write control register configuration data
  153. dsb
  154. isb
  155. bx LR
  156. ASM_PFX(ArmEnableSWPInstruction):
  157. mrc p15, 0, r0, c1, c0, 0
  158. orr r0, r0, #0x00000400
  159. mcr p15, 0, r0, c1, c0, 0
  160. isb
  161. bx LR
  162. ASM_PFX(ArmEnableBranchPrediction):
  163. mrc p15, 0, r0, c1, c0, 0
  164. orr r0, r0, #0x00000800
  165. mcr p15, 0, r0, c1, c0, 0
  166. dsb
  167. isb
  168. bx LR
  169. ASM_PFX(ArmDisableBranchPrediction):
  170. mrc p15, 0, r0, c1, c0, 0
  171. bic r0, r0, #0x00000800
  172. mcr p15, 0, r0, c1, c0, 0
  173. dsb
  174. isb
  175. bx LR
  176. ASM_PFX(ArmSetLowVectors):
  177. mrc p15, 0, r0, c1, c0, 0 @ Read SCTLR into R0 (Read control register configuration data)
  178. bic r0, r0, #0x00002000 @ clear V bit
  179. mcr p15, 0, r0, c1, c0, 0 @ Write R0 into SCTLR (Write control register configuration data)
  180. isb
  181. bx LR
  182. ASM_PFX(ArmSetHighVectors):
  183. mrc p15, 0, r0, c1, c0, 0 @ Read SCTLR into R0 (Read control register configuration data)
  184. orr r0, r0, #0x00002000 @ Set V bit
  185. mcr p15, 0, r0, c1, c0, 0 @ Write R0 into SCTLR (Write control register configuration data)
  186. isb
  187. bx LR
  188. ASM_PFX(ArmV7AllDataCachesOperation):
  189. stmfd SP!,{r4-r12, LR}
  190. mov R1, R0 @ Save Function call in R1
  191. mrc p15, 1, R6, c0, c0, 1 @ Read CLIDR
  192. ands R3, R6, #0x7000000 @ Mask out all but Level of Coherency (LoC)
  193. mov R3, R3, LSR #23 @ Cache level value (naturally aligned)
  194. beq L_Finished
  195. mov R10, #0
  196. Loop1:
  197. add R2, R10, R10, LSR #1 @ Work out 3xcachelevel
  198. mov R12, R6, LSR R2 @ bottom 3 bits are the Cache type for this level
  199. and R12, R12, #7 @ get those 3 bits alone
  200. cmp R12, #2
  201. blt L_Skip @ no cache or only instruction cache at this level
  202. mcr p15, 2, R10, c0, c0, 0 @ write the Cache Size selection register (CSSELR) // OR in 1 for Instruction
  203. isb @ isb to sync the change to the CacheSizeID reg
  204. mrc p15, 1, R12, c0, c0, 0 @ reads current Cache Size ID register (CCSIDR)
  205. and R2, R12, #0x7 @ extract the line length field
  206. add R2, R2, #4 @ add 4 for the line length offset (log2 16 bytes)
  207. @ ldr R4, =0x3FF
  208. mov R4, #0x400
  209. sub R4, R4, #1
  210. ands R4, R4, R12, LSR #3 @ R4 is the max number on the way size (right aligned)
  211. clz R5, R4 @ R5 is the bit position of the way size increment
  212. @ ldr R7, =0x00007FFF
  213. mov R7, #0x00008000
  214. sub R7, R7, #1
  215. ands R7, R7, R12, LSR #13 @ R7 is the max number of the index size (right aligned)
  216. Loop2:
  217. mov R9, R4 @ R9 working copy of the max way size (right aligned)
  218. Loop3:
  219. orr R0, R10, R9, LSL R5 @ factor in the way number and cache number into R11
  220. orr R0, R0, R7, LSL R2 @ factor in the index number
  221. blx R1
  222. subs R9, R9, #1 @ decrement the way number
  223. bge Loop3
  224. subs R7, R7, #1 @ decrement the index
  225. bge Loop2
  226. L_Skip:
  227. add R10, R10, #2 @ increment the cache number
  228. cmp R3, R10
  229. bgt Loop1
  230. L_Finished:
  231. dsb
  232. ldmfd SP!, {r4-r12, lr}
  233. bx LR
  234. ASM_PFX(ArmV7PerformPoUDataCacheOperation):
  235. stmfd SP!,{r4-r12, LR}
  236. mov R1, R0 @ Save Function call in R1
  237. mrc p15, 1, R6, c0, c0, 1 @ Read CLIDR
  238. ands R3, R6, #0x38000000 @ Mask out all but Level of Unification (LoU)
  239. mov R3, R3, LSR #26 @ Cache level value (naturally aligned)
  240. beq Finished2
  241. mov R10, #0
  242. Loop4:
  243. add R2, R10, R10, LSR #1 @ Work out 3xcachelevel
  244. mov R12, R6, LSR R2 @ bottom 3 bits are the Cache type for this level
  245. and R12, R12, #7 @ get those 3 bits alone
  246. cmp R12, #2
  247. blt Skip2 @ no cache or only instruction cache at this level
  248. mcr p15, 2, R10, c0, c0, 0 @ write the Cache Size selection register (CSSELR) // OR in 1 for Instruction
  249. isb @ isb to sync the change to the CacheSizeID reg
  250. mrc p15, 1, R12, c0, c0, 0 @ reads current Cache Size ID register (CCSIDR)
  251. and R2, R12, #0x7 @ extract the line length field
  252. add R2, R2, #4 @ add 4 for the line length offset (log2 16 bytes)
  253. ldr R4, =0x3FF
  254. ands R4, R4, R12, LSR #3 @ R4 is the max number on the way size (right aligned)
  255. clz R5, R4 @ R5 is the bit position of the way size increment
  256. ldr R7, =0x00007FFF
  257. ands R7, R7, R12, LSR #13 @ R7 is the max number of the index size (right aligned)
  258. Loop5:
  259. mov R9, R4 @ R9 working copy of the max way size (right aligned)
  260. Loop6:
  261. orr R0, R10, R9, LSL R5 @ factor in the way number and cache number into R11
  262. orr R0, R0, R7, LSL R2 @ factor in the index number
  263. blx R1
  264. subs R9, R9, #1 @ decrement the way number
  265. bge Loop6
  266. subs R7, R7, #1 @ decrement the index
  267. bge Loop5
  268. Skip2:
  269. add R10, R10, #2 @ increment the cache number
  270. cmp R3, R10
  271. bgt Loop4
  272. Finished2:
  273. dsb
  274. ldmfd SP!, {r4-r12, lr}
  275. bx LR
  276. ASM_PFX(ArmDataMemoryBarrier):
  277. dmb
  278. bx LR
  279. ASM_PFX(ArmDataSyncronizationBarrier):
  280. ASM_PFX(ArmDrainWriteBuffer):
  281. dsb
  282. bx LR
  283. ASM_PFX(ArmInstructionSynchronizationBarrier):
  284. isb
  285. bx LR
  286. ASM_PFX(ArmReadVBar):
  287. # Set the Address of the Vector Table in the VBAR register
  288. mrc p15, 0, r0, c12, c0, 0
  289. bx lr
  290. ASM_PFX(ArmWriteVBar):
  291. # Set the Address of the Vector Table in the VBAR register
  292. mcr p15, 0, r0, c12, c0, 0
  293. # Ensure the SCTLR.V bit is clear
  294. mrc p15, 0, r0, c1, c0, 0 @ Read SCTLR into R0 (Read control register configuration data)
  295. bic r0, r0, #0x00002000 @ clear V bit
  296. mcr p15, 0, r0, c1, c0, 0 @ Write R0 into SCTLR (Write control register configuration data)
  297. isb
  298. bx lr
  299. ASM_PFX(ArmEnableVFP):
  300. # Read CPACR (Coprocessor Access Control Register)
  301. mrc p15, 0, r0, c1, c0, 2
  302. # Enable VPF access (Full Access to CP10, CP11) (V* instructions)
  303. orr r0, r0, #0x00f00000
  304. # Write back CPACR (Coprocessor Access Control Register)
  305. mcr p15, 0, r0, c1, c0, 2
  306. isb
  307. # Set EN bit in FPEXC. The Advanced SIMD and VFP extensions are enabled and operate normally.
  308. mov r0, #0x40000000
  309. mcr p10,#0x7,r0,c8,c0,#0
  310. bx lr
  311. ASM_PFX(ArmCallWFI):
  312. wfi
  313. bx lr
  314. #Note: Return 0 in Uniprocessor implementation
  315. ASM_PFX(ArmReadCbar):
  316. mrc p15, 4, r0, c15, c0, 0 @ Read Configuration Base Address Register
  317. bx lr
  318. ASM_PFX(ArmInvalidateInstructionAndDataTlb):
  319. mcr p15, 0, r0, c8, c7, 0 @ Invalidate Inst TLB and Data TLB
  320. dsb
  321. bx lr
  322. ASM_PFX(ArmReadMpidr):
  323. mrc p15, 0, r0, c0, c0, 5 @ read MPIDR
  324. bx lr
  325. ASM_PFX(ArmReadTpidrurw):
  326. mrc p15, 0, r0, c13, c0, 2 @ read TPIDRURW
  327. bx lr
  328. ASM_PFX(ArmWriteTpidrurw):
  329. mcr p15, 0, r0, c13, c0, 2 @ write TPIDRURW
  330. bx lr
  331. ASM_PFX(ArmIsArchTimerImplemented):
  332. mrc p15, 0, r0, c0, c1, 1 @ Read ID_PFR1
  333. and r0, r0, #0x000F0000
  334. bx lr
  335. ASM_PFX(ArmReadIdPfr1):
  336. mrc p15, 0, r0, c0, c1, 1 @ Read ID_PFR1 Register
  337. bx lr
  338. ASM_FUNCTION_REMOVE_IF_UNREFERENCED