cache.c 8.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371
  1. // SPDX-License-Identifier: GPL-2.0+
  2. /*
  3. * Copyright (C) 2017, STMicroelectronics - All Rights Reserved
  4. * Author(s): Vikas Manocha, <vikas.manocha@st.com> for STMicroelectronics.
  5. */
  6. #include <common.h>
  7. #include <cpu_func.h>
  8. #include <errno.h>
  9. #include <asm/armv7m.h>
  10. #include <asm/cache.h>
  11. #include <asm/io.h>
  12. /* Cache maintenance operation registers */
  13. #define V7M_CACHE_REG_ICIALLU ((u32 *)(V7M_CACHE_MAINT_BASE + 0x00))
  14. #define INVAL_ICACHE_POU 0
  15. #define V7M_CACHE_REG_ICIMVALU ((u32 *)(V7M_CACHE_MAINT_BASE + 0x08))
  16. #define V7M_CACHE_REG_DCIMVAC ((u32 *)(V7M_CACHE_MAINT_BASE + 0x0C))
  17. #define V7M_CACHE_REG_DCISW ((u32 *)(V7M_CACHE_MAINT_BASE + 0x10))
  18. #define V7M_CACHE_REG_DCCMVAU ((u32 *)(V7M_CACHE_MAINT_BASE + 0x14))
  19. #define V7M_CACHE_REG_DCCMVAC ((u32 *)(V7M_CACHE_MAINT_BASE + 0x18))
  20. #define V7M_CACHE_REG_DCCSW ((u32 *)(V7M_CACHE_MAINT_BASE + 0x1C))
  21. #define V7M_CACHE_REG_DCCIMVAC ((u32 *)(V7M_CACHE_MAINT_BASE + 0x20))
  22. #define V7M_CACHE_REG_DCCISW ((u32 *)(V7M_CACHE_MAINT_BASE + 0x24))
  23. #define WAYS_SHIFT 30
  24. #define SETS_SHIFT 5
  25. /* armv7m processor feature registers */
  26. #define V7M_PROC_REG_CLIDR ((u32 *)(V7M_PROC_FTR_BASE + 0x00))
  27. #define V7M_PROC_REG_CTR ((u32 *)(V7M_PROC_FTR_BASE + 0x04))
  28. #define V7M_PROC_REG_CCSIDR ((u32 *)(V7M_PROC_FTR_BASE + 0x08))
  29. #define MASK_NUM_WAYS GENMASK(12, 3)
  30. #define MASK_NUM_SETS GENMASK(27, 13)
  31. #define CLINE_SIZE_MASK GENMASK(2, 0)
  32. #define NUM_WAYS_SHIFT 3
  33. #define NUM_SETS_SHIFT 13
  34. #define V7M_PROC_REG_CSSELR ((u32 *)(V7M_PROC_FTR_BASE + 0x0C))
  35. #define SEL_I_OR_D BIT(0)
  36. enum cache_type {
  37. DCACHE,
  38. ICACHE,
  39. };
  40. /* PoU : Point of Unification, Poc: Point of Coherency */
  41. enum cache_action {
  42. INVALIDATE_POU, /* i-cache invalidate by address */
  43. INVALIDATE_POC, /* d-cache invalidate by address */
  44. INVALIDATE_SET_WAY, /* d-cache invalidate by sets/ways */
  45. FLUSH_POU, /* d-cache clean by address to the PoU */
  46. FLUSH_POC, /* d-cache clean by address to the PoC */
  47. FLUSH_SET_WAY, /* d-cache clean by sets/ways */
  48. FLUSH_INVAL_POC, /* d-cache clean & invalidate by addr to PoC */
  49. FLUSH_INVAL_SET_WAY, /* d-cache clean & invalidate by set/ways */
  50. };
  51. #if !CONFIG_IS_ENABLED(SYS_DCACHE_OFF)
  52. struct dcache_config {
  53. u32 ways;
  54. u32 sets;
  55. };
  56. static void get_cache_ways_sets(struct dcache_config *cache)
  57. {
  58. u32 cache_size_id = readl(V7M_PROC_REG_CCSIDR);
  59. cache->ways = (cache_size_id & MASK_NUM_WAYS) >> NUM_WAYS_SHIFT;
  60. cache->sets = (cache_size_id & MASK_NUM_SETS) >> NUM_SETS_SHIFT;
  61. }
  62. /*
  63. * Return the io register to perform required cache action like clean or clean
  64. * & invalidate by sets/ways.
  65. */
  66. static u32 *get_action_reg_set_ways(enum cache_action action)
  67. {
  68. switch (action) {
  69. case INVALIDATE_SET_WAY:
  70. return V7M_CACHE_REG_DCISW;
  71. case FLUSH_SET_WAY:
  72. return V7M_CACHE_REG_DCCSW;
  73. case FLUSH_INVAL_SET_WAY:
  74. return V7M_CACHE_REG_DCCISW;
  75. default:
  76. break;
  77. };
  78. return NULL;
  79. }
  80. /*
  81. * Return the io register to perform required cache action like clean or clean
  82. * & invalidate by adddress or range.
  83. */
  84. static u32 *get_action_reg_range(enum cache_action action)
  85. {
  86. switch (action) {
  87. case INVALIDATE_POU:
  88. return V7M_CACHE_REG_ICIMVALU;
  89. case INVALIDATE_POC:
  90. return V7M_CACHE_REG_DCIMVAC;
  91. case FLUSH_POU:
  92. return V7M_CACHE_REG_DCCMVAU;
  93. case FLUSH_POC:
  94. return V7M_CACHE_REG_DCCMVAC;
  95. case FLUSH_INVAL_POC:
  96. return V7M_CACHE_REG_DCCIMVAC;
  97. default:
  98. break;
  99. }
  100. return NULL;
  101. }
  102. static u32 get_cline_size(enum cache_type type)
  103. {
  104. u32 size;
  105. if (type == DCACHE)
  106. clrbits_le32(V7M_PROC_REG_CSSELR, BIT(SEL_I_OR_D));
  107. else if (type == ICACHE)
  108. setbits_le32(V7M_PROC_REG_CSSELR, BIT(SEL_I_OR_D));
  109. /* Make sure cache selection is effective for next memory access */
  110. dsb();
  111. size = readl(V7M_PROC_REG_CCSIDR) & CLINE_SIZE_MASK;
  112. /* Size enocoded as 2 less than log(no_of_words_in_cache_line) base 2 */
  113. size = 1 << (size + 2);
  114. debug("cache line size is %d\n", size);
  115. return size;
  116. }
  117. /* Perform the action like invalidate/clean on a range of cache addresses */
  118. static int action_cache_range(enum cache_action action, u32 start_addr,
  119. int64_t size)
  120. {
  121. u32 cline_size;
  122. u32 *action_reg;
  123. enum cache_type type;
  124. action_reg = get_action_reg_range(action);
  125. if (!action_reg)
  126. return -EINVAL;
  127. if (action == INVALIDATE_POU)
  128. type = ICACHE;
  129. else
  130. type = DCACHE;
  131. /* Cache line size is minium size for the cache action */
  132. cline_size = get_cline_size(type);
  133. /* Align start address to cache line boundary */
  134. start_addr &= ~(cline_size - 1);
  135. debug("total size for cache action = %llx\n", size);
  136. do {
  137. writel(start_addr, action_reg);
  138. size -= cline_size;
  139. start_addr += cline_size;
  140. } while (size > cline_size);
  141. /* Make sure cache action is effective for next memory access */
  142. dsb();
  143. isb(); /* Make sure instruction stream sees it */
  144. debug("cache action on range done\n");
  145. return 0;
  146. }
  147. /* Perform the action like invalidate/clean on all cached addresses */
  148. static int action_dcache_all(enum cache_action action)
  149. {
  150. struct dcache_config cache;
  151. u32 *action_reg;
  152. int i, j;
  153. action_reg = get_action_reg_set_ways(action);
  154. if (!action_reg)
  155. return -EINVAL;
  156. clrbits_le32(V7M_PROC_REG_CSSELR, BIT(SEL_I_OR_D));
  157. /* Make sure cache selection is effective for next memory access */
  158. dsb();
  159. get_cache_ways_sets(&cache); /* Get number of ways & sets */
  160. debug("cache: ways= %d, sets= %d\n", cache.ways + 1, cache.sets + 1);
  161. for (i = cache.sets; i >= 0; i--) {
  162. for (j = cache.ways; j >= 0; j--) {
  163. writel((j << WAYS_SHIFT) | (i << SETS_SHIFT),
  164. action_reg);
  165. }
  166. }
  167. /* Make sure cache action is effective for next memory access */
  168. dsb();
  169. isb(); /* Make sure instruction stream sees it */
  170. return 0;
  171. }
  172. void dcache_enable(void)
  173. {
  174. if (dcache_status()) /* return if cache already enabled */
  175. return;
  176. if (action_dcache_all(INVALIDATE_SET_WAY)) {
  177. printf("ERR: D-cache not enabled\n");
  178. return;
  179. }
  180. setbits_le32(&V7M_SCB->ccr, BIT(V7M_CCR_DCACHE));
  181. /* Make sure cache action is effective for next memory access */
  182. dsb();
  183. isb(); /* Make sure instruction stream sees it */
  184. }
  185. void dcache_disable(void)
  186. {
  187. if (!dcache_status())
  188. return;
  189. /* if dcache is enabled-> dcache disable & then flush */
  190. if (action_dcache_all(FLUSH_SET_WAY)) {
  191. printf("ERR: D-cache not flushed\n");
  192. return;
  193. }
  194. clrbits_le32(&V7M_SCB->ccr, BIT(V7M_CCR_DCACHE));
  195. /* Make sure cache action is effective for next memory access */
  196. dsb();
  197. isb(); /* Make sure instruction stream sees it */
  198. }
  199. int dcache_status(void)
  200. {
  201. return (readl(&V7M_SCB->ccr) & BIT(V7M_CCR_DCACHE)) != 0;
  202. }
  203. void invalidate_dcache_range(unsigned long start, unsigned long stop)
  204. {
  205. if (action_cache_range(INVALIDATE_POC, start, stop - start)) {
  206. printf("ERR: D-cache not invalidated\n");
  207. return;
  208. }
  209. }
  210. void flush_dcache_range(unsigned long start, unsigned long stop)
  211. {
  212. if (action_cache_range(FLUSH_POC, start, stop - start)) {
  213. printf("ERR: D-cache not flushed\n");
  214. return;
  215. }
  216. }
  217. void flush_dcache_all(void)
  218. {
  219. if (action_dcache_all(FLUSH_SET_WAY)) {
  220. printf("ERR: D-cache not flushed\n");
  221. return;
  222. }
  223. }
  224. void invalidate_dcache_all(void)
  225. {
  226. if (action_dcache_all(INVALIDATE_SET_WAY)) {
  227. printf("ERR: D-cache not invalidated\n");
  228. return;
  229. }
  230. }
  231. #else
  232. void dcache_enable(void)
  233. {
  234. return;
  235. }
  236. void dcache_disable(void)
  237. {
  238. return;
  239. }
  240. int dcache_status(void)
  241. {
  242. return 0;
  243. }
  244. void flush_dcache_all(void)
  245. {
  246. }
  247. void invalidate_dcache_all(void)
  248. {
  249. }
  250. void mmu_set_region_dcache_behaviour(phys_addr_t start, size_t size,
  251. enum dcache_option option)
  252. {
  253. }
  254. #endif
  255. #if !CONFIG_IS_ENABLED(SYS_ICACHE_OFF)
  256. void invalidate_icache_all(void)
  257. {
  258. writel(INVAL_ICACHE_POU, V7M_CACHE_REG_ICIALLU);
  259. /* Make sure cache action is effective for next memory access */
  260. dsb();
  261. isb(); /* Make sure instruction stream sees it */
  262. }
  263. void icache_enable(void)
  264. {
  265. if (icache_status())
  266. return;
  267. invalidate_icache_all();
  268. setbits_le32(&V7M_SCB->ccr, BIT(V7M_CCR_ICACHE));
  269. /* Make sure cache action is effective for next memory access */
  270. dsb();
  271. isb(); /* Make sure instruction stream sees it */
  272. }
  273. int icache_status(void)
  274. {
  275. return (readl(&V7M_SCB->ccr) & BIT(V7M_CCR_ICACHE)) != 0;
  276. }
  277. void icache_disable(void)
  278. {
  279. if (!icache_status())
  280. return;
  281. isb(); /* flush pipeline */
  282. clrbits_le32(&V7M_SCB->ccr, BIT(V7M_CCR_ICACHE));
  283. isb(); /* subsequent instructions fetch see cache disable effect */
  284. }
  285. #else
  286. void invalidate_icache_all(void)
  287. {
  288. return;
  289. }
  290. void icache_enable(void)
  291. {
  292. return;
  293. }
  294. void icache_disable(void)
  295. {
  296. return;
  297. }
  298. int icache_status(void)
  299. {
  300. return 0;
  301. }
  302. #endif
  303. void enable_caches(void)
  304. {
  305. #if !CONFIG_IS_ENABLED(SYS_ICACHE_OFF)
  306. icache_enable();
  307. #endif
  308. #if !CONFIG_IS_ENABLED(SYS_DCACHE_OFF)
  309. dcache_enable();
  310. #endif
  311. }