cache.c 8.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372
  1. // SPDX-License-Identifier: GPL-2.0+
  2. /*
  3. * Copyright (C) 2017, STMicroelectronics - All Rights Reserved
  4. * Author(s): Vikas Manocha, <vikas.manocha@st.com> for STMicroelectronics.
  5. */
  6. #include <common.h>
  7. #include <cpu_func.h>
  8. #include <errno.h>
  9. #include <log.h>
  10. #include <asm/armv7m.h>
  11. #include <asm/cache.h>
  12. #include <asm/io.h>
  13. /* Cache maintenance operation registers */
  14. #define V7M_CACHE_REG_ICIALLU ((u32 *)(V7M_CACHE_MAINT_BASE + 0x00))
  15. #define INVAL_ICACHE_POU 0
  16. #define V7M_CACHE_REG_ICIMVALU ((u32 *)(V7M_CACHE_MAINT_BASE + 0x08))
  17. #define V7M_CACHE_REG_DCIMVAC ((u32 *)(V7M_CACHE_MAINT_BASE + 0x0C))
  18. #define V7M_CACHE_REG_DCISW ((u32 *)(V7M_CACHE_MAINT_BASE + 0x10))
  19. #define V7M_CACHE_REG_DCCMVAU ((u32 *)(V7M_CACHE_MAINT_BASE + 0x14))
  20. #define V7M_CACHE_REG_DCCMVAC ((u32 *)(V7M_CACHE_MAINT_BASE + 0x18))
  21. #define V7M_CACHE_REG_DCCSW ((u32 *)(V7M_CACHE_MAINT_BASE + 0x1C))
  22. #define V7M_CACHE_REG_DCCIMVAC ((u32 *)(V7M_CACHE_MAINT_BASE + 0x20))
  23. #define V7M_CACHE_REG_DCCISW ((u32 *)(V7M_CACHE_MAINT_BASE + 0x24))
  24. #define WAYS_SHIFT 30
  25. #define SETS_SHIFT 5
  26. /* armv7m processor feature registers */
  27. #define V7M_PROC_REG_CLIDR ((u32 *)(V7M_PROC_FTR_BASE + 0x00))
  28. #define V7M_PROC_REG_CTR ((u32 *)(V7M_PROC_FTR_BASE + 0x04))
  29. #define V7M_PROC_REG_CCSIDR ((u32 *)(V7M_PROC_FTR_BASE + 0x08))
  30. #define MASK_NUM_WAYS GENMASK(12, 3)
  31. #define MASK_NUM_SETS GENMASK(27, 13)
  32. #define CLINE_SIZE_MASK GENMASK(2, 0)
  33. #define NUM_WAYS_SHIFT 3
  34. #define NUM_SETS_SHIFT 13
  35. #define V7M_PROC_REG_CSSELR ((u32 *)(V7M_PROC_FTR_BASE + 0x0C))
  36. #define SEL_I_OR_D BIT(0)
  37. enum cache_type {
  38. DCACHE,
  39. ICACHE,
  40. };
  41. /* PoU : Point of Unification, Poc: Point of Coherency */
  42. enum cache_action {
  43. INVALIDATE_POU, /* i-cache invalidate by address */
  44. INVALIDATE_POC, /* d-cache invalidate by address */
  45. INVALIDATE_SET_WAY, /* d-cache invalidate by sets/ways */
  46. FLUSH_POU, /* d-cache clean by address to the PoU */
  47. FLUSH_POC, /* d-cache clean by address to the PoC */
  48. FLUSH_SET_WAY, /* d-cache clean by sets/ways */
  49. FLUSH_INVAL_POC, /* d-cache clean & invalidate by addr to PoC */
  50. FLUSH_INVAL_SET_WAY, /* d-cache clean & invalidate by set/ways */
  51. };
  52. #if !CONFIG_IS_ENABLED(SYS_DCACHE_OFF)
  53. struct dcache_config {
  54. u32 ways;
  55. u32 sets;
  56. };
  57. static void get_cache_ways_sets(struct dcache_config *cache)
  58. {
  59. u32 cache_size_id = readl(V7M_PROC_REG_CCSIDR);
  60. cache->ways = (cache_size_id & MASK_NUM_WAYS) >> NUM_WAYS_SHIFT;
  61. cache->sets = (cache_size_id & MASK_NUM_SETS) >> NUM_SETS_SHIFT;
  62. }
  63. /*
  64. * Return the io register to perform required cache action like clean or clean
  65. * & invalidate by sets/ways.
  66. */
  67. static u32 *get_action_reg_set_ways(enum cache_action action)
  68. {
  69. switch (action) {
  70. case INVALIDATE_SET_WAY:
  71. return V7M_CACHE_REG_DCISW;
  72. case FLUSH_SET_WAY:
  73. return V7M_CACHE_REG_DCCSW;
  74. case FLUSH_INVAL_SET_WAY:
  75. return V7M_CACHE_REG_DCCISW;
  76. default:
  77. break;
  78. };
  79. return NULL;
  80. }
  81. /*
  82. * Return the io register to perform required cache action like clean or clean
  83. * & invalidate by adddress or range.
  84. */
  85. static u32 *get_action_reg_range(enum cache_action action)
  86. {
  87. switch (action) {
  88. case INVALIDATE_POU:
  89. return V7M_CACHE_REG_ICIMVALU;
  90. case INVALIDATE_POC:
  91. return V7M_CACHE_REG_DCIMVAC;
  92. case FLUSH_POU:
  93. return V7M_CACHE_REG_DCCMVAU;
  94. case FLUSH_POC:
  95. return V7M_CACHE_REG_DCCMVAC;
  96. case FLUSH_INVAL_POC:
  97. return V7M_CACHE_REG_DCCIMVAC;
  98. default:
  99. break;
  100. }
  101. return NULL;
  102. }
  103. static u32 get_cline_size(enum cache_type type)
  104. {
  105. u32 size;
  106. if (type == DCACHE)
  107. clrbits_le32(V7M_PROC_REG_CSSELR, BIT(SEL_I_OR_D));
  108. else if (type == ICACHE)
  109. setbits_le32(V7M_PROC_REG_CSSELR, BIT(SEL_I_OR_D));
  110. /* Make sure cache selection is effective for next memory access */
  111. dsb();
  112. size = readl(V7M_PROC_REG_CCSIDR) & CLINE_SIZE_MASK;
  113. /* Size enocoded as 2 less than log(no_of_words_in_cache_line) base 2 */
  114. size = 1 << (size + 2);
  115. debug("cache line size is %d\n", size);
  116. return size;
  117. }
  118. /* Perform the action like invalidate/clean on a range of cache addresses */
  119. static int action_cache_range(enum cache_action action, u32 start_addr,
  120. int64_t size)
  121. {
  122. u32 cline_size;
  123. u32 *action_reg;
  124. enum cache_type type;
  125. action_reg = get_action_reg_range(action);
  126. if (!action_reg)
  127. return -EINVAL;
  128. if (action == INVALIDATE_POU)
  129. type = ICACHE;
  130. else
  131. type = DCACHE;
  132. /* Cache line size is minium size for the cache action */
  133. cline_size = get_cline_size(type);
  134. /* Align start address to cache line boundary */
  135. start_addr &= ~(cline_size - 1);
  136. debug("total size for cache action = %llx\n", size);
  137. do {
  138. writel(start_addr, action_reg);
  139. size -= cline_size;
  140. start_addr += cline_size;
  141. } while (size > cline_size);
  142. /* Make sure cache action is effective for next memory access */
  143. dsb();
  144. isb(); /* Make sure instruction stream sees it */
  145. debug("cache action on range done\n");
  146. return 0;
  147. }
  148. /* Perform the action like invalidate/clean on all cached addresses */
  149. static int action_dcache_all(enum cache_action action)
  150. {
  151. struct dcache_config cache;
  152. u32 *action_reg;
  153. int i, j;
  154. action_reg = get_action_reg_set_ways(action);
  155. if (!action_reg)
  156. return -EINVAL;
  157. clrbits_le32(V7M_PROC_REG_CSSELR, BIT(SEL_I_OR_D));
  158. /* Make sure cache selection is effective for next memory access */
  159. dsb();
  160. get_cache_ways_sets(&cache); /* Get number of ways & sets */
  161. debug("cache: ways= %d, sets= %d\n", cache.ways + 1, cache.sets + 1);
  162. for (i = cache.sets; i >= 0; i--) {
  163. for (j = cache.ways; j >= 0; j--) {
  164. writel((j << WAYS_SHIFT) | (i << SETS_SHIFT),
  165. action_reg);
  166. }
  167. }
  168. /* Make sure cache action is effective for next memory access */
  169. dsb();
  170. isb(); /* Make sure instruction stream sees it */
  171. return 0;
  172. }
  173. void dcache_enable(void)
  174. {
  175. if (dcache_status()) /* return if cache already enabled */
  176. return;
  177. if (action_dcache_all(INVALIDATE_SET_WAY)) {
  178. printf("ERR: D-cache not enabled\n");
  179. return;
  180. }
  181. setbits_le32(&V7M_SCB->ccr, BIT(V7M_CCR_DCACHE));
  182. /* Make sure cache action is effective for next memory access */
  183. dsb();
  184. isb(); /* Make sure instruction stream sees it */
  185. }
  186. void dcache_disable(void)
  187. {
  188. if (!dcache_status())
  189. return;
  190. /* if dcache is enabled-> dcache disable & then flush */
  191. if (action_dcache_all(FLUSH_SET_WAY)) {
  192. printf("ERR: D-cache not flushed\n");
  193. return;
  194. }
  195. clrbits_le32(&V7M_SCB->ccr, BIT(V7M_CCR_DCACHE));
  196. /* Make sure cache action is effective for next memory access */
  197. dsb();
  198. isb(); /* Make sure instruction stream sees it */
  199. }
  200. int dcache_status(void)
  201. {
  202. return (readl(&V7M_SCB->ccr) & BIT(V7M_CCR_DCACHE)) != 0;
  203. }
  204. void invalidate_dcache_range(unsigned long start, unsigned long stop)
  205. {
  206. if (action_cache_range(INVALIDATE_POC, start, stop - start)) {
  207. printf("ERR: D-cache not invalidated\n");
  208. return;
  209. }
  210. }
  211. void flush_dcache_range(unsigned long start, unsigned long stop)
  212. {
  213. if (action_cache_range(FLUSH_POC, start, stop - start)) {
  214. printf("ERR: D-cache not flushed\n");
  215. return;
  216. }
  217. }
  218. void flush_dcache_all(void)
  219. {
  220. if (action_dcache_all(FLUSH_SET_WAY)) {
  221. printf("ERR: D-cache not flushed\n");
  222. return;
  223. }
  224. }
  225. void invalidate_dcache_all(void)
  226. {
  227. if (action_dcache_all(INVALIDATE_SET_WAY)) {
  228. printf("ERR: D-cache not invalidated\n");
  229. return;
  230. }
  231. }
  232. #else
  233. void dcache_enable(void)
  234. {
  235. return;
  236. }
  237. void dcache_disable(void)
  238. {
  239. return;
  240. }
  241. int dcache_status(void)
  242. {
  243. return 0;
  244. }
  245. void flush_dcache_all(void)
  246. {
  247. }
  248. void invalidate_dcache_all(void)
  249. {
  250. }
  251. void mmu_set_region_dcache_behaviour(phys_addr_t start, size_t size,
  252. enum dcache_option option)
  253. {
  254. }
  255. #endif
  256. #if !CONFIG_IS_ENABLED(SYS_ICACHE_OFF)
  257. void invalidate_icache_all(void)
  258. {
  259. writel(INVAL_ICACHE_POU, V7M_CACHE_REG_ICIALLU);
  260. /* Make sure cache action is effective for next memory access */
  261. dsb();
  262. isb(); /* Make sure instruction stream sees it */
  263. }
  264. void icache_enable(void)
  265. {
  266. if (icache_status())
  267. return;
  268. invalidate_icache_all();
  269. setbits_le32(&V7M_SCB->ccr, BIT(V7M_CCR_ICACHE));
  270. /* Make sure cache action is effective for next memory access */
  271. dsb();
  272. isb(); /* Make sure instruction stream sees it */
  273. }
  274. int icache_status(void)
  275. {
  276. return (readl(&V7M_SCB->ccr) & BIT(V7M_CCR_ICACHE)) != 0;
  277. }
  278. void icache_disable(void)
  279. {
  280. if (!icache_status())
  281. return;
  282. isb(); /* flush pipeline */
  283. clrbits_le32(&V7M_SCB->ccr, BIT(V7M_CCR_ICACHE));
  284. isb(); /* subsequent instructions fetch see cache disable effect */
  285. }
  286. #else
  287. void invalidate_icache_all(void)
  288. {
  289. return;
  290. }
  291. void icache_enable(void)
  292. {
  293. return;
  294. }
  295. void icache_disable(void)
  296. {
  297. return;
  298. }
  299. int icache_status(void)
  300. {
  301. return 0;
  302. }
  303. #endif
  304. void enable_caches(void)
  305. {
  306. #if !CONFIG_IS_ENABLED(SYS_ICACHE_OFF)
  307. icache_enable();
  308. #endif
  309. #if !CONFIG_IS_ENABLED(SYS_DCACHE_OFF)
  310. dcache_enable();
  311. #endif
  312. }