clock_imx8mm.c 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938
  1. // SPDX-License-Identifier: GPL-2.0+
  2. /*
  3. * Copyright 2018-2019 NXP
  4. *
  5. * Peng Fan <peng.fan@nxp.com>
  6. */
  7. #include <common.h>
  8. #include <asm/arch/clock.h>
  9. #include <asm/arch/imx-regs.h>
  10. #include <asm/arch/sys_proto.h>
  11. #include <asm/global_data.h>
  12. #include <asm/io.h>
  13. #include <div64.h>
  14. #include <errno.h>
  15. #include <linux/bitops.h>
  16. #include <linux/delay.h>
  17. DECLARE_GLOBAL_DATA_PTR;
  18. static struct anamix_pll *ana_pll = (struct anamix_pll *)ANATOP_BASE_ADDR;
  19. static u32 get_root_clk(enum clk_root_index clock_id);
  20. #ifdef CONFIG_IMX_HAB
  21. void hab_caam_clock_enable(unsigned char enable)
  22. {
  23. /* The CAAM clock is always on for iMX8M */
  24. }
  25. #endif
  26. void enable_ocotp_clk(unsigned char enable)
  27. {
  28. clock_enable(CCGR_OCOTP, !!enable);
  29. }
  30. int enable_i2c_clk(unsigned char enable, unsigned i2c_num)
  31. {
  32. /* 0 - 3 is valid i2c num */
  33. if (i2c_num > 3)
  34. return -EINVAL;
  35. clock_enable(CCGR_I2C1 + i2c_num, !!enable);
  36. return 0;
  37. }
  38. #ifdef CONFIG_SPL_BUILD
  39. static struct imx_int_pll_rate_table imx8mm_fracpll_tbl[] = {
  40. PLL_1443X_RATE(1000000000U, 250, 3, 1, 0),
  41. PLL_1443X_RATE(800000000U, 300, 9, 0, 0),
  42. PLL_1443X_RATE(750000000U, 250, 8, 0, 0),
  43. PLL_1443X_RATE(650000000U, 325, 3, 2, 0),
  44. PLL_1443X_RATE(600000000U, 300, 3, 2, 0),
  45. PLL_1443X_RATE(594000000U, 99, 1, 2, 0),
  46. PLL_1443X_RATE(400000000U, 300, 9, 1, 0),
  47. PLL_1443X_RATE(266000000U, 400, 9, 2, 0),
  48. PLL_1443X_RATE(167000000U, 334, 3, 4, 0),
  49. PLL_1443X_RATE(100000000U, 300, 9, 3, 0),
  50. };
  51. static int fracpll_configure(enum pll_clocks pll, u32 freq)
  52. {
  53. int i;
  54. u32 tmp, div_val;
  55. void *pll_base;
  56. struct imx_int_pll_rate_table *rate;
  57. for (i = 0; i < ARRAY_SIZE(imx8mm_fracpll_tbl); i++) {
  58. if (freq == imx8mm_fracpll_tbl[i].rate)
  59. break;
  60. }
  61. if (i == ARRAY_SIZE(imx8mm_fracpll_tbl)) {
  62. printf("%s: No matched freq table %u\n", __func__, freq);
  63. return -EINVAL;
  64. }
  65. rate = &imx8mm_fracpll_tbl[i];
  66. switch (pll) {
  67. case ANATOP_DRAM_PLL:
  68. setbits_le32(GPC_BASE_ADDR + 0xEC, 1 << 7);
  69. setbits_le32(GPC_BASE_ADDR + 0xF8, 1 << 5);
  70. writel(SRC_DDR1_ENABLE_MASK, SRC_BASE_ADDR + 0x1004);
  71. pll_base = &ana_pll->dram_pll_gnrl_ctl;
  72. break;
  73. case ANATOP_VIDEO_PLL:
  74. pll_base = &ana_pll->video_pll1_gnrl_ctl;
  75. break;
  76. default:
  77. return 0;
  78. }
  79. /* Bypass clock and set lock to pll output lock */
  80. tmp = readl(pll_base);
  81. tmp |= BYPASS_MASK;
  82. writel(tmp, pll_base);
  83. /* Enable RST */
  84. tmp &= ~RST_MASK;
  85. writel(tmp, pll_base);
  86. div_val = (rate->mdiv << MDIV_SHIFT) | (rate->pdiv << PDIV_SHIFT) |
  87. (rate->sdiv << SDIV_SHIFT);
  88. writel(div_val, pll_base + 4);
  89. writel(rate->kdiv << KDIV_SHIFT, pll_base + 8);
  90. __udelay(100);
  91. /* Disable RST */
  92. tmp |= RST_MASK;
  93. writel(tmp, pll_base);
  94. /* Wait Lock*/
  95. while (!(readl(pll_base) & LOCK_STATUS))
  96. ;
  97. /* Bypass */
  98. tmp &= ~BYPASS_MASK;
  99. writel(tmp, pll_base);
  100. return 0;
  101. }
  102. void dram_pll_init(ulong pll_val)
  103. {
  104. fracpll_configure(ANATOP_DRAM_PLL, pll_val);
  105. }
  106. static struct dram_bypass_clk_setting imx8mm_dram_bypass_tbl[] = {
  107. DRAM_BYPASS_ROOT_CONFIG(MHZ(100), 2, CLK_ROOT_PRE_DIV1, 2,
  108. CLK_ROOT_PRE_DIV2),
  109. DRAM_BYPASS_ROOT_CONFIG(MHZ(250), 3, CLK_ROOT_PRE_DIV2, 2,
  110. CLK_ROOT_PRE_DIV2),
  111. DRAM_BYPASS_ROOT_CONFIG(MHZ(400), 1, CLK_ROOT_PRE_DIV2, 3,
  112. CLK_ROOT_PRE_DIV2),
  113. };
  114. void dram_enable_bypass(ulong clk_val)
  115. {
  116. int i;
  117. struct dram_bypass_clk_setting *config;
  118. for (i = 0; i < ARRAY_SIZE(imx8mm_dram_bypass_tbl); i++) {
  119. if (clk_val == imx8mm_dram_bypass_tbl[i].clk)
  120. break;
  121. }
  122. if (i == ARRAY_SIZE(imx8mm_dram_bypass_tbl)) {
  123. printf("%s: No matched freq table %lu\n", __func__, clk_val);
  124. return;
  125. }
  126. config = &imx8mm_dram_bypass_tbl[i];
  127. clock_set_target_val(DRAM_ALT_CLK_ROOT, CLK_ROOT_ON |
  128. CLK_ROOT_SOURCE_SEL(config->alt_root_sel) |
  129. CLK_ROOT_PRE_DIV(config->alt_pre_div));
  130. clock_set_target_val(DRAM_APB_CLK_ROOT, CLK_ROOT_ON |
  131. CLK_ROOT_SOURCE_SEL(config->apb_root_sel) |
  132. CLK_ROOT_PRE_DIV(config->apb_pre_div));
  133. clock_set_target_val(DRAM_SEL_CFG, CLK_ROOT_ON |
  134. CLK_ROOT_SOURCE_SEL(1));
  135. }
  136. void dram_disable_bypass(void)
  137. {
  138. clock_set_target_val(DRAM_SEL_CFG, CLK_ROOT_ON |
  139. CLK_ROOT_SOURCE_SEL(0));
  140. clock_set_target_val(DRAM_APB_CLK_ROOT, CLK_ROOT_ON |
  141. CLK_ROOT_SOURCE_SEL(4) |
  142. CLK_ROOT_PRE_DIV(CLK_ROOT_PRE_DIV5));
  143. }
  144. #endif
  145. int intpll_configure(enum pll_clocks pll, ulong freq)
  146. {
  147. void __iomem *pll_gnrl_ctl, __iomem *pll_div_ctl;
  148. u32 pll_div_ctl_val, pll_clke_masks;
  149. switch (pll) {
  150. case ANATOP_SYSTEM_PLL1:
  151. pll_gnrl_ctl = &ana_pll->sys_pll1_gnrl_ctl;
  152. pll_div_ctl = &ana_pll->sys_pll1_div_ctl;
  153. pll_clke_masks = INTPLL_DIV20_CLKE_MASK |
  154. INTPLL_DIV10_CLKE_MASK | INTPLL_DIV8_CLKE_MASK |
  155. INTPLL_DIV6_CLKE_MASK | INTPLL_DIV5_CLKE_MASK |
  156. INTPLL_DIV4_CLKE_MASK | INTPLL_DIV3_CLKE_MASK |
  157. INTPLL_DIV2_CLKE_MASK | INTPLL_CLKE_MASK;
  158. break;
  159. case ANATOP_SYSTEM_PLL2:
  160. pll_gnrl_ctl = &ana_pll->sys_pll2_gnrl_ctl;
  161. pll_div_ctl = &ana_pll->sys_pll2_div_ctl;
  162. pll_clke_masks = INTPLL_DIV20_CLKE_MASK |
  163. INTPLL_DIV10_CLKE_MASK | INTPLL_DIV8_CLKE_MASK |
  164. INTPLL_DIV6_CLKE_MASK | INTPLL_DIV5_CLKE_MASK |
  165. INTPLL_DIV4_CLKE_MASK | INTPLL_DIV3_CLKE_MASK |
  166. INTPLL_DIV2_CLKE_MASK | INTPLL_CLKE_MASK;
  167. break;
  168. case ANATOP_SYSTEM_PLL3:
  169. pll_gnrl_ctl = &ana_pll->sys_pll3_gnrl_ctl;
  170. pll_div_ctl = &ana_pll->sys_pll3_div_ctl;
  171. pll_clke_masks = INTPLL_CLKE_MASK;
  172. break;
  173. case ANATOP_ARM_PLL:
  174. pll_gnrl_ctl = &ana_pll->arm_pll_gnrl_ctl;
  175. pll_div_ctl = &ana_pll->arm_pll_div_ctl;
  176. pll_clke_masks = INTPLL_CLKE_MASK;
  177. break;
  178. case ANATOP_GPU_PLL:
  179. pll_gnrl_ctl = &ana_pll->gpu_pll_gnrl_ctl;
  180. pll_div_ctl = &ana_pll->gpu_pll_div_ctl;
  181. pll_clke_masks = INTPLL_CLKE_MASK;
  182. break;
  183. case ANATOP_VPU_PLL:
  184. pll_gnrl_ctl = &ana_pll->vpu_pll_gnrl_ctl;
  185. pll_div_ctl = &ana_pll->vpu_pll_div_ctl;
  186. pll_clke_masks = INTPLL_CLKE_MASK;
  187. break;
  188. default:
  189. return -EINVAL;
  190. };
  191. switch (freq) {
  192. case MHZ(600):
  193. /* 24 * 0x12c / 3 / 2 ^ 2 */
  194. pll_div_ctl_val = INTPLL_MAIN_DIV_VAL(0x12c) |
  195. INTPLL_PRE_DIV_VAL(3) | INTPLL_POST_DIV_VAL(2);
  196. break;
  197. case MHZ(750):
  198. /* 24 * 0xfa / 2 / 2 ^ 2 */
  199. pll_div_ctl_val = INTPLL_MAIN_DIV_VAL(0xfa) |
  200. INTPLL_PRE_DIV_VAL(2) | INTPLL_POST_DIV_VAL(2);
  201. break;
  202. case MHZ(800):
  203. /* 24 * 0x190 / 3 / 2 ^ 2 */
  204. pll_div_ctl_val = INTPLL_MAIN_DIV_VAL(0x190) |
  205. INTPLL_PRE_DIV_VAL(3) | INTPLL_POST_DIV_VAL(2);
  206. break;
  207. case MHZ(1000):
  208. /* 24 * 0xfa / 3 / 2 ^ 1 */
  209. pll_div_ctl_val = INTPLL_MAIN_DIV_VAL(0xfa) |
  210. INTPLL_PRE_DIV_VAL(3) | INTPLL_POST_DIV_VAL(1);
  211. break;
  212. case MHZ(1200):
  213. /* 24 * 0xc8 / 2 / 2 ^ 1 */
  214. pll_div_ctl_val = INTPLL_MAIN_DIV_VAL(0xc8) |
  215. INTPLL_PRE_DIV_VAL(2) | INTPLL_POST_DIV_VAL(1);
  216. break;
  217. case MHZ(2000):
  218. /* 24 * 0xfa / 3 / 2 ^ 0 */
  219. pll_div_ctl_val = INTPLL_MAIN_DIV_VAL(0xfa) |
  220. INTPLL_PRE_DIV_VAL(3) | INTPLL_POST_DIV_VAL(0);
  221. break;
  222. default:
  223. return -EINVAL;
  224. };
  225. /* Bypass clock and set lock to pll output lock */
  226. setbits_le32(pll_gnrl_ctl, INTPLL_BYPASS_MASK | INTPLL_LOCK_SEL_MASK);
  227. /* Enable reset */
  228. clrbits_le32(pll_gnrl_ctl, INTPLL_RST_MASK);
  229. /* Configure */
  230. writel(pll_div_ctl_val, pll_div_ctl);
  231. __udelay(100);
  232. /* Disable reset */
  233. setbits_le32(pll_gnrl_ctl, INTPLL_RST_MASK);
  234. /* Wait Lock */
  235. while (!(readl(pll_gnrl_ctl) & INTPLL_LOCK_MASK))
  236. ;
  237. /* Clear bypass */
  238. clrbits_le32(pll_gnrl_ctl, INTPLL_BYPASS_MASK);
  239. setbits_le32(pll_gnrl_ctl, pll_clke_masks);
  240. return 0;
  241. }
  242. void init_uart_clk(u32 index)
  243. {
  244. /*
  245. * set uart clock root
  246. * 24M OSC
  247. */
  248. switch (index) {
  249. case 0:
  250. clock_enable(CCGR_UART1, 0);
  251. clock_set_target_val(UART1_CLK_ROOT, CLK_ROOT_ON |
  252. CLK_ROOT_SOURCE_SEL(0));
  253. clock_enable(CCGR_UART1, 1);
  254. return;
  255. case 1:
  256. clock_enable(CCGR_UART2, 0);
  257. clock_set_target_val(UART2_CLK_ROOT, CLK_ROOT_ON |
  258. CLK_ROOT_SOURCE_SEL(0));
  259. clock_enable(CCGR_UART2, 1);
  260. return;
  261. case 2:
  262. clock_enable(CCGR_UART3, 0);
  263. clock_set_target_val(UART3_CLK_ROOT, CLK_ROOT_ON |
  264. CLK_ROOT_SOURCE_SEL(0));
  265. clock_enable(CCGR_UART3, 1);
  266. return;
  267. case 3:
  268. clock_enable(CCGR_UART4, 0);
  269. clock_set_target_val(UART4_CLK_ROOT, CLK_ROOT_ON |
  270. CLK_ROOT_SOURCE_SEL(0));
  271. clock_enable(CCGR_UART4, 1);
  272. return;
  273. default:
  274. printf("Invalid uart index\n");
  275. return;
  276. }
  277. }
  278. void init_wdog_clk(void)
  279. {
  280. clock_enable(CCGR_WDOG1, 0);
  281. clock_enable(CCGR_WDOG2, 0);
  282. clock_enable(CCGR_WDOG3, 0);
  283. clock_set_target_val(WDOG_CLK_ROOT, CLK_ROOT_ON |
  284. CLK_ROOT_SOURCE_SEL(0));
  285. clock_enable(CCGR_WDOG1, 1);
  286. clock_enable(CCGR_WDOG2, 1);
  287. clock_enable(CCGR_WDOG3, 1);
  288. }
  289. void init_clk_usdhc(u32 index)
  290. {
  291. /*
  292. * set usdhc clock root
  293. * sys pll1 400M
  294. */
  295. switch (index) {
  296. case 0:
  297. clock_enable(CCGR_USDHC1, 0);
  298. clock_set_target_val(USDHC1_CLK_ROOT, CLK_ROOT_ON |
  299. CLK_ROOT_SOURCE_SEL(1));
  300. clock_enable(CCGR_USDHC1, 1);
  301. return;
  302. case 1:
  303. clock_enable(CCGR_USDHC2, 0);
  304. clock_set_target_val(USDHC2_CLK_ROOT, CLK_ROOT_ON |
  305. CLK_ROOT_SOURCE_SEL(1));
  306. clock_enable(CCGR_USDHC2, 1);
  307. return;
  308. case 2:
  309. clock_enable(CCGR_USDHC3, 0);
  310. clock_set_target_val(USDHC3_CLK_ROOT, CLK_ROOT_ON |
  311. CLK_ROOT_SOURCE_SEL(1));
  312. clock_enable(CCGR_USDHC3, 1);
  313. return;
  314. default:
  315. printf("Invalid usdhc index\n");
  316. return;
  317. }
  318. }
  319. void init_clk_ecspi(u32 index)
  320. {
  321. switch (index) {
  322. case 0:
  323. clock_enable(CCGR_ECSPI1, 0);
  324. clock_set_target_val(ECSPI1_CLK_ROOT, CLK_ROOT_ON | CLK_ROOT_SOURCE_SEL(0));
  325. clock_enable(CCGR_ECSPI1, 1);
  326. return;
  327. case 1:
  328. clock_enable(CCGR_ECSPI2, 0);
  329. clock_set_target_val(ECSPI2_CLK_ROOT, CLK_ROOT_ON | CLK_ROOT_SOURCE_SEL(0));
  330. clock_enable(CCGR_ECSPI2, 1);
  331. return;
  332. case 2:
  333. clock_enable(CCGR_ECSPI3, 0);
  334. clock_set_target_val(ECSPI3_CLK_ROOT, CLK_ROOT_ON | CLK_ROOT_SOURCE_SEL(0));
  335. clock_enable(CCGR_ECSPI3, 1);
  336. return;
  337. default:
  338. printf("Invalid ecspi index\n");
  339. return;
  340. }
  341. }
  342. void init_nand_clk(void)
  343. {
  344. /*
  345. * set rawnand root
  346. * sys pll1 400M
  347. */
  348. clock_enable(CCGR_RAWNAND, 0);
  349. clock_set_target_val(NAND_CLK_ROOT, CLK_ROOT_ON |
  350. CLK_ROOT_SOURCE_SEL(3) | CLK_ROOT_POST_DIV(CLK_ROOT_POST_DIV4)); /* 100M */
  351. clock_enable(CCGR_RAWNAND, 1);
  352. }
  353. int clock_init(void)
  354. {
  355. u32 val_cfg0;
  356. /*
  357. * The gate is not exported to clk tree, so configure them here.
  358. * According to ANAMIX SPEC
  359. * sys pll1 fixed at 800MHz
  360. * sys pll2 fixed at 1GHz
  361. * Here we only enable the outputs.
  362. */
  363. val_cfg0 = readl(&ana_pll->sys_pll1_gnrl_ctl);
  364. val_cfg0 |= INTPLL_CLKE_MASK | INTPLL_DIV2_CLKE_MASK |
  365. INTPLL_DIV3_CLKE_MASK | INTPLL_DIV4_CLKE_MASK |
  366. INTPLL_DIV5_CLKE_MASK | INTPLL_DIV6_CLKE_MASK |
  367. INTPLL_DIV8_CLKE_MASK | INTPLL_DIV10_CLKE_MASK |
  368. INTPLL_DIV20_CLKE_MASK;
  369. writel(val_cfg0, &ana_pll->sys_pll1_gnrl_ctl);
  370. val_cfg0 = readl(&ana_pll->sys_pll2_gnrl_ctl);
  371. val_cfg0 |= INTPLL_CLKE_MASK | INTPLL_DIV2_CLKE_MASK |
  372. INTPLL_DIV3_CLKE_MASK | INTPLL_DIV4_CLKE_MASK |
  373. INTPLL_DIV5_CLKE_MASK | INTPLL_DIV6_CLKE_MASK |
  374. INTPLL_DIV8_CLKE_MASK | INTPLL_DIV10_CLKE_MASK |
  375. INTPLL_DIV20_CLKE_MASK;
  376. writel(val_cfg0, &ana_pll->sys_pll2_gnrl_ctl);
  377. /* Configure ARM at 1.2GHz */
  378. clock_set_target_val(ARM_A53_CLK_ROOT, CLK_ROOT_ON |
  379. CLK_ROOT_SOURCE_SEL(2));
  380. intpll_configure(ANATOP_ARM_PLL, MHZ(1200));
  381. /* Bypass CCM A53 ROOT, Switch to ARM PLL -> MUX-> CPU */
  382. clock_set_target_val(CORE_SEL_CFG, CLK_ROOT_SOURCE_SEL(1));
  383. if (is_imx8mn() || is_imx8mp())
  384. intpll_configure(ANATOP_SYSTEM_PLL3, MHZ(600));
  385. else
  386. intpll_configure(ANATOP_SYSTEM_PLL3, MHZ(750));
  387. #ifdef CONFIG_IMX8MP
  388. /* 8MP ROM already set NOC to 800Mhz, only need to configure NOC_IO clk to 600Mhz */
  389. /* 8MP ROM already set GIC to 400Mhz, system_pll1_800m with div = 2 */
  390. clock_set_target_val(NOC_IO_CLK_ROOT, CLK_ROOT_ON | CLK_ROOT_SOURCE_SEL(2));
  391. #else
  392. clock_set_target_val(NOC_CLK_ROOT, CLK_ROOT_ON | CLK_ROOT_SOURCE_SEL(2));
  393. /* config GIC to sys_pll2_100m */
  394. clock_enable(CCGR_GIC, 0);
  395. clock_set_target_val(GIC_CLK_ROOT, CLK_ROOT_ON |
  396. CLK_ROOT_SOURCE_SEL(3));
  397. clock_enable(CCGR_GIC, 1);
  398. #endif
  399. clock_set_target_val(NAND_USDHC_BUS_CLK_ROOT, CLK_ROOT_ON |
  400. CLK_ROOT_SOURCE_SEL(1));
  401. clock_enable(CCGR_DDR1, 0);
  402. clock_set_target_val(DRAM_ALT_CLK_ROOT, CLK_ROOT_ON |
  403. CLK_ROOT_SOURCE_SEL(1));
  404. clock_set_target_val(DRAM_APB_CLK_ROOT, CLK_ROOT_ON |
  405. CLK_ROOT_SOURCE_SEL(1));
  406. clock_enable(CCGR_DDR1, 1);
  407. init_wdog_clk();
  408. clock_enable(CCGR_TEMP_SENSOR, 1);
  409. clock_enable(CCGR_SEC_DEBUG, 1);
  410. return 0;
  411. };
  412. u32 imx_get_uartclk(void)
  413. {
  414. return 24000000U;
  415. }
  416. static u32 decode_intpll(enum clk_root_src intpll)
  417. {
  418. u32 pll_gnrl_ctl, pll_div_ctl, pll_clke_mask;
  419. u32 main_div, pre_div, post_div, div;
  420. u64 freq;
  421. switch (intpll) {
  422. case ARM_PLL_CLK:
  423. pll_gnrl_ctl = readl(&ana_pll->arm_pll_gnrl_ctl);
  424. pll_div_ctl = readl(&ana_pll->arm_pll_div_ctl);
  425. break;
  426. case GPU_PLL_CLK:
  427. pll_gnrl_ctl = readl(&ana_pll->gpu_pll_gnrl_ctl);
  428. pll_div_ctl = readl(&ana_pll->gpu_pll_div_ctl);
  429. break;
  430. case VPU_PLL_CLK:
  431. pll_gnrl_ctl = readl(&ana_pll->vpu_pll_gnrl_ctl);
  432. pll_div_ctl = readl(&ana_pll->vpu_pll_div_ctl);
  433. break;
  434. case SYSTEM_PLL1_800M_CLK:
  435. case SYSTEM_PLL1_400M_CLK:
  436. case SYSTEM_PLL1_266M_CLK:
  437. case SYSTEM_PLL1_200M_CLK:
  438. case SYSTEM_PLL1_160M_CLK:
  439. case SYSTEM_PLL1_133M_CLK:
  440. case SYSTEM_PLL1_100M_CLK:
  441. case SYSTEM_PLL1_80M_CLK:
  442. case SYSTEM_PLL1_40M_CLK:
  443. pll_gnrl_ctl = readl(&ana_pll->sys_pll1_gnrl_ctl);
  444. pll_div_ctl = readl(&ana_pll->sys_pll1_div_ctl);
  445. break;
  446. case SYSTEM_PLL2_1000M_CLK:
  447. case SYSTEM_PLL2_500M_CLK:
  448. case SYSTEM_PLL2_333M_CLK:
  449. case SYSTEM_PLL2_250M_CLK:
  450. case SYSTEM_PLL2_200M_CLK:
  451. case SYSTEM_PLL2_166M_CLK:
  452. case SYSTEM_PLL2_125M_CLK:
  453. case SYSTEM_PLL2_100M_CLK:
  454. case SYSTEM_PLL2_50M_CLK:
  455. pll_gnrl_ctl = readl(&ana_pll->sys_pll2_gnrl_ctl);
  456. pll_div_ctl = readl(&ana_pll->sys_pll2_div_ctl);
  457. break;
  458. case SYSTEM_PLL3_CLK:
  459. pll_gnrl_ctl = readl(&ana_pll->sys_pll3_gnrl_ctl);
  460. pll_div_ctl = readl(&ana_pll->sys_pll3_div_ctl);
  461. break;
  462. default:
  463. return -EINVAL;
  464. }
  465. /* Only support SYS_XTAL 24M, PAD_CLK not take into consideration */
  466. if ((pll_gnrl_ctl & INTPLL_REF_CLK_SEL_MASK) != 0)
  467. return 0;
  468. if ((pll_gnrl_ctl & INTPLL_RST_MASK) == 0)
  469. return 0;
  470. /*
  471. * When BYPASS is equal to 1, PLL enters the bypass mode
  472. * regardless of the values of RESETB
  473. */
  474. if (pll_gnrl_ctl & INTPLL_BYPASS_MASK)
  475. return 24000000u;
  476. if (!(pll_gnrl_ctl & INTPLL_LOCK_MASK)) {
  477. puts("pll not locked\n");
  478. return 0;
  479. }
  480. switch (intpll) {
  481. case ARM_PLL_CLK:
  482. case GPU_PLL_CLK:
  483. case VPU_PLL_CLK:
  484. case SYSTEM_PLL3_CLK:
  485. case SYSTEM_PLL1_800M_CLK:
  486. case SYSTEM_PLL2_1000M_CLK:
  487. pll_clke_mask = INTPLL_CLKE_MASK;
  488. div = 1;
  489. break;
  490. case SYSTEM_PLL1_400M_CLK:
  491. case SYSTEM_PLL2_500M_CLK:
  492. pll_clke_mask = INTPLL_DIV2_CLKE_MASK;
  493. div = 2;
  494. break;
  495. case SYSTEM_PLL1_266M_CLK:
  496. case SYSTEM_PLL2_333M_CLK:
  497. pll_clke_mask = INTPLL_DIV3_CLKE_MASK;
  498. div = 3;
  499. break;
  500. case SYSTEM_PLL1_200M_CLK:
  501. case SYSTEM_PLL2_250M_CLK:
  502. pll_clke_mask = INTPLL_DIV4_CLKE_MASK;
  503. div = 4;
  504. break;
  505. case SYSTEM_PLL1_160M_CLK:
  506. case SYSTEM_PLL2_200M_CLK:
  507. pll_clke_mask = INTPLL_DIV5_CLKE_MASK;
  508. div = 5;
  509. break;
  510. case SYSTEM_PLL1_133M_CLK:
  511. case SYSTEM_PLL2_166M_CLK:
  512. pll_clke_mask = INTPLL_DIV6_CLKE_MASK;
  513. div = 6;
  514. break;
  515. case SYSTEM_PLL1_100M_CLK:
  516. case SYSTEM_PLL2_125M_CLK:
  517. pll_clke_mask = INTPLL_DIV8_CLKE_MASK;
  518. div = 8;
  519. break;
  520. case SYSTEM_PLL1_80M_CLK:
  521. case SYSTEM_PLL2_100M_CLK:
  522. pll_clke_mask = INTPLL_DIV10_CLKE_MASK;
  523. div = 10;
  524. break;
  525. case SYSTEM_PLL1_40M_CLK:
  526. case SYSTEM_PLL2_50M_CLK:
  527. pll_clke_mask = INTPLL_DIV20_CLKE_MASK;
  528. div = 20;
  529. break;
  530. default:
  531. return -EINVAL;
  532. }
  533. if ((pll_gnrl_ctl & pll_clke_mask) == 0)
  534. return 0;
  535. main_div = (pll_div_ctl & INTPLL_MAIN_DIV_MASK) >>
  536. INTPLL_MAIN_DIV_SHIFT;
  537. pre_div = (pll_div_ctl & INTPLL_PRE_DIV_MASK) >>
  538. INTPLL_PRE_DIV_SHIFT;
  539. post_div = (pll_div_ctl & INTPLL_POST_DIV_MASK) >>
  540. INTPLL_POST_DIV_SHIFT;
  541. /* FFVCO = (m * FFIN) / p, FFOUT = (m * FFIN) / (p * 2^s) */
  542. freq = 24000000ULL * main_div;
  543. return lldiv(freq, pre_div * (1 << post_div) * div);
  544. }
  545. static u32 decode_fracpll(enum clk_root_src frac_pll)
  546. {
  547. u32 pll_gnrl_ctl, pll_fdiv_ctl0, pll_fdiv_ctl1;
  548. u32 main_div, pre_div, post_div, k;
  549. switch (frac_pll) {
  550. case DRAM_PLL1_CLK:
  551. pll_gnrl_ctl = readl(&ana_pll->dram_pll_gnrl_ctl);
  552. pll_fdiv_ctl0 = readl(&ana_pll->dram_pll_fdiv_ctl0);
  553. pll_fdiv_ctl1 = readl(&ana_pll->dram_pll_fdiv_ctl1);
  554. break;
  555. case AUDIO_PLL1_CLK:
  556. pll_gnrl_ctl = readl(&ana_pll->audio_pll1_gnrl_ctl);
  557. pll_fdiv_ctl0 = readl(&ana_pll->audio_pll1_fdiv_ctl0);
  558. pll_fdiv_ctl1 = readl(&ana_pll->audio_pll1_fdiv_ctl1);
  559. break;
  560. case AUDIO_PLL2_CLK:
  561. pll_gnrl_ctl = readl(&ana_pll->audio_pll2_gnrl_ctl);
  562. pll_fdiv_ctl0 = readl(&ana_pll->audio_pll2_fdiv_ctl0);
  563. pll_fdiv_ctl1 = readl(&ana_pll->audio_pll2_fdiv_ctl1);
  564. break;
  565. case VIDEO_PLL_CLK:
  566. pll_gnrl_ctl = readl(&ana_pll->video_pll1_gnrl_ctl);
  567. pll_fdiv_ctl0 = readl(&ana_pll->video_pll1_fdiv_ctl0);
  568. pll_fdiv_ctl1 = readl(&ana_pll->video_pll1_fdiv_ctl1);
  569. break;
  570. default:
  571. printf("Unsupported clk_root_src %d\n", frac_pll);
  572. return 0;
  573. }
  574. /* Only support SYS_XTAL 24M, PAD_CLK not take into consideration */
  575. if ((pll_gnrl_ctl & GENMASK(1, 0)) != 0)
  576. return 0;
  577. if ((pll_gnrl_ctl & RST_MASK) == 0)
  578. return 0;
  579. /*
  580. * When BYPASS is equal to 1, PLL enters the bypass mode
  581. * regardless of the values of RESETB
  582. */
  583. if (pll_gnrl_ctl & BYPASS_MASK)
  584. return 24000000u;
  585. if (!(pll_gnrl_ctl & LOCK_STATUS)) {
  586. puts("pll not locked\n");
  587. return 0;
  588. }
  589. if (!(pll_gnrl_ctl & CLKE_MASK))
  590. return 0;
  591. main_div = (pll_fdiv_ctl0 & MDIV_MASK) >>
  592. MDIV_SHIFT;
  593. pre_div = (pll_fdiv_ctl0 & PDIV_MASK) >>
  594. PDIV_SHIFT;
  595. post_div = (pll_fdiv_ctl0 & SDIV_MASK) >>
  596. SDIV_SHIFT;
  597. k = pll_fdiv_ctl1 & KDIV_MASK;
  598. return lldiv((main_div * 65536 + k) * 24000000ULL,
  599. 65536 * pre_div * (1 << post_div));
  600. }
  601. static u32 get_root_src_clk(enum clk_root_src root_src)
  602. {
  603. switch (root_src) {
  604. case OSC_24M_CLK:
  605. return 24000000u;
  606. case OSC_HDMI_CLK:
  607. return 26000000u;
  608. case OSC_32K_CLK:
  609. return 32000u;
  610. case ARM_PLL_CLK:
  611. case GPU_PLL_CLK:
  612. case VPU_PLL_CLK:
  613. case SYSTEM_PLL1_800M_CLK:
  614. case SYSTEM_PLL1_400M_CLK:
  615. case SYSTEM_PLL1_266M_CLK:
  616. case SYSTEM_PLL1_200M_CLK:
  617. case SYSTEM_PLL1_160M_CLK:
  618. case SYSTEM_PLL1_133M_CLK:
  619. case SYSTEM_PLL1_100M_CLK:
  620. case SYSTEM_PLL1_80M_CLK:
  621. case SYSTEM_PLL1_40M_CLK:
  622. case SYSTEM_PLL2_1000M_CLK:
  623. case SYSTEM_PLL2_500M_CLK:
  624. case SYSTEM_PLL2_333M_CLK:
  625. case SYSTEM_PLL2_250M_CLK:
  626. case SYSTEM_PLL2_200M_CLK:
  627. case SYSTEM_PLL2_166M_CLK:
  628. case SYSTEM_PLL2_125M_CLK:
  629. case SYSTEM_PLL2_100M_CLK:
  630. case SYSTEM_PLL2_50M_CLK:
  631. case SYSTEM_PLL3_CLK:
  632. return decode_intpll(root_src);
  633. case DRAM_PLL1_CLK:
  634. case AUDIO_PLL1_CLK:
  635. case AUDIO_PLL2_CLK:
  636. case VIDEO_PLL_CLK:
  637. return decode_fracpll(root_src);
  638. case ARM_A53_ALT_CLK:
  639. return get_root_clk(ARM_A53_CLK_ROOT);
  640. default:
  641. return 0;
  642. }
  643. return 0;
  644. }
  645. static u32 get_root_clk(enum clk_root_index clock_id)
  646. {
  647. enum clk_root_src root_src;
  648. u32 post_podf, pre_podf, root_src_clk;
  649. if (clock_root_enabled(clock_id) <= 0)
  650. return 0;
  651. if (clock_get_prediv(clock_id, &pre_podf) < 0)
  652. return 0;
  653. if (clock_get_postdiv(clock_id, &post_podf) < 0)
  654. return 0;
  655. if (clock_get_src(clock_id, &root_src) < 0)
  656. return 0;
  657. root_src_clk = get_root_src_clk(root_src);
  658. return root_src_clk / (post_podf + 1) / (pre_podf + 1);
  659. }
  660. u32 get_arm_core_clk(void)
  661. {
  662. enum clk_root_src root_src;
  663. u32 root_src_clk;
  664. if (clock_get_src(CORE_SEL_CFG, &root_src) < 0)
  665. return 0;
  666. root_src_clk = get_root_src_clk(root_src);
  667. return root_src_clk;
  668. }
  669. u32 mxc_get_clock(enum mxc_clock clk)
  670. {
  671. u32 val;
  672. switch (clk) {
  673. case MXC_ARM_CLK:
  674. return get_arm_core_clk();
  675. case MXC_IPG_CLK:
  676. clock_get_target_val(IPG_CLK_ROOT, &val);
  677. val = val & 0x3;
  678. return get_root_clk(AHB_CLK_ROOT) / 2 / (val + 1);
  679. case MXC_CSPI_CLK:
  680. return get_root_clk(ECSPI1_CLK_ROOT);
  681. case MXC_ESDHC_CLK:
  682. return get_root_clk(USDHC1_CLK_ROOT);
  683. case MXC_ESDHC2_CLK:
  684. return get_root_clk(USDHC2_CLK_ROOT);
  685. case MXC_ESDHC3_CLK:
  686. return get_root_clk(USDHC3_CLK_ROOT);
  687. case MXC_I2C_CLK:
  688. return get_root_clk(I2C1_CLK_ROOT);
  689. case MXC_UART_CLK:
  690. return get_root_clk(UART1_CLK_ROOT);
  691. case MXC_QSPI_CLK:
  692. return get_root_clk(QSPI_CLK_ROOT);
  693. default:
  694. printf("Unsupported mxc_clock %d\n", clk);
  695. break;
  696. }
  697. return 0;
  698. }
  699. #ifdef CONFIG_DWC_ETH_QOS
  700. int set_clk_eqos(enum enet_freq type)
  701. {
  702. u32 target;
  703. u32 enet1_ref;
  704. switch (type) {
  705. case ENET_125MHZ:
  706. enet1_ref = ENET1_REF_CLK_ROOT_FROM_PLL_ENET_MAIN_125M_CLK;
  707. break;
  708. case ENET_50MHZ:
  709. enet1_ref = ENET1_REF_CLK_ROOT_FROM_PLL_ENET_MAIN_50M_CLK;
  710. break;
  711. case ENET_25MHZ:
  712. enet1_ref = ENET1_REF_CLK_ROOT_FROM_PLL_ENET_MAIN_25M_CLK;
  713. break;
  714. default:
  715. return -EINVAL;
  716. }
  717. /* disable the clock first */
  718. clock_enable(CCGR_QOS_ETHENET, 0);
  719. clock_enable(CCGR_SDMA2, 0);
  720. /* set enet axi clock 266Mhz */
  721. target = CLK_ROOT_ON | ENET_AXI_CLK_ROOT_FROM_SYS1_PLL_266M |
  722. CLK_ROOT_PRE_DIV(CLK_ROOT_PRE_DIV1) |
  723. CLK_ROOT_POST_DIV(CLK_ROOT_POST_DIV1);
  724. clock_set_target_val(ENET_AXI_CLK_ROOT, target);
  725. target = CLK_ROOT_ON | enet1_ref |
  726. CLK_ROOT_PRE_DIV(CLK_ROOT_PRE_DIV1) |
  727. CLK_ROOT_POST_DIV(CLK_ROOT_POST_DIV1);
  728. clock_set_target_val(ENET_QOS_CLK_ROOT, target);
  729. target = CLK_ROOT_ON |
  730. ENET1_TIME_CLK_ROOT_FROM_PLL_ENET_MAIN_100M_CLK |
  731. CLK_ROOT_PRE_DIV(CLK_ROOT_PRE_DIV1) |
  732. CLK_ROOT_POST_DIV(CLK_ROOT_POST_DIV4);
  733. clock_set_target_val(ENET_QOS_TIMER_CLK_ROOT, target);
  734. /* enable clock */
  735. clock_enable(CCGR_QOS_ETHENET, 1);
  736. clock_enable(CCGR_SDMA2, 1);
  737. return 0;
  738. }
  739. int imx_eqos_txclk_set_rate(ulong rate)
  740. {
  741. u32 val;
  742. u32 eqos_post_div;
  743. /* disable the clock first */
  744. clock_enable(CCGR_QOS_ETHENET, 0);
  745. clock_enable(CCGR_SDMA2, 0);
  746. switch (rate) {
  747. case 125000000:
  748. eqos_post_div = 1;
  749. break;
  750. case 25000000:
  751. eqos_post_div = 125000000 / 25000000;
  752. break;
  753. case 2500000:
  754. eqos_post_div = 125000000 / 2500000;
  755. break;
  756. default:
  757. return -EINVAL;
  758. }
  759. clock_get_target_val(ENET_QOS_CLK_ROOT, &val);
  760. val &= ~(CLK_ROOT_PRE_DIV_MASK | CLK_ROOT_POST_DIV_MASK);
  761. val |= CLK_ROOT_PRE_DIV(CLK_ROOT_PRE_DIV1) |
  762. CLK_ROOT_POST_DIV(eqos_post_div - 1);
  763. clock_set_target_val(ENET_QOS_CLK_ROOT, val);
  764. /* enable clock */
  765. clock_enable(CCGR_QOS_ETHENET, 1);
  766. clock_enable(CCGR_SDMA2, 1);
  767. return 0;
  768. }
  769. u32 imx_get_eqos_csr_clk(void)
  770. {
  771. return get_root_clk(ENET_AXI_CLK_ROOT);
  772. }
  773. #endif
  774. #ifdef CONFIG_FEC_MXC
  775. int set_clk_enet(enum enet_freq type)
  776. {
  777. u32 target;
  778. u32 enet1_ref;
  779. switch (type) {
  780. case ENET_125MHZ:
  781. enet1_ref = ENET1_REF_CLK_ROOT_FROM_PLL_ENET_MAIN_125M_CLK;
  782. break;
  783. case ENET_50MHZ:
  784. enet1_ref = ENET1_REF_CLK_ROOT_FROM_PLL_ENET_MAIN_50M_CLK;
  785. break;
  786. case ENET_25MHZ:
  787. enet1_ref = ENET1_REF_CLK_ROOT_FROM_PLL_ENET_MAIN_25M_CLK;
  788. break;
  789. default:
  790. return -EINVAL;
  791. }
  792. /* disable the clock first */
  793. clock_enable(CCGR_ENET1, 0);
  794. clock_enable(CCGR_SIM_ENET, 0);
  795. /* set enet axi clock 266Mhz */
  796. target = CLK_ROOT_ON | ENET_AXI_CLK_ROOT_FROM_SYS1_PLL_266M |
  797. CLK_ROOT_PRE_DIV(CLK_ROOT_PRE_DIV1) |
  798. CLK_ROOT_POST_DIV(CLK_ROOT_POST_DIV1);
  799. clock_set_target_val(ENET_AXI_CLK_ROOT, target);
  800. target = CLK_ROOT_ON | enet1_ref |
  801. CLK_ROOT_PRE_DIV(CLK_ROOT_PRE_DIV1) |
  802. CLK_ROOT_POST_DIV(CLK_ROOT_POST_DIV1);
  803. clock_set_target_val(ENET_REF_CLK_ROOT, target);
  804. target = CLK_ROOT_ON |
  805. ENET1_TIME_CLK_ROOT_FROM_PLL_ENET_MAIN_100M_CLK |
  806. CLK_ROOT_PRE_DIV(CLK_ROOT_PRE_DIV1) |
  807. CLK_ROOT_POST_DIV(CLK_ROOT_POST_DIV4);
  808. clock_set_target_val(ENET_TIMER_CLK_ROOT, target);
  809. /* enable clock */
  810. clock_enable(CCGR_SIM_ENET, 1);
  811. clock_enable(CCGR_ENET1, 1);
  812. return 0;
  813. }
  814. #endif