clock_imx8mm.c 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930
  1. // SPDX-License-Identifier: GPL-2.0+
  2. /*
  3. * Copyright 2018-2019 NXP
  4. *
  5. * Peng Fan <peng.fan@nxp.com>
  6. */
  7. #include <common.h>
  8. #include <asm/arch/clock.h>
  9. #include <asm/arch/imx-regs.h>
  10. #include <asm/arch/sys_proto.h>
  11. #include <asm/global_data.h>
  12. #include <asm/io.h>
  13. #include <div64.h>
  14. #include <errno.h>
  15. #include <linux/bitops.h>
  16. #include <linux/delay.h>
  17. DECLARE_GLOBAL_DATA_PTR;
  18. static struct anamix_pll *ana_pll = (struct anamix_pll *)ANATOP_BASE_ADDR;
  19. static u32 get_root_clk(enum clk_root_index clock_id);
  20. void enable_ocotp_clk(unsigned char enable)
  21. {
  22. clock_enable(CCGR_OCOTP, !!enable);
  23. }
  24. int enable_i2c_clk(unsigned char enable, unsigned i2c_num)
  25. {
  26. /* 0 - 3 is valid i2c num */
  27. if (i2c_num > 3)
  28. return -EINVAL;
  29. clock_enable(CCGR_I2C1 + i2c_num, !!enable);
  30. return 0;
  31. }
  32. #ifdef CONFIG_SPL_BUILD
  33. static struct imx_int_pll_rate_table imx8mm_fracpll_tbl[] = {
  34. PLL_1443X_RATE(1000000000U, 250, 3, 1, 0),
  35. PLL_1443X_RATE(800000000U, 300, 9, 0, 0),
  36. PLL_1443X_RATE(750000000U, 250, 8, 0, 0),
  37. PLL_1443X_RATE(650000000U, 325, 3, 2, 0),
  38. PLL_1443X_RATE(600000000U, 300, 3, 2, 0),
  39. PLL_1443X_RATE(594000000U, 99, 1, 2, 0),
  40. PLL_1443X_RATE(400000000U, 300, 9, 1, 0),
  41. PLL_1443X_RATE(266666667U, 400, 9, 2, 0),
  42. PLL_1443X_RATE(167000000U, 334, 3, 4, 0),
  43. PLL_1443X_RATE(100000000U, 300, 9, 3, 0),
  44. };
  45. static int fracpll_configure(enum pll_clocks pll, u32 freq)
  46. {
  47. int i;
  48. u32 tmp, div_val;
  49. void *pll_base;
  50. struct imx_int_pll_rate_table *rate;
  51. for (i = 0; i < ARRAY_SIZE(imx8mm_fracpll_tbl); i++) {
  52. if (freq == imx8mm_fracpll_tbl[i].rate)
  53. break;
  54. }
  55. if (i == ARRAY_SIZE(imx8mm_fracpll_tbl)) {
  56. printf("No matched freq table %u\n", freq);
  57. return -EINVAL;
  58. }
  59. rate = &imx8mm_fracpll_tbl[i];
  60. switch (pll) {
  61. case ANATOP_DRAM_PLL:
  62. setbits_le32(GPC_BASE_ADDR + 0xEC, 1 << 7);
  63. setbits_le32(GPC_BASE_ADDR + 0xF8, 1 << 5);
  64. writel(SRC_DDR1_ENABLE_MASK, SRC_BASE_ADDR + 0x1004);
  65. pll_base = &ana_pll->dram_pll_gnrl_ctl;
  66. break;
  67. case ANATOP_VIDEO_PLL:
  68. pll_base = &ana_pll->video_pll1_gnrl_ctl;
  69. break;
  70. default:
  71. return 0;
  72. }
  73. /* Bypass clock and set lock to pll output lock */
  74. tmp = readl(pll_base);
  75. tmp |= BYPASS_MASK;
  76. writel(tmp, pll_base);
  77. /* Enable RST */
  78. tmp &= ~RST_MASK;
  79. writel(tmp, pll_base);
  80. div_val = (rate->mdiv << MDIV_SHIFT) | (rate->pdiv << PDIV_SHIFT) |
  81. (rate->sdiv << SDIV_SHIFT);
  82. writel(div_val, pll_base + 4);
  83. writel(rate->kdiv << KDIV_SHIFT, pll_base + 8);
  84. __udelay(100);
  85. /* Disable RST */
  86. tmp |= RST_MASK;
  87. writel(tmp, pll_base);
  88. /* Wait Lock*/
  89. while (!(readl(pll_base) & LOCK_STATUS))
  90. ;
  91. /* Bypass */
  92. tmp &= ~BYPASS_MASK;
  93. writel(tmp, pll_base);
  94. return 0;
  95. }
  96. void dram_pll_init(ulong pll_val)
  97. {
  98. fracpll_configure(ANATOP_DRAM_PLL, pll_val);
  99. }
  100. static struct dram_bypass_clk_setting imx8mm_dram_bypass_tbl[] = {
  101. DRAM_BYPASS_ROOT_CONFIG(MHZ(100), 2, CLK_ROOT_PRE_DIV1, 2,
  102. CLK_ROOT_PRE_DIV2),
  103. DRAM_BYPASS_ROOT_CONFIG(MHZ(250), 3, CLK_ROOT_PRE_DIV2, 2,
  104. CLK_ROOT_PRE_DIV2),
  105. DRAM_BYPASS_ROOT_CONFIG(MHZ(400), 1, CLK_ROOT_PRE_DIV2, 3,
  106. CLK_ROOT_PRE_DIV2),
  107. };
  108. void dram_enable_bypass(ulong clk_val)
  109. {
  110. int i;
  111. struct dram_bypass_clk_setting *config;
  112. for (i = 0; i < ARRAY_SIZE(imx8mm_dram_bypass_tbl); i++) {
  113. if (clk_val == imx8mm_dram_bypass_tbl[i].clk)
  114. break;
  115. }
  116. if (i == ARRAY_SIZE(imx8mm_dram_bypass_tbl)) {
  117. printf("No matched freq table %lu\n", clk_val);
  118. return;
  119. }
  120. config = &imx8mm_dram_bypass_tbl[i];
  121. clock_set_target_val(DRAM_ALT_CLK_ROOT, CLK_ROOT_ON |
  122. CLK_ROOT_SOURCE_SEL(config->alt_root_sel) |
  123. CLK_ROOT_PRE_DIV(config->alt_pre_div));
  124. clock_set_target_val(DRAM_APB_CLK_ROOT, CLK_ROOT_ON |
  125. CLK_ROOT_SOURCE_SEL(config->apb_root_sel) |
  126. CLK_ROOT_PRE_DIV(config->apb_pre_div));
  127. clock_set_target_val(DRAM_SEL_CFG, CLK_ROOT_ON |
  128. CLK_ROOT_SOURCE_SEL(1));
  129. }
  130. void dram_disable_bypass(void)
  131. {
  132. clock_set_target_val(DRAM_SEL_CFG, CLK_ROOT_ON |
  133. CLK_ROOT_SOURCE_SEL(0));
  134. clock_set_target_val(DRAM_APB_CLK_ROOT, CLK_ROOT_ON |
  135. CLK_ROOT_SOURCE_SEL(4) |
  136. CLK_ROOT_PRE_DIV(CLK_ROOT_PRE_DIV5));
  137. }
  138. #endif
  139. int intpll_configure(enum pll_clocks pll, ulong freq)
  140. {
  141. void __iomem *pll_gnrl_ctl, __iomem *pll_div_ctl;
  142. u32 pll_div_ctl_val, pll_clke_masks;
  143. switch (pll) {
  144. case ANATOP_SYSTEM_PLL1:
  145. pll_gnrl_ctl = &ana_pll->sys_pll1_gnrl_ctl;
  146. pll_div_ctl = &ana_pll->sys_pll1_div_ctl;
  147. pll_clke_masks = INTPLL_DIV20_CLKE_MASK |
  148. INTPLL_DIV10_CLKE_MASK | INTPLL_DIV8_CLKE_MASK |
  149. INTPLL_DIV6_CLKE_MASK | INTPLL_DIV5_CLKE_MASK |
  150. INTPLL_DIV4_CLKE_MASK | INTPLL_DIV3_CLKE_MASK |
  151. INTPLL_DIV2_CLKE_MASK | INTPLL_CLKE_MASK;
  152. break;
  153. case ANATOP_SYSTEM_PLL2:
  154. pll_gnrl_ctl = &ana_pll->sys_pll2_gnrl_ctl;
  155. pll_div_ctl = &ana_pll->sys_pll2_div_ctl;
  156. pll_clke_masks = INTPLL_DIV20_CLKE_MASK |
  157. INTPLL_DIV10_CLKE_MASK | INTPLL_DIV8_CLKE_MASK |
  158. INTPLL_DIV6_CLKE_MASK | INTPLL_DIV5_CLKE_MASK |
  159. INTPLL_DIV4_CLKE_MASK | INTPLL_DIV3_CLKE_MASK |
  160. INTPLL_DIV2_CLKE_MASK | INTPLL_CLKE_MASK;
  161. break;
  162. case ANATOP_SYSTEM_PLL3:
  163. pll_gnrl_ctl = &ana_pll->sys_pll3_gnrl_ctl;
  164. pll_div_ctl = &ana_pll->sys_pll3_div_ctl;
  165. pll_clke_masks = INTPLL_CLKE_MASK;
  166. break;
  167. case ANATOP_ARM_PLL:
  168. pll_gnrl_ctl = &ana_pll->arm_pll_gnrl_ctl;
  169. pll_div_ctl = &ana_pll->arm_pll_div_ctl;
  170. pll_clke_masks = INTPLL_CLKE_MASK;
  171. break;
  172. case ANATOP_GPU_PLL:
  173. pll_gnrl_ctl = &ana_pll->gpu_pll_gnrl_ctl;
  174. pll_div_ctl = &ana_pll->gpu_pll_div_ctl;
  175. pll_clke_masks = INTPLL_CLKE_MASK;
  176. break;
  177. case ANATOP_VPU_PLL:
  178. pll_gnrl_ctl = &ana_pll->vpu_pll_gnrl_ctl;
  179. pll_div_ctl = &ana_pll->vpu_pll_div_ctl;
  180. pll_clke_masks = INTPLL_CLKE_MASK;
  181. break;
  182. default:
  183. return -EINVAL;
  184. };
  185. switch (freq) {
  186. case MHZ(600):
  187. /* 24 * 0x12c / 3 / 2 ^ 2 */
  188. pll_div_ctl_val = INTPLL_MAIN_DIV_VAL(0x12c) |
  189. INTPLL_PRE_DIV_VAL(3) | INTPLL_POST_DIV_VAL(2);
  190. break;
  191. case MHZ(750):
  192. /* 24 * 0xfa / 2 / 2 ^ 2 */
  193. pll_div_ctl_val = INTPLL_MAIN_DIV_VAL(0xfa) |
  194. INTPLL_PRE_DIV_VAL(2) | INTPLL_POST_DIV_VAL(2);
  195. break;
  196. case MHZ(800):
  197. /* 24 * 0x190 / 3 / 2 ^ 2 */
  198. pll_div_ctl_val = INTPLL_MAIN_DIV_VAL(0x190) |
  199. INTPLL_PRE_DIV_VAL(3) | INTPLL_POST_DIV_VAL(2);
  200. break;
  201. case MHZ(1000):
  202. /* 24 * 0xfa / 3 / 2 ^ 1 */
  203. pll_div_ctl_val = INTPLL_MAIN_DIV_VAL(0xfa) |
  204. INTPLL_PRE_DIV_VAL(3) | INTPLL_POST_DIV_VAL(1);
  205. break;
  206. case MHZ(1200):
  207. /* 24 * 0xc8 / 2 / 2 ^ 1 */
  208. pll_div_ctl_val = INTPLL_MAIN_DIV_VAL(0xc8) |
  209. INTPLL_PRE_DIV_VAL(2) | INTPLL_POST_DIV_VAL(1);
  210. break;
  211. case MHZ(2000):
  212. /* 24 * 0xfa / 3 / 2 ^ 0 */
  213. pll_div_ctl_val = INTPLL_MAIN_DIV_VAL(0xfa) |
  214. INTPLL_PRE_DIV_VAL(3) | INTPLL_POST_DIV_VAL(0);
  215. break;
  216. default:
  217. return -EINVAL;
  218. };
  219. /* Bypass clock and set lock to pll output lock */
  220. setbits_le32(pll_gnrl_ctl, INTPLL_BYPASS_MASK | INTPLL_LOCK_SEL_MASK);
  221. /* Enable reset */
  222. clrbits_le32(pll_gnrl_ctl, INTPLL_RST_MASK);
  223. /* Configure */
  224. writel(pll_div_ctl_val, pll_div_ctl);
  225. __udelay(100);
  226. /* Disable reset */
  227. setbits_le32(pll_gnrl_ctl, INTPLL_RST_MASK);
  228. /* Wait Lock */
  229. while (!(readl(pll_gnrl_ctl) & INTPLL_LOCK_MASK))
  230. ;
  231. /* Clear bypass */
  232. clrbits_le32(pll_gnrl_ctl, INTPLL_BYPASS_MASK);
  233. setbits_le32(pll_gnrl_ctl, pll_clke_masks);
  234. return 0;
  235. }
  236. void init_uart_clk(u32 index)
  237. {
  238. /*
  239. * set uart clock root
  240. * 24M OSC
  241. */
  242. switch (index) {
  243. case 0:
  244. clock_enable(CCGR_UART1, 0);
  245. clock_set_target_val(UART1_CLK_ROOT, CLK_ROOT_ON |
  246. CLK_ROOT_SOURCE_SEL(0));
  247. clock_enable(CCGR_UART1, 1);
  248. return;
  249. case 1:
  250. clock_enable(CCGR_UART2, 0);
  251. clock_set_target_val(UART2_CLK_ROOT, CLK_ROOT_ON |
  252. CLK_ROOT_SOURCE_SEL(0));
  253. clock_enable(CCGR_UART2, 1);
  254. return;
  255. case 2:
  256. clock_enable(CCGR_UART3, 0);
  257. clock_set_target_val(UART3_CLK_ROOT, CLK_ROOT_ON |
  258. CLK_ROOT_SOURCE_SEL(0));
  259. clock_enable(CCGR_UART3, 1);
  260. return;
  261. case 3:
  262. clock_enable(CCGR_UART4, 0);
  263. clock_set_target_val(UART4_CLK_ROOT, CLK_ROOT_ON |
  264. CLK_ROOT_SOURCE_SEL(0));
  265. clock_enable(CCGR_UART4, 1);
  266. return;
  267. default:
  268. printf("Invalid uart index\n");
  269. return;
  270. }
  271. }
  272. void init_wdog_clk(void)
  273. {
  274. clock_enable(CCGR_WDOG1, 0);
  275. clock_enable(CCGR_WDOG2, 0);
  276. clock_enable(CCGR_WDOG3, 0);
  277. clock_set_target_val(WDOG_CLK_ROOT, CLK_ROOT_ON |
  278. CLK_ROOT_SOURCE_SEL(0));
  279. clock_enable(CCGR_WDOG1, 1);
  280. clock_enable(CCGR_WDOG2, 1);
  281. clock_enable(CCGR_WDOG3, 1);
  282. }
  283. void init_clk_usdhc(u32 index)
  284. {
  285. /*
  286. * set usdhc clock root
  287. * sys pll1 400M
  288. */
  289. switch (index) {
  290. case 0:
  291. clock_enable(CCGR_USDHC1, 0);
  292. clock_set_target_val(USDHC1_CLK_ROOT, CLK_ROOT_ON |
  293. CLK_ROOT_SOURCE_SEL(1));
  294. clock_enable(CCGR_USDHC1, 1);
  295. return;
  296. case 1:
  297. clock_enable(CCGR_USDHC2, 0);
  298. clock_set_target_val(USDHC2_CLK_ROOT, CLK_ROOT_ON |
  299. CLK_ROOT_SOURCE_SEL(1));
  300. clock_enable(CCGR_USDHC2, 1);
  301. return;
  302. case 2:
  303. clock_enable(CCGR_USDHC3, 0);
  304. clock_set_target_val(USDHC3_CLK_ROOT, CLK_ROOT_ON |
  305. CLK_ROOT_SOURCE_SEL(1));
  306. clock_enable(CCGR_USDHC3, 1);
  307. return;
  308. default:
  309. printf("Invalid usdhc index\n");
  310. return;
  311. }
  312. }
  313. void init_clk_ecspi(u32 index)
  314. {
  315. switch (index) {
  316. case 0:
  317. clock_enable(CCGR_ECSPI1, 0);
  318. clock_set_target_val(ECSPI1_CLK_ROOT, CLK_ROOT_ON | CLK_ROOT_SOURCE_SEL(0));
  319. clock_enable(CCGR_ECSPI1, 1);
  320. return;
  321. case 1:
  322. clock_enable(CCGR_ECSPI2, 0);
  323. clock_set_target_val(ECSPI2_CLK_ROOT, CLK_ROOT_ON | CLK_ROOT_SOURCE_SEL(0));
  324. clock_enable(CCGR_ECSPI2, 1);
  325. return;
  326. case 2:
  327. clock_enable(CCGR_ECSPI3, 0);
  328. clock_set_target_val(ECSPI3_CLK_ROOT, CLK_ROOT_ON | CLK_ROOT_SOURCE_SEL(0));
  329. clock_enable(CCGR_ECSPI3, 1);
  330. return;
  331. default:
  332. printf("Invalid ecspi index\n");
  333. return;
  334. }
  335. }
  336. void init_nand_clk(void)
  337. {
  338. /*
  339. * set rawnand root
  340. * sys pll1 400M
  341. */
  342. clock_enable(CCGR_RAWNAND, 0);
  343. clock_set_target_val(NAND_CLK_ROOT, CLK_ROOT_ON |
  344. CLK_ROOT_SOURCE_SEL(3) | CLK_ROOT_POST_DIV(CLK_ROOT_POST_DIV4)); /* 100M */
  345. clock_enable(CCGR_RAWNAND, 1);
  346. }
  347. int clock_init(void)
  348. {
  349. u32 val_cfg0;
  350. /*
  351. * The gate is not exported to clk tree, so configure them here.
  352. * According to ANAMIX SPEC
  353. * sys pll1 fixed at 800MHz
  354. * sys pll2 fixed at 1GHz
  355. * Here we only enable the outputs.
  356. */
  357. val_cfg0 = readl(&ana_pll->sys_pll1_gnrl_ctl);
  358. val_cfg0 |= INTPLL_CLKE_MASK | INTPLL_DIV2_CLKE_MASK |
  359. INTPLL_DIV3_CLKE_MASK | INTPLL_DIV4_CLKE_MASK |
  360. INTPLL_DIV5_CLKE_MASK | INTPLL_DIV6_CLKE_MASK |
  361. INTPLL_DIV8_CLKE_MASK | INTPLL_DIV10_CLKE_MASK |
  362. INTPLL_DIV20_CLKE_MASK;
  363. writel(val_cfg0, &ana_pll->sys_pll1_gnrl_ctl);
  364. val_cfg0 = readl(&ana_pll->sys_pll2_gnrl_ctl);
  365. val_cfg0 |= INTPLL_CLKE_MASK | INTPLL_DIV2_CLKE_MASK |
  366. INTPLL_DIV3_CLKE_MASK | INTPLL_DIV4_CLKE_MASK |
  367. INTPLL_DIV5_CLKE_MASK | INTPLL_DIV6_CLKE_MASK |
  368. INTPLL_DIV8_CLKE_MASK | INTPLL_DIV10_CLKE_MASK |
  369. INTPLL_DIV20_CLKE_MASK;
  370. writel(val_cfg0, &ana_pll->sys_pll2_gnrl_ctl);
  371. /* Configure ARM at 1.2GHz */
  372. clock_set_target_val(ARM_A53_CLK_ROOT, CLK_ROOT_ON |
  373. CLK_ROOT_SOURCE_SEL(2));
  374. intpll_configure(ANATOP_ARM_PLL, MHZ(1200));
  375. /* Bypass CCM A53 ROOT, Switch to ARM PLL -> MUX-> CPU */
  376. clock_set_target_val(CORE_SEL_CFG, CLK_ROOT_SOURCE_SEL(1));
  377. if (is_imx8mn() || is_imx8mp())
  378. intpll_configure(ANATOP_SYSTEM_PLL3, MHZ(600));
  379. else
  380. intpll_configure(ANATOP_SYSTEM_PLL3, MHZ(750));
  381. #ifdef CONFIG_IMX8MP
  382. /* 8MP ROM already set NOC to 800Mhz, only need to configure NOC_IO clk to 600Mhz */
  383. /* 8MP ROM already set GIC to 400Mhz, system_pll1_800m with div = 2 */
  384. clock_set_target_val(NOC_IO_CLK_ROOT, CLK_ROOT_ON | CLK_ROOT_SOURCE_SEL(2));
  385. #else
  386. clock_set_target_val(NOC_CLK_ROOT, CLK_ROOT_ON | CLK_ROOT_SOURCE_SEL(2));
  387. /* config GIC to sys_pll2_100m */
  388. clock_enable(CCGR_GIC, 0);
  389. clock_set_target_val(GIC_CLK_ROOT, CLK_ROOT_ON |
  390. CLK_ROOT_SOURCE_SEL(3));
  391. clock_enable(CCGR_GIC, 1);
  392. #endif
  393. clock_set_target_val(NAND_USDHC_BUS_CLK_ROOT, CLK_ROOT_ON |
  394. CLK_ROOT_SOURCE_SEL(1));
  395. clock_enable(CCGR_DDR1, 0);
  396. clock_set_target_val(DRAM_ALT_CLK_ROOT, CLK_ROOT_ON |
  397. CLK_ROOT_SOURCE_SEL(1));
  398. clock_set_target_val(DRAM_APB_CLK_ROOT, CLK_ROOT_ON |
  399. CLK_ROOT_SOURCE_SEL(1));
  400. clock_enable(CCGR_DDR1, 1);
  401. init_wdog_clk();
  402. clock_enable(CCGR_TEMP_SENSOR, 1);
  403. clock_enable(CCGR_SEC_DEBUG, 1);
  404. return 0;
  405. };
  406. u32 imx_get_uartclk(void)
  407. {
  408. return 24000000U;
  409. }
  410. static u32 decode_intpll(enum clk_root_src intpll)
  411. {
  412. u32 pll_gnrl_ctl, pll_div_ctl, pll_clke_mask;
  413. u32 main_div, pre_div, post_div, div;
  414. u64 freq;
  415. switch (intpll) {
  416. case ARM_PLL_CLK:
  417. pll_gnrl_ctl = readl(&ana_pll->arm_pll_gnrl_ctl);
  418. pll_div_ctl = readl(&ana_pll->arm_pll_div_ctl);
  419. break;
  420. case GPU_PLL_CLK:
  421. pll_gnrl_ctl = readl(&ana_pll->gpu_pll_gnrl_ctl);
  422. pll_div_ctl = readl(&ana_pll->gpu_pll_div_ctl);
  423. break;
  424. case VPU_PLL_CLK:
  425. pll_gnrl_ctl = readl(&ana_pll->vpu_pll_gnrl_ctl);
  426. pll_div_ctl = readl(&ana_pll->vpu_pll_div_ctl);
  427. break;
  428. case SYSTEM_PLL1_800M_CLK:
  429. case SYSTEM_PLL1_400M_CLK:
  430. case SYSTEM_PLL1_266M_CLK:
  431. case SYSTEM_PLL1_200M_CLK:
  432. case SYSTEM_PLL1_160M_CLK:
  433. case SYSTEM_PLL1_133M_CLK:
  434. case SYSTEM_PLL1_100M_CLK:
  435. case SYSTEM_PLL1_80M_CLK:
  436. case SYSTEM_PLL1_40M_CLK:
  437. pll_gnrl_ctl = readl(&ana_pll->sys_pll1_gnrl_ctl);
  438. pll_div_ctl = readl(&ana_pll->sys_pll1_div_ctl);
  439. break;
  440. case SYSTEM_PLL2_1000M_CLK:
  441. case SYSTEM_PLL2_500M_CLK:
  442. case SYSTEM_PLL2_333M_CLK:
  443. case SYSTEM_PLL2_250M_CLK:
  444. case SYSTEM_PLL2_200M_CLK:
  445. case SYSTEM_PLL2_166M_CLK:
  446. case SYSTEM_PLL2_125M_CLK:
  447. case SYSTEM_PLL2_100M_CLK:
  448. case SYSTEM_PLL2_50M_CLK:
  449. pll_gnrl_ctl = readl(&ana_pll->sys_pll2_gnrl_ctl);
  450. pll_div_ctl = readl(&ana_pll->sys_pll2_div_ctl);
  451. break;
  452. case SYSTEM_PLL3_CLK:
  453. pll_gnrl_ctl = readl(&ana_pll->sys_pll3_gnrl_ctl);
  454. pll_div_ctl = readl(&ana_pll->sys_pll3_div_ctl);
  455. break;
  456. default:
  457. return -EINVAL;
  458. }
  459. /* Only support SYS_XTAL 24M, PAD_CLK not take into consideration */
  460. if ((pll_gnrl_ctl & INTPLL_REF_CLK_SEL_MASK) != 0)
  461. return 0;
  462. if ((pll_gnrl_ctl & INTPLL_RST_MASK) == 0)
  463. return 0;
  464. /*
  465. * When BYPASS is equal to 1, PLL enters the bypass mode
  466. * regardless of the values of RESETB
  467. */
  468. if (pll_gnrl_ctl & INTPLL_BYPASS_MASK)
  469. return 24000000u;
  470. if (!(pll_gnrl_ctl & INTPLL_LOCK_MASK)) {
  471. puts("pll not locked\n");
  472. return 0;
  473. }
  474. switch (intpll) {
  475. case ARM_PLL_CLK:
  476. case GPU_PLL_CLK:
  477. case VPU_PLL_CLK:
  478. case SYSTEM_PLL3_CLK:
  479. case SYSTEM_PLL1_800M_CLK:
  480. case SYSTEM_PLL2_1000M_CLK:
  481. pll_clke_mask = INTPLL_CLKE_MASK;
  482. div = 1;
  483. break;
  484. case SYSTEM_PLL1_400M_CLK:
  485. case SYSTEM_PLL2_500M_CLK:
  486. pll_clke_mask = INTPLL_DIV2_CLKE_MASK;
  487. div = 2;
  488. break;
  489. case SYSTEM_PLL1_266M_CLK:
  490. case SYSTEM_PLL2_333M_CLK:
  491. pll_clke_mask = INTPLL_DIV3_CLKE_MASK;
  492. div = 3;
  493. break;
  494. case SYSTEM_PLL1_200M_CLK:
  495. case SYSTEM_PLL2_250M_CLK:
  496. pll_clke_mask = INTPLL_DIV4_CLKE_MASK;
  497. div = 4;
  498. break;
  499. case SYSTEM_PLL1_160M_CLK:
  500. case SYSTEM_PLL2_200M_CLK:
  501. pll_clke_mask = INTPLL_DIV5_CLKE_MASK;
  502. div = 5;
  503. break;
  504. case SYSTEM_PLL1_133M_CLK:
  505. case SYSTEM_PLL2_166M_CLK:
  506. pll_clke_mask = INTPLL_DIV6_CLKE_MASK;
  507. div = 6;
  508. break;
  509. case SYSTEM_PLL1_100M_CLK:
  510. case SYSTEM_PLL2_125M_CLK:
  511. pll_clke_mask = INTPLL_DIV8_CLKE_MASK;
  512. div = 8;
  513. break;
  514. case SYSTEM_PLL1_80M_CLK:
  515. case SYSTEM_PLL2_100M_CLK:
  516. pll_clke_mask = INTPLL_DIV10_CLKE_MASK;
  517. div = 10;
  518. break;
  519. case SYSTEM_PLL1_40M_CLK:
  520. case SYSTEM_PLL2_50M_CLK:
  521. pll_clke_mask = INTPLL_DIV20_CLKE_MASK;
  522. div = 20;
  523. break;
  524. default:
  525. return -EINVAL;
  526. }
  527. if ((pll_gnrl_ctl & pll_clke_mask) == 0)
  528. return 0;
  529. main_div = (pll_div_ctl & INTPLL_MAIN_DIV_MASK) >>
  530. INTPLL_MAIN_DIV_SHIFT;
  531. pre_div = (pll_div_ctl & INTPLL_PRE_DIV_MASK) >>
  532. INTPLL_PRE_DIV_SHIFT;
  533. post_div = (pll_div_ctl & INTPLL_POST_DIV_MASK) >>
  534. INTPLL_POST_DIV_SHIFT;
  535. /* FFVCO = (m * FFIN) / p, FFOUT = (m * FFIN) / (p * 2^s) */
  536. freq = 24000000ULL * main_div;
  537. return lldiv(freq, pre_div * (1 << post_div) * div);
  538. }
  539. static u32 decode_fracpll(enum clk_root_src frac_pll)
  540. {
  541. u32 pll_gnrl_ctl, pll_fdiv_ctl0, pll_fdiv_ctl1;
  542. u32 main_div, pre_div, post_div, k;
  543. switch (frac_pll) {
  544. case DRAM_PLL1_CLK:
  545. pll_gnrl_ctl = readl(&ana_pll->dram_pll_gnrl_ctl);
  546. pll_fdiv_ctl0 = readl(&ana_pll->dram_pll_fdiv_ctl0);
  547. pll_fdiv_ctl1 = readl(&ana_pll->dram_pll_fdiv_ctl1);
  548. break;
  549. case AUDIO_PLL1_CLK:
  550. pll_gnrl_ctl = readl(&ana_pll->audio_pll1_gnrl_ctl);
  551. pll_fdiv_ctl0 = readl(&ana_pll->audio_pll1_fdiv_ctl0);
  552. pll_fdiv_ctl1 = readl(&ana_pll->audio_pll1_fdiv_ctl1);
  553. break;
  554. case AUDIO_PLL2_CLK:
  555. pll_gnrl_ctl = readl(&ana_pll->audio_pll2_gnrl_ctl);
  556. pll_fdiv_ctl0 = readl(&ana_pll->audio_pll2_fdiv_ctl0);
  557. pll_fdiv_ctl1 = readl(&ana_pll->audio_pll2_fdiv_ctl1);
  558. break;
  559. case VIDEO_PLL_CLK:
  560. pll_gnrl_ctl = readl(&ana_pll->video_pll1_gnrl_ctl);
  561. pll_fdiv_ctl0 = readl(&ana_pll->video_pll1_fdiv_ctl0);
  562. pll_fdiv_ctl1 = readl(&ana_pll->video_pll1_fdiv_ctl1);
  563. break;
  564. default:
  565. printf("Not supported\n");
  566. return 0;
  567. }
  568. /* Only support SYS_XTAL 24M, PAD_CLK not take into consideration */
  569. if ((pll_gnrl_ctl & GENMASK(1, 0)) != 0)
  570. return 0;
  571. if ((pll_gnrl_ctl & RST_MASK) == 0)
  572. return 0;
  573. /*
  574. * When BYPASS is equal to 1, PLL enters the bypass mode
  575. * regardless of the values of RESETB
  576. */
  577. if (pll_gnrl_ctl & BYPASS_MASK)
  578. return 24000000u;
  579. if (!(pll_gnrl_ctl & LOCK_STATUS)) {
  580. puts("pll not locked\n");
  581. return 0;
  582. }
  583. if (!(pll_gnrl_ctl & CLKE_MASK))
  584. return 0;
  585. main_div = (pll_fdiv_ctl0 & MDIV_MASK) >>
  586. MDIV_SHIFT;
  587. pre_div = (pll_fdiv_ctl0 & PDIV_MASK) >>
  588. PDIV_SHIFT;
  589. post_div = (pll_fdiv_ctl0 & SDIV_MASK) >>
  590. SDIV_SHIFT;
  591. k = pll_fdiv_ctl1 & KDIV_MASK;
  592. return lldiv((main_div * 65536 + k) * 24000000ULL,
  593. 65536 * pre_div * (1 << post_div));
  594. }
  595. static u32 get_root_src_clk(enum clk_root_src root_src)
  596. {
  597. switch (root_src) {
  598. case OSC_24M_CLK:
  599. return 24000000u;
  600. case OSC_HDMI_CLK:
  601. return 26000000u;
  602. case OSC_32K_CLK:
  603. return 32000u;
  604. case ARM_PLL_CLK:
  605. case GPU_PLL_CLK:
  606. case VPU_PLL_CLK:
  607. case SYSTEM_PLL1_800M_CLK:
  608. case SYSTEM_PLL1_400M_CLK:
  609. case SYSTEM_PLL1_266M_CLK:
  610. case SYSTEM_PLL1_200M_CLK:
  611. case SYSTEM_PLL1_160M_CLK:
  612. case SYSTEM_PLL1_133M_CLK:
  613. case SYSTEM_PLL1_100M_CLK:
  614. case SYSTEM_PLL1_80M_CLK:
  615. case SYSTEM_PLL1_40M_CLK:
  616. case SYSTEM_PLL2_1000M_CLK:
  617. case SYSTEM_PLL2_500M_CLK:
  618. case SYSTEM_PLL2_333M_CLK:
  619. case SYSTEM_PLL2_250M_CLK:
  620. case SYSTEM_PLL2_200M_CLK:
  621. case SYSTEM_PLL2_166M_CLK:
  622. case SYSTEM_PLL2_125M_CLK:
  623. case SYSTEM_PLL2_100M_CLK:
  624. case SYSTEM_PLL2_50M_CLK:
  625. case SYSTEM_PLL3_CLK:
  626. return decode_intpll(root_src);
  627. case DRAM_PLL1_CLK:
  628. case AUDIO_PLL1_CLK:
  629. case AUDIO_PLL2_CLK:
  630. case VIDEO_PLL_CLK:
  631. return decode_fracpll(root_src);
  632. case ARM_A53_ALT_CLK:
  633. return get_root_clk(ARM_A53_CLK_ROOT);
  634. default:
  635. return 0;
  636. }
  637. return 0;
  638. }
  639. static u32 get_root_clk(enum clk_root_index clock_id)
  640. {
  641. enum clk_root_src root_src;
  642. u32 post_podf, pre_podf, root_src_clk;
  643. if (clock_root_enabled(clock_id) <= 0)
  644. return 0;
  645. if (clock_get_prediv(clock_id, &pre_podf) < 0)
  646. return 0;
  647. if (clock_get_postdiv(clock_id, &post_podf) < 0)
  648. return 0;
  649. if (clock_get_src(clock_id, &root_src) < 0)
  650. return 0;
  651. root_src_clk = get_root_src_clk(root_src);
  652. return root_src_clk / (post_podf + 1) / (pre_podf + 1);
  653. }
  654. u32 get_arm_core_clk(void)
  655. {
  656. enum clk_root_src root_src;
  657. u32 root_src_clk;
  658. if (clock_get_src(CORE_SEL_CFG, &root_src) < 0)
  659. return 0;
  660. root_src_clk = get_root_src_clk(root_src);
  661. return root_src_clk;
  662. }
  663. u32 mxc_get_clock(enum mxc_clock clk)
  664. {
  665. u32 val;
  666. switch (clk) {
  667. case MXC_ARM_CLK:
  668. return get_arm_core_clk();
  669. case MXC_IPG_CLK:
  670. clock_get_target_val(IPG_CLK_ROOT, &val);
  671. val = val & 0x3;
  672. return get_root_clk(AHB_CLK_ROOT) / 2 / (val + 1);
  673. case MXC_CSPI_CLK:
  674. return get_root_clk(ECSPI1_CLK_ROOT);
  675. case MXC_ESDHC_CLK:
  676. return get_root_clk(USDHC1_CLK_ROOT);
  677. case MXC_ESDHC2_CLK:
  678. return get_root_clk(USDHC2_CLK_ROOT);
  679. case MXC_ESDHC3_CLK:
  680. return get_root_clk(USDHC3_CLK_ROOT);
  681. case MXC_I2C_CLK:
  682. return get_root_clk(I2C1_CLK_ROOT);
  683. case MXC_UART_CLK:
  684. return get_root_clk(UART1_CLK_ROOT);
  685. case MXC_QSPI_CLK:
  686. return get_root_clk(QSPI_CLK_ROOT);
  687. default:
  688. printf("Unsupported mxc_clock %d\n", clk);
  689. break;
  690. }
  691. return 0;
  692. }
  693. #ifdef CONFIG_DWC_ETH_QOS
  694. int set_clk_eqos(enum enet_freq type)
  695. {
  696. u32 target;
  697. u32 enet1_ref;
  698. switch (type) {
  699. case ENET_125MHZ:
  700. enet1_ref = ENET1_REF_CLK_ROOT_FROM_PLL_ENET_MAIN_125M_CLK;
  701. break;
  702. case ENET_50MHZ:
  703. enet1_ref = ENET1_REF_CLK_ROOT_FROM_PLL_ENET_MAIN_50M_CLK;
  704. break;
  705. case ENET_25MHZ:
  706. enet1_ref = ENET1_REF_CLK_ROOT_FROM_PLL_ENET_MAIN_25M_CLK;
  707. break;
  708. default:
  709. return -EINVAL;
  710. }
  711. /* disable the clock first */
  712. clock_enable(CCGR_QOS_ETHENET, 0);
  713. clock_enable(CCGR_SDMA2, 0);
  714. /* set enet axi clock 266Mhz */
  715. target = CLK_ROOT_ON | ENET_AXI_CLK_ROOT_FROM_SYS1_PLL_266M |
  716. CLK_ROOT_PRE_DIV(CLK_ROOT_PRE_DIV1) |
  717. CLK_ROOT_POST_DIV(CLK_ROOT_POST_DIV1);
  718. clock_set_target_val(ENET_AXI_CLK_ROOT, target);
  719. target = CLK_ROOT_ON | enet1_ref |
  720. CLK_ROOT_PRE_DIV(CLK_ROOT_PRE_DIV1) |
  721. CLK_ROOT_POST_DIV(CLK_ROOT_POST_DIV1);
  722. clock_set_target_val(ENET_QOS_CLK_ROOT, target);
  723. target = CLK_ROOT_ON |
  724. ENET1_TIME_CLK_ROOT_FROM_PLL_ENET_MAIN_100M_CLK |
  725. CLK_ROOT_PRE_DIV(CLK_ROOT_PRE_DIV1) |
  726. CLK_ROOT_POST_DIV(CLK_ROOT_POST_DIV4);
  727. clock_set_target_val(ENET_QOS_TIMER_CLK_ROOT, target);
  728. /* enable clock */
  729. clock_enable(CCGR_QOS_ETHENET, 1);
  730. clock_enable(CCGR_SDMA2, 1);
  731. return 0;
  732. }
  733. int imx_eqos_txclk_set_rate(u32 rate)
  734. {
  735. u32 val;
  736. u32 eqos_post_div;
  737. /* disable the clock first */
  738. clock_enable(CCGR_QOS_ETHENET, 0);
  739. clock_enable(CCGR_SDMA2, 0);
  740. switch (rate) {
  741. case 125000000:
  742. eqos_post_div = 1;
  743. break;
  744. case 25000000:
  745. eqos_post_div = 125000000 / 25000000;
  746. break;
  747. case 2500000:
  748. eqos_post_div = 125000000 / 2500000;
  749. break;
  750. default:
  751. return -EINVAL;
  752. }
  753. clock_get_target_val(ENET_QOS_CLK_ROOT, &val);
  754. val &= ~(CLK_ROOT_PRE_DIV_MASK | CLK_ROOT_POST_DIV_MASK);
  755. val |= CLK_ROOT_PRE_DIV(CLK_ROOT_PRE_DIV1) |
  756. CLK_ROOT_POST_DIV(eqos_post_div - 1);
  757. clock_set_target_val(ENET_QOS_CLK_ROOT, val);
  758. /* enable clock */
  759. clock_enable(CCGR_QOS_ETHENET, 1);
  760. clock_enable(CCGR_SDMA2, 1);
  761. return 0;
  762. }
  763. u32 imx_get_eqos_csr_clk(void)
  764. {
  765. return get_root_clk(ENET_AXI_CLK_ROOT);
  766. }
  767. #endif
  768. #ifdef CONFIG_FEC_MXC
  769. int set_clk_enet(enum enet_freq type)
  770. {
  771. u32 target;
  772. u32 enet1_ref;
  773. switch (type) {
  774. case ENET_125MHZ:
  775. enet1_ref = ENET1_REF_CLK_ROOT_FROM_PLL_ENET_MAIN_125M_CLK;
  776. break;
  777. case ENET_50MHZ:
  778. enet1_ref = ENET1_REF_CLK_ROOT_FROM_PLL_ENET_MAIN_50M_CLK;
  779. break;
  780. case ENET_25MHZ:
  781. enet1_ref = ENET1_REF_CLK_ROOT_FROM_PLL_ENET_MAIN_25M_CLK;
  782. break;
  783. default:
  784. return -EINVAL;
  785. }
  786. /* disable the clock first */
  787. clock_enable(CCGR_ENET1, 0);
  788. clock_enable(CCGR_SIM_ENET, 0);
  789. /* set enet axi clock 266Mhz */
  790. target = CLK_ROOT_ON | ENET_AXI_CLK_ROOT_FROM_SYS1_PLL_266M |
  791. CLK_ROOT_PRE_DIV(CLK_ROOT_PRE_DIV1) |
  792. CLK_ROOT_POST_DIV(CLK_ROOT_POST_DIV1);
  793. clock_set_target_val(ENET_AXI_CLK_ROOT, target);
  794. target = CLK_ROOT_ON | enet1_ref |
  795. CLK_ROOT_PRE_DIV(CLK_ROOT_PRE_DIV1) |
  796. CLK_ROOT_POST_DIV(CLK_ROOT_POST_DIV1);
  797. clock_set_target_val(ENET_REF_CLK_ROOT, target);
  798. target = CLK_ROOT_ON |
  799. ENET1_TIME_CLK_ROOT_FROM_PLL_ENET_MAIN_100M_CLK |
  800. CLK_ROOT_PRE_DIV(CLK_ROOT_PRE_DIV1) |
  801. CLK_ROOT_POST_DIV(CLK_ROOT_POST_DIV4);
  802. clock_set_target_val(ENET_TIMER_CLK_ROOT, target);
  803. /* enable clock */
  804. clock_enable(CCGR_SIM_ENET, 1);
  805. clock_enable(CCGR_ENET1, 1);
  806. return 0;
  807. }
  808. #endif