clk_rk3288.c 26 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * (C) Copyright 2015 Google, Inc
  4. */
  5. #include <common.h>
  6. #include <bitfield.h>
  7. #include <clk-uclass.h>
  8. #include <div64.h>
  9. #include <dm.h>
  10. #include <dt-structs.h>
  11. #include <errno.h>
  12. #include <log.h>
  13. #include <malloc.h>
  14. #include <mapmem.h>
  15. #include <syscon.h>
  16. #include <asm/io.h>
  17. #include <asm/arch-rockchip/clock.h>
  18. #include <asm/arch-rockchip/cru.h>
  19. #include <asm/arch-rockchip/grf_rk3288.h>
  20. #include <asm/arch-rockchip/hardware.h>
  21. #include <dt-bindings/clock/rk3288-cru.h>
  22. #include <dm/device-internal.h>
  23. #include <dm/lists.h>
  24. #include <dm/uclass-internal.h>
  25. #include <linux/bitops.h>
  26. #include <linux/delay.h>
  27. #include <linux/err.h>
  28. #include <linux/log2.h>
  29. #include <linux/stringify.h>
  30. DECLARE_GLOBAL_DATA_PTR;
  31. struct rk3288_clk_plat {
  32. #if CONFIG_IS_ENABLED(OF_PLATDATA)
  33. struct dtd_rockchip_rk3288_cru dtd;
  34. #endif
  35. };
  36. struct pll_div {
  37. u32 nr;
  38. u32 nf;
  39. u32 no;
  40. };
  41. enum {
  42. VCO_MAX_HZ = 2200U * 1000000,
  43. VCO_MIN_HZ = 440 * 1000000,
  44. OUTPUT_MAX_HZ = 2200U * 1000000,
  45. OUTPUT_MIN_HZ = 27500000,
  46. FREF_MAX_HZ = 2200U * 1000000,
  47. FREF_MIN_HZ = 269 * 1000,
  48. };
  49. enum {
  50. /* PLL CON0 */
  51. PLL_OD_MASK = 0x0f,
  52. /* PLL CON1 */
  53. PLL_NF_MASK = 0x1fff,
  54. /* PLL CON2 */
  55. PLL_BWADJ_MASK = 0x0fff,
  56. /* PLL CON3 */
  57. PLL_RESET_SHIFT = 5,
  58. /* CLKSEL0 */
  59. CORE_SEL_PLL_SHIFT = 15,
  60. CORE_SEL_PLL_MASK = 1 << CORE_SEL_PLL_SHIFT,
  61. A17_DIV_SHIFT = 8,
  62. A17_DIV_MASK = 0x1f << A17_DIV_SHIFT,
  63. MP_DIV_SHIFT = 4,
  64. MP_DIV_MASK = 0xf << MP_DIV_SHIFT,
  65. M0_DIV_SHIFT = 0,
  66. M0_DIV_MASK = 0xf << M0_DIV_SHIFT,
  67. /* CLKSEL1: pd bus clk pll sel: codec or general */
  68. PD_BUS_SEL_PLL_MASK = 15,
  69. PD_BUS_SEL_CPLL = 0,
  70. PD_BUS_SEL_GPLL,
  71. /* pd bus pclk div: pclk = pd_bus_aclk /(div + 1) */
  72. PD_BUS_PCLK_DIV_SHIFT = 12,
  73. PD_BUS_PCLK_DIV_MASK = 7 << PD_BUS_PCLK_DIV_SHIFT,
  74. /* pd bus hclk div: aclk_bus: hclk_bus = 1:1 or 2:1 or 4:1 */
  75. PD_BUS_HCLK_DIV_SHIFT = 8,
  76. PD_BUS_HCLK_DIV_MASK = 3 << PD_BUS_HCLK_DIV_SHIFT,
  77. /* pd bus aclk div: pd_bus_aclk = pd_bus_src_clk /(div0 * div1) */
  78. PD_BUS_ACLK_DIV0_SHIFT = 3,
  79. PD_BUS_ACLK_DIV0_MASK = 0x1f << PD_BUS_ACLK_DIV0_SHIFT,
  80. PD_BUS_ACLK_DIV1_SHIFT = 0,
  81. PD_BUS_ACLK_DIV1_MASK = 0x7 << PD_BUS_ACLK_DIV1_SHIFT,
  82. /*
  83. * CLKSEL10
  84. * peripheral bus pclk div:
  85. * aclk_bus: pclk_bus = 1:1 or 2:1 or 4:1 or 8:1
  86. */
  87. PERI_SEL_PLL_SHIFT = 15,
  88. PERI_SEL_PLL_MASK = 1 << PERI_SEL_PLL_SHIFT,
  89. PERI_SEL_CPLL = 0,
  90. PERI_SEL_GPLL,
  91. PERI_PCLK_DIV_SHIFT = 12,
  92. PERI_PCLK_DIV_MASK = 3 << PERI_PCLK_DIV_SHIFT,
  93. /* peripheral bus hclk div: aclk_bus: hclk_bus = 1:1 or 2:1 or 4:1 */
  94. PERI_HCLK_DIV_SHIFT = 8,
  95. PERI_HCLK_DIV_MASK = 3 << PERI_HCLK_DIV_SHIFT,
  96. /*
  97. * peripheral bus aclk div:
  98. * aclk_periph = periph_clk_src / (peri_aclk_div_con + 1)
  99. */
  100. PERI_ACLK_DIV_SHIFT = 0,
  101. PERI_ACLK_DIV_MASK = 0x1f << PERI_ACLK_DIV_SHIFT,
  102. /*
  103. * CLKSEL24
  104. * saradc_div_con:
  105. * clk_saradc=24MHz/(saradc_div_con+1)
  106. */
  107. CLK_SARADC_DIV_CON_SHIFT = 8,
  108. CLK_SARADC_DIV_CON_MASK = GENMASK(15, 8),
  109. CLK_SARADC_DIV_CON_WIDTH = 8,
  110. SOCSTS_DPLL_LOCK = 1 << 5,
  111. SOCSTS_APLL_LOCK = 1 << 6,
  112. SOCSTS_CPLL_LOCK = 1 << 7,
  113. SOCSTS_GPLL_LOCK = 1 << 8,
  114. SOCSTS_NPLL_LOCK = 1 << 9,
  115. };
  116. #define DIV_TO_RATE(input_rate, div) ((input_rate) / ((div) + 1))
  117. #define PLL_DIVISORS(hz, _nr, _no) {\
  118. .nr = _nr, .nf = (u32)((u64)hz * _nr * _no / OSC_HZ), .no = _no};\
  119. _Static_assert(((u64)hz * _nr * _no / OSC_HZ) * OSC_HZ /\
  120. (_nr * _no) == hz, #hz "Hz cannot be hit with PLL "\
  121. "divisors on line " __stringify(__LINE__));
  122. /* Keep divisors as low as possible to reduce jitter and power usage */
  123. static const struct pll_div apll_init_cfg = PLL_DIVISORS(APLL_HZ, 1, 1);
  124. static const struct pll_div gpll_init_cfg = PLL_DIVISORS(GPLL_HZ, 2, 2);
  125. static const struct pll_div cpll_init_cfg = PLL_DIVISORS(CPLL_HZ, 1, 2);
  126. static int rkclk_set_pll(struct rockchip_cru *cru, enum rk_clk_id clk_id,
  127. const struct pll_div *div)
  128. {
  129. int pll_id = rk_pll_id(clk_id);
  130. struct rk3288_pll *pll = &cru->pll[pll_id];
  131. /* All PLLs have same VCO and output frequency range restrictions. */
  132. uint vco_hz = OSC_HZ / 1000 * div->nf / div->nr * 1000;
  133. uint output_hz = vco_hz / div->no;
  134. debug("PLL at %x: nf=%d, nr=%d, no=%d, vco=%u Hz, output=%u Hz\n",
  135. (uint)pll, div->nf, div->nr, div->no, vco_hz, output_hz);
  136. assert(vco_hz >= VCO_MIN_HZ && vco_hz <= VCO_MAX_HZ &&
  137. output_hz >= OUTPUT_MIN_HZ && output_hz <= OUTPUT_MAX_HZ &&
  138. (div->no == 1 || !(div->no % 2)));
  139. /* enter reset */
  140. rk_setreg(&pll->con3, 1 << PLL_RESET_SHIFT);
  141. rk_clrsetreg(&pll->con0, CLKR_MASK | PLL_OD_MASK,
  142. ((div->nr - 1) << CLKR_SHIFT) | (div->no - 1));
  143. rk_clrsetreg(&pll->con1, CLKF_MASK, div->nf - 1);
  144. rk_clrsetreg(&pll->con2, PLL_BWADJ_MASK, (div->nf >> 1) - 1);
  145. udelay(10);
  146. /* return from reset */
  147. rk_clrreg(&pll->con3, 1 << PLL_RESET_SHIFT);
  148. return 0;
  149. }
  150. static int rkclk_configure_ddr(struct rockchip_cru *cru, struct rk3288_grf *grf,
  151. unsigned int hz)
  152. {
  153. static const struct pll_div dpll_cfg[] = {
  154. {.nf = 25, .nr = 2, .no = 1},
  155. {.nf = 400, .nr = 9, .no = 2},
  156. {.nf = 500, .nr = 9, .no = 2},
  157. {.nf = 100, .nr = 3, .no = 1},
  158. };
  159. int cfg;
  160. switch (hz) {
  161. case 300000000:
  162. cfg = 0;
  163. break;
  164. case 533000000: /* actually 533.3P MHz */
  165. cfg = 1;
  166. break;
  167. case 666000000: /* actually 666.6P MHz */
  168. cfg = 2;
  169. break;
  170. case 800000000:
  171. cfg = 3;
  172. break;
  173. default:
  174. debug("Unsupported SDRAM frequency");
  175. return -EINVAL;
  176. }
  177. /* pll enter slow-mode */
  178. rk_clrsetreg(&cru->cru_mode_con, DPLL_MODE_MASK,
  179. DPLL_MODE_SLOW << DPLL_MODE_SHIFT);
  180. rkclk_set_pll(cru, CLK_DDR, &dpll_cfg[cfg]);
  181. /* wait for pll lock */
  182. while (!(readl(&grf->soc_status[1]) & SOCSTS_DPLL_LOCK))
  183. udelay(1);
  184. /* PLL enter normal-mode */
  185. rk_clrsetreg(&cru->cru_mode_con, DPLL_MODE_MASK,
  186. DPLL_MODE_NORMAL << DPLL_MODE_SHIFT);
  187. return 0;
  188. }
  189. #ifndef CONFIG_SPL_BUILD
  190. #define VCO_MAX_KHZ 2200000
  191. #define VCO_MIN_KHZ 440000
  192. #define FREF_MAX_KHZ 2200000
  193. #define FREF_MIN_KHZ 269
  194. static int pll_para_config(ulong freq_hz, struct pll_div *div, uint *ext_div)
  195. {
  196. uint ref_khz = OSC_HZ / 1000, nr, nf = 0;
  197. uint fref_khz;
  198. uint diff_khz, best_diff_khz;
  199. const uint max_nr = 1 << 6, max_nf = 1 << 12, max_no = 1 << 4;
  200. uint vco_khz;
  201. uint no = 1;
  202. uint freq_khz = freq_hz / 1000;
  203. if (!freq_hz) {
  204. printf("%s: the frequency can not be 0 Hz\n", __func__);
  205. return -EINVAL;
  206. }
  207. no = DIV_ROUND_UP(VCO_MIN_KHZ, freq_khz);
  208. if (ext_div) {
  209. *ext_div = DIV_ROUND_UP(no, max_no);
  210. no = DIV_ROUND_UP(no, *ext_div);
  211. }
  212. /* only even divisors (and 1) are supported */
  213. if (no > 1)
  214. no = DIV_ROUND_UP(no, 2) * 2;
  215. vco_khz = freq_khz * no;
  216. if (ext_div)
  217. vco_khz *= *ext_div;
  218. if (vco_khz < VCO_MIN_KHZ || vco_khz > VCO_MAX_KHZ || no > max_no) {
  219. printf("%s: Cannot find out a supported VCO for Frequency (%luHz).\n",
  220. __func__, freq_hz);
  221. return -1;
  222. }
  223. div->no = no;
  224. best_diff_khz = vco_khz;
  225. for (nr = 1; nr < max_nr && best_diff_khz; nr++) {
  226. fref_khz = ref_khz / nr;
  227. if (fref_khz < FREF_MIN_KHZ)
  228. break;
  229. if (fref_khz > FREF_MAX_KHZ)
  230. continue;
  231. nf = vco_khz / fref_khz;
  232. if (nf >= max_nf)
  233. continue;
  234. diff_khz = vco_khz - nf * fref_khz;
  235. if (nf + 1 < max_nf && diff_khz > fref_khz / 2) {
  236. nf++;
  237. diff_khz = fref_khz - diff_khz;
  238. }
  239. if (diff_khz >= best_diff_khz)
  240. continue;
  241. best_diff_khz = diff_khz;
  242. div->nr = nr;
  243. div->nf = nf;
  244. }
  245. if (best_diff_khz > 4 * 1000) {
  246. printf("%s: Failed to match output frequency %lu, difference is %u Hz, exceed 4MHZ\n",
  247. __func__, freq_hz, best_diff_khz * 1000);
  248. return -EINVAL;
  249. }
  250. return 0;
  251. }
  252. static int rockchip_mac_set_clk(struct rockchip_cru *cru, uint freq)
  253. {
  254. ulong ret;
  255. /*
  256. * The gmac clock can be derived either from an external clock
  257. * or can be generated from internally by a divider from SCLK_MAC.
  258. */
  259. if (readl(&cru->cru_clksel_con[21]) & RMII_EXTCLK_MASK) {
  260. /* An external clock will always generate the right rate... */
  261. ret = freq;
  262. } else {
  263. u32 con = readl(&cru->cru_clksel_con[21]);
  264. ulong pll_rate;
  265. u8 div;
  266. if (((con >> EMAC_PLL_SHIFT) & EMAC_PLL_MASK) ==
  267. EMAC_PLL_SELECT_GENERAL)
  268. pll_rate = GPLL_HZ;
  269. else if (((con >> EMAC_PLL_SHIFT) & EMAC_PLL_MASK) ==
  270. EMAC_PLL_SELECT_CODEC)
  271. pll_rate = CPLL_HZ;
  272. else
  273. pll_rate = NPLL_HZ;
  274. div = DIV_ROUND_UP(pll_rate, freq) - 1;
  275. if (div <= 0x1f)
  276. rk_clrsetreg(&cru->cru_clksel_con[21], MAC_DIV_CON_MASK,
  277. div << MAC_DIV_CON_SHIFT);
  278. else
  279. debug("Unsupported div for gmac:%d\n", div);
  280. return DIV_TO_RATE(pll_rate, div);
  281. }
  282. return ret;
  283. }
  284. static int rockchip_vop_set_clk(struct rockchip_cru *cru, struct rk3288_grf *grf,
  285. int periph, unsigned int rate_hz)
  286. {
  287. struct pll_div npll_config = {0};
  288. u32 lcdc_div;
  289. int ret;
  290. ret = pll_para_config(rate_hz, &npll_config, &lcdc_div);
  291. if (ret)
  292. return ret;
  293. rk_clrsetreg(&cru->cru_mode_con, NPLL_MODE_MASK,
  294. NPLL_MODE_SLOW << NPLL_MODE_SHIFT);
  295. rkclk_set_pll(cru, CLK_NEW, &npll_config);
  296. /* waiting for pll lock */
  297. while (1) {
  298. if (readl(&grf->soc_status[1]) & SOCSTS_NPLL_LOCK)
  299. break;
  300. udelay(1);
  301. }
  302. rk_clrsetreg(&cru->cru_mode_con, NPLL_MODE_MASK,
  303. NPLL_MODE_NORMAL << NPLL_MODE_SHIFT);
  304. /* vop dclk source clk: npll,dclk_div: 1 */
  305. switch (periph) {
  306. case DCLK_VOP0:
  307. rk_clrsetreg(&cru->cru_clksel_con[27], 0xff << 8 | 3 << 0,
  308. (lcdc_div - 1) << 8 | 2 << 0);
  309. break;
  310. case DCLK_VOP1:
  311. rk_clrsetreg(&cru->cru_clksel_con[29], 0xff << 8 | 3 << 6,
  312. (lcdc_div - 1) << 8 | 2 << 6);
  313. break;
  314. }
  315. return 0;
  316. }
  317. static u32 rockchip_clk_gcd(u32 a, u32 b)
  318. {
  319. while (b != 0) {
  320. int r = b;
  321. b = a % b;
  322. a = r;
  323. }
  324. return a;
  325. }
  326. static ulong rockchip_i2s_get_clk(struct rockchip_cru *cru, uint gclk_rate)
  327. {
  328. unsigned long long rate;
  329. uint val;
  330. int n, d;
  331. val = readl(&cru->cru_clksel_con[8]);
  332. n = (val & I2S0_FRAC_NUMER_MASK) >> I2S0_FRAC_NUMER_SHIFT;
  333. d = (val & I2S0_FRAC_DENOM_MASK) >> I2S0_FRAC_DENOM_SHIFT;
  334. rate = (unsigned long long)gclk_rate * n;
  335. do_div(rate, d);
  336. return (ulong)rate;
  337. }
  338. static ulong rockchip_i2s_set_clk(struct rockchip_cru *cru, uint gclk_rate,
  339. uint freq)
  340. {
  341. int n, d;
  342. int v;
  343. /* set frac divider */
  344. v = rockchip_clk_gcd(gclk_rate, freq);
  345. n = gclk_rate / v;
  346. d = freq / v;
  347. assert(freq == gclk_rate / n * d);
  348. writel(d << I2S0_FRAC_NUMER_SHIFT | n << I2S0_FRAC_DENOM_SHIFT,
  349. &cru->cru_clksel_con[8]);
  350. return rockchip_i2s_get_clk(cru, gclk_rate);
  351. }
  352. #endif /* CONFIG_SPL_BUILD */
  353. static void rkclk_init(struct rockchip_cru *cru, struct rk3288_grf *grf)
  354. {
  355. u32 aclk_div;
  356. u32 hclk_div;
  357. u32 pclk_div;
  358. /* pll enter slow-mode */
  359. rk_clrsetreg(&cru->cru_mode_con,
  360. GPLL_MODE_MASK | CPLL_MODE_MASK,
  361. GPLL_MODE_SLOW << GPLL_MODE_SHIFT |
  362. CPLL_MODE_SLOW << CPLL_MODE_SHIFT);
  363. /* init pll */
  364. rkclk_set_pll(cru, CLK_GENERAL, &gpll_init_cfg);
  365. rkclk_set_pll(cru, CLK_CODEC, &cpll_init_cfg);
  366. /* waiting for pll lock */
  367. while ((readl(&grf->soc_status[1]) &
  368. (SOCSTS_CPLL_LOCK | SOCSTS_GPLL_LOCK)) !=
  369. (SOCSTS_CPLL_LOCK | SOCSTS_GPLL_LOCK))
  370. udelay(1);
  371. /*
  372. * pd_bus clock pll source selection and
  373. * set up dependent divisors for PCLK/HCLK and ACLK clocks.
  374. */
  375. aclk_div = GPLL_HZ / PD_BUS_ACLK_HZ - 1;
  376. assert((aclk_div + 1) * PD_BUS_ACLK_HZ == GPLL_HZ && aclk_div < 0x1f);
  377. hclk_div = PD_BUS_ACLK_HZ / PD_BUS_HCLK_HZ - 1;
  378. assert((hclk_div + 1) * PD_BUS_HCLK_HZ ==
  379. PD_BUS_ACLK_HZ && (hclk_div < 0x4) && (hclk_div != 0x2));
  380. pclk_div = PD_BUS_ACLK_HZ / PD_BUS_PCLK_HZ - 1;
  381. assert((pclk_div + 1) * PD_BUS_PCLK_HZ ==
  382. PD_BUS_ACLK_HZ && pclk_div < 0x7);
  383. rk_clrsetreg(&cru->cru_clksel_con[1],
  384. PD_BUS_PCLK_DIV_MASK | PD_BUS_HCLK_DIV_MASK |
  385. PD_BUS_ACLK_DIV0_MASK | PD_BUS_ACLK_DIV1_MASK,
  386. pclk_div << PD_BUS_PCLK_DIV_SHIFT |
  387. hclk_div << PD_BUS_HCLK_DIV_SHIFT |
  388. aclk_div << PD_BUS_ACLK_DIV0_SHIFT |
  389. 0 << 0);
  390. /*
  391. * peri clock pll source selection and
  392. * set up dependent divisors for PCLK/HCLK and ACLK clocks.
  393. */
  394. aclk_div = GPLL_HZ / PERI_ACLK_HZ - 1;
  395. assert((aclk_div + 1) * PERI_ACLK_HZ == GPLL_HZ && aclk_div < 0x1f);
  396. hclk_div = ilog2(PERI_ACLK_HZ / PERI_HCLK_HZ);
  397. assert((1 << hclk_div) * PERI_HCLK_HZ ==
  398. PERI_ACLK_HZ && (hclk_div < 0x4));
  399. pclk_div = ilog2(PERI_ACLK_HZ / PERI_PCLK_HZ);
  400. assert((1 << pclk_div) * PERI_PCLK_HZ ==
  401. PERI_ACLK_HZ && (pclk_div < 0x4));
  402. rk_clrsetreg(&cru->cru_clksel_con[10],
  403. PERI_PCLK_DIV_MASK | PERI_HCLK_DIV_MASK |
  404. PERI_ACLK_DIV_MASK,
  405. PERI_SEL_GPLL << PERI_SEL_PLL_SHIFT |
  406. pclk_div << PERI_PCLK_DIV_SHIFT |
  407. hclk_div << PERI_HCLK_DIV_SHIFT |
  408. aclk_div << PERI_ACLK_DIV_SHIFT);
  409. /* PLL enter normal-mode */
  410. rk_clrsetreg(&cru->cru_mode_con,
  411. GPLL_MODE_MASK | CPLL_MODE_MASK,
  412. GPLL_MODE_NORMAL << GPLL_MODE_SHIFT |
  413. CPLL_MODE_NORMAL << CPLL_MODE_SHIFT);
  414. }
  415. void rk3288_clk_configure_cpu(struct rockchip_cru *cru, struct rk3288_grf *grf)
  416. {
  417. /* pll enter slow-mode */
  418. rk_clrsetreg(&cru->cru_mode_con, APLL_MODE_MASK,
  419. APLL_MODE_SLOW << APLL_MODE_SHIFT);
  420. rkclk_set_pll(cru, CLK_ARM, &apll_init_cfg);
  421. /* waiting for pll lock */
  422. while (!(readl(&grf->soc_status[1]) & SOCSTS_APLL_LOCK))
  423. udelay(1);
  424. /*
  425. * core clock pll source selection and
  426. * set up dependent divisors for MPAXI/M0AXI and ARM clocks.
  427. * core clock select apll, apll clk = 1800MHz
  428. * arm clk = 1800MHz, mpclk = 450MHz, m0clk = 900MHz
  429. */
  430. rk_clrsetreg(&cru->cru_clksel_con[0],
  431. CORE_SEL_PLL_MASK | A17_DIV_MASK | MP_DIV_MASK |
  432. M0_DIV_MASK,
  433. 0 << A17_DIV_SHIFT |
  434. 3 << MP_DIV_SHIFT |
  435. 1 << M0_DIV_SHIFT);
  436. /*
  437. * set up dependent divisors for L2RAM/ATCLK and PCLK clocks.
  438. * l2ramclk = 900MHz, atclk = 450MHz, pclk_dbg = 450MHz
  439. */
  440. rk_clrsetreg(&cru->cru_clksel_con[37],
  441. CLK_L2RAM_DIV_MASK | ATCLK_CORE_DIV_CON_MASK |
  442. PCLK_CORE_DBG_DIV_MASK,
  443. 1 << CLK_L2RAM_DIV_SHIFT |
  444. 3 << ATCLK_CORE_DIV_CON_SHIFT |
  445. 3 << PCLK_CORE_DBG_DIV_SHIFT);
  446. /* PLL enter normal-mode */
  447. rk_clrsetreg(&cru->cru_mode_con, APLL_MODE_MASK,
  448. APLL_MODE_NORMAL << APLL_MODE_SHIFT);
  449. }
  450. /* Get pll rate by id */
  451. static uint32_t rkclk_pll_get_rate(struct rockchip_cru *cru,
  452. enum rk_clk_id clk_id)
  453. {
  454. uint32_t nr, no, nf;
  455. uint32_t con;
  456. int pll_id = rk_pll_id(clk_id);
  457. struct rk3288_pll *pll = &cru->pll[pll_id];
  458. static u8 clk_shift[CLK_COUNT] = {
  459. 0xff, APLL_MODE_SHIFT, DPLL_MODE_SHIFT, CPLL_MODE_SHIFT,
  460. GPLL_MODE_SHIFT, NPLL_MODE_SHIFT
  461. };
  462. uint shift;
  463. con = readl(&cru->cru_mode_con);
  464. shift = clk_shift[clk_id];
  465. switch ((con >> shift) & CRU_MODE_MASK) {
  466. case APLL_MODE_SLOW:
  467. return OSC_HZ;
  468. case APLL_MODE_NORMAL:
  469. /* normal mode */
  470. con = readl(&pll->con0);
  471. no = ((con & CLKOD_MASK) >> CLKOD_SHIFT) + 1;
  472. nr = ((con & CLKR_MASK) >> CLKR_SHIFT) + 1;
  473. con = readl(&pll->con1);
  474. nf = ((con & CLKF_MASK) >> CLKF_SHIFT) + 1;
  475. return (24 * nf / (nr * no)) * 1000000;
  476. case APLL_MODE_DEEP:
  477. default:
  478. return 32768;
  479. }
  480. }
  481. static ulong rockchip_mmc_get_clk(struct rockchip_cru *cru, uint gclk_rate,
  482. int periph)
  483. {
  484. uint src_rate;
  485. uint div, mux;
  486. u32 con;
  487. switch (periph) {
  488. case HCLK_EMMC:
  489. case SCLK_EMMC:
  490. con = readl(&cru->cru_clksel_con[12]);
  491. mux = (con & EMMC_PLL_MASK) >> EMMC_PLL_SHIFT;
  492. div = (con & EMMC_DIV_MASK) >> EMMC_DIV_SHIFT;
  493. break;
  494. case HCLK_SDMMC:
  495. case SCLK_SDMMC:
  496. con = readl(&cru->cru_clksel_con[11]);
  497. mux = (con & MMC0_PLL_MASK) >> MMC0_PLL_SHIFT;
  498. div = (con & MMC0_DIV_MASK) >> MMC0_DIV_SHIFT;
  499. break;
  500. case HCLK_SDIO0:
  501. case SCLK_SDIO0:
  502. con = readl(&cru->cru_clksel_con[12]);
  503. mux = (con & SDIO0_PLL_MASK) >> SDIO0_PLL_SHIFT;
  504. div = (con & SDIO0_DIV_MASK) >> SDIO0_DIV_SHIFT;
  505. break;
  506. default:
  507. return -EINVAL;
  508. }
  509. src_rate = mux == EMMC_PLL_SELECT_24MHZ ? OSC_HZ : gclk_rate;
  510. return DIV_TO_RATE(src_rate, div);
  511. }
  512. static ulong rockchip_mmc_set_clk(struct rockchip_cru *cru, uint gclk_rate,
  513. int periph, uint freq)
  514. {
  515. int src_clk_div;
  516. int mux;
  517. debug("%s: gclk_rate=%u\n", __func__, gclk_rate);
  518. /* mmc clock default div 2 internal, need provide double in cru */
  519. src_clk_div = DIV_ROUND_UP(gclk_rate / 2, freq);
  520. if (src_clk_div > 0x3f) {
  521. src_clk_div = DIV_ROUND_UP(OSC_HZ / 2, freq);
  522. assert(src_clk_div < 0x40);
  523. mux = EMMC_PLL_SELECT_24MHZ;
  524. assert((int)EMMC_PLL_SELECT_24MHZ ==
  525. (int)MMC0_PLL_SELECT_24MHZ);
  526. } else {
  527. mux = EMMC_PLL_SELECT_GENERAL;
  528. assert((int)EMMC_PLL_SELECT_GENERAL ==
  529. (int)MMC0_PLL_SELECT_GENERAL);
  530. }
  531. switch (periph) {
  532. case HCLK_EMMC:
  533. case SCLK_EMMC:
  534. rk_clrsetreg(&cru->cru_clksel_con[12],
  535. EMMC_PLL_MASK | EMMC_DIV_MASK,
  536. mux << EMMC_PLL_SHIFT |
  537. (src_clk_div - 1) << EMMC_DIV_SHIFT);
  538. break;
  539. case HCLK_SDMMC:
  540. case SCLK_SDMMC:
  541. rk_clrsetreg(&cru->cru_clksel_con[11],
  542. MMC0_PLL_MASK | MMC0_DIV_MASK,
  543. mux << MMC0_PLL_SHIFT |
  544. (src_clk_div - 1) << MMC0_DIV_SHIFT);
  545. break;
  546. case HCLK_SDIO0:
  547. case SCLK_SDIO0:
  548. rk_clrsetreg(&cru->cru_clksel_con[12],
  549. SDIO0_PLL_MASK | SDIO0_DIV_MASK,
  550. mux << SDIO0_PLL_SHIFT |
  551. (src_clk_div - 1) << SDIO0_DIV_SHIFT);
  552. break;
  553. default:
  554. return -EINVAL;
  555. }
  556. return rockchip_mmc_get_clk(cru, gclk_rate, periph);
  557. }
  558. static ulong rockchip_spi_get_clk(struct rockchip_cru *cru, uint gclk_rate,
  559. int periph)
  560. {
  561. uint div, mux;
  562. u32 con;
  563. switch (periph) {
  564. case SCLK_SPI0:
  565. con = readl(&cru->cru_clksel_con[25]);
  566. mux = (con & SPI0_PLL_MASK) >> SPI0_PLL_SHIFT;
  567. div = (con & SPI0_DIV_MASK) >> SPI0_DIV_SHIFT;
  568. break;
  569. case SCLK_SPI1:
  570. con = readl(&cru->cru_clksel_con[25]);
  571. mux = (con & SPI1_PLL_MASK) >> SPI1_PLL_SHIFT;
  572. div = (con & SPI1_DIV_MASK) >> SPI1_DIV_SHIFT;
  573. break;
  574. case SCLK_SPI2:
  575. con = readl(&cru->cru_clksel_con[39]);
  576. mux = (con & SPI2_PLL_MASK) >> SPI2_PLL_SHIFT;
  577. div = (con & SPI2_DIV_MASK) >> SPI2_DIV_SHIFT;
  578. break;
  579. default:
  580. return -EINVAL;
  581. }
  582. assert(mux == SPI0_PLL_SELECT_GENERAL);
  583. return DIV_TO_RATE(gclk_rate, div);
  584. }
  585. static ulong rockchip_spi_set_clk(struct rockchip_cru *cru, uint gclk_rate,
  586. int periph, uint freq)
  587. {
  588. int src_clk_div;
  589. debug("%s: clk_general_rate=%u\n", __func__, gclk_rate);
  590. src_clk_div = DIV_ROUND_UP(gclk_rate, freq) - 1;
  591. assert(src_clk_div < 128);
  592. switch (periph) {
  593. case SCLK_SPI0:
  594. rk_clrsetreg(&cru->cru_clksel_con[25],
  595. SPI0_PLL_MASK | SPI0_DIV_MASK,
  596. SPI0_PLL_SELECT_GENERAL << SPI0_PLL_SHIFT |
  597. src_clk_div << SPI0_DIV_SHIFT);
  598. break;
  599. case SCLK_SPI1:
  600. rk_clrsetreg(&cru->cru_clksel_con[25],
  601. SPI1_PLL_MASK | SPI1_DIV_MASK,
  602. SPI1_PLL_SELECT_GENERAL << SPI1_PLL_SHIFT |
  603. src_clk_div << SPI1_DIV_SHIFT);
  604. break;
  605. case SCLK_SPI2:
  606. rk_clrsetreg(&cru->cru_clksel_con[39],
  607. SPI2_PLL_MASK | SPI2_DIV_MASK,
  608. SPI2_PLL_SELECT_GENERAL << SPI2_PLL_SHIFT |
  609. src_clk_div << SPI2_DIV_SHIFT);
  610. break;
  611. default:
  612. return -EINVAL;
  613. }
  614. return rockchip_spi_get_clk(cru, gclk_rate, periph);
  615. }
  616. static ulong rockchip_saradc_get_clk(struct rockchip_cru *cru)
  617. {
  618. u32 div, val;
  619. val = readl(&cru->cru_clksel_con[24]);
  620. div = bitfield_extract(val, CLK_SARADC_DIV_CON_SHIFT,
  621. CLK_SARADC_DIV_CON_WIDTH);
  622. return DIV_TO_RATE(OSC_HZ, div);
  623. }
  624. static ulong rockchip_saradc_set_clk(struct rockchip_cru *cru, uint hz)
  625. {
  626. int src_clk_div;
  627. src_clk_div = DIV_ROUND_UP(OSC_HZ, hz) - 1;
  628. assert(src_clk_div < 128);
  629. rk_clrsetreg(&cru->cru_clksel_con[24],
  630. CLK_SARADC_DIV_CON_MASK,
  631. src_clk_div << CLK_SARADC_DIV_CON_SHIFT);
  632. return rockchip_saradc_get_clk(cru);
  633. }
  634. static ulong rk3288_clk_get_rate(struct clk *clk)
  635. {
  636. struct rk3288_clk_priv *priv = dev_get_priv(clk->dev);
  637. ulong new_rate, gclk_rate;
  638. gclk_rate = rkclk_pll_get_rate(priv->cru, CLK_GENERAL);
  639. switch (clk->id) {
  640. case 0 ... 63:
  641. new_rate = rkclk_pll_get_rate(priv->cru, clk->id);
  642. break;
  643. case HCLK_EMMC:
  644. case HCLK_SDMMC:
  645. case HCLK_SDIO0:
  646. case SCLK_EMMC:
  647. case SCLK_SDMMC:
  648. case SCLK_SDIO0:
  649. new_rate = rockchip_mmc_get_clk(priv->cru, gclk_rate, clk->id);
  650. break;
  651. case SCLK_SPI0:
  652. case SCLK_SPI1:
  653. case SCLK_SPI2:
  654. new_rate = rockchip_spi_get_clk(priv->cru, gclk_rate, clk->id);
  655. break;
  656. case PCLK_I2C0:
  657. case PCLK_I2C1:
  658. case PCLK_I2C2:
  659. case PCLK_I2C3:
  660. case PCLK_I2C4:
  661. case PCLK_I2C5:
  662. return gclk_rate;
  663. case PCLK_PWM:
  664. return PD_BUS_PCLK_HZ;
  665. case SCLK_SARADC:
  666. new_rate = rockchip_saradc_get_clk(priv->cru);
  667. break;
  668. default:
  669. return -ENOENT;
  670. }
  671. return new_rate;
  672. }
  673. static ulong rk3288_clk_set_rate(struct clk *clk, ulong rate)
  674. {
  675. struct rk3288_clk_priv *priv = dev_get_priv(clk->dev);
  676. struct rockchip_cru *cru = priv->cru;
  677. ulong new_rate, gclk_rate;
  678. gclk_rate = rkclk_pll_get_rate(priv->cru, CLK_GENERAL);
  679. switch (clk->id) {
  680. case PLL_APLL:
  681. /* We only support a fixed rate here */
  682. if (rate != 1800000000)
  683. return -EINVAL;
  684. rk3288_clk_configure_cpu(priv->cru, priv->grf);
  685. new_rate = rate;
  686. break;
  687. case CLK_DDR:
  688. new_rate = rkclk_configure_ddr(priv->cru, priv->grf, rate);
  689. break;
  690. case HCLK_EMMC:
  691. case HCLK_SDMMC:
  692. case HCLK_SDIO0:
  693. case SCLK_EMMC:
  694. case SCLK_SDMMC:
  695. case SCLK_SDIO0:
  696. new_rate = rockchip_mmc_set_clk(cru, gclk_rate, clk->id, rate);
  697. break;
  698. case SCLK_SPI0:
  699. case SCLK_SPI1:
  700. case SCLK_SPI2:
  701. new_rate = rockchip_spi_set_clk(cru, gclk_rate, clk->id, rate);
  702. break;
  703. #ifndef CONFIG_SPL_BUILD
  704. case SCLK_I2S0:
  705. new_rate = rockchip_i2s_set_clk(cru, gclk_rate, rate);
  706. break;
  707. case SCLK_MAC:
  708. new_rate = rockchip_mac_set_clk(priv->cru, rate);
  709. break;
  710. case DCLK_VOP0:
  711. case DCLK_VOP1:
  712. new_rate = rockchip_vop_set_clk(cru, priv->grf, clk->id, rate);
  713. break;
  714. case SCLK_EDP_24M:
  715. /* clk_edp_24M source: 24M */
  716. rk_setreg(&cru->cru_clksel_con[28], 1 << 15);
  717. /* rst edp */
  718. rk_setreg(&cru->cru_clksel_con[6], 1 << 15);
  719. udelay(1);
  720. rk_clrreg(&cru->cru_clksel_con[6], 1 << 15);
  721. new_rate = rate;
  722. break;
  723. case ACLK_VOP0:
  724. case ACLK_VOP1: {
  725. u32 div;
  726. /* vop aclk source clk: cpll */
  727. div = CPLL_HZ / rate;
  728. assert((div - 1 < 64) && (div * rate == CPLL_HZ));
  729. switch (clk->id) {
  730. case ACLK_VOP0:
  731. rk_clrsetreg(&cru->cru_clksel_con[31],
  732. 3 << 6 | 0x1f << 0,
  733. 0 << 6 | (div - 1) << 0);
  734. break;
  735. case ACLK_VOP1:
  736. rk_clrsetreg(&cru->cru_clksel_con[31],
  737. 3 << 14 | 0x1f << 8,
  738. 0 << 14 | (div - 1) << 8);
  739. break;
  740. }
  741. new_rate = rate;
  742. break;
  743. }
  744. case PCLK_HDMI_CTRL:
  745. /* enable pclk hdmi ctrl */
  746. rk_clrreg(&cru->cru_clkgate_con[16], 1 << 9);
  747. /* software reset hdmi */
  748. rk_setreg(&cru->cru_clkgate_con[7], 1 << 9);
  749. udelay(1);
  750. rk_clrreg(&cru->cru_clkgate_con[7], 1 << 9);
  751. new_rate = rate;
  752. break;
  753. #endif
  754. case SCLK_SARADC:
  755. new_rate = rockchip_saradc_set_clk(priv->cru, rate);
  756. break;
  757. case PLL_GPLL:
  758. case PLL_CPLL:
  759. case PLL_NPLL:
  760. case ACLK_CPU:
  761. case HCLK_CPU:
  762. case PCLK_CPU:
  763. case ACLK_PERI:
  764. case HCLK_PERI:
  765. case PCLK_PERI:
  766. case SCLK_UART0:
  767. return 0;
  768. default:
  769. return -ENOENT;
  770. }
  771. return new_rate;
  772. }
  773. static int __maybe_unused rk3288_gmac_set_parent(struct clk *clk, struct clk *parent)
  774. {
  775. struct rk3288_clk_priv *priv = dev_get_priv(clk->dev);
  776. struct rockchip_cru *cru = priv->cru;
  777. const char *clock_output_name;
  778. int ret;
  779. /*
  780. * If the requested parent is in the same clock-controller and
  781. * the id is SCLK_MAC_PLL ("mac_pll_src"), switch to the internal
  782. * clock.
  783. */
  784. if ((parent->dev == clk->dev) && (parent->id == SCLK_MAC_PLL)) {
  785. debug("%s: switching GAMC to SCLK_MAC_PLL\n", __func__);
  786. rk_clrsetreg(&cru->cru_clksel_con[21], RMII_EXTCLK_MASK, 0);
  787. return 0;
  788. }
  789. /*
  790. * Otherwise, we need to check the clock-output-names of the
  791. * requested parent to see if the requested id is "ext_gmac".
  792. */
  793. ret = dev_read_string_index(parent->dev, "clock-output-names",
  794. parent->id, &clock_output_name);
  795. if (ret < 0)
  796. return -ENODATA;
  797. /* If this is "ext_gmac", switch to the external clock input */
  798. if (!strcmp(clock_output_name, "ext_gmac")) {
  799. debug("%s: switching GMAC to external clock\n", __func__);
  800. rk_clrsetreg(&cru->cru_clksel_con[21], RMII_EXTCLK_MASK,
  801. RMII_EXTCLK_SELECT_EXT_CLK << RMII_EXTCLK_SHIFT);
  802. return 0;
  803. }
  804. return -EINVAL;
  805. }
  806. static int __maybe_unused rk3288_clk_set_parent(struct clk *clk, struct clk *parent)
  807. {
  808. switch (clk->id) {
  809. case SCLK_MAC:
  810. return rk3288_gmac_set_parent(clk, parent);
  811. case SCLK_USBPHY480M_SRC:
  812. return 0;
  813. }
  814. debug("%s: unsupported clk %ld\n", __func__, clk->id);
  815. return -ENOENT;
  816. }
  817. static struct clk_ops rk3288_clk_ops = {
  818. .get_rate = rk3288_clk_get_rate,
  819. .set_rate = rk3288_clk_set_rate,
  820. #if CONFIG_IS_ENABLED(OF_CONTROL) && !CONFIG_IS_ENABLED(OF_PLATDATA)
  821. .set_parent = rk3288_clk_set_parent,
  822. #endif
  823. };
  824. static int rk3288_clk_ofdata_to_platdata(struct udevice *dev)
  825. {
  826. #if !CONFIG_IS_ENABLED(OF_PLATDATA)
  827. struct rk3288_clk_priv *priv = dev_get_priv(dev);
  828. priv->cru = dev_read_addr_ptr(dev);
  829. #endif
  830. return 0;
  831. }
  832. static int rk3288_clk_probe(struct udevice *dev)
  833. {
  834. struct rk3288_clk_priv *priv = dev_get_priv(dev);
  835. bool init_clocks = false;
  836. priv->grf = syscon_get_first_range(ROCKCHIP_SYSCON_GRF);
  837. if (IS_ERR(priv->grf))
  838. return PTR_ERR(priv->grf);
  839. #ifdef CONFIG_SPL_BUILD
  840. #if CONFIG_IS_ENABLED(OF_PLATDATA)
  841. struct rk3288_clk_plat *plat = dev_get_platdata(dev);
  842. priv->cru = map_sysmem(plat->dtd.reg[0], plat->dtd.reg[1]);
  843. #endif
  844. init_clocks = true;
  845. #endif
  846. if (!(gd->flags & GD_FLG_RELOC)) {
  847. u32 reg;
  848. /*
  849. * Init clocks in U-Boot proper if the NPLL is runnning. This
  850. * indicates that a previous boot loader set up the clocks, so
  851. * we need to redo it. U-Boot's SPL does not set this clock.
  852. */
  853. reg = readl(&priv->cru->cru_mode_con);
  854. if (((reg & NPLL_MODE_MASK) >> NPLL_MODE_SHIFT) ==
  855. NPLL_MODE_NORMAL)
  856. init_clocks = true;
  857. }
  858. if (init_clocks)
  859. rkclk_init(priv->cru, priv->grf);
  860. return 0;
  861. }
  862. static int rk3288_clk_bind(struct udevice *dev)
  863. {
  864. int ret;
  865. struct udevice *sys_child;
  866. struct sysreset_reg *priv;
  867. /* The reset driver does not have a device node, so bind it here */
  868. ret = device_bind_driver(dev, "rockchip_sysreset", "sysreset",
  869. &sys_child);
  870. if (ret) {
  871. debug("Warning: No sysreset driver: ret=%d\n", ret);
  872. } else {
  873. priv = malloc(sizeof(struct sysreset_reg));
  874. priv->glb_srst_fst_value = offsetof(struct rockchip_cru,
  875. cru_glb_srst_fst_value);
  876. priv->glb_srst_snd_value = offsetof(struct rockchip_cru,
  877. cru_glb_srst_snd_value);
  878. sys_child->priv = priv;
  879. }
  880. #if CONFIG_IS_ENABLED(RESET_ROCKCHIP)
  881. ret = offsetof(struct rockchip_cru, cru_softrst_con[0]);
  882. ret = rockchip_reset_bind(dev, ret, 12);
  883. if (ret)
  884. debug("Warning: software reset driver bind faile\n");
  885. #endif
  886. return 0;
  887. }
  888. static const struct udevice_id rk3288_clk_ids[] = {
  889. { .compatible = "rockchip,rk3288-cru" },
  890. { }
  891. };
  892. U_BOOT_DRIVER(rockchip_rk3288_cru) = {
  893. .name = "rockchip_rk3288_cru",
  894. .id = UCLASS_CLK,
  895. .of_match = rk3288_clk_ids,
  896. .priv_auto = sizeof(struct rk3288_clk_priv),
  897. .plat_auto = sizeof(struct rk3288_clk_plat),
  898. .ops = &rk3288_clk_ops,
  899. .bind = rk3288_clk_bind,
  900. .ofdata_to_platdata = rk3288_clk_ofdata_to_platdata,
  901. .probe = rk3288_clk_probe,
  902. };