sdram_rk3399.c 37 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237
  1. // SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause
  2. /*
  3. * (C) Copyright 2016-2017 Rockchip Inc.
  4. *
  5. * Adapted from coreboot.
  6. */
  7. #include <common.h>
  8. #include <clk.h>
  9. #include <dm.h>
  10. #include <dt-structs.h>
  11. #include <ram.h>
  12. #include <regmap.h>
  13. #include <syscon.h>
  14. #include <asm/io.h>
  15. #include <asm/arch-rockchip/clock.h>
  16. #include <asm/arch-rockchip/sdram_common.h>
  17. #include <asm/arch-rockchip/sdram_rk3399.h>
  18. #include <asm/arch-rockchip/cru_rk3399.h>
  19. #include <asm/arch-rockchip/grf_rk3399.h>
  20. #include <asm/arch-rockchip/hardware.h>
  21. #include <linux/err.h>
  22. #include <time.h>
  23. struct chan_info {
  24. struct rk3399_ddr_pctl_regs *pctl;
  25. struct rk3399_ddr_pi_regs *pi;
  26. struct rk3399_ddr_publ_regs *publ;
  27. struct rk3399_msch_regs *msch;
  28. };
  29. struct dram_info {
  30. #ifdef CONFIG_SPL_BUILD
  31. struct chan_info chan[2];
  32. struct clk ddr_clk;
  33. struct rk3399_cru *cru;
  34. struct rk3399_pmucru *pmucru;
  35. struct rk3399_pmusgrf_regs *pmusgrf;
  36. struct rk3399_ddr_cic_regs *cic;
  37. #endif
  38. struct ram_info info;
  39. struct rk3399_pmugrf_regs *pmugrf;
  40. };
  41. #define PRESET_SGRF_HOLD(n) ((0x1 << (6 + 16)) | ((n) << 6))
  42. #define PRESET_GPIO0_HOLD(n) ((0x1 << (7 + 16)) | ((n) << 7))
  43. #define PRESET_GPIO1_HOLD(n) ((0x1 << (8 + 16)) | ((n) << 8))
  44. #define PHY_DRV_ODT_Hi_Z 0x0
  45. #define PHY_DRV_ODT_240 0x1
  46. #define PHY_DRV_ODT_120 0x8
  47. #define PHY_DRV_ODT_80 0x9
  48. #define PHY_DRV_ODT_60 0xc
  49. #define PHY_DRV_ODT_48 0xd
  50. #define PHY_DRV_ODT_40 0xe
  51. #define PHY_DRV_ODT_34_3 0xf
  52. #ifdef CONFIG_SPL_BUILD
  53. struct rockchip_dmc_plat {
  54. #if CONFIG_IS_ENABLED(OF_PLATDATA)
  55. struct dtd_rockchip_rk3399_dmc dtplat;
  56. #else
  57. struct rk3399_sdram_params sdram_params;
  58. #endif
  59. struct regmap *map;
  60. };
  61. static void copy_to_reg(u32 *dest, const u32 *src, u32 n)
  62. {
  63. int i;
  64. for (i = 0; i < n / sizeof(u32); i++) {
  65. writel(*src, dest);
  66. src++;
  67. dest++;
  68. }
  69. }
  70. static void phy_dll_bypass_set(struct rk3399_ddr_publ_regs *ddr_publ_regs,
  71. u32 freq)
  72. {
  73. u32 *denali_phy = ddr_publ_regs->denali_phy;
  74. /* From IP spec, only freq small than 125 can enter dll bypass mode */
  75. if (freq <= 125) {
  76. /* phy_sw_master_mode_X PHY_86/214/342/470 4bits offset_8 */
  77. setbits_le32(&denali_phy[86], (0x3 << 2) << 8);
  78. setbits_le32(&denali_phy[214], (0x3 << 2) << 8);
  79. setbits_le32(&denali_phy[342], (0x3 << 2) << 8);
  80. setbits_le32(&denali_phy[470], (0x3 << 2) << 8);
  81. /* phy_adrctl_sw_master_mode PHY_547/675/803 4bits offset_16 */
  82. setbits_le32(&denali_phy[547], (0x3 << 2) << 16);
  83. setbits_le32(&denali_phy[675], (0x3 << 2) << 16);
  84. setbits_le32(&denali_phy[803], (0x3 << 2) << 16);
  85. } else {
  86. /* phy_sw_master_mode_X PHY_86/214/342/470 4bits offset_8 */
  87. clrbits_le32(&denali_phy[86], (0x3 << 2) << 8);
  88. clrbits_le32(&denali_phy[214], (0x3 << 2) << 8);
  89. clrbits_le32(&denali_phy[342], (0x3 << 2) << 8);
  90. clrbits_le32(&denali_phy[470], (0x3 << 2) << 8);
  91. /* phy_adrctl_sw_master_mode PHY_547/675/803 4bits offset_16 */
  92. clrbits_le32(&denali_phy[547], (0x3 << 2) << 16);
  93. clrbits_le32(&denali_phy[675], (0x3 << 2) << 16);
  94. clrbits_le32(&denali_phy[803], (0x3 << 2) << 16);
  95. }
  96. }
  97. static void set_memory_map(const struct chan_info *chan, u32 channel,
  98. const struct rk3399_sdram_params *sdram_params)
  99. {
  100. const struct rk3399_sdram_channel *sdram_ch =
  101. &sdram_params->ch[channel];
  102. u32 *denali_ctl = chan->pctl->denali_ctl;
  103. u32 *denali_pi = chan->pi->denali_pi;
  104. u32 cs_map;
  105. u32 reduc;
  106. u32 row;
  107. /* Get row number from ddrconfig setting */
  108. if (sdram_ch->ddrconfig < 2 || sdram_ch->ddrconfig == 4)
  109. row = 16;
  110. else if (sdram_ch->ddrconfig == 3)
  111. row = 14;
  112. else
  113. row = 15;
  114. cs_map = (sdram_ch->rank > 1) ? 3 : 1;
  115. reduc = (sdram_ch->bw == 2) ? 0 : 1;
  116. /* Set the dram configuration to ctrl */
  117. clrsetbits_le32(&denali_ctl[191], 0xF, (12 - sdram_ch->col));
  118. clrsetbits_le32(&denali_ctl[190], (0x3 << 16) | (0x7 << 24),
  119. ((3 - sdram_ch->bk) << 16) |
  120. ((16 - row) << 24));
  121. clrsetbits_le32(&denali_ctl[196], 0x3 | (1 << 16),
  122. cs_map | (reduc << 16));
  123. /* PI_199 PI_COL_DIFF:RW:0:4 */
  124. clrsetbits_le32(&denali_pi[199], 0xF, (12 - sdram_ch->col));
  125. /* PI_155 PI_ROW_DIFF:RW:24:3 PI_BANK_DIFF:RW:16:2 */
  126. clrsetbits_le32(&denali_pi[155], (0x3 << 16) | (0x7 << 24),
  127. ((3 - sdram_ch->bk) << 16) |
  128. ((16 - row) << 24));
  129. /* PI_41 PI_CS_MAP:RW:24:4 */
  130. clrsetbits_le32(&denali_pi[41], 0xf << 24, cs_map << 24);
  131. if ((sdram_ch->rank == 1) && (sdram_params->base.dramtype == DDR3))
  132. writel(0x2EC7FFFF, &denali_pi[34]);
  133. }
  134. static void set_ds_odt(const struct chan_info *chan,
  135. const struct rk3399_sdram_params *sdram_params)
  136. {
  137. u32 *denali_phy = chan->publ->denali_phy;
  138. u32 tsel_idle_en, tsel_wr_en, tsel_rd_en;
  139. u32 tsel_idle_select_p, tsel_wr_select_p, tsel_rd_select_p;
  140. u32 ca_tsel_wr_select_p, ca_tsel_wr_select_n;
  141. u32 tsel_idle_select_n, tsel_wr_select_n, tsel_rd_select_n;
  142. u32 reg_value;
  143. if (sdram_params->base.dramtype == LPDDR4) {
  144. tsel_rd_select_p = PHY_DRV_ODT_Hi_Z;
  145. tsel_wr_select_p = PHY_DRV_ODT_40;
  146. ca_tsel_wr_select_p = PHY_DRV_ODT_40;
  147. tsel_idle_select_p = PHY_DRV_ODT_Hi_Z;
  148. tsel_rd_select_n = PHY_DRV_ODT_240;
  149. tsel_wr_select_n = PHY_DRV_ODT_40;
  150. ca_tsel_wr_select_n = PHY_DRV_ODT_40;
  151. tsel_idle_select_n = PHY_DRV_ODT_240;
  152. } else if (sdram_params->base.dramtype == LPDDR3) {
  153. tsel_rd_select_p = PHY_DRV_ODT_240;
  154. tsel_wr_select_p = PHY_DRV_ODT_34_3;
  155. ca_tsel_wr_select_p = PHY_DRV_ODT_48;
  156. tsel_idle_select_p = PHY_DRV_ODT_240;
  157. tsel_rd_select_n = PHY_DRV_ODT_Hi_Z;
  158. tsel_wr_select_n = PHY_DRV_ODT_34_3;
  159. ca_tsel_wr_select_n = PHY_DRV_ODT_48;
  160. tsel_idle_select_n = PHY_DRV_ODT_Hi_Z;
  161. } else {
  162. tsel_rd_select_p = PHY_DRV_ODT_240;
  163. tsel_wr_select_p = PHY_DRV_ODT_34_3;
  164. ca_tsel_wr_select_p = PHY_DRV_ODT_34_3;
  165. tsel_idle_select_p = PHY_DRV_ODT_240;
  166. tsel_rd_select_n = PHY_DRV_ODT_240;
  167. tsel_wr_select_n = PHY_DRV_ODT_34_3;
  168. ca_tsel_wr_select_n = PHY_DRV_ODT_34_3;
  169. tsel_idle_select_n = PHY_DRV_ODT_240;
  170. }
  171. if (sdram_params->base.odt == 1)
  172. tsel_rd_en = 1;
  173. else
  174. tsel_rd_en = 0;
  175. tsel_wr_en = 0;
  176. tsel_idle_en = 0;
  177. /*
  178. * phy_dq_tsel_select_X 24bits DENALI_PHY_6/134/262/390 offset_0
  179. * sets termination values for read/idle cycles and drive strength
  180. * for write cycles for DQ/DM
  181. */
  182. reg_value = tsel_rd_select_n | (tsel_rd_select_p << 0x4) |
  183. (tsel_wr_select_n << 8) | (tsel_wr_select_p << 12) |
  184. (tsel_idle_select_n << 16) | (tsel_idle_select_p << 20);
  185. clrsetbits_le32(&denali_phy[6], 0xffffff, reg_value);
  186. clrsetbits_le32(&denali_phy[134], 0xffffff, reg_value);
  187. clrsetbits_le32(&denali_phy[262], 0xffffff, reg_value);
  188. clrsetbits_le32(&denali_phy[390], 0xffffff, reg_value);
  189. /*
  190. * phy_dqs_tsel_select_X 24bits DENALI_PHY_7/135/263/391 offset_0
  191. * sets termination values for read/idle cycles and drive strength
  192. * for write cycles for DQS
  193. */
  194. clrsetbits_le32(&denali_phy[7], 0xffffff, reg_value);
  195. clrsetbits_le32(&denali_phy[135], 0xffffff, reg_value);
  196. clrsetbits_le32(&denali_phy[263], 0xffffff, reg_value);
  197. clrsetbits_le32(&denali_phy[391], 0xffffff, reg_value);
  198. /* phy_adr_tsel_select_ 8bits DENALI_PHY_544/672/800 offset_0 */
  199. reg_value = ca_tsel_wr_select_n | (ca_tsel_wr_select_p << 0x4);
  200. clrsetbits_le32(&denali_phy[544], 0xff, reg_value);
  201. clrsetbits_le32(&denali_phy[672], 0xff, reg_value);
  202. clrsetbits_le32(&denali_phy[800], 0xff, reg_value);
  203. /* phy_pad_addr_drive 8bits DENALI_PHY_928 offset_0 */
  204. clrsetbits_le32(&denali_phy[928], 0xff, reg_value);
  205. /* phy_pad_rst_drive 8bits DENALI_PHY_937 offset_0 */
  206. clrsetbits_le32(&denali_phy[937], 0xff, reg_value);
  207. /* phy_pad_cke_drive 8bits DENALI_PHY_935 offset_0 */
  208. clrsetbits_le32(&denali_phy[935], 0xff, reg_value);
  209. /* phy_pad_cs_drive 8bits DENALI_PHY_939 offset_0 */
  210. clrsetbits_le32(&denali_phy[939], 0xff, reg_value);
  211. /* phy_pad_clk_drive 8bits DENALI_PHY_929 offset_0 */
  212. clrsetbits_le32(&denali_phy[929], 0xff, reg_value);
  213. /* phy_pad_fdbk_drive 23bit DENALI_PHY_924/925 */
  214. clrsetbits_le32(&denali_phy[924], 0xff,
  215. tsel_wr_select_n | (tsel_wr_select_p << 4));
  216. clrsetbits_le32(&denali_phy[925], 0xff,
  217. tsel_rd_select_n | (tsel_rd_select_p << 4));
  218. /* phy_dq_tsel_enable_X 3bits DENALI_PHY_5/133/261/389 offset_16 */
  219. reg_value = (tsel_rd_en | (tsel_wr_en << 1) | (tsel_idle_en << 2))
  220. << 16;
  221. clrsetbits_le32(&denali_phy[5], 0x7 << 16, reg_value);
  222. clrsetbits_le32(&denali_phy[133], 0x7 << 16, reg_value);
  223. clrsetbits_le32(&denali_phy[261], 0x7 << 16, reg_value);
  224. clrsetbits_le32(&denali_phy[389], 0x7 << 16, reg_value);
  225. /* phy_dqs_tsel_enable_X 3bits DENALI_PHY_6/134/262/390 offset_24 */
  226. reg_value = (tsel_rd_en | (tsel_wr_en << 1) | (tsel_idle_en << 2))
  227. << 24;
  228. clrsetbits_le32(&denali_phy[6], 0x7 << 24, reg_value);
  229. clrsetbits_le32(&denali_phy[134], 0x7 << 24, reg_value);
  230. clrsetbits_le32(&denali_phy[262], 0x7 << 24, reg_value);
  231. clrsetbits_le32(&denali_phy[390], 0x7 << 24, reg_value);
  232. /* phy_adr_tsel_enable_ 1bit DENALI_PHY_518/646/774 offset_8 */
  233. reg_value = tsel_wr_en << 8;
  234. clrsetbits_le32(&denali_phy[518], 0x1 << 8, reg_value);
  235. clrsetbits_le32(&denali_phy[646], 0x1 << 8, reg_value);
  236. clrsetbits_le32(&denali_phy[774], 0x1 << 8, reg_value);
  237. /* phy_pad_addr_term tsel 1bit DENALI_PHY_933 offset_17 */
  238. reg_value = tsel_wr_en << 17;
  239. clrsetbits_le32(&denali_phy[933], 0x1 << 17, reg_value);
  240. /*
  241. * pad_rst/cke/cs/clk_term tsel 1bits
  242. * DENALI_PHY_938/936/940/934 offset_17
  243. */
  244. clrsetbits_le32(&denali_phy[938], 0x1 << 17, reg_value);
  245. clrsetbits_le32(&denali_phy[936], 0x1 << 17, reg_value);
  246. clrsetbits_le32(&denali_phy[940], 0x1 << 17, reg_value);
  247. clrsetbits_le32(&denali_phy[934], 0x1 << 17, reg_value);
  248. /* phy_pad_fdbk_term 1bit DENALI_PHY_930 offset_17 */
  249. clrsetbits_le32(&denali_phy[930], 0x1 << 17, reg_value);
  250. }
  251. static int phy_io_config(const struct chan_info *chan,
  252. const struct rk3399_sdram_params *sdram_params)
  253. {
  254. u32 *denali_phy = chan->publ->denali_phy;
  255. u32 vref_mode_dq, vref_value_dq, vref_mode_ac, vref_value_ac;
  256. u32 mode_sel;
  257. u32 reg_value;
  258. u32 drv_value, odt_value;
  259. u32 speed;
  260. /* vref setting */
  261. if (sdram_params->base.dramtype == LPDDR4) {
  262. /* LPDDR4 */
  263. vref_mode_dq = 0x6;
  264. vref_value_dq = 0x1f;
  265. vref_mode_ac = 0x6;
  266. vref_value_ac = 0x1f;
  267. } else if (sdram_params->base.dramtype == LPDDR3) {
  268. if (sdram_params->base.odt == 1) {
  269. vref_mode_dq = 0x5; /* LPDDR3 ODT */
  270. drv_value = (readl(&denali_phy[6]) >> 12) & 0xf;
  271. odt_value = (readl(&denali_phy[6]) >> 4) & 0xf;
  272. if (drv_value == PHY_DRV_ODT_48) {
  273. switch (odt_value) {
  274. case PHY_DRV_ODT_240:
  275. vref_value_dq = 0x16;
  276. break;
  277. case PHY_DRV_ODT_120:
  278. vref_value_dq = 0x26;
  279. break;
  280. case PHY_DRV_ODT_60:
  281. vref_value_dq = 0x36;
  282. break;
  283. default:
  284. debug("Invalid ODT value.\n");
  285. return -EINVAL;
  286. }
  287. } else if (drv_value == PHY_DRV_ODT_40) {
  288. switch (odt_value) {
  289. case PHY_DRV_ODT_240:
  290. vref_value_dq = 0x19;
  291. break;
  292. case PHY_DRV_ODT_120:
  293. vref_value_dq = 0x23;
  294. break;
  295. case PHY_DRV_ODT_60:
  296. vref_value_dq = 0x31;
  297. break;
  298. default:
  299. debug("Invalid ODT value.\n");
  300. return -EINVAL;
  301. }
  302. } else if (drv_value == PHY_DRV_ODT_34_3) {
  303. switch (odt_value) {
  304. case PHY_DRV_ODT_240:
  305. vref_value_dq = 0x17;
  306. break;
  307. case PHY_DRV_ODT_120:
  308. vref_value_dq = 0x20;
  309. break;
  310. case PHY_DRV_ODT_60:
  311. vref_value_dq = 0x2e;
  312. break;
  313. default:
  314. debug("Invalid ODT value.\n");
  315. return -EINVAL;
  316. }
  317. } else {
  318. debug("Invalid DRV value.\n");
  319. return -EINVAL;
  320. }
  321. } else {
  322. vref_mode_dq = 0x2; /* LPDDR3 */
  323. vref_value_dq = 0x1f;
  324. }
  325. vref_mode_ac = 0x2;
  326. vref_value_ac = 0x1f;
  327. } else if (sdram_params->base.dramtype == DDR3) {
  328. /* DDR3L */
  329. vref_mode_dq = 0x1;
  330. vref_value_dq = 0x1f;
  331. vref_mode_ac = 0x1;
  332. vref_value_ac = 0x1f;
  333. } else {
  334. debug("Unknown DRAM type.\n");
  335. return -EINVAL;
  336. }
  337. reg_value = (vref_mode_dq << 9) | (0x1 << 8) | vref_value_dq;
  338. /* PHY_913 PHY_PAD_VREF_CTRL_DQ_0 12bits offset_8 */
  339. clrsetbits_le32(&denali_phy[913], 0xfff << 8, reg_value << 8);
  340. /* PHY_914 PHY_PAD_VREF_CTRL_DQ_1 12bits offset_0 */
  341. clrsetbits_le32(&denali_phy[914], 0xfff, reg_value);
  342. /* PHY_914 PHY_PAD_VREF_CTRL_DQ_2 12bits offset_16 */
  343. clrsetbits_le32(&denali_phy[914], 0xfff << 16, reg_value << 16);
  344. /* PHY_915 PHY_PAD_VREF_CTRL_DQ_3 12bits offset_0 */
  345. clrsetbits_le32(&denali_phy[915], 0xfff, reg_value);
  346. reg_value = (vref_mode_ac << 9) | (0x1 << 8) | vref_value_ac;
  347. /* PHY_915 PHY_PAD_VREF_CTRL_AC 12bits offset_16 */
  348. clrsetbits_le32(&denali_phy[915], 0xfff << 16, reg_value << 16);
  349. if (sdram_params->base.dramtype == LPDDR4)
  350. mode_sel = 0x6;
  351. else if (sdram_params->base.dramtype == LPDDR3)
  352. mode_sel = 0x0;
  353. else if (sdram_params->base.dramtype == DDR3)
  354. mode_sel = 0x1;
  355. else
  356. return -EINVAL;
  357. /* PHY_924 PHY_PAD_FDBK_DRIVE */
  358. clrsetbits_le32(&denali_phy[924], 0x7 << 15, mode_sel << 15);
  359. /* PHY_926 PHY_PAD_DATA_DRIVE */
  360. clrsetbits_le32(&denali_phy[926], 0x7 << 6, mode_sel << 6);
  361. /* PHY_927 PHY_PAD_DQS_DRIVE */
  362. clrsetbits_le32(&denali_phy[927], 0x7 << 6, mode_sel << 6);
  363. /* PHY_928 PHY_PAD_ADDR_DRIVE */
  364. clrsetbits_le32(&denali_phy[928], 0x7 << 14, mode_sel << 14);
  365. /* PHY_929 PHY_PAD_CLK_DRIVE */
  366. clrsetbits_le32(&denali_phy[929], 0x7 << 14, mode_sel << 14);
  367. /* PHY_935 PHY_PAD_CKE_DRIVE */
  368. clrsetbits_le32(&denali_phy[935], 0x7 << 14, mode_sel << 14);
  369. /* PHY_937 PHY_PAD_RST_DRIVE */
  370. clrsetbits_le32(&denali_phy[937], 0x7 << 14, mode_sel << 14);
  371. /* PHY_939 PHY_PAD_CS_DRIVE */
  372. clrsetbits_le32(&denali_phy[939], 0x7 << 14, mode_sel << 14);
  373. /* speed setting */
  374. if (sdram_params->base.ddr_freq < 400)
  375. speed = 0x0;
  376. else if (sdram_params->base.ddr_freq < 800)
  377. speed = 0x1;
  378. else if (sdram_params->base.ddr_freq < 1200)
  379. speed = 0x2;
  380. else
  381. speed = 0x3;
  382. /* PHY_924 PHY_PAD_FDBK_DRIVE */
  383. clrsetbits_le32(&denali_phy[924], 0x3 << 21, speed << 21);
  384. /* PHY_926 PHY_PAD_DATA_DRIVE */
  385. clrsetbits_le32(&denali_phy[926], 0x3 << 9, speed << 9);
  386. /* PHY_927 PHY_PAD_DQS_DRIVE */
  387. clrsetbits_le32(&denali_phy[927], 0x3 << 9, speed << 9);
  388. /* PHY_928 PHY_PAD_ADDR_DRIVE */
  389. clrsetbits_le32(&denali_phy[928], 0x3 << 17, speed << 17);
  390. /* PHY_929 PHY_PAD_CLK_DRIVE */
  391. clrsetbits_le32(&denali_phy[929], 0x3 << 17, speed << 17);
  392. /* PHY_935 PHY_PAD_CKE_DRIVE */
  393. clrsetbits_le32(&denali_phy[935], 0x3 << 17, speed << 17);
  394. /* PHY_937 PHY_PAD_RST_DRIVE */
  395. clrsetbits_le32(&denali_phy[937], 0x3 << 17, speed << 17);
  396. /* PHY_939 PHY_PAD_CS_DRIVE */
  397. clrsetbits_le32(&denali_phy[939], 0x3 << 17, speed << 17);
  398. return 0;
  399. }
  400. static int pctl_cfg(const struct chan_info *chan, u32 channel,
  401. const struct rk3399_sdram_params *sdram_params)
  402. {
  403. u32 *denali_ctl = chan->pctl->denali_ctl;
  404. u32 *denali_pi = chan->pi->denali_pi;
  405. u32 *denali_phy = chan->publ->denali_phy;
  406. const u32 *params_ctl = sdram_params->pctl_regs.denali_ctl;
  407. const u32 *params_phy = sdram_params->phy_regs.denali_phy;
  408. u32 tmp, tmp1, tmp2;
  409. u32 pwrup_srefresh_exit;
  410. int ret;
  411. const ulong timeout_ms = 200;
  412. /*
  413. * work around controller bug:
  414. * Do not program DRAM_CLASS until NO_PHY_IND_TRAIN_INT is programmed
  415. */
  416. copy_to_reg(&denali_ctl[1], &params_ctl[1],
  417. sizeof(struct rk3399_ddr_pctl_regs) - 4);
  418. writel(params_ctl[0], &denali_ctl[0]);
  419. copy_to_reg(denali_pi, &sdram_params->pi_regs.denali_pi[0],
  420. sizeof(struct rk3399_ddr_pi_regs));
  421. /* rank count need to set for init */
  422. set_memory_map(chan, channel, sdram_params);
  423. writel(sdram_params->phy_regs.denali_phy[910], &denali_phy[910]);
  424. writel(sdram_params->phy_regs.denali_phy[911], &denali_phy[911]);
  425. writel(sdram_params->phy_regs.denali_phy[912], &denali_phy[912]);
  426. pwrup_srefresh_exit = readl(&denali_ctl[68]) & PWRUP_SREFRESH_EXIT;
  427. clrbits_le32(&denali_ctl[68], PWRUP_SREFRESH_EXIT);
  428. /* PHY_DLL_RST_EN */
  429. clrsetbits_le32(&denali_phy[957], 0x3 << 24, 1 << 24);
  430. setbits_le32(&denali_pi[0], START);
  431. setbits_le32(&denali_ctl[0], START);
  432. /* Wating for phy DLL lock */
  433. while (1) {
  434. tmp = readl(&denali_phy[920]);
  435. tmp1 = readl(&denali_phy[921]);
  436. tmp2 = readl(&denali_phy[922]);
  437. if ((((tmp >> 16) & 0x1) == 0x1) &&
  438. (((tmp1 >> 16) & 0x1) == 0x1) &&
  439. (((tmp1 >> 0) & 0x1) == 0x1) &&
  440. (((tmp2 >> 0) & 0x1) == 0x1))
  441. break;
  442. }
  443. copy_to_reg(&denali_phy[896], &params_phy[896], (958 - 895) * 4);
  444. copy_to_reg(&denali_phy[0], &params_phy[0], (90 - 0 + 1) * 4);
  445. copy_to_reg(&denali_phy[128], &params_phy[128], (218 - 128 + 1) * 4);
  446. copy_to_reg(&denali_phy[256], &params_phy[256], (346 - 256 + 1) * 4);
  447. copy_to_reg(&denali_phy[384], &params_phy[384], (474 - 384 + 1) * 4);
  448. copy_to_reg(&denali_phy[512], &params_phy[512], (549 - 512 + 1) * 4);
  449. copy_to_reg(&denali_phy[640], &params_phy[640], (677 - 640 + 1) * 4);
  450. copy_to_reg(&denali_phy[768], &params_phy[768], (805 - 768 + 1) * 4);
  451. set_ds_odt(chan, sdram_params);
  452. /*
  453. * phy_dqs_tsel_wr_timing_X 8bits DENALI_PHY_84/212/340/468 offset_8
  454. * dqs_tsel_wr_end[7:4] add Half cycle
  455. */
  456. tmp = (readl(&denali_phy[84]) >> 8) & 0xff;
  457. clrsetbits_le32(&denali_phy[84], 0xff << 8, (tmp + 0x10) << 8);
  458. tmp = (readl(&denali_phy[212]) >> 8) & 0xff;
  459. clrsetbits_le32(&denali_phy[212], 0xff << 8, (tmp + 0x10) << 8);
  460. tmp = (readl(&denali_phy[340]) >> 8) & 0xff;
  461. clrsetbits_le32(&denali_phy[340], 0xff << 8, (tmp + 0x10) << 8);
  462. tmp = (readl(&denali_phy[468]) >> 8) & 0xff;
  463. clrsetbits_le32(&denali_phy[468], 0xff << 8, (tmp + 0x10) << 8);
  464. /*
  465. * phy_dqs_tsel_wr_timing_X 8bits DENALI_PHY_83/211/339/467 offset_8
  466. * dq_tsel_wr_end[7:4] add Half cycle
  467. */
  468. tmp = (readl(&denali_phy[83]) >> 16) & 0xff;
  469. clrsetbits_le32(&denali_phy[83], 0xff << 16, (tmp + 0x10) << 16);
  470. tmp = (readl(&denali_phy[211]) >> 16) & 0xff;
  471. clrsetbits_le32(&denali_phy[211], 0xff << 16, (tmp + 0x10) << 16);
  472. tmp = (readl(&denali_phy[339]) >> 16) & 0xff;
  473. clrsetbits_le32(&denali_phy[339], 0xff << 16, (tmp + 0x10) << 16);
  474. tmp = (readl(&denali_phy[467]) >> 16) & 0xff;
  475. clrsetbits_le32(&denali_phy[467], 0xff << 16, (tmp + 0x10) << 16);
  476. ret = phy_io_config(chan, sdram_params);
  477. if (ret)
  478. return ret;
  479. /* PHY_DLL_RST_EN */
  480. clrsetbits_le32(&denali_phy[957], 0x3 << 24, 0x2 << 24);
  481. /* Wating for PHY and DRAM init complete */
  482. tmp = get_timer(0);
  483. do {
  484. if (get_timer(tmp) > timeout_ms) {
  485. pr_err("DRAM (%s): phy failed to lock within %ld ms\n",
  486. __func__, timeout_ms);
  487. return -ETIME;
  488. }
  489. } while (!(readl(&denali_ctl[203]) & (1 << 3)));
  490. debug("DRAM (%s): phy locked after %ld ms\n", __func__, get_timer(tmp));
  491. clrsetbits_le32(&denali_ctl[68], PWRUP_SREFRESH_EXIT,
  492. pwrup_srefresh_exit);
  493. return 0;
  494. }
  495. static void select_per_cs_training_index(const struct chan_info *chan,
  496. u32 rank)
  497. {
  498. u32 *denali_phy = chan->publ->denali_phy;
  499. /* PHY_84 PHY_PER_CS_TRAINING_EN_0 1bit offset_16 */
  500. if ((readl(&denali_phy[84])>>16) & 1) {
  501. /*
  502. * PHY_8/136/264/392
  503. * phy_per_cs_training_index_X 1bit offset_24
  504. */
  505. clrsetbits_le32(&denali_phy[8], 0x1 << 24, rank << 24);
  506. clrsetbits_le32(&denali_phy[136], 0x1 << 24, rank << 24);
  507. clrsetbits_le32(&denali_phy[264], 0x1 << 24, rank << 24);
  508. clrsetbits_le32(&denali_phy[392], 0x1 << 24, rank << 24);
  509. }
  510. }
  511. static void override_write_leveling_value(const struct chan_info *chan)
  512. {
  513. u32 *denali_ctl = chan->pctl->denali_ctl;
  514. u32 *denali_phy = chan->publ->denali_phy;
  515. u32 byte;
  516. /* PHY_896 PHY_FREQ_SEL_MULTICAST_EN 1bit offset_0 */
  517. setbits_le32(&denali_phy[896], 1);
  518. /*
  519. * PHY_8/136/264/392
  520. * phy_per_cs_training_multicast_en_X 1bit offset_16
  521. */
  522. clrsetbits_le32(&denali_phy[8], 0x1 << 16, 1 << 16);
  523. clrsetbits_le32(&denali_phy[136], 0x1 << 16, 1 << 16);
  524. clrsetbits_le32(&denali_phy[264], 0x1 << 16, 1 << 16);
  525. clrsetbits_le32(&denali_phy[392], 0x1 << 16, 1 << 16);
  526. for (byte = 0; byte < 4; byte++)
  527. clrsetbits_le32(&denali_phy[63 + (128 * byte)], 0xffff << 16,
  528. 0x200 << 16);
  529. /* PHY_896 PHY_FREQ_SEL_MULTICAST_EN 1bit offset_0 */
  530. clrbits_le32(&denali_phy[896], 1);
  531. /* CTL_200 ctrlupd_req 1bit offset_8 */
  532. clrsetbits_le32(&denali_ctl[200], 0x1 << 8, 0x1 << 8);
  533. }
  534. static int data_training_ca(const struct chan_info *chan, u32 channel,
  535. const struct rk3399_sdram_params *sdram_params)
  536. {
  537. u32 *denali_pi = chan->pi->denali_pi;
  538. u32 *denali_phy = chan->publ->denali_phy;
  539. u32 i, tmp;
  540. u32 obs_0, obs_1, obs_2, obs_err = 0;
  541. u32 rank = sdram_params->ch[channel].rank;
  542. for (i = 0; i < rank; i++) {
  543. select_per_cs_training_index(chan, i);
  544. /* PI_100 PI_CALVL_EN:RW:8:2 */
  545. clrsetbits_le32(&denali_pi[100], 0x3 << 8, 0x2 << 8);
  546. /* PI_92 PI_CALVL_REQ:WR:16:1,PI_CALVL_CS:RW:24:2 */
  547. clrsetbits_le32(&denali_pi[92],
  548. (0x1 << 16) | (0x3 << 24),
  549. (0x1 << 16) | (i << 24));
  550. /* Waiting for training complete */
  551. while (1) {
  552. /* PI_174 PI_INT_STATUS:RD:8:18 */
  553. tmp = readl(&denali_pi[174]) >> 8;
  554. /*
  555. * check status obs
  556. * PHY_532/660/789 phy_adr_calvl_obs1_:0:32
  557. */
  558. obs_0 = readl(&denali_phy[532]);
  559. obs_1 = readl(&denali_phy[660]);
  560. obs_2 = readl(&denali_phy[788]);
  561. if (((obs_0 >> 30) & 0x3) ||
  562. ((obs_1 >> 30) & 0x3) ||
  563. ((obs_2 >> 30) & 0x3))
  564. obs_err = 1;
  565. if ((((tmp >> 11) & 0x1) == 0x1) &&
  566. (((tmp >> 13) & 0x1) == 0x1) &&
  567. (((tmp >> 5) & 0x1) == 0x0) &&
  568. (obs_err == 0))
  569. break;
  570. else if ((((tmp >> 5) & 0x1) == 0x1) ||
  571. (obs_err == 1))
  572. return -EIO;
  573. }
  574. /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
  575. writel(0x00003f7c, (&denali_pi[175]));
  576. }
  577. clrbits_le32(&denali_pi[100], 0x3 << 8);
  578. return 0;
  579. }
  580. static int data_training_wl(const struct chan_info *chan, u32 channel,
  581. const struct rk3399_sdram_params *sdram_params)
  582. {
  583. u32 *denali_pi = chan->pi->denali_pi;
  584. u32 *denali_phy = chan->publ->denali_phy;
  585. u32 i, tmp;
  586. u32 obs_0, obs_1, obs_2, obs_3, obs_err = 0;
  587. u32 rank = sdram_params->ch[channel].rank;
  588. for (i = 0; i < rank; i++) {
  589. select_per_cs_training_index(chan, i);
  590. /* PI_60 PI_WRLVL_EN:RW:8:2 */
  591. clrsetbits_le32(&denali_pi[60], 0x3 << 8, 0x2 << 8);
  592. /* PI_59 PI_WRLVL_REQ:WR:8:1,PI_WRLVL_CS:RW:16:2 */
  593. clrsetbits_le32(&denali_pi[59],
  594. (0x1 << 8) | (0x3 << 16),
  595. (0x1 << 8) | (i << 16));
  596. /* Waiting for training complete */
  597. while (1) {
  598. /* PI_174 PI_INT_STATUS:RD:8:18 */
  599. tmp = readl(&denali_pi[174]) >> 8;
  600. /*
  601. * check status obs, if error maybe can not
  602. * get leveling done PHY_40/168/296/424
  603. * phy_wrlvl_status_obs_X:0:13
  604. */
  605. obs_0 = readl(&denali_phy[40]);
  606. obs_1 = readl(&denali_phy[168]);
  607. obs_2 = readl(&denali_phy[296]);
  608. obs_3 = readl(&denali_phy[424]);
  609. if (((obs_0 >> 12) & 0x1) ||
  610. ((obs_1 >> 12) & 0x1) ||
  611. ((obs_2 >> 12) & 0x1) ||
  612. ((obs_3 >> 12) & 0x1))
  613. obs_err = 1;
  614. if ((((tmp >> 10) & 0x1) == 0x1) &&
  615. (((tmp >> 13) & 0x1) == 0x1) &&
  616. (((tmp >> 4) & 0x1) == 0x0) &&
  617. (obs_err == 0))
  618. break;
  619. else if ((((tmp >> 4) & 0x1) == 0x1) ||
  620. (obs_err == 1))
  621. return -EIO;
  622. }
  623. /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
  624. writel(0x00003f7c, (&denali_pi[175]));
  625. }
  626. override_write_leveling_value(chan);
  627. clrbits_le32(&denali_pi[60], 0x3 << 8);
  628. return 0;
  629. }
  630. static int data_training_rg(const struct chan_info *chan, u32 channel,
  631. const struct rk3399_sdram_params *sdram_params)
  632. {
  633. u32 *denali_pi = chan->pi->denali_pi;
  634. u32 *denali_phy = chan->publ->denali_phy;
  635. u32 i, tmp;
  636. u32 obs_0, obs_1, obs_2, obs_3, obs_err = 0;
  637. u32 rank = sdram_params->ch[channel].rank;
  638. for (i = 0; i < rank; i++) {
  639. select_per_cs_training_index(chan, i);
  640. /* PI_80 PI_RDLVL_GATE_EN:RW:24:2 */
  641. clrsetbits_le32(&denali_pi[80], 0x3 << 24, 0x2 << 24);
  642. /*
  643. * PI_74 PI_RDLVL_GATE_REQ:WR:16:1
  644. * PI_RDLVL_CS:RW:24:2
  645. */
  646. clrsetbits_le32(&denali_pi[74],
  647. (0x1 << 16) | (0x3 << 24),
  648. (0x1 << 16) | (i << 24));
  649. /* Waiting for training complete */
  650. while (1) {
  651. /* PI_174 PI_INT_STATUS:RD:8:18 */
  652. tmp = readl(&denali_pi[174]) >> 8;
  653. /*
  654. * check status obs
  655. * PHY_43/171/299/427
  656. * PHY_GTLVL_STATUS_OBS_x:16:8
  657. */
  658. obs_0 = readl(&denali_phy[43]);
  659. obs_1 = readl(&denali_phy[171]);
  660. obs_2 = readl(&denali_phy[299]);
  661. obs_3 = readl(&denali_phy[427]);
  662. if (((obs_0 >> (16 + 6)) & 0x3) ||
  663. ((obs_1 >> (16 + 6)) & 0x3) ||
  664. ((obs_2 >> (16 + 6)) & 0x3) ||
  665. ((obs_3 >> (16 + 6)) & 0x3))
  666. obs_err = 1;
  667. if ((((tmp >> 9) & 0x1) == 0x1) &&
  668. (((tmp >> 13) & 0x1) == 0x1) &&
  669. (((tmp >> 3) & 0x1) == 0x0) &&
  670. (obs_err == 0))
  671. break;
  672. else if ((((tmp >> 3) & 0x1) == 0x1) ||
  673. (obs_err == 1))
  674. return -EIO;
  675. }
  676. /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
  677. writel(0x00003f7c, (&denali_pi[175]));
  678. }
  679. clrbits_le32(&denali_pi[80], 0x3 << 24);
  680. return 0;
  681. }
  682. static int data_training_rl(const struct chan_info *chan, u32 channel,
  683. const struct rk3399_sdram_params *sdram_params)
  684. {
  685. u32 *denali_pi = chan->pi->denali_pi;
  686. u32 i, tmp;
  687. u32 rank = sdram_params->ch[channel].rank;
  688. for (i = 0; i < rank; i++) {
  689. select_per_cs_training_index(chan, i);
  690. /* PI_80 PI_RDLVL_EN:RW:16:2 */
  691. clrsetbits_le32(&denali_pi[80], 0x3 << 16, 0x2 << 16);
  692. /* PI_74 PI_RDLVL_REQ:WR:8:1,PI_RDLVL_CS:RW:24:2 */
  693. clrsetbits_le32(&denali_pi[74],
  694. (0x1 << 8) | (0x3 << 24),
  695. (0x1 << 8) | (i << 24));
  696. /* Waiting for training complete */
  697. while (1) {
  698. /* PI_174 PI_INT_STATUS:RD:8:18 */
  699. tmp = readl(&denali_pi[174]) >> 8;
  700. /*
  701. * make sure status obs not report error bit
  702. * PHY_46/174/302/430
  703. * phy_rdlvl_status_obs_X:16:8
  704. */
  705. if ((((tmp >> 8) & 0x1) == 0x1) &&
  706. (((tmp >> 13) & 0x1) == 0x1) &&
  707. (((tmp >> 2) & 0x1) == 0x0))
  708. break;
  709. else if (((tmp >> 2) & 0x1) == 0x1)
  710. return -EIO;
  711. }
  712. /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
  713. writel(0x00003f7c, (&denali_pi[175]));
  714. }
  715. clrbits_le32(&denali_pi[80], 0x3 << 16);
  716. return 0;
  717. }
  718. static int data_training_wdql(const struct chan_info *chan, u32 channel,
  719. const struct rk3399_sdram_params *sdram_params)
  720. {
  721. u32 *denali_pi = chan->pi->denali_pi;
  722. u32 i, tmp;
  723. u32 rank = sdram_params->ch[channel].rank;
  724. for (i = 0; i < rank; i++) {
  725. select_per_cs_training_index(chan, i);
  726. /*
  727. * disable PI_WDQLVL_VREF_EN before wdq leveling?
  728. * PI_181 PI_WDQLVL_VREF_EN:RW:8:1
  729. */
  730. clrbits_le32(&denali_pi[181], 0x1 << 8);
  731. /* PI_124 PI_WDQLVL_EN:RW:16:2 */
  732. clrsetbits_le32(&denali_pi[124], 0x3 << 16, 0x2 << 16);
  733. /* PI_121 PI_WDQLVL_REQ:WR:8:1,PI_WDQLVL_CS:RW:16:2 */
  734. clrsetbits_le32(&denali_pi[121],
  735. (0x1 << 8) | (0x3 << 16),
  736. (0x1 << 8) | (i << 16));
  737. /* Waiting for training complete */
  738. while (1) {
  739. /* PI_174 PI_INT_STATUS:RD:8:18 */
  740. tmp = readl(&denali_pi[174]) >> 8;
  741. if ((((tmp >> 12) & 0x1) == 0x1) &&
  742. (((tmp >> 13) & 0x1) == 0x1) &&
  743. (((tmp >> 6) & 0x1) == 0x0))
  744. break;
  745. else if (((tmp >> 6) & 0x1) == 0x1)
  746. return -EIO;
  747. }
  748. /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
  749. writel(0x00003f7c, (&denali_pi[175]));
  750. }
  751. clrbits_le32(&denali_pi[124], 0x3 << 16);
  752. return 0;
  753. }
  754. static int data_training(const struct chan_info *chan, u32 channel,
  755. const struct rk3399_sdram_params *sdram_params,
  756. u32 training_flag)
  757. {
  758. u32 *denali_phy = chan->publ->denali_phy;
  759. /* PHY_927 PHY_PAD_DQS_DRIVE RPULL offset_22 */
  760. setbits_le32(&denali_phy[927], (1 << 22));
  761. if (training_flag == PI_FULL_TRAINING) {
  762. if (sdram_params->base.dramtype == LPDDR4) {
  763. training_flag = PI_CA_TRAINING | PI_WRITE_LEVELING |
  764. PI_READ_GATE_TRAINING |
  765. PI_READ_LEVELING | PI_WDQ_LEVELING;
  766. } else if (sdram_params->base.dramtype == LPDDR3) {
  767. training_flag = PI_CA_TRAINING | PI_WRITE_LEVELING |
  768. PI_READ_GATE_TRAINING;
  769. } else if (sdram_params->base.dramtype == DDR3) {
  770. training_flag = PI_WRITE_LEVELING |
  771. PI_READ_GATE_TRAINING |
  772. PI_READ_LEVELING;
  773. }
  774. }
  775. /* ca training(LPDDR4,LPDDR3 support) */
  776. if ((training_flag & PI_CA_TRAINING) == PI_CA_TRAINING)
  777. data_training_ca(chan, channel, sdram_params);
  778. /* write leveling(LPDDR4,LPDDR3,DDR3 support) */
  779. if ((training_flag & PI_WRITE_LEVELING) == PI_WRITE_LEVELING)
  780. data_training_wl(chan, channel, sdram_params);
  781. /* read gate training(LPDDR4,LPDDR3,DDR3 support) */
  782. if ((training_flag & PI_READ_GATE_TRAINING) == PI_READ_GATE_TRAINING)
  783. data_training_rg(chan, channel, sdram_params);
  784. /* read leveling(LPDDR4,LPDDR3,DDR3 support) */
  785. if ((training_flag & PI_READ_LEVELING) == PI_READ_LEVELING)
  786. data_training_rl(chan, channel, sdram_params);
  787. /* wdq leveling(LPDDR4 support) */
  788. if ((training_flag & PI_WDQ_LEVELING) == PI_WDQ_LEVELING)
  789. data_training_wdql(chan, channel, sdram_params);
  790. /* PHY_927 PHY_PAD_DQS_DRIVE RPULL offset_22 */
  791. clrbits_le32(&denali_phy[927], (1 << 22));
  792. return 0;
  793. }
  794. static void set_ddrconfig(const struct chan_info *chan,
  795. const struct rk3399_sdram_params *sdram_params,
  796. unsigned char channel, u32 ddrconfig)
  797. {
  798. /* only need to set ddrconfig */
  799. struct rk3399_msch_regs *ddr_msch_regs = chan->msch;
  800. unsigned int cs0_cap = 0;
  801. unsigned int cs1_cap = 0;
  802. cs0_cap = (1 << (sdram_params->ch[channel].cs0_row
  803. + sdram_params->ch[channel].col
  804. + sdram_params->ch[channel].bk
  805. + sdram_params->ch[channel].bw - 20));
  806. if (sdram_params->ch[channel].rank > 1)
  807. cs1_cap = cs0_cap >> (sdram_params->ch[channel].cs0_row
  808. - sdram_params->ch[channel].cs1_row);
  809. if (sdram_params->ch[channel].row_3_4) {
  810. cs0_cap = cs0_cap * 3 / 4;
  811. cs1_cap = cs1_cap * 3 / 4;
  812. }
  813. writel(ddrconfig | (ddrconfig << 8), &ddr_msch_regs->ddrconf);
  814. writel(((cs0_cap / 32) & 0xff) | (((cs1_cap / 32) & 0xff) << 8),
  815. &ddr_msch_regs->ddrsize);
  816. }
  817. static void dram_all_config(struct dram_info *dram,
  818. const struct rk3399_sdram_params *sdram_params)
  819. {
  820. u32 sys_reg = 0;
  821. unsigned int channel, idx;
  822. sys_reg |= sdram_params->base.dramtype << SYS_REG_DDRTYPE_SHIFT;
  823. sys_reg |= (sdram_params->base.num_channels - 1)
  824. << SYS_REG_NUM_CH_SHIFT;
  825. for (channel = 0, idx = 0;
  826. (idx < sdram_params->base.num_channels) && (channel < 2);
  827. channel++) {
  828. const struct rk3399_sdram_channel *info =
  829. &sdram_params->ch[channel];
  830. struct rk3399_msch_regs *ddr_msch_regs;
  831. const struct rk3399_msch_timings *noc_timing;
  832. if (sdram_params->ch[channel].col == 0)
  833. continue;
  834. idx++;
  835. sys_reg |= info->row_3_4 << SYS_REG_ROW_3_4_SHIFT(channel);
  836. sys_reg |= 1 << SYS_REG_CHINFO_SHIFT(channel);
  837. sys_reg |= (info->rank - 1) << SYS_REG_RANK_SHIFT(channel);
  838. sys_reg |= (info->col - 9) << SYS_REG_COL_SHIFT(channel);
  839. sys_reg |= info->bk == 3 ? 0 : 1 << SYS_REG_BK_SHIFT(channel);
  840. sys_reg |= (info->cs0_row - 13) << SYS_REG_CS0_ROW_SHIFT(channel);
  841. sys_reg |= (info->cs1_row - 13) << SYS_REG_CS1_ROW_SHIFT(channel);
  842. sys_reg |= (2 >> info->bw) << SYS_REG_BW_SHIFT(channel);
  843. sys_reg |= (2 >> info->dbw) << SYS_REG_DBW_SHIFT(channel);
  844. ddr_msch_regs = dram->chan[channel].msch;
  845. noc_timing = &sdram_params->ch[channel].noc_timings;
  846. writel(noc_timing->ddrtiminga0,
  847. &ddr_msch_regs->ddrtiminga0);
  848. writel(noc_timing->ddrtimingb0,
  849. &ddr_msch_regs->ddrtimingb0);
  850. writel(noc_timing->ddrtimingc0,
  851. &ddr_msch_regs->ddrtimingc0);
  852. writel(noc_timing->devtodev0,
  853. &ddr_msch_regs->devtodev0);
  854. writel(noc_timing->ddrmode,
  855. &ddr_msch_regs->ddrmode);
  856. /* rank 1 memory clock disable (dfi_dram_clk_disable = 1) */
  857. if (sdram_params->ch[channel].rank == 1)
  858. setbits_le32(&dram->chan[channel].pctl->denali_ctl[276],
  859. 1 << 17);
  860. }
  861. writel(sys_reg, &dram->pmugrf->os_reg2);
  862. rk_clrsetreg(&dram->pmusgrf->soc_con4, 0x1f << 10,
  863. sdram_params->base.stride << 10);
  864. /* reboot hold register set */
  865. writel(PRESET_SGRF_HOLD(0) | PRESET_GPIO0_HOLD(1) |
  866. PRESET_GPIO1_HOLD(1),
  867. &dram->pmucru->pmucru_rstnhold_con[1]);
  868. clrsetbits_le32(&dram->cru->glb_rst_con, 0x3, 0x3);
  869. }
  870. static int switch_to_phy_index1(struct dram_info *dram,
  871. const struct rk3399_sdram_params *sdram_params)
  872. {
  873. u32 channel;
  874. u32 *denali_phy;
  875. u32 ch_count = sdram_params->base.num_channels;
  876. int ret;
  877. int i = 0;
  878. writel(RK_CLRSETBITS(0x03 << 4 | 1 << 2 | 1,
  879. 1 << 4 | 1 << 2 | 1),
  880. &dram->cic->cic_ctrl0);
  881. while (!(readl(&dram->cic->cic_status0) & (1 << 2))) {
  882. mdelay(10);
  883. i++;
  884. if (i > 10) {
  885. debug("index1 frequency change overtime\n");
  886. return -ETIME;
  887. }
  888. }
  889. i = 0;
  890. writel(RK_CLRSETBITS(1 << 1, 1 << 1), &dram->cic->cic_ctrl0);
  891. while (!(readl(&dram->cic->cic_status0) & (1 << 0))) {
  892. mdelay(10);
  893. i++;
  894. if (i > 10) {
  895. debug("index1 frequency done overtime\n");
  896. return -ETIME;
  897. }
  898. }
  899. for (channel = 0; channel < ch_count; channel++) {
  900. denali_phy = dram->chan[channel].publ->denali_phy;
  901. clrsetbits_le32(&denali_phy[896], (0x3 << 8) | 1, 1 << 8);
  902. ret = data_training(&dram->chan[channel], channel,
  903. sdram_params, PI_FULL_TRAINING);
  904. if (ret) {
  905. debug("index1 training failed\n");
  906. return ret;
  907. }
  908. }
  909. return 0;
  910. }
  911. static int sdram_init(struct dram_info *dram,
  912. const struct rk3399_sdram_params *sdram_params)
  913. {
  914. unsigned char dramtype = sdram_params->base.dramtype;
  915. unsigned int ddr_freq = sdram_params->base.ddr_freq;
  916. int channel;
  917. debug("Starting SDRAM initialization...\n");
  918. if ((dramtype == DDR3 && ddr_freq > 933) ||
  919. (dramtype == LPDDR3 && ddr_freq > 933) ||
  920. (dramtype == LPDDR4 && ddr_freq > 800)) {
  921. debug("SDRAM frequency is to high!");
  922. return -E2BIG;
  923. }
  924. for (channel = 0; channel < 2; channel++) {
  925. const struct chan_info *chan = &dram->chan[channel];
  926. struct rk3399_ddr_publ_regs *publ = chan->publ;
  927. phy_dll_bypass_set(publ, ddr_freq);
  928. if (channel >= sdram_params->base.num_channels)
  929. continue;
  930. if (pctl_cfg(chan, channel, sdram_params) != 0) {
  931. printf("pctl_cfg fail, reset\n");
  932. return -EIO;
  933. }
  934. /* LPDDR2/LPDDR3 need to wait DAI complete, max 10us */
  935. if (dramtype == LPDDR3)
  936. udelay(10);
  937. if (data_training(chan, channel,
  938. sdram_params, PI_FULL_TRAINING)) {
  939. printf("SDRAM initialization failed, reset\n");
  940. return -EIO;
  941. }
  942. set_ddrconfig(chan, sdram_params, channel,
  943. sdram_params->ch[channel].ddrconfig);
  944. }
  945. dram_all_config(dram, sdram_params);
  946. switch_to_phy_index1(dram, sdram_params);
  947. debug("Finish SDRAM initialization...\n");
  948. return 0;
  949. }
  950. static int rk3399_dmc_ofdata_to_platdata(struct udevice *dev)
  951. {
  952. #if !CONFIG_IS_ENABLED(OF_PLATDATA)
  953. struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
  954. int ret;
  955. ret = dev_read_u32_array(dev, "rockchip,sdram-params",
  956. (u32 *)&plat->sdram_params,
  957. sizeof(plat->sdram_params) / sizeof(u32));
  958. if (ret) {
  959. printf("%s: Cannot read rockchip,sdram-params %d\n",
  960. __func__, ret);
  961. return ret;
  962. }
  963. ret = regmap_init_mem(dev_ofnode(dev), &plat->map);
  964. if (ret)
  965. printf("%s: regmap failed %d\n", __func__, ret);
  966. #endif
  967. return 0;
  968. }
  969. #if CONFIG_IS_ENABLED(OF_PLATDATA)
  970. static int conv_of_platdata(struct udevice *dev)
  971. {
  972. struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
  973. struct dtd_rockchip_rk3399_dmc *dtplat = &plat->dtplat;
  974. int ret;
  975. ret = regmap_init_mem_platdata(dev, dtplat->reg,
  976. ARRAY_SIZE(dtplat->reg) / 2,
  977. &plat->map);
  978. if (ret)
  979. return ret;
  980. return 0;
  981. }
  982. #endif
  983. static int rk3399_dmc_init(struct udevice *dev)
  984. {
  985. struct dram_info *priv = dev_get_priv(dev);
  986. struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
  987. int ret;
  988. #if !CONFIG_IS_ENABLED(OF_PLATDATA)
  989. struct rk3399_sdram_params *params = &plat->sdram_params;
  990. #else
  991. struct dtd_rockchip_rk3399_dmc *dtplat = &plat->dtplat;
  992. struct rk3399_sdram_params *params =
  993. (void *)dtplat->rockchip_sdram_params;
  994. ret = conv_of_platdata(dev);
  995. if (ret)
  996. return ret;
  997. #endif
  998. priv->cic = syscon_get_first_range(ROCKCHIP_SYSCON_CIC);
  999. priv->pmugrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUGRF);
  1000. priv->pmusgrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUSGRF);
  1001. priv->pmucru = rockchip_get_pmucru();
  1002. priv->cru = rockchip_get_cru();
  1003. priv->chan[0].pctl = regmap_get_range(plat->map, 0);
  1004. priv->chan[0].pi = regmap_get_range(plat->map, 1);
  1005. priv->chan[0].publ = regmap_get_range(plat->map, 2);
  1006. priv->chan[0].msch = regmap_get_range(plat->map, 3);
  1007. priv->chan[1].pctl = regmap_get_range(plat->map, 4);
  1008. priv->chan[1].pi = regmap_get_range(plat->map, 5);
  1009. priv->chan[1].publ = regmap_get_range(plat->map, 6);
  1010. priv->chan[1].msch = regmap_get_range(plat->map, 7);
  1011. debug("con reg %p %p %p %p %p %p %p %p\n",
  1012. priv->chan[0].pctl, priv->chan[0].pi,
  1013. priv->chan[0].publ, priv->chan[0].msch,
  1014. priv->chan[1].pctl, priv->chan[1].pi,
  1015. priv->chan[1].publ, priv->chan[1].msch);
  1016. debug("cru %p, cic %p, grf %p, sgrf %p, pmucru %p\n", priv->cru,
  1017. priv->cic, priv->pmugrf, priv->pmusgrf, priv->pmucru);
  1018. #if CONFIG_IS_ENABLED(OF_PLATDATA)
  1019. ret = clk_get_by_index_platdata(dev, 0, dtplat->clocks, &priv->ddr_clk);
  1020. #else
  1021. ret = clk_get_by_index(dev, 0, &priv->ddr_clk);
  1022. #endif
  1023. if (ret) {
  1024. printf("%s clk get failed %d\n", __func__, ret);
  1025. return ret;
  1026. }
  1027. ret = clk_set_rate(&priv->ddr_clk, params->base.ddr_freq * MHz);
  1028. if (ret < 0) {
  1029. printf("%s clk set failed %d\n", __func__, ret);
  1030. return ret;
  1031. }
  1032. ret = sdram_init(priv, params);
  1033. if (ret < 0) {
  1034. printf("%s DRAM init failed%d\n", __func__, ret);
  1035. return ret;
  1036. }
  1037. return 0;
  1038. }
  1039. #endif
  1040. static int rk3399_dmc_probe(struct udevice *dev)
  1041. {
  1042. #ifdef CONFIG_SPL_BUILD
  1043. if (rk3399_dmc_init(dev))
  1044. return 0;
  1045. #else
  1046. struct dram_info *priv = dev_get_priv(dev);
  1047. priv->pmugrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUGRF);
  1048. debug("%s: pmugrf=%p\n", __func__, priv->pmugrf);
  1049. priv->info.base = CONFIG_SYS_SDRAM_BASE;
  1050. priv->info.size = rockchip_sdram_size(
  1051. (phys_addr_t)&priv->pmugrf->os_reg2);
  1052. #endif
  1053. return 0;
  1054. }
  1055. static int rk3399_dmc_get_info(struct udevice *dev, struct ram_info *info)
  1056. {
  1057. struct dram_info *priv = dev_get_priv(dev);
  1058. *info = priv->info;
  1059. return 0;
  1060. }
  1061. static struct ram_ops rk3399_dmc_ops = {
  1062. .get_info = rk3399_dmc_get_info,
  1063. };
  1064. static const struct udevice_id rk3399_dmc_ids[] = {
  1065. { .compatible = "rockchip,rk3399-dmc" },
  1066. { }
  1067. };
  1068. U_BOOT_DRIVER(dmc_rk3399) = {
  1069. .name = "rockchip_rk3399_dmc",
  1070. .id = UCLASS_RAM,
  1071. .of_match = rk3399_dmc_ids,
  1072. .ops = &rk3399_dmc_ops,
  1073. #ifdef CONFIG_SPL_BUILD
  1074. .ofdata_to_platdata = rk3399_dmc_ofdata_to_platdata,
  1075. #endif
  1076. .probe = rk3399_dmc_probe,
  1077. .priv_auto_alloc_size = sizeof(struct dram_info),
  1078. #ifdef CONFIG_SPL_BUILD
  1079. .platdata_auto_alloc_size = sizeof(struct rockchip_dmc_plat),
  1080. #endif
  1081. };