ddr3_spd.c 37 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * Copyright (C) Marvell International Ltd. and its affiliates
  4. */
  5. #include <common.h>
  6. #include <i2c.h>
  7. #include <spl.h>
  8. #include <asm/io.h>
  9. #include <asm/arch/cpu.h>
  10. #include <asm/arch/soc.h>
  11. #include "ddr3_init.h"
  12. #if defined(MV88F78X60)
  13. #include "ddr3_axp_config.h"
  14. #elif defined(MV88F67XX)
  15. #include "ddr3_a370_config.h"
  16. #endif
  17. #if defined(MV88F672X)
  18. #include "ddr3_a375_config.h"
  19. #endif
  20. #ifdef DUNIT_SPD
  21. /* DIMM SPD offsets */
  22. #define SPD_DEV_TYPE_BYTE 2
  23. #define SPD_MODULE_TYPE_BYTE 3
  24. #define SPD_MODULE_MASK 0xf
  25. #define SPD_MODULE_TYPE_RDIMM 1
  26. #define SPD_MODULE_TYPE_UDIMM 2
  27. #define SPD_DEV_DENSITY_BYTE 4
  28. #define SPD_DEV_DENSITY_MASK 0xf
  29. #define SPD_ROW_NUM_BYTE 5
  30. #define SPD_ROW_NUM_MIN 12
  31. #define SPD_ROW_NUM_OFF 3
  32. #define SPD_ROW_NUM_MASK (7 << SPD_ROW_NUM_OFF)
  33. #define SPD_COL_NUM_BYTE 5
  34. #define SPD_COL_NUM_MIN 9
  35. #define SPD_COL_NUM_OFF 0
  36. #define SPD_COL_NUM_MASK (7 << SPD_COL_NUM_OFF)
  37. #define SPD_MODULE_ORG_BYTE 7
  38. #define SPD_MODULE_SDRAM_DEV_WIDTH_OFF 0
  39. #define SPD_MODULE_SDRAM_DEV_WIDTH_MASK (7 << SPD_MODULE_SDRAM_DEV_WIDTH_OFF)
  40. #define SPD_MODULE_BANK_NUM_MIN 1
  41. #define SPD_MODULE_BANK_NUM_OFF 3
  42. #define SPD_MODULE_BANK_NUM_MASK (7 << SPD_MODULE_BANK_NUM_OFF)
  43. #define SPD_BUS_WIDTH_BYTE 8
  44. #define SPD_BUS_WIDTH_OFF 0
  45. #define SPD_BUS_WIDTH_MASK (7 << SPD_BUS_WIDTH_OFF)
  46. #define SPD_BUS_ECC_OFF 3
  47. #define SPD_BUS_ECC_MASK (3 << SPD_BUS_ECC_OFF)
  48. #define SPD_MTB_DIVIDEND_BYTE 10
  49. #define SPD_MTB_DIVISOR_BYTE 11
  50. #define SPD_TCK_BYTE 12
  51. #define SPD_SUP_CAS_LAT_LSB_BYTE 14
  52. #define SPD_SUP_CAS_LAT_MSB_BYTE 15
  53. #define SPD_TAA_BYTE 16
  54. #define SPD_TWR_BYTE 17
  55. #define SPD_TRCD_BYTE 18
  56. #define SPD_TRRD_BYTE 19
  57. #define SPD_TRP_BYTE 20
  58. #define SPD_TRAS_MSB_BYTE 21
  59. #define SPD_TRAS_MSB_MASK 0xf
  60. #define SPD_TRC_MSB_BYTE 21
  61. #define SPD_TRC_MSB_MASK 0xf0
  62. #define SPD_TRAS_LSB_BYTE 22
  63. #define SPD_TRC_LSB_BYTE 23
  64. #define SPD_TRFC_LSB_BYTE 24
  65. #define SPD_TRFC_MSB_BYTE 25
  66. #define SPD_TWTR_BYTE 26
  67. #define SPD_TRTP_BYTE 27
  68. #define SPD_TFAW_MSB_BYTE 28
  69. #define SPD_TFAW_MSB_MASK 0xf
  70. #define SPD_TFAW_LSB_BYTE 29
  71. #define SPD_OPT_FEATURES_BYTE 30
  72. #define SPD_THERMAL_REFRESH_OPT_BYTE 31
  73. #define SPD_ADDR_MAP_BYTE 63
  74. #define SPD_ADDR_MAP_MIRROR_OFFS 0
  75. #define SPD_RDIMM_RC_BYTE 69
  76. #define SPD_RDIMM_RC_NIBBLE_MASK 0xF
  77. #define SPD_RDIMM_RC_NUM 16
  78. /* Dimm Memory Type values */
  79. #define SPD_MEM_TYPE_SDRAM 0x4
  80. #define SPD_MEM_TYPE_DDR1 0x7
  81. #define SPD_MEM_TYPE_DDR2 0x8
  82. #define SPD_MEM_TYPE_DDR3 0xB
  83. #define DIMM_MODULE_MANU_OFFS 64
  84. #define DIMM_MODULE_MANU_SIZE 8
  85. #define DIMM_MODULE_VEN_OFFS 73
  86. #define DIMM_MODULE_VEN_SIZE 25
  87. #define DIMM_MODULE_ID_OFFS 99
  88. #define DIMM_MODULE_ID_SIZE 18
  89. /* enumeration for voltage levels. */
  90. enum dimm_volt_if {
  91. TTL_5V_TOLERANT,
  92. LVTTL,
  93. HSTL_1_5V,
  94. SSTL_3_3V,
  95. SSTL_2_5V,
  96. VOLTAGE_UNKNOWN,
  97. };
  98. /* enumaration for SDRAM CAS Latencies. */
  99. enum dimm_sdram_cas {
  100. SD_CL_1 = 1,
  101. SD_CL_2,
  102. SD_CL_3,
  103. SD_CL_4,
  104. SD_CL_5,
  105. SD_CL_6,
  106. SD_CL_7,
  107. SD_FAULT
  108. };
  109. /* enumeration for memory types */
  110. enum memory_type {
  111. MEM_TYPE_SDRAM,
  112. MEM_TYPE_DDR1,
  113. MEM_TYPE_DDR2,
  114. MEM_TYPE_DDR3
  115. };
  116. /* DIMM information structure */
  117. typedef struct dimm_info {
  118. /* DIMM dimensions */
  119. u32 num_of_module_ranks;
  120. u32 data_width;
  121. u32 rank_capacity;
  122. u32 num_of_devices;
  123. u32 sdram_width;
  124. u32 num_of_banks_on_each_device;
  125. u32 sdram_capacity;
  126. u32 num_of_row_addr;
  127. u32 num_of_col_addr;
  128. u32 addr_mirroring;
  129. u32 err_check_type; /* ECC , PARITY.. */
  130. u32 type_info; /* DDR2 only */
  131. /* DIMM timing parameters */
  132. u32 supported_cas_latencies;
  133. u32 refresh_interval;
  134. u32 min_cycle_time;
  135. u32 min_row_precharge_time;
  136. u32 min_row_active_to_row_active;
  137. u32 min_ras_to_cas_delay;
  138. u32 min_write_recovery_time; /* DDR3/2 only */
  139. u32 min_write_to_read_cmd_delay; /* DDR3/2 only */
  140. u32 min_read_to_prech_cmd_delay; /* DDR3/2 only */
  141. u32 min_active_to_precharge;
  142. u32 min_refresh_recovery; /* DDR3/2 only */
  143. u32 min_cas_lat_time;
  144. u32 min_four_active_win_delay;
  145. u8 dimm_rc[SPD_RDIMM_RC_NUM];
  146. /* DIMM vendor ID */
  147. u32 vendor;
  148. } MV_DIMM_INFO;
  149. static int ddr3_spd_sum_init(MV_DIMM_INFO *info, MV_DIMM_INFO *sum_info,
  150. u32 dimm);
  151. static u32 ddr3_get_max_val(u32 spd_val, u32 dimm_num, u32 static_val);
  152. static u32 ddr3_get_min_val(u32 spd_val, u32 dimm_num, u32 static_val);
  153. static int ddr3_spd_init(MV_DIMM_INFO *info, u32 dimm_addr, u32 dimm_width);
  154. static u32 ddr3_div(u32 val, u32 divider, u32 sub);
  155. extern u8 spd_data[SPD_SIZE];
  156. extern u32 odt_config[ODT_OPT];
  157. extern u16 odt_static[ODT_OPT][MAX_CS];
  158. extern u16 odt_dynamic[ODT_OPT][MAX_CS];
  159. #if !(defined(DB_88F6710) || defined(DB_88F6710_PCAC) || defined(RD_88F6710))
  160. /*
  161. * Name: ddr3_get_dimm_num - Find number of dimms and their addresses
  162. * Desc:
  163. * Args: dimm_addr - array of dimm addresses
  164. * Notes:
  165. * Returns: None.
  166. */
  167. static u32 ddr3_get_dimm_num(u32 *dimm_addr)
  168. {
  169. u32 dimm_cur_addr;
  170. u8 data[3];
  171. u32 dimm_num = 0;
  172. int ret;
  173. /* Read the dimm eeprom */
  174. for (dimm_cur_addr = MAX_DIMM_ADDR; dimm_cur_addr > MIN_DIMM_ADDR;
  175. dimm_cur_addr--) {
  176. data[SPD_DEV_TYPE_BYTE] = 0;
  177. /* Far-End DIMM must be connected */
  178. if ((dimm_num == 0) && (dimm_cur_addr < FAR_END_DIMM_ADDR))
  179. return 0;
  180. ret = i2c_read(dimm_cur_addr, 0, 1, (uchar *)data, 3);
  181. if (!ret) {
  182. if (data[SPD_DEV_TYPE_BYTE] == SPD_MEM_TYPE_DDR3) {
  183. dimm_addr[dimm_num] = dimm_cur_addr;
  184. dimm_num++;
  185. }
  186. }
  187. }
  188. return dimm_num;
  189. }
  190. #endif
  191. /*
  192. * Name: dimmSpdInit - Get the SPD parameters.
  193. * Desc: Read the DIMM SPD parameters into given struct parameter.
  194. * Args: dimmNum - DIMM number. See MV_BOARD_DIMM_NUM enumerator.
  195. * info - DIMM information structure.
  196. * Notes:
  197. * Returns: MV_OK if function could read DIMM parameters, 0 otherwise.
  198. */
  199. int ddr3_spd_init(MV_DIMM_INFO *info, u32 dimm_addr, u32 dimm_width)
  200. {
  201. u32 tmp;
  202. u32 time_base;
  203. int ret;
  204. __maybe_unused u32 rc;
  205. __maybe_unused u8 vendor_high, vendor_low;
  206. if (dimm_addr != 0) {
  207. memset(spd_data, 0, SPD_SIZE * sizeof(u8));
  208. ret = i2c_read(dimm_addr, 0, 1, (uchar *)spd_data, SPD_SIZE);
  209. if (ret)
  210. return MV_DDR3_TRAINING_ERR_TWSI_FAIL;
  211. }
  212. /* Check if DDR3 */
  213. if (spd_data[SPD_DEV_TYPE_BYTE] != SPD_MEM_TYPE_DDR3)
  214. return MV_DDR3_TRAINING_ERR_TWSI_BAD_TYPE;
  215. /* Error Check Type */
  216. /* No byte for error check in DDR3 SPD, use DDR2 convention */
  217. info->err_check_type = 0;
  218. /* Check if ECC */
  219. if ((spd_data[SPD_BUS_WIDTH_BYTE] & 0x18) >> 3)
  220. info->err_check_type = 1;
  221. DEBUG_INIT_FULL_C("DRAM err_check_type ", info->err_check_type, 1);
  222. switch (spd_data[SPD_MODULE_TYPE_BYTE]) {
  223. case 1:
  224. /* support RDIMM */
  225. info->type_info = SPD_MODULE_TYPE_RDIMM;
  226. break;
  227. case 2:
  228. /* support UDIMM */
  229. info->type_info = SPD_MODULE_TYPE_UDIMM;
  230. break;
  231. case 11: /* LRDIMM current not supported */
  232. default:
  233. info->type_info = (spd_data[SPD_MODULE_TYPE_BYTE]);
  234. break;
  235. }
  236. /* Size Calculations: */
  237. /* Number Of Row Addresses - 12/13/14/15/16 */
  238. info->num_of_row_addr =
  239. (spd_data[SPD_ROW_NUM_BYTE] & SPD_ROW_NUM_MASK) >>
  240. SPD_ROW_NUM_OFF;
  241. info->num_of_row_addr += SPD_ROW_NUM_MIN;
  242. DEBUG_INIT_FULL_C("DRAM num_of_row_addr ", info->num_of_row_addr, 2);
  243. /* Number Of Column Addresses - 9/10/11/12 */
  244. info->num_of_col_addr =
  245. (spd_data[SPD_COL_NUM_BYTE] & SPD_COL_NUM_MASK) >>
  246. SPD_COL_NUM_OFF;
  247. info->num_of_col_addr += SPD_COL_NUM_MIN;
  248. DEBUG_INIT_FULL_C("DRAM num_of_col_addr ", info->num_of_col_addr, 1);
  249. /* Number Of Ranks = number of CS on Dimm - 1/2/3/4 Ranks */
  250. info->num_of_module_ranks =
  251. (spd_data[SPD_MODULE_ORG_BYTE] & SPD_MODULE_BANK_NUM_MASK) >>
  252. SPD_MODULE_BANK_NUM_OFF;
  253. info->num_of_module_ranks += SPD_MODULE_BANK_NUM_MIN;
  254. DEBUG_INIT_FULL_C("DRAM numOfModuleBanks ", info->num_of_module_ranks,
  255. 1);
  256. /* Data Width - 8/16/32/64 bits */
  257. info->data_width =
  258. 1 << (3 + (spd_data[SPD_BUS_WIDTH_BYTE] & SPD_BUS_WIDTH_MASK));
  259. DEBUG_INIT_FULL_C("DRAM data_width ", info->data_width, 1);
  260. /* Number Of Banks On Each Device - 8/16/32/64 banks */
  261. info->num_of_banks_on_each_device =
  262. 1 << (3 + ((spd_data[SPD_DEV_DENSITY_BYTE] >> 4) & 0x7));
  263. DEBUG_INIT_FULL_C("DRAM num_of_banks_on_each_device ",
  264. info->num_of_banks_on_each_device, 1);
  265. /* Total SDRAM capacity - 256Mb/512Mb/1Gb/2Gb/4Gb/8Gb/16Gb - MegaBits */
  266. info->sdram_capacity =
  267. spd_data[SPD_DEV_DENSITY_BYTE] & SPD_DEV_DENSITY_MASK;
  268. /* Sdram Width - 4/8/16/32 bits */
  269. info->sdram_width = 1 << (2 + (spd_data[SPD_MODULE_ORG_BYTE] &
  270. SPD_MODULE_SDRAM_DEV_WIDTH_MASK));
  271. DEBUG_INIT_FULL_C("DRAM sdram_width ", info->sdram_width, 1);
  272. /* CS (Rank) Capacity - MB */
  273. /*
  274. * DDR3 device uiDensity val are: (device capacity/8) *
  275. * (Module_width/Device_width)
  276. */
  277. /* Jedec SPD DDR3 - page 7, Save spd_data in Mb - 2048=2GB */
  278. if (dimm_width == 32) {
  279. info->rank_capacity =
  280. ((1 << info->sdram_capacity) * 256 *
  281. (info->data_width / info->sdram_width)) << 16;
  282. /* CS size = CS size / 2 */
  283. } else {
  284. info->rank_capacity =
  285. ((1 << info->sdram_capacity) * 256 *
  286. (info->data_width / info->sdram_width) * 0x2) << 16;
  287. /* 0x2 => 0x100000-1Mbit / 8-bit->byte / 0x10000 */
  288. }
  289. DEBUG_INIT_FULL_C("DRAM rank_capacity[31] ", info->rank_capacity, 1);
  290. /* Number of devices includeing Error correction */
  291. info->num_of_devices =
  292. ((info->data_width / info->sdram_width) *
  293. info->num_of_module_ranks) + info->err_check_type;
  294. DEBUG_INIT_FULL_C("DRAM num_of_devices ", info->num_of_devices, 1);
  295. /* Address Mapping from Edge connector to DRAM - mirroring option */
  296. info->addr_mirroring =
  297. spd_data[SPD_ADDR_MAP_BYTE] & (1 << SPD_ADDR_MAP_MIRROR_OFFS);
  298. /* Timings - All in ps */
  299. time_base = (1000 * spd_data[SPD_MTB_DIVIDEND_BYTE]) /
  300. spd_data[SPD_MTB_DIVISOR_BYTE];
  301. /* Minimum Cycle Time At Max CasLatancy */
  302. info->min_cycle_time = spd_data[SPD_TCK_BYTE] * time_base;
  303. DEBUG_INIT_FULL_C("DRAM tCKmin ", info->min_cycle_time, 1);
  304. /* Refresh Interval */
  305. /* No byte for refresh interval in DDR3 SPD, use DDR2 convention */
  306. /*
  307. * JEDEC param are 0 <= Tcase <= 85: 7.8uSec, 85 <= Tcase
  308. * <= 95: 3.9uSec
  309. */
  310. info->refresh_interval = 7800000; /* Set to 7.8uSec */
  311. DEBUG_INIT_FULL_C("DRAM refresh_interval ", info->refresh_interval, 1);
  312. /* Suported Cas Latencies - DDR 3: */
  313. /*
  314. * bit7 | bit6 | bit5 | bit4 | bit3 | bit2 | bit1 | bit0 *
  315. *******-******-******-******-******-******-******-*******-*******
  316. CAS = 11 | 10 | 9 | 8 | 7 | 6 | 5 | 4 *
  317. *********************************************************-*******
  318. *******-******-******-******-******-******-******-*******-*******
  319. * bit15 |bit14 |bit13 |bit12 |bit11 |bit10 | bit9 | bit8 *
  320. *******-******-******-******-******-******-******-*******-*******
  321. CAS = TBD | 18 | 17 | 16 | 15 | 14 | 13 | 12 *
  322. */
  323. /* DDR3 include 2 byte of CAS support */
  324. info->supported_cas_latencies =
  325. (spd_data[SPD_SUP_CAS_LAT_MSB_BYTE] << 8) |
  326. spd_data[SPD_SUP_CAS_LAT_LSB_BYTE];
  327. DEBUG_INIT_FULL_C("DRAM supported_cas_latencies ",
  328. info->supported_cas_latencies, 1);
  329. /* Minimum Cycle Time At Max CasLatancy */
  330. info->min_cas_lat_time = (spd_data[SPD_TAA_BYTE] * time_base);
  331. /*
  332. * This field divided by the cycleTime will give us the CAS latency
  333. * to config
  334. */
  335. /*
  336. * For DDR3 and DDR2 includes Write Recovery Time field.
  337. * Other SDRAM ignore
  338. */
  339. info->min_write_recovery_time = spd_data[SPD_TWR_BYTE] * time_base;
  340. DEBUG_INIT_FULL_C("DRAM min_write_recovery_time ",
  341. info->min_write_recovery_time, 1);
  342. /* Mininmum Ras to Cas Delay */
  343. info->min_ras_to_cas_delay = spd_data[SPD_TRCD_BYTE] * time_base;
  344. DEBUG_INIT_FULL_C("DRAM min_ras_to_cas_delay ",
  345. info->min_ras_to_cas_delay, 1);
  346. /* Minimum Row Active to Row Active Time */
  347. info->min_row_active_to_row_active =
  348. spd_data[SPD_TRRD_BYTE] * time_base;
  349. DEBUG_INIT_FULL_C("DRAM min_row_active_to_row_active ",
  350. info->min_row_active_to_row_active, 1);
  351. /* Minimum Row Precharge Delay Time */
  352. info->min_row_precharge_time = spd_data[SPD_TRP_BYTE] * time_base;
  353. DEBUG_INIT_FULL_C("DRAM min_row_precharge_time ",
  354. info->min_row_precharge_time, 1);
  355. /* Minimum Active to Precharge Delay Time - tRAS ps */
  356. info->min_active_to_precharge =
  357. (spd_data[SPD_TRAS_MSB_BYTE] & SPD_TRAS_MSB_MASK) << 8;
  358. info->min_active_to_precharge |= spd_data[SPD_TRAS_LSB_BYTE];
  359. info->min_active_to_precharge *= time_base;
  360. DEBUG_INIT_FULL_C("DRAM min_active_to_precharge ",
  361. info->min_active_to_precharge, 1);
  362. /* Minimum Refresh Recovery Delay Time - tRFC ps */
  363. info->min_refresh_recovery = spd_data[SPD_TRFC_MSB_BYTE] << 8;
  364. info->min_refresh_recovery |= spd_data[SPD_TRFC_LSB_BYTE];
  365. info->min_refresh_recovery *= time_base;
  366. DEBUG_INIT_FULL_C("DRAM min_refresh_recovery ",
  367. info->min_refresh_recovery, 1);
  368. /*
  369. * For DDR3 and DDR2 includes Internal Write To Read Command Delay
  370. * field.
  371. */
  372. info->min_write_to_read_cmd_delay = spd_data[SPD_TWTR_BYTE] * time_base;
  373. DEBUG_INIT_FULL_C("DRAM min_write_to_read_cmd_delay ",
  374. info->min_write_to_read_cmd_delay, 1);
  375. /*
  376. * For DDR3 and DDR2 includes Internal Read To Precharge Command Delay
  377. * field.
  378. */
  379. info->min_read_to_prech_cmd_delay = spd_data[SPD_TRTP_BYTE] * time_base;
  380. DEBUG_INIT_FULL_C("DRAM min_read_to_prech_cmd_delay ",
  381. info->min_read_to_prech_cmd_delay, 1);
  382. /*
  383. * For DDR3 includes Minimum Activate to Activate/Refresh Command
  384. * field
  385. */
  386. tmp = ((spd_data[SPD_TFAW_MSB_BYTE] & SPD_TFAW_MSB_MASK) << 8) |
  387. spd_data[SPD_TFAW_LSB_BYTE];
  388. info->min_four_active_win_delay = tmp * time_base;
  389. DEBUG_INIT_FULL_C("DRAM min_four_active_win_delay ",
  390. info->min_four_active_win_delay, 1);
  391. #if defined(MV88F78X60) || defined(MV88F672X)
  392. /* Registered DIMM support */
  393. if (info->type_info == SPD_MODULE_TYPE_RDIMM) {
  394. for (rc = 2; rc < 6; rc += 2) {
  395. tmp = spd_data[SPD_RDIMM_RC_BYTE + rc / 2];
  396. info->dimm_rc[rc] =
  397. spd_data[SPD_RDIMM_RC_BYTE + rc / 2] &
  398. SPD_RDIMM_RC_NIBBLE_MASK;
  399. info->dimm_rc[rc + 1] =
  400. (spd_data[SPD_RDIMM_RC_BYTE + rc / 2] >> 4) &
  401. SPD_RDIMM_RC_NIBBLE_MASK;
  402. }
  403. vendor_low = spd_data[66];
  404. vendor_high = spd_data[65];
  405. info->vendor = (vendor_high << 8) + vendor_low;
  406. DEBUG_INIT_C("DDR3 Training Sequence - Registered DIMM vendor ID 0x",
  407. info->vendor, 4);
  408. info->dimm_rc[0] = RDIMM_RC0;
  409. info->dimm_rc[1] = RDIMM_RC1;
  410. info->dimm_rc[2] = RDIMM_RC2;
  411. info->dimm_rc[8] = RDIMM_RC8;
  412. info->dimm_rc[9] = RDIMM_RC9;
  413. info->dimm_rc[10] = RDIMM_RC10;
  414. info->dimm_rc[11] = RDIMM_RC11;
  415. }
  416. #endif
  417. return MV_OK;
  418. }
  419. /*
  420. * Name: ddr3_spd_sum_init - Get the SPD parameters.
  421. * Desc: Read the DIMM SPD parameters into given struct parameter.
  422. * Args: dimmNum - DIMM number. See MV_BOARD_DIMM_NUM enumerator.
  423. * info - DIMM information structure.
  424. * Notes:
  425. * Returns: MV_OK if function could read DIMM parameters, 0 otherwise.
  426. */
  427. int ddr3_spd_sum_init(MV_DIMM_INFO *info, MV_DIMM_INFO *sum_info, u32 dimm)
  428. {
  429. if (dimm == 0) {
  430. memcpy(sum_info, info, sizeof(MV_DIMM_INFO));
  431. return MV_OK;
  432. }
  433. if (sum_info->type_info != info->type_info) {
  434. DEBUG_INIT_S("DDR3 Dimm Compare - DIMM type does not match - FAIL\n");
  435. return MV_DDR3_TRAINING_ERR_DIMM_TYPE_NO_MATCH;
  436. }
  437. if (sum_info->err_check_type > info->err_check_type) {
  438. sum_info->err_check_type = info->err_check_type;
  439. DEBUG_INIT_S("DDR3 Dimm Compare - ECC does not match. ECC is disabled\n");
  440. }
  441. if (sum_info->data_width != info->data_width) {
  442. DEBUG_INIT_S("DDR3 Dimm Compare - DRAM bus width does not match - FAIL\n");
  443. return MV_DDR3_TRAINING_ERR_BUS_WIDTH_NOT_MATCH;
  444. }
  445. if (sum_info->min_cycle_time < info->min_cycle_time)
  446. sum_info->min_cycle_time = info->min_cycle_time;
  447. if (sum_info->refresh_interval < info->refresh_interval)
  448. sum_info->refresh_interval = info->refresh_interval;
  449. sum_info->supported_cas_latencies &= info->supported_cas_latencies;
  450. if (sum_info->min_cas_lat_time < info->min_cas_lat_time)
  451. sum_info->min_cas_lat_time = info->min_cas_lat_time;
  452. if (sum_info->min_write_recovery_time < info->min_write_recovery_time)
  453. sum_info->min_write_recovery_time =
  454. info->min_write_recovery_time;
  455. if (sum_info->min_ras_to_cas_delay < info->min_ras_to_cas_delay)
  456. sum_info->min_ras_to_cas_delay = info->min_ras_to_cas_delay;
  457. if (sum_info->min_row_active_to_row_active <
  458. info->min_row_active_to_row_active)
  459. sum_info->min_row_active_to_row_active =
  460. info->min_row_active_to_row_active;
  461. if (sum_info->min_row_precharge_time < info->min_row_precharge_time)
  462. sum_info->min_row_precharge_time = info->min_row_precharge_time;
  463. if (sum_info->min_active_to_precharge < info->min_active_to_precharge)
  464. sum_info->min_active_to_precharge =
  465. info->min_active_to_precharge;
  466. if (sum_info->min_refresh_recovery < info->min_refresh_recovery)
  467. sum_info->min_refresh_recovery = info->min_refresh_recovery;
  468. if (sum_info->min_write_to_read_cmd_delay <
  469. info->min_write_to_read_cmd_delay)
  470. sum_info->min_write_to_read_cmd_delay =
  471. info->min_write_to_read_cmd_delay;
  472. if (sum_info->min_read_to_prech_cmd_delay <
  473. info->min_read_to_prech_cmd_delay)
  474. sum_info->min_read_to_prech_cmd_delay =
  475. info->min_read_to_prech_cmd_delay;
  476. if (sum_info->min_four_active_win_delay <
  477. info->min_four_active_win_delay)
  478. sum_info->min_four_active_win_delay =
  479. info->min_four_active_win_delay;
  480. if (sum_info->min_write_to_read_cmd_delay <
  481. info->min_write_to_read_cmd_delay)
  482. sum_info->min_write_to_read_cmd_delay =
  483. info->min_write_to_read_cmd_delay;
  484. return MV_OK;
  485. }
  486. /*
  487. * Name: ddr3_dunit_setup
  488. * Desc: Set the controller with the timing values.
  489. * Args: ecc_ena - User ECC setup
  490. * Notes:
  491. * Returns:
  492. */
  493. int ddr3_dunit_setup(u32 ecc_ena, u32 hclk_time, u32 *ddr_width)
  494. {
  495. u32 reg, tmp, cwl;
  496. u32 ddr_clk_time;
  497. MV_DIMM_INFO dimm_info[2];
  498. MV_DIMM_INFO sum_info;
  499. u32 stat_val, spd_val;
  500. u32 cs, cl, cs_num, cs_ena;
  501. u32 dimm_num = 0;
  502. int status;
  503. u32 rc;
  504. __maybe_unused u32 dimm_cnt, cs_count, dimm;
  505. __maybe_unused u32 dimm_addr[2] = { 0, 0 };
  506. #if defined(DB_88F6710) || defined(DB_88F6710_PCAC) || defined(RD_88F6710)
  507. /* Armada 370 - SPD is not available on DIMM */
  508. /*
  509. * Set MC registers according to Static SPD values Values -
  510. * must be set manually
  511. */
  512. /*
  513. * We only have one optional DIMM for the DB and we already got the
  514. * SPD matching values
  515. */
  516. status = ddr3_spd_init(&dimm_info[0], 0, *ddr_width);
  517. if (MV_OK != status)
  518. return status;
  519. dimm_num = 1;
  520. /* Use JP8 to enable multiCS support for Armada 370 DB */
  521. if (!ddr3_check_config(EEPROM_MODULE_ADDR, CONFIG_MULTI_CS))
  522. dimm_info[0].num_of_module_ranks = 1;
  523. status = ddr3_spd_sum_init(&dimm_info[0], &sum_info, 0);
  524. if (MV_OK != status)
  525. return status;
  526. #else
  527. /* Dynamic D-Unit Setup - Read SPD values */
  528. #ifdef DUNIT_SPD
  529. dimm_num = ddr3_get_dimm_num(dimm_addr);
  530. if (dimm_num == 0) {
  531. #ifdef MIXED_DIMM_STATIC
  532. DEBUG_INIT_S("DDR3 Training Sequence - No DIMMs detected\n");
  533. #else
  534. DEBUG_INIT_S("DDR3 Training Sequence - FAILED (Wrong DIMMs Setup)\n");
  535. return MV_DDR3_TRAINING_ERR_BAD_DIMM_SETUP;
  536. #endif
  537. } else {
  538. DEBUG_INIT_C("DDR3 Training Sequence - Number of DIMMs detected: ",
  539. dimm_num, 1);
  540. }
  541. for (dimm = 0; dimm < dimm_num; dimm++) {
  542. status = ddr3_spd_init(&dimm_info[dimm], dimm_addr[dimm],
  543. *ddr_width);
  544. if (MV_OK != status)
  545. return status;
  546. status = ddr3_spd_sum_init(&dimm_info[dimm], &sum_info, dimm);
  547. if (MV_OK != status)
  548. return status;
  549. }
  550. #endif
  551. #endif
  552. /* Set number of enabled CS */
  553. cs_num = 0;
  554. #ifdef DUNIT_STATIC
  555. cs_num = ddr3_get_cs_num_from_reg();
  556. #endif
  557. #ifdef DUNIT_SPD
  558. for (dimm = 0; dimm < dimm_num; dimm++)
  559. cs_num += dimm_info[dimm].num_of_module_ranks;
  560. #endif
  561. if (cs_num > MAX_CS) {
  562. DEBUG_INIT_C("DDR3 Training Sequence - Number of CS exceed limit - ",
  563. MAX_CS, 1);
  564. return MV_DDR3_TRAINING_ERR_MAX_CS_LIMIT;
  565. }
  566. /* Set bitmap of enabled CS */
  567. cs_ena = 0;
  568. #ifdef DUNIT_STATIC
  569. cs_ena = ddr3_get_cs_ena_from_reg();
  570. #endif
  571. #ifdef DUNIT_SPD
  572. dimm = 0;
  573. if (dimm_num) {
  574. for (cs = 0; cs < MAX_CS; cs += 2) {
  575. if (((1 << cs) & DIMM_CS_BITMAP) &&
  576. !(cs_ena & (1 << cs))) {
  577. if (dimm_info[dimm].num_of_module_ranks == 1)
  578. cs_ena |= (0x1 << cs);
  579. else if (dimm_info[dimm].num_of_module_ranks == 2)
  580. cs_ena |= (0x3 << cs);
  581. else if (dimm_info[dimm].num_of_module_ranks == 3)
  582. cs_ena |= (0x7 << cs);
  583. else if (dimm_info[dimm].num_of_module_ranks == 4)
  584. cs_ena |= (0xF << cs);
  585. dimm++;
  586. if (dimm == dimm_num)
  587. break;
  588. }
  589. }
  590. }
  591. #endif
  592. if (cs_ena > 0xF) {
  593. DEBUG_INIT_C("DDR3 Training Sequence - Number of enabled CS exceed limit - ",
  594. MAX_CS, 1);
  595. return MV_DDR3_TRAINING_ERR_MAX_ENA_CS_LIMIT;
  596. }
  597. DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - Number of CS = ", cs_num, 1);
  598. /* Check Ratio - '1' - 2:1, '0' - 1:1 */
  599. if (reg_read(REG_DDR_IO_ADDR) & (1 << REG_DDR_IO_CLK_RATIO_OFFS))
  600. ddr_clk_time = hclk_time / 2;
  601. else
  602. ddr_clk_time = hclk_time;
  603. #ifdef DUNIT_STATIC
  604. /* Get target CL value from set register */
  605. reg = (reg_read(REG_DDR3_MR0_ADDR) >> 2);
  606. reg = ((((reg >> 1) & 0xE)) | (reg & 0x1)) & 0xF;
  607. cl = ddr3_get_max_val(ddr3_div(sum_info.min_cas_lat_time,
  608. ddr_clk_time, 0),
  609. dimm_num, ddr3_valid_cl_to_cl(reg));
  610. #else
  611. cl = ddr3_div(sum_info.min_cas_lat_time, ddr_clk_time, 0);
  612. #endif
  613. if (cl < 5)
  614. cl = 5;
  615. DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - Cas Latency = ", cl, 1);
  616. /* {0x00001400} - DDR SDRAM Configuration Register */
  617. reg = 0x73004000;
  618. stat_val = ddr3_get_static_mc_value(
  619. REG_SDRAM_CONFIG_ADDR, REG_SDRAM_CONFIG_ECC_OFFS, 0x1, 0, 0);
  620. if (ecc_ena && ddr3_get_min_val(sum_info.err_check_type, dimm_num,
  621. stat_val)) {
  622. reg |= (1 << REG_SDRAM_CONFIG_ECC_OFFS);
  623. reg |= (1 << REG_SDRAM_CONFIG_IERR_OFFS);
  624. DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - ECC Enabled\n");
  625. } else {
  626. DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - ECC Disabled\n");
  627. }
  628. if (sum_info.type_info == SPD_MODULE_TYPE_RDIMM) {
  629. #ifdef DUNIT_STATIC
  630. DEBUG_INIT_S("DDR3 Training Sequence - FAIL - Illegal R-DIMM setup\n");
  631. return MV_DDR3_TRAINING_ERR_BAD_R_DIMM_SETUP;
  632. #endif
  633. reg |= (1 << REG_SDRAM_CONFIG_REGDIMM_OFFS);
  634. DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - R-DIMM\n");
  635. } else {
  636. DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - U-DIMM\n");
  637. }
  638. #ifndef MV88F67XX
  639. #ifdef DUNIT_STATIC
  640. if (ddr3_get_min_val(sum_info.data_width, dimm_num, BUS_WIDTH) == 64) {
  641. #else
  642. if (*ddr_width == 64) {
  643. #endif
  644. reg |= (1 << REG_SDRAM_CONFIG_WIDTH_OFFS);
  645. DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - Datawidth - 64Bits\n");
  646. } else {
  647. DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - Datawidth - 32Bits\n");
  648. }
  649. #else
  650. DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - Datawidth - 16Bits\n");
  651. #endif
  652. #if defined(MV88F672X)
  653. if (*ddr_width == 32) {
  654. reg |= (1 << REG_SDRAM_CONFIG_WIDTH_OFFS);
  655. DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - Datawidth - 32Bits\n");
  656. } else {
  657. DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - Datawidth - 16Bits\n");
  658. }
  659. #endif
  660. stat_val = ddr3_get_static_mc_value(REG_SDRAM_CONFIG_ADDR, 0,
  661. REG_SDRAM_CONFIG_RFRS_MASK, 0, 0);
  662. tmp = ddr3_get_min_val(sum_info.refresh_interval / hclk_time,
  663. dimm_num, stat_val);
  664. #ifdef TREFI_USER_EN
  665. tmp = min(TREFI_USER / hclk_time, tmp);
  666. #endif
  667. DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - RefreshInterval/Hclk = ", tmp, 4);
  668. reg |= tmp;
  669. if (cl != 3)
  670. reg |= (1 << 16); /* If 2:1 need to set P2DWr */
  671. #if defined(MV88F672X)
  672. reg |= (1 << 27); /* PhyRfRST = Disable */
  673. #endif
  674. reg_write(REG_SDRAM_CONFIG_ADDR, reg);
  675. /*{0x00001404} - DDR SDRAM Configuration Register */
  676. reg = 0x3630B800;
  677. #ifdef DUNIT_SPD
  678. reg |= (DRAM_2T << REG_DUNIT_CTRL_LOW_2T_OFFS);
  679. #endif
  680. reg_write(REG_DUNIT_CTRL_LOW_ADDR, reg);
  681. /* {0x00001408} - DDR SDRAM Timing (Low) Register */
  682. reg = 0x0;
  683. /* tRAS - (0:3,20) */
  684. spd_val = ddr3_div(sum_info.min_active_to_precharge,
  685. ddr_clk_time, 1);
  686. stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
  687. 0, 0xF, 16, 0x10);
  688. tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
  689. DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRAS-1 = ", tmp, 1);
  690. reg |= (tmp & 0xF);
  691. reg |= ((tmp & 0x10) << 16); /* to bit 20 */
  692. /* tRCD - (4:7) */
  693. spd_val = ddr3_div(sum_info.min_ras_to_cas_delay, ddr_clk_time, 1);
  694. stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
  695. 4, 0xF, 0, 0);
  696. tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
  697. DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRCD-1 = ", tmp, 1);
  698. reg |= ((tmp & 0xF) << 4);
  699. /* tRP - (8:11) */
  700. spd_val = ddr3_div(sum_info.min_row_precharge_time, ddr_clk_time, 1);
  701. stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
  702. 8, 0xF, 0, 0);
  703. tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
  704. DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRP-1 = ", tmp, 1);
  705. reg |= ((tmp & 0xF) << 8);
  706. /* tWR - (12:15) */
  707. spd_val = ddr3_div(sum_info.min_write_recovery_time, ddr_clk_time, 1);
  708. stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
  709. 12, 0xF, 0, 0);
  710. tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
  711. DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tWR-1 = ", tmp, 1);
  712. reg |= ((tmp & 0xF) << 12);
  713. /* tWTR - (16:19) */
  714. spd_val = ddr3_div(sum_info.min_write_to_read_cmd_delay, ddr_clk_time, 1);
  715. stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
  716. 16, 0xF, 0, 0);
  717. tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
  718. DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tWTR-1 = ", tmp, 1);
  719. reg |= ((tmp & 0xF) << 16);
  720. /* tRRD - (24:27) */
  721. spd_val = ddr3_div(sum_info.min_row_active_to_row_active, ddr_clk_time, 1);
  722. stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
  723. 24, 0xF, 0, 0);
  724. tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
  725. DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRRD-1 = ", tmp, 1);
  726. reg |= ((tmp & 0xF) << 24);
  727. /* tRTP - (28:31) */
  728. spd_val = ddr3_div(sum_info.min_read_to_prech_cmd_delay, ddr_clk_time, 1);
  729. stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
  730. 28, 0xF, 0, 0);
  731. tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
  732. DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRTP-1 = ", tmp, 1);
  733. reg |= ((tmp & 0xF) << 28);
  734. if (cl < 7)
  735. reg = 0x33137663;
  736. reg_write(REG_SDRAM_TIMING_LOW_ADDR, reg);
  737. /*{0x0000140C} - DDR SDRAM Timing (High) Register */
  738. /* Add cycles to R2R W2W */
  739. reg = 0x39F8FF80;
  740. /* tRFC - (0:6,16:18) */
  741. spd_val = ddr3_div(sum_info.min_refresh_recovery, ddr_clk_time, 1);
  742. stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_HIGH_ADDR,
  743. 0, 0x7F, 9, 0x380);
  744. tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
  745. DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRFC-1 = ", tmp, 1);
  746. reg |= (tmp & 0x7F);
  747. reg |= ((tmp & 0x380) << 9); /* to bit 16 */
  748. reg_write(REG_SDRAM_TIMING_HIGH_ADDR, reg);
  749. /*{0x00001410} - DDR SDRAM Address Control Register */
  750. reg = 0x000F0000;
  751. /* tFAW - (24:28) */
  752. #if (defined(MV88F78X60) || defined(MV88F672X))
  753. tmp = sum_info.min_four_active_win_delay;
  754. spd_val = ddr3_div(tmp, ddr_clk_time, 0);
  755. stat_val = ddr3_get_static_mc_value(REG_SDRAM_ADDRESS_CTRL_ADDR,
  756. 24, 0x3F, 0, 0);
  757. tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
  758. DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tFAW = ", tmp, 1);
  759. reg |= ((tmp & 0x3F) << 24);
  760. #else
  761. tmp = sum_info.min_four_active_win_delay -
  762. 4 * (sum_info.min_row_active_to_row_active);
  763. spd_val = ddr3_div(tmp, ddr_clk_time, 0);
  764. stat_val = ddr3_get_static_mc_value(REG_SDRAM_ADDRESS_CTRL_ADDR,
  765. 24, 0x1F, 0, 0);
  766. tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
  767. DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tFAW-4*tRRD = ", tmp, 1);
  768. reg |= ((tmp & 0x1F) << 24);
  769. #endif
  770. /* SDRAM device capacity */
  771. #ifdef DUNIT_STATIC
  772. reg |= (reg_read(REG_SDRAM_ADDRESS_CTRL_ADDR) & 0xF0FFFF);
  773. #endif
  774. #ifdef DUNIT_SPD
  775. cs_count = 0;
  776. dimm_cnt = 0;
  777. for (cs = 0; cs < MAX_CS; cs++) {
  778. if (cs_ena & (1 << cs) & DIMM_CS_BITMAP) {
  779. if (dimm_info[dimm_cnt].num_of_module_ranks == cs_count) {
  780. dimm_cnt++;
  781. cs_count = 0;
  782. }
  783. cs_count++;
  784. if (dimm_info[dimm_cnt].sdram_capacity < 0x3) {
  785. reg |= ((dimm_info[dimm_cnt].sdram_capacity + 1) <<
  786. (REG_SDRAM_ADDRESS_SIZE_OFFS +
  787. (REG_SDRAM_ADDRESS_CTRL_STRUCT_OFFS * cs)));
  788. } else if (dimm_info[dimm_cnt].sdram_capacity > 0x3) {
  789. reg |= ((dimm_info[dimm_cnt].sdram_capacity & 0x3) <<
  790. (REG_SDRAM_ADDRESS_SIZE_OFFS +
  791. (REG_SDRAM_ADDRESS_CTRL_STRUCT_OFFS * cs)));
  792. reg |= ((dimm_info[dimm_cnt].sdram_capacity & 0x4) <<
  793. (REG_SDRAM_ADDRESS_SIZE_HIGH_OFFS + cs));
  794. }
  795. }
  796. }
  797. /* SDRAM device structure */
  798. cs_count = 0;
  799. dimm_cnt = 0;
  800. for (cs = 0; cs < MAX_CS; cs++) {
  801. if (cs_ena & (1 << cs) & DIMM_CS_BITMAP) {
  802. if (dimm_info[dimm_cnt].num_of_module_ranks == cs_count) {
  803. dimm_cnt++;
  804. cs_count = 0;
  805. }
  806. cs_count++;
  807. if (dimm_info[dimm_cnt].sdram_width == 16)
  808. reg |= (1 << (REG_SDRAM_ADDRESS_CTRL_STRUCT_OFFS * cs));
  809. }
  810. }
  811. #endif
  812. reg_write(REG_SDRAM_ADDRESS_CTRL_ADDR, reg);
  813. /*{0x00001418} - DDR SDRAM Operation Register */
  814. reg = 0xF00;
  815. for (cs = 0; cs < MAX_CS; cs++) {
  816. if (cs_ena & (1 << cs))
  817. reg &= ~(1 << (cs + REG_SDRAM_OPERATION_CS_OFFS));
  818. }
  819. reg_write(REG_SDRAM_OPERATION_ADDR, reg);
  820. /*{0x00001420} - DDR SDRAM Extended Mode Register */
  821. reg = 0x00000004;
  822. reg_write(REG_SDRAM_EXT_MODE_ADDR, reg);
  823. /*{0x00001424} - DDR Controller Control (High) Register */
  824. #if (defined(MV88F78X60) || defined(MV88F672X))
  825. reg = 0x0000D3FF;
  826. #else
  827. reg = 0x0100D1FF;
  828. #endif
  829. reg_write(REG_DDR_CONT_HIGH_ADDR, reg);
  830. /*{0x0000142C} - DDR3 Timing Register */
  831. reg = 0x014C2F38;
  832. #if defined(MV88F78X60) || defined(MV88F672X)
  833. reg = 0x1FEC2F38;
  834. #endif
  835. reg_write(0x142C, reg);
  836. /*{0x00001484} - MBus CPU Block Register */
  837. #ifdef MV88F67XX
  838. if (reg_read(REG_DDR_IO_ADDR) & (1 << REG_DDR_IO_CLK_RATIO_OFFS))
  839. reg_write(REG_MBUS_CPU_BLOCK_ADDR, 0x0000E907);
  840. #endif
  841. /*
  842. * In case of mixed dimm and on-board devices setup paramters will
  843. * be taken statically
  844. */
  845. /*{0x00001494} - DDR SDRAM ODT Control (Low) Register */
  846. reg = odt_config[cs_ena];
  847. reg_write(REG_SDRAM_ODT_CTRL_LOW_ADDR, reg);
  848. /*{0x00001498} - DDR SDRAM ODT Control (High) Register */
  849. reg = 0x00000000;
  850. reg_write(REG_SDRAM_ODT_CTRL_HIGH_ADDR, reg);
  851. /*{0x0000149C} - DDR Dunit ODT Control Register */
  852. reg = cs_ena;
  853. reg_write(REG_DUNIT_ODT_CTRL_ADDR, reg);
  854. /*{0x000014A0} - DDR Dunit ODT Control Register */
  855. #if defined(MV88F672X)
  856. reg = 0x000006A9;
  857. reg_write(REG_DRAM_FIFO_CTRL_ADDR, reg);
  858. #endif
  859. /*{0x000014C0} - DRAM address and Control Driving Strenght */
  860. reg_write(REG_DRAM_ADDR_CTRL_DRIVE_STRENGTH_ADDR, 0x192435e9);
  861. /*{0x000014C4} - DRAM Data and DQS Driving Strenght */
  862. reg_write(REG_DRAM_DATA_DQS_DRIVE_STRENGTH_ADDR, 0xB2C35E9);
  863. #if (defined(MV88F78X60) || defined(MV88F672X))
  864. /*{0x000014CC} - DRAM Main Pads Calibration Machine Control Register */
  865. reg = reg_read(REG_DRAM_MAIN_PADS_CAL_ADDR);
  866. reg_write(REG_DRAM_MAIN_PADS_CAL_ADDR, reg | (1 << 0));
  867. #endif
  868. #if defined(MV88F672X)
  869. /* DRAM Main Pads Calibration Machine Control Register */
  870. /* 0x14CC[4:3] - CalUpdateControl = IntOnly */
  871. reg = reg_read(REG_DRAM_MAIN_PADS_CAL_ADDR);
  872. reg &= 0xFFFFFFE7;
  873. reg |= (1 << 3);
  874. reg_write(REG_DRAM_MAIN_PADS_CAL_ADDR, reg);
  875. #endif
  876. #ifdef DUNIT_SPD
  877. cs_count = 0;
  878. dimm_cnt = 0;
  879. for (cs = 0; cs < MAX_CS; cs++) {
  880. if ((1 << cs) & DIMM_CS_BITMAP) {
  881. if ((1 << cs) & cs_ena) {
  882. if (dimm_info[dimm_cnt].num_of_module_ranks ==
  883. cs_count) {
  884. dimm_cnt++;
  885. cs_count = 0;
  886. }
  887. cs_count++;
  888. reg_write(REG_CS_SIZE_SCRATCH_ADDR + (cs * 0x8),
  889. dimm_info[dimm_cnt].rank_capacity - 1);
  890. } else {
  891. reg_write(REG_CS_SIZE_SCRATCH_ADDR + (cs * 0x8), 0);
  892. }
  893. }
  894. }
  895. #endif
  896. /*{0x00020184} - Close FastPath - 2G */
  897. reg_write(REG_FASTPATH_WIN_0_CTRL_ADDR, 0);
  898. /*{0x00001538} - Read Data Sample Delays Register */
  899. reg = 0;
  900. for (cs = 0; cs < MAX_CS; cs++) {
  901. if (cs_ena & (1 << cs))
  902. reg |= (cl << (REG_READ_DATA_SAMPLE_DELAYS_OFFS * cs));
  903. }
  904. reg_write(REG_READ_DATA_SAMPLE_DELAYS_ADDR, reg);
  905. DEBUG_INIT_FULL_C("DDR3 - SPD-SET - Read Data Sample Delays = ", reg,
  906. 1);
  907. /*{0x0000153C} - Read Data Ready Delay Register */
  908. reg = 0;
  909. for (cs = 0; cs < MAX_CS; cs++) {
  910. if (cs_ena & (1 << cs)) {
  911. reg |= ((cl + 2) <<
  912. (REG_READ_DATA_READY_DELAYS_OFFS * cs));
  913. }
  914. }
  915. reg_write(REG_READ_DATA_READY_DELAYS_ADDR, reg);
  916. DEBUG_INIT_FULL_C("DDR3 - SPD-SET - Read Data Ready Delays = ", reg, 1);
  917. /* Set MR registers */
  918. /* MR0 */
  919. reg = 0x00000600;
  920. tmp = ddr3_cl_to_valid_cl(cl);
  921. reg |= ((tmp & 0x1) << 2);
  922. reg |= ((tmp & 0xE) << 3); /* to bit 4 */
  923. for (cs = 0; cs < MAX_CS; cs++) {
  924. if (cs_ena & (1 << cs)) {
  925. reg_write(REG_DDR3_MR0_CS_ADDR +
  926. (cs << MR_CS_ADDR_OFFS), reg);
  927. }
  928. }
  929. /* MR1 */
  930. reg = 0x00000044 & REG_DDR3_MR1_ODT_MASK;
  931. if (cs_num > 1)
  932. reg = 0x00000046 & REG_DDR3_MR1_ODT_MASK;
  933. for (cs = 0; cs < MAX_CS; cs++) {
  934. if (cs_ena & (1 << cs)) {
  935. reg |= odt_static[cs_ena][cs];
  936. reg_write(REG_DDR3_MR1_CS_ADDR +
  937. (cs << MR_CS_ADDR_OFFS), reg);
  938. }
  939. }
  940. /* MR2 */
  941. if (reg_read(REG_DDR_IO_ADDR) & (1 << REG_DDR_IO_CLK_RATIO_OFFS))
  942. tmp = hclk_time / 2;
  943. else
  944. tmp = hclk_time;
  945. if (tmp >= 2500)
  946. cwl = 5; /* CWL = 5 */
  947. else if (tmp >= 1875 && tmp < 2500)
  948. cwl = 6; /* CWL = 6 */
  949. else if (tmp >= 1500 && tmp < 1875)
  950. cwl = 7; /* CWL = 7 */
  951. else if (tmp >= 1250 && tmp < 1500)
  952. cwl = 8; /* CWL = 8 */
  953. else if (tmp >= 1070 && tmp < 1250)
  954. cwl = 9; /* CWL = 9 */
  955. else if (tmp >= 935 && tmp < 1070)
  956. cwl = 10; /* CWL = 10 */
  957. else if (tmp >= 833 && tmp < 935)
  958. cwl = 11; /* CWL = 11 */
  959. else if (tmp >= 750 && tmp < 833)
  960. cwl = 12; /* CWL = 12 */
  961. else {
  962. cwl = 12; /* CWL = 12 */
  963. printf("Unsupported hclk %d MHz\n", tmp);
  964. }
  965. reg = ((cwl - 5) << REG_DDR3_MR2_CWL_OFFS);
  966. for (cs = 0; cs < MAX_CS; cs++) {
  967. if (cs_ena & (1 << cs)) {
  968. reg &= REG_DDR3_MR2_ODT_MASK;
  969. reg |= odt_dynamic[cs_ena][cs];
  970. reg_write(REG_DDR3_MR2_CS_ADDR +
  971. (cs << MR_CS_ADDR_OFFS), reg);
  972. }
  973. }
  974. /* MR3 */
  975. reg = 0x00000000;
  976. for (cs = 0; cs < MAX_CS; cs++) {
  977. if (cs_ena & (1 << cs)) {
  978. reg_write(REG_DDR3_MR3_CS_ADDR +
  979. (cs << MR_CS_ADDR_OFFS), reg);
  980. }
  981. }
  982. /* {0x00001428} - DDR ODT Timing (Low) Register */
  983. reg = 0;
  984. reg |= (((cl - cwl + 1) & 0xF) << 4);
  985. reg |= (((cl - cwl + 6) & 0xF) << 8);
  986. reg |= ((((cl - cwl + 6) >> 4) & 0x1) << 21);
  987. reg |= (((cl - 1) & 0xF) << 12);
  988. reg |= (((cl + 6) & 0x1F) << 16);
  989. reg_write(REG_ODT_TIME_LOW_ADDR, reg);
  990. /* {0x0000147C} - DDR ODT Timing (High) Register */
  991. reg = 0x00000071;
  992. reg |= ((cwl - 1) << 8);
  993. reg |= ((cwl + 5) << 12);
  994. reg_write(REG_ODT_TIME_HIGH_ADDR, reg);
  995. #ifdef DUNIT_SPD
  996. /*{0x000015E0} - DDR3 Rank Control Register */
  997. reg = cs_ena;
  998. cs_count = 0;
  999. dimm_cnt = 0;
  1000. for (cs = 0; cs < MAX_CS; cs++) {
  1001. if (cs_ena & (1 << cs) & DIMM_CS_BITMAP) {
  1002. if (dimm_info[dimm_cnt].num_of_module_ranks == cs_count) {
  1003. dimm_cnt++;
  1004. cs_count = 0;
  1005. }
  1006. cs_count++;
  1007. if (dimm_info[dimm_cnt].addr_mirroring &&
  1008. (cs == 1 || cs == 3) &&
  1009. (sum_info.type_info != SPD_MODULE_TYPE_RDIMM)) {
  1010. reg |= (1 << (REG_DDR3_RANK_CTRL_MIRROR_OFFS + cs));
  1011. DEBUG_INIT_FULL_C("DDR3 - SPD-SET - Setting Address Mirroring for CS = ",
  1012. cs, 1);
  1013. }
  1014. }
  1015. }
  1016. reg_write(REG_DDR3_RANK_CTRL_ADDR, reg);
  1017. #endif
  1018. /*{0xD00015E4} - ZQDS Configuration Register */
  1019. reg = 0x00203c18;
  1020. reg_write(REG_ZQC_CONF_ADDR, reg);
  1021. /* {0x00015EC} - DDR PHY */
  1022. #if defined(MV88F78X60)
  1023. reg = 0xF800AAA5;
  1024. if (mv_ctrl_rev_get() == MV_78XX0_B0_REV)
  1025. reg = 0xF800A225;
  1026. #else
  1027. reg = 0xDE000025;
  1028. #if defined(MV88F672X)
  1029. reg = 0xF800A225;
  1030. #endif
  1031. #endif
  1032. reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
  1033. #if (defined(MV88F78X60) || defined(MV88F672X))
  1034. /* Registered DIMM support - supported only in AXP A0 devices */
  1035. /* Currently supported for SPD detection only */
  1036. /*
  1037. * Flow is according to the Registered DIMM chapter in the
  1038. * Functional Spec
  1039. */
  1040. if (sum_info.type_info == SPD_MODULE_TYPE_RDIMM) {
  1041. DEBUG_INIT_S("DDR3 Training Sequence - Registered DIMM detected\n");
  1042. /* Set commands parity completion */
  1043. reg = reg_read(REG_REGISTERED_DRAM_CTRL_ADDR);
  1044. reg &= ~REG_REGISTERED_DRAM_CTRL_PARITY_MASK;
  1045. reg |= 0x8;
  1046. reg_write(REG_REGISTERED_DRAM_CTRL_ADDR, reg);
  1047. /* De-assert M_RESETn and assert M_CKE */
  1048. reg_write(REG_SDRAM_INIT_CTRL_ADDR,
  1049. 1 << REG_SDRAM_INIT_CKE_ASSERT_OFFS);
  1050. do {
  1051. reg = (reg_read(REG_SDRAM_INIT_CTRL_ADDR)) &
  1052. (1 << REG_SDRAM_INIT_CKE_ASSERT_OFFS);
  1053. } while (reg);
  1054. for (rc = 0; rc < SPD_RDIMM_RC_NUM; rc++) {
  1055. if (rc != 6 && rc != 7) {
  1056. /* Set CWA Command */
  1057. reg = (REG_SDRAM_OPERATION_CMD_CWA &
  1058. ~(0xF << REG_SDRAM_OPERATION_CS_OFFS));
  1059. reg |= ((dimm_info[0].dimm_rc[rc] &
  1060. REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
  1061. REG_SDRAM_OPERATION_CWA_DATA_OFFS);
  1062. reg |= rc << REG_SDRAM_OPERATION_CWA_RC_OFFS;
  1063. /* Configure - Set Delay - tSTAB/tMRD */
  1064. if (rc == 2 || rc == 10)
  1065. reg |= (0x1 << REG_SDRAM_OPERATION_CWA_DELAY_SEL_OFFS);
  1066. /* 0x1418 - SDRAM Operation Register */
  1067. reg_write(REG_SDRAM_OPERATION_ADDR, reg);
  1068. /*
  1069. * Poll the "cmd" field in the SDRAM OP
  1070. * register for 0x0
  1071. */
  1072. do {
  1073. reg = reg_read(REG_SDRAM_OPERATION_ADDR) &
  1074. (REG_SDRAM_OPERATION_CMD_MASK);
  1075. } while (reg);
  1076. }
  1077. }
  1078. }
  1079. #endif
  1080. return MV_OK;
  1081. }
  1082. /*
  1083. * Name: ddr3_div - this function divides integers
  1084. * Desc:
  1085. * Args: val - the value
  1086. * divider - the divider
  1087. * sub - substruction value
  1088. * Notes:
  1089. * Returns: required value
  1090. */
  1091. u32 ddr3_div(u32 val, u32 divider, u32 sub)
  1092. {
  1093. return val / divider + (val % divider > 0 ? 1 : 0) - sub;
  1094. }
  1095. /*
  1096. * Name: ddr3_get_max_val
  1097. * Desc:
  1098. * Args:
  1099. * Notes:
  1100. * Returns:
  1101. */
  1102. u32 ddr3_get_max_val(u32 spd_val, u32 dimm_num, u32 static_val)
  1103. {
  1104. #ifdef DUNIT_STATIC
  1105. if (dimm_num > 0) {
  1106. if (spd_val >= static_val)
  1107. return spd_val;
  1108. else
  1109. return static_val;
  1110. } else {
  1111. return static_val;
  1112. }
  1113. #else
  1114. return spd_val;
  1115. #endif
  1116. }
  1117. /*
  1118. * Name: ddr3_get_min_val
  1119. * Desc:
  1120. * Args:
  1121. * Notes:
  1122. * Returns:
  1123. */
  1124. u32 ddr3_get_min_val(u32 spd_val, u32 dimm_num, u32 static_val)
  1125. {
  1126. #ifdef DUNIT_STATIC
  1127. if (dimm_num > 0) {
  1128. if (spd_val <= static_val)
  1129. return spd_val;
  1130. else
  1131. return static_val;
  1132. } else
  1133. return static_val;
  1134. #else
  1135. return spd_val;
  1136. #endif
  1137. }
  1138. #endif