ddr_cal.c 5.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * Copyright (C) 2020 MediaTek Inc.
  4. *
  5. * Author: Weijie Gao <weijie.gao@mediatek.com>
  6. */
  7. #include <common.h>
  8. #include <asm/addrspace.h>
  9. #include <asm/cacheops.h>
  10. #include <linux/bitops.h>
  11. #include <linux/io.h>
  12. #include <mach/mc.h>
  13. DECLARE_GLOBAL_DATA_PTR;
  14. #define COARSE_MIN_START 6
  15. #define FINE_MIN_START 15
  16. #define COARSE_MAX_START 7
  17. #define FINE_MAX_START 0
  18. #define NUM_OF_CACHELINE 128
  19. #define TEST_PAT_SIZE (NUM_OF_CACHELINE * CONFIG_SYS_CACHELINE_SIZE)
  20. #define INIT_DQS_VAL ((7 << DQS1_DELAY_COARSE_TUNING_S) | \
  21. (4 << DQS1_DELAY_FINE_TUNING_S) | \
  22. (7 << DQS0_DELAY_COARSE_TUNING_S) | \
  23. (4 << DQS0_DELAY_FINE_TUNING_S))
  24. static inline void pref_op(int op, const volatile void *addr)
  25. {
  26. __asm__ __volatile__("pref %0, 0(%1)" : : "i" (op), "r" (addr));
  27. }
  28. static inline bool dqs_test_error(void __iomem *memc, u32 memsize, u32 dqsval,
  29. u32 bias)
  30. {
  31. u32 *nca, *ca;
  32. u32 off;
  33. int i;
  34. for (off = 0; off < memsize - TEST_PAT_SIZE; off += (memsize >> 6)) {
  35. nca = (u32 *)KSEG1ADDR(off);
  36. ca = (u32 *)KSEG0ADDR(off);
  37. writel(INIT_DQS_VAL, memc + MEMCTL_DDR_DQS_DLY_REG);
  38. wmb();
  39. for (i = 0; i < TEST_PAT_SIZE / sizeof(u32); i++)
  40. ca[i] = 0x1f1f1f1f;
  41. for (i = 0; i < TEST_PAT_SIZE / sizeof(u32); i++)
  42. nca[i] = (u32)nca + i + bias;
  43. writel(dqsval, memc + MEMCTL_DDR_DQS_DLY_REG);
  44. wmb();
  45. for (i = 0; i < TEST_PAT_SIZE; i += CONFIG_SYS_CACHELINE_SIZE)
  46. mips_cache(HIT_INVALIDATE_D, (u8 *)ca + i);
  47. wmb();
  48. for (i = 0; i < TEST_PAT_SIZE; i += CONFIG_SYS_CACHELINE_SIZE)
  49. pref_op(0, (u8 *)ca + i);
  50. for (i = 0; i < TEST_PAT_SIZE / sizeof(u32); i++) {
  51. if (ca[i] != (u32)nca + i + bias)
  52. return true;
  53. }
  54. }
  55. return false;
  56. }
  57. static inline int dqs_find_max(void __iomem *memc, u32 memsize, int initval,
  58. int maxval, int shift, u32 regval)
  59. {
  60. int fieldval;
  61. u32 dqsval;
  62. for (fieldval = initval; fieldval <= maxval; fieldval++) {
  63. dqsval = regval | (fieldval << shift);
  64. if (dqs_test_error(memc, memsize, dqsval, 3))
  65. return max(fieldval - 1, initval);
  66. }
  67. return maxval;
  68. }
  69. static inline int dqs_find_min(void __iomem *memc, u32 memsize, int initval,
  70. int minval, int shift, u32 regval)
  71. {
  72. int fieldval;
  73. u32 dqsval;
  74. for (fieldval = initval; fieldval >= minval; fieldval--) {
  75. dqsval = regval | (fieldval << shift);
  76. if (dqs_test_error(memc, memsize, dqsval, 1))
  77. return min(fieldval + 1, initval);
  78. }
  79. return minval;
  80. }
  81. void ddr_calibrate(void __iomem *memc, u32 memsize, u32 bw)
  82. {
  83. u32 dqs_coarse_min, dqs_coarse_max, dqs_coarse_val;
  84. u32 dqs_fine_min, dqs_fine_max, dqs_fine_val;
  85. u32 dqs_coarse_min_limit, dqs_fine_min_limit;
  86. u32 dlls, dqs_dll, ddr_cfg2_reg;
  87. u32 dqs_dly_tmp, dqs_dly, test_dqs, shift;
  88. u32 rem, mask;
  89. int i;
  90. /* Disable Self-refresh */
  91. clrbits_32(memc + MEMCTL_DDR_SELF_REFRESH_REG, SR_AUTO_EN);
  92. /* Save DDR_CFG2 and modify its DQS gating window */
  93. ddr_cfg2_reg = readl(memc + MEMCTL_DDR_CFG2_REG);
  94. mask = DQS0_GATING_WINDOW_M;
  95. if (bw == IND_SDRAM_WIDTH_16BIT)
  96. mask |= DQS1_GATING_WINDOW_M;
  97. clrbits_32(memc + MEMCTL_DDR_CFG2_REG, mask);
  98. /* Get minimum available DQS value */
  99. dlls = readl(memc + MEMCTL_DLL_DBG_REG);
  100. dlls = (dlls & MST_DLY_SEL_M) >> MST_DLY_SEL_S;
  101. dqs_dll = dlls >> 4;
  102. if (dqs_dll <= 8)
  103. dqs_coarse_min_limit = 8 - dqs_dll;
  104. else
  105. dqs_coarse_min_limit = 0;
  106. dqs_dll = dlls & 0xf;
  107. if (dqs_dll <= 8)
  108. dqs_fine_min_limit = 8 - dqs_dll;
  109. else
  110. dqs_fine_min_limit = 0;
  111. /* Initial DQS register value */
  112. dqs_dly = INIT_DQS_VAL;
  113. /* Calibrate DQS0 and/or DQS1 */
  114. for (i = 0; i < bw; i++) {
  115. shift = i * 8;
  116. dqs_dly &= ~(0xff << shift);
  117. /* Find maximum DQS coarse-grain */
  118. dqs_dly_tmp = dqs_dly | (0xf << shift);
  119. dqs_coarse_max = dqs_find_max(memc, memsize, COARSE_MAX_START,
  120. 0xf, 4 + shift, dqs_dly_tmp);
  121. /* Find maximum DQS fine-grain */
  122. dqs_dly_tmp = dqs_dly | (dqs_coarse_max << (4 + shift));
  123. test_dqs = dqs_find_max(memc, memsize, FINE_MAX_START, 0xf,
  124. shift, dqs_dly_tmp);
  125. if (test_dqs == FINE_MAX_START) {
  126. dqs_coarse_max--;
  127. dqs_fine_max = 0xf;
  128. } else {
  129. dqs_fine_max = test_dqs - 1;
  130. }
  131. /* Find minimum DQS coarse-grain */
  132. dqs_dly_tmp = dqs_dly;
  133. dqs_coarse_min = dqs_find_min(memc, memsize, COARSE_MIN_START,
  134. dqs_coarse_min_limit, 4 + shift,
  135. dqs_dly_tmp);
  136. /* Find minimum DQS fine-grain */
  137. dqs_dly_tmp = dqs_dly | (dqs_coarse_min << (4 + shift));
  138. test_dqs = dqs_find_min(memc, memsize, FINE_MIN_START,
  139. dqs_fine_min_limit, shift, dqs_dly_tmp);
  140. if (test_dqs == FINE_MIN_START + 1) {
  141. dqs_coarse_min++;
  142. dqs_fine_min = 0;
  143. } else {
  144. dqs_fine_min = test_dqs;
  145. }
  146. /* Calculate central DQS coarse/fine value */
  147. dqs_coarse_val = (dqs_coarse_max + dqs_coarse_min) >> 1;
  148. rem = (dqs_coarse_max + dqs_coarse_min) % 2;
  149. dqs_fine_val = (rem * 4) + ((dqs_fine_max + dqs_fine_min) >> 1);
  150. if (dqs_fine_val >= 0x10) {
  151. dqs_coarse_val++;
  152. dqs_fine_val -= 8;
  153. }
  154. /* Save current DQS value */
  155. dqs_dly |= ((dqs_coarse_val << 4) | dqs_fine_val) << shift;
  156. }
  157. /* Set final DQS value */
  158. writel(dqs_dly, memc + MEMCTL_DDR_DQS_DLY_REG);
  159. /* Restore DDR_CFG2 */
  160. writel(ddr_cfg2_reg, memc + MEMCTL_DDR_CFG2_REG);
  161. /* Enable Self-refresh */
  162. setbits_32(memc + MEMCTL_DDR_SELF_REFRESH_REG, SR_AUTO_EN);
  163. }