ddr_cal.c 5.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * Copyright (C) 2020 MediaTek Inc.
  4. *
  5. * Author: Weijie Gao <weijie.gao@mediatek.com>
  6. */
  7. #include <common.h>
  8. #include <asm/addrspace.h>
  9. #include <asm/cacheops.h>
  10. #include <asm/global_data.h>
  11. #include <linux/bitops.h>
  12. #include <linux/io.h>
  13. #include <mach/mc.h>
  14. DECLARE_GLOBAL_DATA_PTR;
  15. #define COARSE_MIN_START 6
  16. #define FINE_MIN_START 15
  17. #define COARSE_MAX_START 7
  18. #define FINE_MAX_START 0
  19. #define NUM_OF_CACHELINE 128
  20. #define TEST_PAT_SIZE (NUM_OF_CACHELINE * CONFIG_SYS_CACHELINE_SIZE)
  21. #define INIT_DQS_VAL ((7 << DQS1_DELAY_COARSE_TUNING_S) | \
  22. (4 << DQS1_DELAY_FINE_TUNING_S) | \
  23. (7 << DQS0_DELAY_COARSE_TUNING_S) | \
  24. (4 << DQS0_DELAY_FINE_TUNING_S))
  25. static inline void pref_op(int op, const volatile void *addr)
  26. {
  27. __asm__ __volatile__("pref %0, 0(%1)" : : "i" (op), "r" (addr));
  28. }
  29. static inline bool dqs_test_error(void __iomem *memc, u32 memsize, u32 dqsval,
  30. u32 bias)
  31. {
  32. u32 *nca, *ca;
  33. u32 off;
  34. int i;
  35. for (off = 0; off < memsize - TEST_PAT_SIZE; off += (memsize >> 6)) {
  36. nca = (u32 *)KSEG1ADDR(off);
  37. ca = (u32 *)KSEG0ADDR(off);
  38. writel(INIT_DQS_VAL, memc + MEMCTL_DDR_DQS_DLY_REG);
  39. wmb();
  40. for (i = 0; i < TEST_PAT_SIZE / sizeof(u32); i++)
  41. ca[i] = 0x1f1f1f1f;
  42. for (i = 0; i < TEST_PAT_SIZE / sizeof(u32); i++)
  43. nca[i] = (u32)nca + i + bias;
  44. writel(dqsval, memc + MEMCTL_DDR_DQS_DLY_REG);
  45. wmb();
  46. for (i = 0; i < TEST_PAT_SIZE; i += CONFIG_SYS_CACHELINE_SIZE)
  47. mips_cache(HIT_INVALIDATE_D, (u8 *)ca + i);
  48. wmb();
  49. for (i = 0; i < TEST_PAT_SIZE; i += CONFIG_SYS_CACHELINE_SIZE)
  50. pref_op(0, (u8 *)ca + i);
  51. for (i = 0; i < TEST_PAT_SIZE / sizeof(u32); i++) {
  52. if (ca[i] != (u32)nca + i + bias)
  53. return true;
  54. }
  55. }
  56. return false;
  57. }
  58. static inline int dqs_find_max(void __iomem *memc, u32 memsize, int initval,
  59. int maxval, int shift, u32 regval)
  60. {
  61. int fieldval;
  62. u32 dqsval;
  63. for (fieldval = initval; fieldval <= maxval; fieldval++) {
  64. dqsval = regval | (fieldval << shift);
  65. if (dqs_test_error(memc, memsize, dqsval, 3))
  66. return max(fieldval - 1, initval);
  67. }
  68. return maxval;
  69. }
  70. static inline int dqs_find_min(void __iomem *memc, u32 memsize, int initval,
  71. int minval, int shift, u32 regval)
  72. {
  73. int fieldval;
  74. u32 dqsval;
  75. for (fieldval = initval; fieldval >= minval; fieldval--) {
  76. dqsval = regval | (fieldval << shift);
  77. if (dqs_test_error(memc, memsize, dqsval, 1))
  78. return min(fieldval + 1, initval);
  79. }
  80. return minval;
  81. }
  82. void ddr_calibrate(void __iomem *memc, u32 memsize, u32 bw)
  83. {
  84. u32 dqs_coarse_min, dqs_coarse_max, dqs_coarse_val;
  85. u32 dqs_fine_min, dqs_fine_max, dqs_fine_val;
  86. u32 dqs_coarse_min_limit, dqs_fine_min_limit;
  87. u32 dlls, dqs_dll, ddr_cfg2_reg;
  88. u32 dqs_dly_tmp, dqs_dly, test_dqs, shift;
  89. u32 rem, mask;
  90. int i;
  91. /* Disable Self-refresh */
  92. clrbits_32(memc + MEMCTL_DDR_SELF_REFRESH_REG, SR_AUTO_EN);
  93. /* Save DDR_CFG2 and modify its DQS gating window */
  94. ddr_cfg2_reg = readl(memc + MEMCTL_DDR_CFG2_REG);
  95. mask = DQS0_GATING_WINDOW_M;
  96. if (bw == IND_SDRAM_WIDTH_16BIT)
  97. mask |= DQS1_GATING_WINDOW_M;
  98. clrbits_32(memc + MEMCTL_DDR_CFG2_REG, mask);
  99. /* Get minimum available DQS value */
  100. dlls = readl(memc + MEMCTL_DLL_DBG_REG);
  101. dlls = (dlls & MST_DLY_SEL_M) >> MST_DLY_SEL_S;
  102. dqs_dll = dlls >> 4;
  103. if (dqs_dll <= 8)
  104. dqs_coarse_min_limit = 8 - dqs_dll;
  105. else
  106. dqs_coarse_min_limit = 0;
  107. dqs_dll = dlls & 0xf;
  108. if (dqs_dll <= 8)
  109. dqs_fine_min_limit = 8 - dqs_dll;
  110. else
  111. dqs_fine_min_limit = 0;
  112. /* Initial DQS register value */
  113. dqs_dly = INIT_DQS_VAL;
  114. /* Calibrate DQS0 and/or DQS1 */
  115. for (i = 0; i < bw; i++) {
  116. shift = i * 8;
  117. dqs_dly &= ~(0xff << shift);
  118. /* Find maximum DQS coarse-grain */
  119. dqs_dly_tmp = dqs_dly | (0xf << shift);
  120. dqs_coarse_max = dqs_find_max(memc, memsize, COARSE_MAX_START,
  121. 0xf, 4 + shift, dqs_dly_tmp);
  122. /* Find maximum DQS fine-grain */
  123. dqs_dly_tmp = dqs_dly | (dqs_coarse_max << (4 + shift));
  124. test_dqs = dqs_find_max(memc, memsize, FINE_MAX_START, 0xf,
  125. shift, dqs_dly_tmp);
  126. if (test_dqs == FINE_MAX_START) {
  127. dqs_coarse_max--;
  128. dqs_fine_max = 0xf;
  129. } else {
  130. dqs_fine_max = test_dqs - 1;
  131. }
  132. /* Find minimum DQS coarse-grain */
  133. dqs_dly_tmp = dqs_dly;
  134. dqs_coarse_min = dqs_find_min(memc, memsize, COARSE_MIN_START,
  135. dqs_coarse_min_limit, 4 + shift,
  136. dqs_dly_tmp);
  137. /* Find minimum DQS fine-grain */
  138. dqs_dly_tmp = dqs_dly | (dqs_coarse_min << (4 + shift));
  139. test_dqs = dqs_find_min(memc, memsize, FINE_MIN_START,
  140. dqs_fine_min_limit, shift, dqs_dly_tmp);
  141. if (test_dqs == FINE_MIN_START + 1) {
  142. dqs_coarse_min++;
  143. dqs_fine_min = 0;
  144. } else {
  145. dqs_fine_min = test_dqs;
  146. }
  147. /* Calculate central DQS coarse/fine value */
  148. dqs_coarse_val = (dqs_coarse_max + dqs_coarse_min) >> 1;
  149. rem = (dqs_coarse_max + dqs_coarse_min) % 2;
  150. dqs_fine_val = (rem * 4) + ((dqs_fine_max + dqs_fine_min) >> 1);
  151. if (dqs_fine_val >= 0x10) {
  152. dqs_coarse_val++;
  153. dqs_fine_val -= 8;
  154. }
  155. /* Save current DQS value */
  156. dqs_dly |= ((dqs_coarse_val << 4) | dqs_fine_val) << shift;
  157. }
  158. /* Set final DQS value */
  159. writel(dqs_dly, memc + MEMCTL_DDR_DQS_DLY_REG);
  160. /* Restore DDR_CFG2 */
  161. writel(ddr_cfg2_reg, memc + MEMCTL_DDR_CFG2_REG);
  162. /* Enable Self-refresh */
  163. setbits_32(memc + MEMCTL_DDR_SELF_REFRESH_REG, SR_AUTO_EN);
  164. }