sdram_pctl_px30.c 4.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * (C) Copyright 2018 Rockchip Electronics Co., Ltd.
  4. */
  5. #include <common.h>
  6. #include <ram.h>
  7. #include <asm/io.h>
  8. #include <asm/arch-rockchip/sdram.h>
  9. #include <asm/arch-rockchip/sdram_pctl_px30.h>
  10. #include <linux/delay.h>
  11. /*
  12. * rank = 1: cs0
  13. * rank = 2: cs1
  14. */
  15. void pctl_read_mr(void __iomem *pctl_base, u32 rank, u32 mr_num)
  16. {
  17. writel((rank << 4) | (1 << 0), pctl_base + DDR_PCTL2_MRCTRL0);
  18. writel((mr_num << 8), pctl_base + DDR_PCTL2_MRCTRL1);
  19. setbits_le32(pctl_base + DDR_PCTL2_MRCTRL0, 1u << 31);
  20. while (readl(pctl_base + DDR_PCTL2_MRCTRL0) & (1u << 31))
  21. continue;
  22. while (readl(pctl_base + DDR_PCTL2_MRSTAT) & PCTL2_MR_WR_BUSY)
  23. continue;
  24. }
  25. /* rank = 1: cs0
  26. * rank = 2: cs1
  27. * rank = 3: cs0 & cs1
  28. * note: be careful of keep mr original val
  29. */
  30. int pctl_write_mr(void __iomem *pctl_base, u32 rank, u32 mr_num, u32 arg,
  31. u32 dramtype)
  32. {
  33. while (readl(pctl_base + DDR_PCTL2_MRSTAT) & PCTL2_MR_WR_BUSY)
  34. continue;
  35. if (dramtype == DDR3 || dramtype == DDR4) {
  36. writel((mr_num << 12) | (rank << 4) | (0 << 0),
  37. pctl_base + DDR_PCTL2_MRCTRL0);
  38. writel(arg, pctl_base + DDR_PCTL2_MRCTRL1);
  39. } else {
  40. writel((rank << 4) | (0 << 0),
  41. pctl_base + DDR_PCTL2_MRCTRL0);
  42. writel((mr_num << 8) | (arg & 0xff),
  43. pctl_base + DDR_PCTL2_MRCTRL1);
  44. }
  45. setbits_le32(pctl_base + DDR_PCTL2_MRCTRL0, 1u << 31);
  46. while (readl(pctl_base + DDR_PCTL2_MRCTRL0) & (1u << 31))
  47. continue;
  48. while (readl(pctl_base + DDR_PCTL2_MRSTAT) & PCTL2_MR_WR_BUSY)
  49. continue;
  50. return 0;
  51. }
  52. /*
  53. * rank : 1:cs0, 2:cs1, 3:cs0&cs1
  54. * vrefrate: 4500: 45%,
  55. */
  56. int pctl_write_vrefdq(void __iomem *pctl_base, u32 rank, u32 vrefrate,
  57. u32 dramtype)
  58. {
  59. u32 tccd_l, value;
  60. u32 dis_auto_zq = 0;
  61. if (dramtype != DDR4 || vrefrate < 4500 ||
  62. vrefrate > 9200)
  63. return (-1);
  64. tccd_l = (readl(pctl_base + DDR_PCTL2_DRAMTMG4) >> 16) & 0xf;
  65. tccd_l = (tccd_l - 4) << 10;
  66. if (vrefrate > 7500) {
  67. /* range 1 */
  68. value = ((vrefrate - 6000) / 65) | tccd_l;
  69. } else {
  70. /* range 2 */
  71. value = ((vrefrate - 4500) / 65) | tccd_l | (1 << 6);
  72. }
  73. dis_auto_zq = pctl_dis_zqcs_aref(pctl_base);
  74. /* enable vrefdq calibratin */
  75. pctl_write_mr(pctl_base, rank, 6, value | (1 << 7), dramtype);
  76. udelay(1);/* tvrefdqe */
  77. /* write vrefdq value */
  78. pctl_write_mr(pctl_base, rank, 6, value | (1 << 7), dramtype);
  79. udelay(1);/* tvref_time */
  80. pctl_write_mr(pctl_base, rank, 6, value | (0 << 7), dramtype);
  81. udelay(1);/* tvrefdqx */
  82. pctl_rest_zqcs_aref(pctl_base, dis_auto_zq);
  83. return 0;
  84. }
  85. static int upctl2_update_ref_reg(void __iomem *pctl_base)
  86. {
  87. u32 ret;
  88. ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
  89. writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
  90. return 0;
  91. }
  92. u32 pctl_dis_zqcs_aref(void __iomem *pctl_base)
  93. {
  94. u32 dis_auto_zq = 0;
  95. /* disable zqcs */
  96. if (!(readl(pctl_base + DDR_PCTL2_ZQCTL0) &
  97. (1ul << 31))) {
  98. dis_auto_zq = 1;
  99. setbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1 << 31);
  100. }
  101. /* disable auto refresh */
  102. setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 1);
  103. upctl2_update_ref_reg(pctl_base);
  104. return dis_auto_zq;
  105. }
  106. void pctl_rest_zqcs_aref(void __iomem *pctl_base, u32 dis_auto_zq)
  107. {
  108. /* restore zqcs */
  109. if (dis_auto_zq)
  110. clrbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1 << 31);
  111. /* restore auto refresh */
  112. clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 1);
  113. upctl2_update_ref_reg(pctl_base);
  114. }
  115. u32 pctl_remodify_sdram_params(struct ddr_pctl_regs *pctl_regs,
  116. struct sdram_cap_info *cap_info,
  117. u32 dram_type)
  118. {
  119. u32 tmp = 0, tmp_adr = 0, i;
  120. for (i = 0; pctl_regs->pctl[i][0] != 0xFFFFFFFF; i++) {
  121. if (pctl_regs->pctl[i][0] == 0) {
  122. tmp = pctl_regs->pctl[i][1];/* MSTR */
  123. tmp_adr = i;
  124. }
  125. }
  126. tmp &= ~((3ul << 30) | (3ul << 24) | (3ul << 12));
  127. switch (cap_info->dbw) {
  128. case 2:
  129. tmp |= (3ul << 30);
  130. break;
  131. case 1:
  132. tmp |= (2ul << 30);
  133. break;
  134. case 0:
  135. default:
  136. tmp |= (1ul << 30);
  137. break;
  138. }
  139. /*
  140. * If DDR3 or DDR4 MSTR.active_ranks=1,
  141. * it will gate memory clock when enter power down.
  142. * Force set active_ranks to 3 to workaround it.
  143. */
  144. if (cap_info->rank == 2 || dram_type == DDR3 ||
  145. dram_type == DDR4)
  146. tmp |= 3 << 24;
  147. else
  148. tmp |= 1 << 24;
  149. tmp |= (2 - cap_info->bw) << 12;
  150. pctl_regs->pctl[tmp_adr][1] = tmp;
  151. return 0;
  152. }
  153. int pctl_cfg(void __iomem *pctl_base, struct ddr_pctl_regs *pctl_regs,
  154. u32 sr_idle, u32 pd_idle)
  155. {
  156. u32 i;
  157. for (i = 0; pctl_regs->pctl[i][0] != 0xFFFFFFFF; i++) {
  158. writel(pctl_regs->pctl[i][1],
  159. pctl_base + pctl_regs->pctl[i][0]);
  160. }
  161. clrsetbits_le32(pctl_base + DDR_PCTL2_PWRTMG,
  162. (0xff << 16) | 0x1f,
  163. ((sr_idle & 0xff) << 16) | (pd_idle & 0x1f));
  164. clrsetbits_le32(pctl_base + DDR_PCTL2_HWLPCTL,
  165. 0xfff << 16,
  166. 5 << 16);
  167. /* disable zqcs */
  168. setbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1u << 31);
  169. return 0;
  170. }