stm32mp1_ddr.c 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864
  1. // SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause
  2. /*
  3. * Copyright (C) 2018, STMicroelectronics - All Rights Reserved
  4. */
  5. #define LOG_CATEGORY UCLASS_RAM
  6. #include <common.h>
  7. #include <clk.h>
  8. #include <log.h>
  9. #include <ram.h>
  10. #include <reset.h>
  11. #include <timer.h>
  12. #include <asm/io.h>
  13. #include <asm/arch/ddr.h>
  14. #include <linux/bitops.h>
  15. #include <linux/delay.h>
  16. #include <linux/iopoll.h>
  17. #include "stm32mp1_ddr.h"
  18. #include "stm32mp1_ddr_regs.h"
  19. #define RCC_DDRITFCR 0xD8
  20. #define RCC_DDRITFCR_DDRCAPBRST (BIT(14))
  21. #define RCC_DDRITFCR_DDRCAXIRST (BIT(15))
  22. #define RCC_DDRITFCR_DDRCORERST (BIT(16))
  23. #define RCC_DDRITFCR_DPHYAPBRST (BIT(17))
  24. #define RCC_DDRITFCR_DPHYRST (BIT(18))
  25. #define RCC_DDRITFCR_DPHYCTLRST (BIT(19))
  26. #define RCC_DDRITFCR_DDRCKMOD_MASK GENMASK(22, 20)
  27. #define RCC_DDRITFCR_DDRCKMOD_ASR BIT(20)
  28. struct reg_desc {
  29. const char *name;
  30. u16 offset; /* offset for base address */
  31. u8 par_offset; /* offset for parameter array */
  32. };
  33. #define INVALID_OFFSET 0xFF
  34. #define DDRCTL_REG(x, y) \
  35. {#x,\
  36. offsetof(struct stm32mp1_ddrctl, x),\
  37. offsetof(struct y, x)}
  38. #define DDRPHY_REG(x, y) \
  39. {#x,\
  40. offsetof(struct stm32mp1_ddrphy, x),\
  41. offsetof(struct y, x)}
  42. #define DDR_REG_DYN(x) \
  43. {#x,\
  44. offsetof(struct stm32mp1_ddrctl, x),\
  45. INVALID_OFFSET}
  46. #define DDRPHY_REG_DYN(x) \
  47. {#x,\
  48. offsetof(struct stm32mp1_ddrphy, x),\
  49. INVALID_OFFSET}
  50. /***********************************************************
  51. * PARAMETERS: value get from device tree :
  52. * size / order need to be aligned with binding
  53. * modification NOT ALLOWED !!!
  54. ***********************************************************/
  55. #define DDRCTL_REG_REG_SIZE 25 /* st,ctl-reg */
  56. #define DDRCTL_REG_TIMING_SIZE 12 /* st,ctl-timing */
  57. #define DDRCTL_REG_MAP_SIZE 9 /* st,ctl-map */
  58. #define DDRCTL_REG_PERF_SIZE 17 /* st,ctl-perf */
  59. #define DDRPHY_REG_REG_SIZE 11 /* st,phy-reg */
  60. #define DDRPHY_REG_TIMING_SIZE 10 /* st,phy-timing */
  61. #define DDRCTL_REG_REG(x) DDRCTL_REG(x, stm32mp1_ddrctrl_reg)
  62. static const struct reg_desc ddr_reg[DDRCTL_REG_REG_SIZE] = {
  63. DDRCTL_REG_REG(mstr),
  64. DDRCTL_REG_REG(mrctrl0),
  65. DDRCTL_REG_REG(mrctrl1),
  66. DDRCTL_REG_REG(derateen),
  67. DDRCTL_REG_REG(derateint),
  68. DDRCTL_REG_REG(pwrctl),
  69. DDRCTL_REG_REG(pwrtmg),
  70. DDRCTL_REG_REG(hwlpctl),
  71. DDRCTL_REG_REG(rfshctl0),
  72. DDRCTL_REG_REG(rfshctl3),
  73. DDRCTL_REG_REG(crcparctl0),
  74. DDRCTL_REG_REG(zqctl0),
  75. DDRCTL_REG_REG(dfitmg0),
  76. DDRCTL_REG_REG(dfitmg1),
  77. DDRCTL_REG_REG(dfilpcfg0),
  78. DDRCTL_REG_REG(dfiupd0),
  79. DDRCTL_REG_REG(dfiupd1),
  80. DDRCTL_REG_REG(dfiupd2),
  81. DDRCTL_REG_REG(dfiphymstr),
  82. DDRCTL_REG_REG(odtmap),
  83. DDRCTL_REG_REG(dbg0),
  84. DDRCTL_REG_REG(dbg1),
  85. DDRCTL_REG_REG(dbgcmd),
  86. DDRCTL_REG_REG(poisoncfg),
  87. DDRCTL_REG_REG(pccfg),
  88. };
  89. #define DDRCTL_REG_TIMING(x) DDRCTL_REG(x, stm32mp1_ddrctrl_timing)
  90. static const struct reg_desc ddr_timing[DDRCTL_REG_TIMING_SIZE] = {
  91. DDRCTL_REG_TIMING(rfshtmg),
  92. DDRCTL_REG_TIMING(dramtmg0),
  93. DDRCTL_REG_TIMING(dramtmg1),
  94. DDRCTL_REG_TIMING(dramtmg2),
  95. DDRCTL_REG_TIMING(dramtmg3),
  96. DDRCTL_REG_TIMING(dramtmg4),
  97. DDRCTL_REG_TIMING(dramtmg5),
  98. DDRCTL_REG_TIMING(dramtmg6),
  99. DDRCTL_REG_TIMING(dramtmg7),
  100. DDRCTL_REG_TIMING(dramtmg8),
  101. DDRCTL_REG_TIMING(dramtmg14),
  102. DDRCTL_REG_TIMING(odtcfg),
  103. };
  104. #define DDRCTL_REG_MAP(x) DDRCTL_REG(x, stm32mp1_ddrctrl_map)
  105. static const struct reg_desc ddr_map[DDRCTL_REG_MAP_SIZE] = {
  106. DDRCTL_REG_MAP(addrmap1),
  107. DDRCTL_REG_MAP(addrmap2),
  108. DDRCTL_REG_MAP(addrmap3),
  109. DDRCTL_REG_MAP(addrmap4),
  110. DDRCTL_REG_MAP(addrmap5),
  111. DDRCTL_REG_MAP(addrmap6),
  112. DDRCTL_REG_MAP(addrmap9),
  113. DDRCTL_REG_MAP(addrmap10),
  114. DDRCTL_REG_MAP(addrmap11),
  115. };
  116. #define DDRCTL_REG_PERF(x) DDRCTL_REG(x, stm32mp1_ddrctrl_perf)
  117. static const struct reg_desc ddr_perf[DDRCTL_REG_PERF_SIZE] = {
  118. DDRCTL_REG_PERF(sched),
  119. DDRCTL_REG_PERF(sched1),
  120. DDRCTL_REG_PERF(perfhpr1),
  121. DDRCTL_REG_PERF(perflpr1),
  122. DDRCTL_REG_PERF(perfwr1),
  123. DDRCTL_REG_PERF(pcfgr_0),
  124. DDRCTL_REG_PERF(pcfgw_0),
  125. DDRCTL_REG_PERF(pcfgqos0_0),
  126. DDRCTL_REG_PERF(pcfgqos1_0),
  127. DDRCTL_REG_PERF(pcfgwqos0_0),
  128. DDRCTL_REG_PERF(pcfgwqos1_0),
  129. DDRCTL_REG_PERF(pcfgr_1),
  130. DDRCTL_REG_PERF(pcfgw_1),
  131. DDRCTL_REG_PERF(pcfgqos0_1),
  132. DDRCTL_REG_PERF(pcfgqos1_1),
  133. DDRCTL_REG_PERF(pcfgwqos0_1),
  134. DDRCTL_REG_PERF(pcfgwqos1_1),
  135. };
  136. #define DDRPHY_REG_REG(x) DDRPHY_REG(x, stm32mp1_ddrphy_reg)
  137. static const struct reg_desc ddrphy_reg[DDRPHY_REG_REG_SIZE] = {
  138. DDRPHY_REG_REG(pgcr),
  139. DDRPHY_REG_REG(aciocr),
  140. DDRPHY_REG_REG(dxccr),
  141. DDRPHY_REG_REG(dsgcr),
  142. DDRPHY_REG_REG(dcr),
  143. DDRPHY_REG_REG(odtcr),
  144. DDRPHY_REG_REG(zq0cr1),
  145. DDRPHY_REG_REG(dx0gcr),
  146. DDRPHY_REG_REG(dx1gcr),
  147. DDRPHY_REG_REG(dx2gcr),
  148. DDRPHY_REG_REG(dx3gcr),
  149. };
  150. #define DDRPHY_REG_TIMING(x) DDRPHY_REG(x, stm32mp1_ddrphy_timing)
  151. static const struct reg_desc ddrphy_timing[DDRPHY_REG_TIMING_SIZE] = {
  152. DDRPHY_REG_TIMING(ptr0),
  153. DDRPHY_REG_TIMING(ptr1),
  154. DDRPHY_REG_TIMING(ptr2),
  155. DDRPHY_REG_TIMING(dtpr0),
  156. DDRPHY_REG_TIMING(dtpr1),
  157. DDRPHY_REG_TIMING(dtpr2),
  158. DDRPHY_REG_TIMING(mr0),
  159. DDRPHY_REG_TIMING(mr1),
  160. DDRPHY_REG_TIMING(mr2),
  161. DDRPHY_REG_TIMING(mr3),
  162. };
  163. /**************************************************************
  164. * DYNAMIC REGISTERS: only used for debug purpose (read/modify)
  165. **************************************************************/
  166. #ifdef CONFIG_STM32MP1_DDR_INTERACTIVE
  167. static const struct reg_desc ddr_dyn[] = {
  168. DDR_REG_DYN(stat),
  169. DDR_REG_DYN(init0),
  170. DDR_REG_DYN(dfimisc),
  171. DDR_REG_DYN(dfistat),
  172. DDR_REG_DYN(swctl),
  173. DDR_REG_DYN(swstat),
  174. DDR_REG_DYN(pctrl_0),
  175. DDR_REG_DYN(pctrl_1),
  176. };
  177. #define DDR_REG_DYN_SIZE ARRAY_SIZE(ddr_dyn)
  178. static const struct reg_desc ddrphy_dyn[] = {
  179. DDRPHY_REG_DYN(pir),
  180. DDRPHY_REG_DYN(pgsr),
  181. DDRPHY_REG_DYN(zq0sr0),
  182. DDRPHY_REG_DYN(zq0sr1),
  183. DDRPHY_REG_DYN(dx0gsr0),
  184. DDRPHY_REG_DYN(dx0gsr1),
  185. DDRPHY_REG_DYN(dx0dllcr),
  186. DDRPHY_REG_DYN(dx0dqtr),
  187. DDRPHY_REG_DYN(dx0dqstr),
  188. DDRPHY_REG_DYN(dx1gsr0),
  189. DDRPHY_REG_DYN(dx1gsr1),
  190. DDRPHY_REG_DYN(dx1dllcr),
  191. DDRPHY_REG_DYN(dx1dqtr),
  192. DDRPHY_REG_DYN(dx1dqstr),
  193. DDRPHY_REG_DYN(dx2gsr0),
  194. DDRPHY_REG_DYN(dx2gsr1),
  195. DDRPHY_REG_DYN(dx2dllcr),
  196. DDRPHY_REG_DYN(dx2dqtr),
  197. DDRPHY_REG_DYN(dx2dqstr),
  198. DDRPHY_REG_DYN(dx3gsr0),
  199. DDRPHY_REG_DYN(dx3gsr1),
  200. DDRPHY_REG_DYN(dx3dllcr),
  201. DDRPHY_REG_DYN(dx3dqtr),
  202. DDRPHY_REG_DYN(dx3dqstr),
  203. };
  204. #define DDRPHY_REG_DYN_SIZE ARRAY_SIZE(ddrphy_dyn)
  205. #endif
  206. /*****************************************************************
  207. * REGISTERS ARRAY: used to parse device tree and interactive mode
  208. *****************************************************************/
  209. enum reg_type {
  210. REG_REG,
  211. REG_TIMING,
  212. REG_PERF,
  213. REG_MAP,
  214. REGPHY_REG,
  215. REGPHY_TIMING,
  216. #ifdef CONFIG_STM32MP1_DDR_INTERACTIVE
  217. /* dynamic registers => managed in driver or not changed,
  218. * can be dumped in interactive mode
  219. */
  220. REG_DYN,
  221. REGPHY_DYN,
  222. #endif
  223. REG_TYPE_NB
  224. };
  225. enum base_type {
  226. DDR_BASE,
  227. DDRPHY_BASE,
  228. NONE_BASE
  229. };
  230. struct ddr_reg_info {
  231. const char *name;
  232. const struct reg_desc *desc;
  233. u8 size;
  234. enum base_type base;
  235. };
  236. const struct ddr_reg_info ddr_registers[REG_TYPE_NB] = {
  237. [REG_REG] = {
  238. "static", ddr_reg, DDRCTL_REG_REG_SIZE, DDR_BASE},
  239. [REG_TIMING] = {
  240. "timing", ddr_timing, DDRCTL_REG_TIMING_SIZE, DDR_BASE},
  241. [REG_PERF] = {
  242. "perf", ddr_perf, DDRCTL_REG_PERF_SIZE, DDR_BASE},
  243. [REG_MAP] = {
  244. "map", ddr_map, DDRCTL_REG_MAP_SIZE, DDR_BASE},
  245. [REGPHY_REG] = {
  246. "static", ddrphy_reg, DDRPHY_REG_REG_SIZE, DDRPHY_BASE},
  247. [REGPHY_TIMING] = {
  248. "timing", ddrphy_timing, DDRPHY_REG_TIMING_SIZE, DDRPHY_BASE},
  249. #ifdef CONFIG_STM32MP1_DDR_INTERACTIVE
  250. [REG_DYN] = {
  251. "dyn", ddr_dyn, DDR_REG_DYN_SIZE, DDR_BASE},
  252. [REGPHY_DYN] = {
  253. "dyn", ddrphy_dyn, DDRPHY_REG_DYN_SIZE, DDRPHY_BASE},
  254. #endif
  255. };
  256. const char *base_name[] = {
  257. [DDR_BASE] = "ctl",
  258. [DDRPHY_BASE] = "phy",
  259. };
  260. static u32 get_base_addr(const struct ddr_info *priv, enum base_type base)
  261. {
  262. if (base == DDRPHY_BASE)
  263. return (u32)priv->phy;
  264. else
  265. return (u32)priv->ctl;
  266. }
  267. static void set_reg(const struct ddr_info *priv,
  268. enum reg_type type,
  269. const void *param)
  270. {
  271. unsigned int i;
  272. unsigned int *ptr, value;
  273. enum base_type base = ddr_registers[type].base;
  274. u32 base_addr = get_base_addr(priv, base);
  275. const struct reg_desc *desc = ddr_registers[type].desc;
  276. log_debug("init %s\n", ddr_registers[type].name);
  277. for (i = 0; i < ddr_registers[type].size; i++) {
  278. ptr = (unsigned int *)(base_addr + desc[i].offset);
  279. if (desc[i].par_offset == INVALID_OFFSET) {
  280. log_err("invalid parameter offset for %s", desc[i].name);
  281. } else {
  282. value = *((u32 *)((u32)param +
  283. desc[i].par_offset));
  284. writel(value, ptr);
  285. log_debug("[0x%x] %s= 0x%08x\n",
  286. (u32)ptr, desc[i].name, value);
  287. }
  288. }
  289. }
  290. #ifdef CONFIG_STM32MP1_DDR_INTERACTIVE
  291. static void stm32mp1_dump_reg_desc(u32 base_addr, const struct reg_desc *desc)
  292. {
  293. unsigned int *ptr;
  294. ptr = (unsigned int *)(base_addr + desc->offset);
  295. printf("%s= 0x%08x\n", desc->name, readl(ptr));
  296. }
  297. static void stm32mp1_dump_param_desc(u32 par_addr, const struct reg_desc *desc)
  298. {
  299. unsigned int *ptr;
  300. ptr = (unsigned int *)(par_addr + desc->par_offset);
  301. printf("%s= 0x%08x\n", desc->name, readl(ptr));
  302. }
  303. static const struct reg_desc *found_reg(const char *name, enum reg_type *type)
  304. {
  305. unsigned int i, j;
  306. const struct reg_desc *desc;
  307. for (i = 0; i < ARRAY_SIZE(ddr_registers); i++) {
  308. desc = ddr_registers[i].desc;
  309. for (j = 0; j < ddr_registers[i].size; j++) {
  310. if (strcmp(name, desc[j].name) == 0) {
  311. *type = i;
  312. return &desc[j];
  313. }
  314. }
  315. }
  316. *type = REG_TYPE_NB;
  317. return NULL;
  318. }
  319. int stm32mp1_dump_reg(const struct ddr_info *priv,
  320. const char *name)
  321. {
  322. unsigned int i, j;
  323. const struct reg_desc *desc;
  324. u32 base_addr;
  325. enum base_type p_base;
  326. enum reg_type type;
  327. const char *p_name;
  328. enum base_type filter = NONE_BASE;
  329. int result = -1;
  330. if (name) {
  331. if (strcmp(name, base_name[DDR_BASE]) == 0)
  332. filter = DDR_BASE;
  333. else if (strcmp(name, base_name[DDRPHY_BASE]) == 0)
  334. filter = DDRPHY_BASE;
  335. }
  336. for (i = 0; i < ARRAY_SIZE(ddr_registers); i++) {
  337. p_base = ddr_registers[i].base;
  338. p_name = ddr_registers[i].name;
  339. if (!name || (filter == p_base || !strcmp(name, p_name))) {
  340. result = 0;
  341. desc = ddr_registers[i].desc;
  342. base_addr = get_base_addr(priv, p_base);
  343. printf("==%s.%s==\n", base_name[p_base], p_name);
  344. for (j = 0; j < ddr_registers[i].size; j++)
  345. stm32mp1_dump_reg_desc(base_addr, &desc[j]);
  346. }
  347. }
  348. if (result) {
  349. desc = found_reg(name, &type);
  350. if (desc) {
  351. p_base = ddr_registers[type].base;
  352. base_addr = get_base_addr(priv, p_base);
  353. stm32mp1_dump_reg_desc(base_addr, desc);
  354. result = 0;
  355. }
  356. }
  357. return result;
  358. }
  359. void stm32mp1_edit_reg(const struct ddr_info *priv,
  360. char *name, char *string)
  361. {
  362. unsigned long *ptr, value;
  363. enum reg_type type;
  364. enum base_type base;
  365. const struct reg_desc *desc;
  366. u32 base_addr;
  367. desc = found_reg(name, &type);
  368. if (!desc) {
  369. printf("%s not found\n", name);
  370. return;
  371. }
  372. if (strict_strtoul(string, 16, &value) < 0) {
  373. printf("invalid value %s\n", string);
  374. return;
  375. }
  376. base = ddr_registers[type].base;
  377. base_addr = get_base_addr(priv, base);
  378. ptr = (unsigned long *)(base_addr + desc->offset);
  379. writel(value, ptr);
  380. printf("%s= 0x%08x\n", desc->name, readl(ptr));
  381. }
  382. static u32 get_par_addr(const struct stm32mp1_ddr_config *config,
  383. enum reg_type type)
  384. {
  385. u32 par_addr = 0x0;
  386. switch (type) {
  387. case REG_REG:
  388. par_addr = (u32)&config->c_reg;
  389. break;
  390. case REG_TIMING:
  391. par_addr = (u32)&config->c_timing;
  392. break;
  393. case REG_PERF:
  394. par_addr = (u32)&config->c_perf;
  395. break;
  396. case REG_MAP:
  397. par_addr = (u32)&config->c_map;
  398. break;
  399. case REGPHY_REG:
  400. par_addr = (u32)&config->p_reg;
  401. break;
  402. case REGPHY_TIMING:
  403. par_addr = (u32)&config->p_timing;
  404. break;
  405. case REG_DYN:
  406. case REGPHY_DYN:
  407. case REG_TYPE_NB:
  408. par_addr = (u32)NULL;
  409. break;
  410. }
  411. return par_addr;
  412. }
  413. int stm32mp1_dump_param(const struct stm32mp1_ddr_config *config,
  414. const char *name)
  415. {
  416. unsigned int i, j;
  417. const struct reg_desc *desc;
  418. u32 par_addr;
  419. enum base_type p_base;
  420. enum reg_type type;
  421. const char *p_name;
  422. enum base_type filter = NONE_BASE;
  423. int result = -EINVAL;
  424. if (name) {
  425. if (strcmp(name, base_name[DDR_BASE]) == 0)
  426. filter = DDR_BASE;
  427. else if (strcmp(name, base_name[DDRPHY_BASE]) == 0)
  428. filter = DDRPHY_BASE;
  429. }
  430. for (i = 0; i < ARRAY_SIZE(ddr_registers); i++) {
  431. par_addr = get_par_addr(config, i);
  432. if (!par_addr)
  433. continue;
  434. p_base = ddr_registers[i].base;
  435. p_name = ddr_registers[i].name;
  436. if (!name || (filter == p_base || !strcmp(name, p_name))) {
  437. result = 0;
  438. desc = ddr_registers[i].desc;
  439. printf("==%s.%s==\n", base_name[p_base], p_name);
  440. for (j = 0; j < ddr_registers[i].size; j++)
  441. stm32mp1_dump_param_desc(par_addr, &desc[j]);
  442. }
  443. }
  444. if (result) {
  445. desc = found_reg(name, &type);
  446. if (desc) {
  447. par_addr = get_par_addr(config, type);
  448. if (par_addr) {
  449. stm32mp1_dump_param_desc(par_addr, desc);
  450. result = 0;
  451. }
  452. }
  453. }
  454. return result;
  455. }
  456. void stm32mp1_edit_param(const struct stm32mp1_ddr_config *config,
  457. char *name, char *string)
  458. {
  459. unsigned long *ptr, value;
  460. enum reg_type type;
  461. const struct reg_desc *desc;
  462. u32 par_addr;
  463. desc = found_reg(name, &type);
  464. if (!desc) {
  465. printf("%s not found\n", name);
  466. return;
  467. }
  468. if (strict_strtoul(string, 16, &value) < 0) {
  469. printf("invalid value %s\n", string);
  470. return;
  471. }
  472. par_addr = get_par_addr(config, type);
  473. if (!par_addr) {
  474. printf("no parameter %s\n", name);
  475. return;
  476. }
  477. ptr = (unsigned long *)(par_addr + desc->par_offset);
  478. writel(value, ptr);
  479. printf("%s= 0x%08x\n", desc->name, readl(ptr));
  480. }
  481. #endif
  482. __weak bool stm32mp1_ddr_interactive(void *priv,
  483. enum stm32mp1_ddr_interact_step step,
  484. const struct stm32mp1_ddr_config *config)
  485. {
  486. return false;
  487. }
  488. #define INTERACTIVE(step)\
  489. stm32mp1_ddr_interactive(priv, step, config)
  490. static void ddrphy_idone_wait(struct stm32mp1_ddrphy *phy)
  491. {
  492. u32 pgsr;
  493. int ret;
  494. ret = readl_poll_timeout(&phy->pgsr, pgsr,
  495. pgsr & (DDRPHYC_PGSR_IDONE |
  496. DDRPHYC_PGSR_DTERR |
  497. DDRPHYC_PGSR_DTIERR |
  498. DDRPHYC_PGSR_DFTERR |
  499. DDRPHYC_PGSR_RVERR |
  500. DDRPHYC_PGSR_RVEIRR),
  501. 1000000);
  502. log_debug("\n[0x%08x] pgsr = 0x%08x ret=%d\n",
  503. (u32)&phy->pgsr, pgsr, ret);
  504. }
  505. static void stm32mp1_ddrphy_init(struct stm32mp1_ddrphy *phy, u32 pir)
  506. {
  507. pir |= DDRPHYC_PIR_INIT;
  508. writel(pir, &phy->pir);
  509. log_debug("[0x%08x] pir = 0x%08x -> 0x%08x\n",
  510. (u32)&phy->pir, pir, readl(&phy->pir));
  511. /* need to wait 10 configuration clock before start polling */
  512. udelay(10);
  513. /* Wait DRAM initialization and Gate Training Evaluation complete */
  514. ddrphy_idone_wait(phy);
  515. }
  516. /* start quasi dynamic register update */
  517. static void start_sw_done(struct stm32mp1_ddrctl *ctl)
  518. {
  519. clrbits_le32(&ctl->swctl, DDRCTRL_SWCTL_SW_DONE);
  520. }
  521. /* wait quasi dynamic register update */
  522. static void wait_sw_done_ack(struct stm32mp1_ddrctl *ctl)
  523. {
  524. int ret;
  525. u32 swstat;
  526. setbits_le32(&ctl->swctl, DDRCTRL_SWCTL_SW_DONE);
  527. ret = readl_poll_timeout(&ctl->swstat, swstat,
  528. swstat & DDRCTRL_SWSTAT_SW_DONE_ACK,
  529. 1000000);
  530. if (ret)
  531. panic("Timeout initialising DRAM : DDR->swstat = %x\n",
  532. swstat);
  533. log_debug("[0x%08x] swstat = 0x%08x\n", (u32)&ctl->swstat, swstat);
  534. }
  535. /* wait quasi dynamic register update */
  536. static void wait_operating_mode(struct ddr_info *priv, int mode)
  537. {
  538. u32 stat, val, mask, val2 = 0, mask2 = 0;
  539. int ret;
  540. mask = DDRCTRL_STAT_OPERATING_MODE_MASK;
  541. val = mode;
  542. /* self-refresh due to software => check also STAT.selfref_type */
  543. if (mode == DDRCTRL_STAT_OPERATING_MODE_SR) {
  544. mask |= DDRCTRL_STAT_SELFREF_TYPE_MASK;
  545. val |= DDRCTRL_STAT_SELFREF_TYPE_SR;
  546. } else if (mode == DDRCTRL_STAT_OPERATING_MODE_NORMAL) {
  547. /* normal mode: handle also automatic self refresh */
  548. mask2 = DDRCTRL_STAT_OPERATING_MODE_MASK |
  549. DDRCTRL_STAT_SELFREF_TYPE_MASK;
  550. val2 = DDRCTRL_STAT_OPERATING_MODE_SR |
  551. DDRCTRL_STAT_SELFREF_TYPE_ASR;
  552. }
  553. ret = readl_poll_timeout(&priv->ctl->stat, stat,
  554. ((stat & mask) == val) ||
  555. (mask2 && ((stat & mask2) == val2)),
  556. 1000000);
  557. if (ret)
  558. panic("Timeout DRAM : DDR->stat = %x\n", stat);
  559. log_debug("[0x%08x] stat = 0x%08x\n", (u32)&priv->ctl->stat, stat);
  560. }
  561. static void stm32mp1_refresh_disable(struct stm32mp1_ddrctl *ctl)
  562. {
  563. start_sw_done(ctl);
  564. /* quasi-dynamic register update*/
  565. setbits_le32(&ctl->rfshctl3, DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
  566. clrbits_le32(&ctl->pwrctl, DDRCTRL_PWRCTL_POWERDOWN_EN |
  567. DDRCTRL_PWRCTL_SELFREF_EN);
  568. clrbits_le32(&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
  569. wait_sw_done_ack(ctl);
  570. }
  571. static void stm32mp1_refresh_restore(struct stm32mp1_ddrctl *ctl,
  572. u32 rfshctl3, u32 pwrctl)
  573. {
  574. start_sw_done(ctl);
  575. if (!(rfshctl3 & DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH))
  576. clrbits_le32(&ctl->rfshctl3, DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
  577. if (pwrctl & DDRCTRL_PWRCTL_POWERDOWN_EN)
  578. setbits_le32(&ctl->pwrctl, DDRCTRL_PWRCTL_POWERDOWN_EN);
  579. if ((pwrctl & DDRCTRL_PWRCTL_SELFREF_EN))
  580. setbits_le32(&ctl->pwrctl, DDRCTRL_PWRCTL_SELFREF_EN);
  581. setbits_le32(&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
  582. wait_sw_done_ack(ctl);
  583. }
  584. static void stm32mp1_asr_enable(struct ddr_info *priv, const u32 pwrctl)
  585. {
  586. struct stm32mp1_ddrctl *ctl = priv->ctl;
  587. /* SSR is the best we can do. */
  588. if (!(pwrctl & DDRCTRL_PWRCTL_EN_DFI_DRAM_CLK_DISABLE))
  589. return;
  590. clrsetbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCKMOD_MASK,
  591. RCC_DDRITFCR_DDRCKMOD_ASR);
  592. start_sw_done(ctl);
  593. setbits_le32(&ctl->hwlpctl, DDRCTRL_HWLPCTL_HW_LP_EN);
  594. writel(DDRCTRL_PWRTMG_POWERDOWN_TO_X32(0x10) |
  595. DDRCTRL_PWRTMG_SELFREF_TO_X32(0x01),
  596. &ctl->pwrtmg);
  597. /* HSR we can do. */
  598. setbits_le32(&ctl->pwrctl, DDRCTRL_PWRCTL_EN_DFI_DRAM_CLK_DISABLE);
  599. if (pwrctl & DDRCTRL_PWRCTL_SELFREF_EN) /* ASR we can do. */
  600. setbits_le32(&ctl->pwrctl, DDRCTRL_PWRCTL_SELFREF_EN);
  601. setbits_le32(&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
  602. wait_sw_done_ack(ctl);
  603. }
  604. /* board-specific DDR power initializations. */
  605. __weak int board_ddr_power_init(enum ddr_type ddr_type)
  606. {
  607. return 0;
  608. }
  609. __maybe_unused
  610. void stm32mp1_ddr_init(struct ddr_info *priv,
  611. const struct stm32mp1_ddr_config *config)
  612. {
  613. u32 pir;
  614. int ret = -EINVAL;
  615. char bus_width;
  616. switch (config->c_reg.mstr & DDRCTRL_MSTR_DATA_BUS_WIDTH_MASK) {
  617. case DDRCTRL_MSTR_DATA_BUS_WIDTH_QUARTER:
  618. bus_width = 8;
  619. break;
  620. case DDRCTRL_MSTR_DATA_BUS_WIDTH_HALF:
  621. bus_width = 16;
  622. break;
  623. default:
  624. bus_width = 32;
  625. break;
  626. }
  627. if (config->c_reg.mstr & DDRCTRL_MSTR_DDR3)
  628. ret = board_ddr_power_init(STM32MP_DDR3);
  629. else if (config->c_reg.mstr & DDRCTRL_MSTR_LPDDR2) {
  630. if (bus_width == 32)
  631. ret = board_ddr_power_init(STM32MP_LPDDR2_32);
  632. else
  633. ret = board_ddr_power_init(STM32MP_LPDDR2_16);
  634. } else if (config->c_reg.mstr & DDRCTRL_MSTR_LPDDR3) {
  635. if (bus_width == 32)
  636. ret = board_ddr_power_init(STM32MP_LPDDR3_32);
  637. else
  638. ret = board_ddr_power_init(STM32MP_LPDDR3_16);
  639. }
  640. if (ret)
  641. panic("ddr power init failed\n");
  642. start:
  643. log_debug("name = %s\n", config->info.name);
  644. log_debug("speed = %d kHz\n", config->info.speed);
  645. log_debug("size = 0x%x\n", config->info.size);
  646. /*
  647. * 1. Program the DWC_ddr_umctl2 registers
  648. * 1.1 RESETS: presetn, core_ddrc_rstn, aresetn
  649. */
  650. /* Assert All DDR part */
  651. setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
  652. setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
  653. setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
  654. setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
  655. setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
  656. setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
  657. /* 1.2. start CLOCK */
  658. if (stm32mp1_ddr_clk_enable(priv, config->info.speed))
  659. panic("invalid DRAM clock : %d kHz\n",
  660. config->info.speed);
  661. /* 1.3. deassert reset */
  662. /* de-assert PHY rstn and ctl_rstn via DPHYRST and DPHYCTLRST */
  663. clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
  664. clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
  665. /* De-assert presetn once the clocks are active
  666. * and stable via DDRCAPBRST bit
  667. */
  668. clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
  669. /* 1.4. wait 128 cycles to permit initialization of end logic */
  670. udelay(2);
  671. /* for PCLK = 133MHz => 1 us is enough, 2 to allow lower frequency */
  672. if (INTERACTIVE(STEP_DDR_RESET))
  673. goto start;
  674. /* 1.5. initialize registers ddr_umctl2 */
  675. /* Stop uMCTL2 before PHY is ready */
  676. clrbits_le32(&priv->ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
  677. log_debug("[0x%08x] dfimisc = 0x%08x\n",
  678. (u32)&priv->ctl->dfimisc, readl(&priv->ctl->dfimisc));
  679. set_reg(priv, REG_REG, &config->c_reg);
  680. set_reg(priv, REG_TIMING, &config->c_timing);
  681. set_reg(priv, REG_MAP, &config->c_map);
  682. /* skip CTRL init, SDRAM init is done by PHY PUBL */
  683. clrsetbits_le32(&priv->ctl->init0,
  684. DDRCTRL_INIT0_SKIP_DRAM_INIT_MASK,
  685. DDRCTRL_INIT0_SKIP_DRAM_INIT_NORMAL);
  686. set_reg(priv, REG_PERF, &config->c_perf);
  687. if (INTERACTIVE(STEP_CTL_INIT))
  688. goto start;
  689. /* 2. deassert reset signal core_ddrc_rstn, aresetn and presetn */
  690. clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
  691. clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
  692. clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
  693. /* 3. start PHY init by accessing relevant PUBL registers
  694. * (DXGCR, DCR, PTR*, MR*, DTPR*)
  695. */
  696. set_reg(priv, REGPHY_REG, &config->p_reg);
  697. set_reg(priv, REGPHY_TIMING, &config->p_timing);
  698. if (INTERACTIVE(STEP_PHY_INIT))
  699. goto start;
  700. /* 4. Monitor PHY init status by polling PUBL register PGSR.IDONE
  701. * Perform DDR PHY DRAM initialization and Gate Training Evaluation
  702. */
  703. ddrphy_idone_wait(priv->phy);
  704. /* 5. Indicate to PUBL that controller performs SDRAM initialization
  705. * by setting PIR.INIT and PIR CTLDINIT and pool PGSR.IDONE
  706. * DRAM init is done by PHY, init0.skip_dram.init = 1
  707. */
  708. pir = DDRPHYC_PIR_DLLSRST | DDRPHYC_PIR_DLLLOCK | DDRPHYC_PIR_ZCAL |
  709. DDRPHYC_PIR_ITMSRST | DDRPHYC_PIR_DRAMINIT | DDRPHYC_PIR_ICPC;
  710. if (config->c_reg.mstr & DDRCTRL_MSTR_DDR3)
  711. pir |= DDRPHYC_PIR_DRAMRST; /* only for DDR3 */
  712. stm32mp1_ddrphy_init(priv->phy, pir);
  713. /* 6. SET DFIMISC.dfi_init_complete_en to 1 */
  714. /* Enable quasi-dynamic register programming*/
  715. start_sw_done(priv->ctl);
  716. setbits_le32(&priv->ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
  717. wait_sw_done_ack(priv->ctl);
  718. /* 7. Wait for DWC_ddr_umctl2 to move to normal operation mode
  719. * by monitoring STAT.operating_mode signal
  720. */
  721. /* wait uMCTL2 ready */
  722. wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL);
  723. log_debug("DDR DQS training : ");
  724. /* 8. Disable Auto refresh and power down by setting
  725. * - RFSHCTL3.dis_au_refresh = 1
  726. * - PWRCTL.powerdown_en = 0
  727. * - DFIMISC.dfiinit_complete_en = 0
  728. */
  729. stm32mp1_refresh_disable(priv->ctl);
  730. /* 9. Program PUBL PGCR to enable refresh during training and rank to train
  731. * not done => keep the programed value in PGCR
  732. */
  733. /* 10. configure PUBL PIR register to specify which training step to run */
  734. /* RVTRN is excuted only on LPDDR2/LPDDR3 */
  735. if (config->c_reg.mstr & DDRCTRL_MSTR_DDR3)
  736. pir = DDRPHYC_PIR_QSTRN;
  737. else
  738. pir = DDRPHYC_PIR_QSTRN | DDRPHYC_PIR_RVTRN;
  739. stm32mp1_ddrphy_init(priv->phy, pir);
  740. /* 11. monitor PUB PGSR.IDONE to poll cpmpletion of training sequence */
  741. ddrphy_idone_wait(priv->phy);
  742. /* 12. set back registers in step 8 to the orginal values if desidered */
  743. stm32mp1_refresh_restore(priv->ctl, config->c_reg.rfshctl3,
  744. config->c_reg.pwrctl);
  745. /* Enable auto-self-refresh, which saves a bit of power at runtime. */
  746. stm32mp1_asr_enable(priv, config->c_reg.pwrctl);
  747. /* enable uMCTL2 AXI port 0 and 1 */
  748. setbits_le32(&priv->ctl->pctrl_0, DDRCTRL_PCTRL_N_PORT_EN);
  749. setbits_le32(&priv->ctl->pctrl_1, DDRCTRL_PCTRL_N_PORT_EN);
  750. if (INTERACTIVE(STEP_DDR_READY))
  751. goto start;
  752. }