ti-emif-sram-pm.S 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368
  1. /* SPDX-License-Identifier: GPL-2.0-only */
  2. /*
  3. * Low level PM code for TI EMIF
  4. *
  5. * Copyright (C) 2016-2017 Texas Instruments Incorporated - http://www.ti.com/
  6. * Dave Gerlach
  7. */
  8. #include <linux/linkage.h>
  9. #include <asm/assembler.h>
  10. #include <asm/page.h>
  11. #include "emif.h"
  12. #include "ti-emif-asm-offsets.h"
  13. #define EMIF_POWER_MGMT_WAIT_SELF_REFRESH_8192_CYCLES 0x00a0
  14. #define EMIF_POWER_MGMT_SR_TIMER_MASK 0x00f0
  15. #define EMIF_POWER_MGMT_SELF_REFRESH_MODE 0x0200
  16. #define EMIF_POWER_MGMT_SELF_REFRESH_MODE_MASK 0x0700
  17. #define EMIF_SDCFG_TYPE_DDR2 0x2 << SDRAM_TYPE_SHIFT
  18. #define EMIF_SDCFG_TYPE_DDR3 0x3 << SDRAM_TYPE_SHIFT
  19. #define EMIF_STATUS_READY 0x4
  20. #define AM43XX_EMIF_PHY_CTRL_REG_COUNT 0x120
  21. #define EMIF_AM437X_REGISTERS 0x1
  22. .arm
  23. .align 3
  24. .arch armv7-a
  25. ENTRY(ti_emif_sram)
  26. /*
  27. * void ti_emif_save_context(void)
  28. *
  29. * Used during suspend to save the context of all required EMIF registers
  30. * to local memory if the EMIF is going to lose context during the sleep
  31. * transition. Operates on the VIRTUAL address of the EMIF.
  32. */
  33. ENTRY(ti_emif_save_context)
  34. stmfd sp!, {r4 - r11, lr} @ save registers on stack
  35. adr r4, ti_emif_pm_sram_data
  36. ldr r0, [r4, #EMIF_PM_BASE_ADDR_VIRT_OFFSET]
  37. ldr r2, [r4, #EMIF_PM_REGS_VIRT_OFFSET]
  38. /* Save EMIF configuration */
  39. ldr r1, [r0, #EMIF_SDRAM_CONFIG]
  40. str r1, [r2, #EMIF_SDCFG_VAL_OFFSET]
  41. ldr r1, [r0, #EMIF_SDRAM_REFRESH_CONTROL]
  42. str r1, [r2, #EMIF_REF_CTRL_VAL_OFFSET]
  43. ldr r1, [r0, #EMIF_SDRAM_TIMING_1]
  44. str r1, [r2, #EMIF_TIMING1_VAL_OFFSET]
  45. ldr r1, [r0, #EMIF_SDRAM_TIMING_2]
  46. str r1, [r2, #EMIF_TIMING2_VAL_OFFSET]
  47. ldr r1, [r0, #EMIF_SDRAM_TIMING_3]
  48. str r1, [r2, #EMIF_TIMING3_VAL_OFFSET]
  49. ldr r1, [r0, #EMIF_POWER_MANAGEMENT_CONTROL]
  50. str r1, [r2, #EMIF_PMCR_VAL_OFFSET]
  51. ldr r1, [r0, #EMIF_POWER_MANAGEMENT_CTRL_SHDW]
  52. str r1, [r2, #EMIF_PMCR_SHDW_VAL_OFFSET]
  53. ldr r1, [r0, #EMIF_SDRAM_OUTPUT_IMPEDANCE_CALIBRATION_CONFIG]
  54. str r1, [r2, #EMIF_ZQCFG_VAL_OFFSET]
  55. ldr r1, [r0, #EMIF_DDR_PHY_CTRL_1]
  56. str r1, [r2, #EMIF_DDR_PHY_CTLR_1_OFFSET]
  57. ldr r1, [r0, #EMIF_COS_CONFIG]
  58. str r1, [r2, #EMIF_COS_CONFIG_OFFSET]
  59. ldr r1, [r0, #EMIF_PRIORITY_TO_CLASS_OF_SERVICE_MAPPING]
  60. str r1, [r2, #EMIF_PRIORITY_TO_COS_MAPPING_OFFSET]
  61. ldr r1, [r0, #EMIF_CONNECTION_ID_TO_CLASS_OF_SERVICE_1_MAPPING]
  62. str r1, [r2, #EMIF_CONNECT_ID_SERV_1_MAP_OFFSET]
  63. ldr r1, [r0, #EMIF_CONNECTION_ID_TO_CLASS_OF_SERVICE_2_MAPPING]
  64. str r1, [r2, #EMIF_CONNECT_ID_SERV_2_MAP_OFFSET]
  65. ldr r1, [r0, #EMIF_OCP_CONFIG]
  66. str r1, [r2, #EMIF_OCP_CONFIG_VAL_OFFSET]
  67. ldr r5, [r4, #EMIF_PM_CONFIG_OFFSET]
  68. cmp r5, #EMIF_SRAM_AM43_REG_LAYOUT
  69. bne emif_skip_save_extra_regs
  70. ldr r1, [r0, #EMIF_READ_WRITE_LEVELING_RAMP_CONTROL]
  71. str r1, [r2, #EMIF_RD_WR_LEVEL_RAMP_CTRL_OFFSET]
  72. ldr r1, [r0, #EMIF_READ_WRITE_EXECUTION_THRESHOLD]
  73. str r1, [r2, #EMIF_RD_WR_EXEC_THRESH_OFFSET]
  74. ldr r1, [r0, #EMIF_LPDDR2_NVM_TIMING]
  75. str r1, [r2, #EMIF_LPDDR2_NVM_TIM_OFFSET]
  76. ldr r1, [r0, #EMIF_LPDDR2_NVM_TIMING_SHDW]
  77. str r1, [r2, #EMIF_LPDDR2_NVM_TIM_SHDW_OFFSET]
  78. ldr r1, [r0, #EMIF_DLL_CALIB_CTRL]
  79. str r1, [r2, #EMIF_DLL_CALIB_CTRL_VAL_OFFSET]
  80. ldr r1, [r0, #EMIF_DLL_CALIB_CTRL_SHDW]
  81. str r1, [r2, #EMIF_DLL_CALIB_CTRL_VAL_SHDW_OFFSET]
  82. /* Loop and save entire block of emif phy regs */
  83. mov r5, #0x0
  84. add r4, r2, #EMIF_EXT_PHY_CTRL_VALS_OFFSET
  85. add r3, r0, #EMIF_EXT_PHY_CTRL_1
  86. ddr_phy_ctrl_save:
  87. ldr r1, [r3, r5]
  88. str r1, [r4, r5]
  89. add r5, r5, #0x4
  90. cmp r5, #AM43XX_EMIF_PHY_CTRL_REG_COUNT
  91. bne ddr_phy_ctrl_save
  92. emif_skip_save_extra_regs:
  93. ldmfd sp!, {r4 - r11, pc} @ restore regs and return
  94. ENDPROC(ti_emif_save_context)
  95. /*
  96. * void ti_emif_restore_context(void)
  97. *
  98. * Used during resume to restore the context of all required EMIF registers
  99. * from local memory after the EMIF has lost context during a sleep transition.
  100. * Operates on the PHYSICAL address of the EMIF.
  101. */
  102. ENTRY(ti_emif_restore_context)
  103. adr r4, ti_emif_pm_sram_data
  104. ldr r0, [r4, #EMIF_PM_BASE_ADDR_PHYS_OFFSET]
  105. ldr r2, [r4, #EMIF_PM_REGS_PHYS_OFFSET]
  106. /* Config EMIF Timings */
  107. ldr r1, [r2, #EMIF_DDR_PHY_CTLR_1_OFFSET]
  108. str r1, [r0, #EMIF_DDR_PHY_CTRL_1]
  109. str r1, [r0, #EMIF_DDR_PHY_CTRL_1_SHDW]
  110. ldr r1, [r2, #EMIF_TIMING1_VAL_OFFSET]
  111. str r1, [r0, #EMIF_SDRAM_TIMING_1]
  112. str r1, [r0, #EMIF_SDRAM_TIMING_1_SHDW]
  113. ldr r1, [r2, #EMIF_TIMING2_VAL_OFFSET]
  114. str r1, [r0, #EMIF_SDRAM_TIMING_2]
  115. str r1, [r0, #EMIF_SDRAM_TIMING_2_SHDW]
  116. ldr r1, [r2, #EMIF_TIMING3_VAL_OFFSET]
  117. str r1, [r0, #EMIF_SDRAM_TIMING_3]
  118. str r1, [r0, #EMIF_SDRAM_TIMING_3_SHDW]
  119. ldr r1, [r2, #EMIF_REF_CTRL_VAL_OFFSET]
  120. str r1, [r0, #EMIF_SDRAM_REFRESH_CONTROL]
  121. str r1, [r0, #EMIF_SDRAM_REFRESH_CTRL_SHDW]
  122. ldr r1, [r2, #EMIF_PMCR_VAL_OFFSET]
  123. str r1, [r0, #EMIF_POWER_MANAGEMENT_CONTROL]
  124. ldr r1, [r2, #EMIF_PMCR_SHDW_VAL_OFFSET]
  125. str r1, [r0, #EMIF_POWER_MANAGEMENT_CTRL_SHDW]
  126. ldr r1, [r2, #EMIF_COS_CONFIG_OFFSET]
  127. str r1, [r0, #EMIF_COS_CONFIG]
  128. ldr r1, [r2, #EMIF_PRIORITY_TO_COS_MAPPING_OFFSET]
  129. str r1, [r0, #EMIF_PRIORITY_TO_CLASS_OF_SERVICE_MAPPING]
  130. ldr r1, [r2, #EMIF_CONNECT_ID_SERV_1_MAP_OFFSET]
  131. str r1, [r0, #EMIF_CONNECTION_ID_TO_CLASS_OF_SERVICE_1_MAPPING]
  132. ldr r1, [r2, #EMIF_CONNECT_ID_SERV_2_MAP_OFFSET]
  133. str r1, [r0, #EMIF_CONNECTION_ID_TO_CLASS_OF_SERVICE_2_MAPPING]
  134. ldr r1, [r2, #EMIF_OCP_CONFIG_VAL_OFFSET]
  135. str r1, [r0, #EMIF_OCP_CONFIG]
  136. ldr r5, [r4, #EMIF_PM_CONFIG_OFFSET]
  137. cmp r5, #EMIF_SRAM_AM43_REG_LAYOUT
  138. bne emif_skip_restore_extra_regs
  139. ldr r1, [r2, #EMIF_RD_WR_LEVEL_RAMP_CTRL_OFFSET]
  140. str r1, [r0, #EMIF_READ_WRITE_LEVELING_RAMP_CONTROL]
  141. ldr r1, [r2, #EMIF_RD_WR_EXEC_THRESH_OFFSET]
  142. str r1, [r0, #EMIF_READ_WRITE_EXECUTION_THRESHOLD]
  143. ldr r1, [r2, #EMIF_LPDDR2_NVM_TIM_OFFSET]
  144. str r1, [r0, #EMIF_LPDDR2_NVM_TIMING]
  145. ldr r1, [r2, #EMIF_LPDDR2_NVM_TIM_SHDW_OFFSET]
  146. str r1, [r0, #EMIF_LPDDR2_NVM_TIMING_SHDW]
  147. ldr r1, [r2, #EMIF_DLL_CALIB_CTRL_VAL_OFFSET]
  148. str r1, [r0, #EMIF_DLL_CALIB_CTRL]
  149. ldr r1, [r2, #EMIF_DLL_CALIB_CTRL_VAL_SHDW_OFFSET]
  150. str r1, [r0, #EMIF_DLL_CALIB_CTRL_SHDW]
  151. ldr r1, [r2, #EMIF_ZQCFG_VAL_OFFSET]
  152. str r1, [r0, #EMIF_SDRAM_OUTPUT_IMPEDANCE_CALIBRATION_CONFIG]
  153. /* Loop and restore entire block of emif phy regs */
  154. mov r5, #0x0
  155. /* Load ti_emif_regs_amx3 + EMIF_EXT_PHY_CTRL_VALS_OFFSET for address
  156. * to phy register save space
  157. */
  158. add r3, r2, #EMIF_EXT_PHY_CTRL_VALS_OFFSET
  159. add r4, r0, #EMIF_EXT_PHY_CTRL_1
  160. ddr_phy_ctrl_restore:
  161. ldr r1, [r3, r5]
  162. str r1, [r4, r5]
  163. add r5, r5, #0x4
  164. cmp r5, #AM43XX_EMIF_PHY_CTRL_REG_COUNT
  165. bne ddr_phy_ctrl_restore
  166. emif_skip_restore_extra_regs:
  167. /*
  168. * Output impedence calib needed only for DDR3
  169. * but since the initial state of this will be
  170. * disabled for DDR2 no harm in restoring the
  171. * old configuration
  172. */
  173. ldr r1, [r2, #EMIF_ZQCFG_VAL_OFFSET]
  174. str r1, [r0, #EMIF_SDRAM_OUTPUT_IMPEDANCE_CALIBRATION_CONFIG]
  175. /* Write to sdcfg last for DDR2 only */
  176. ldr r1, [r2, #EMIF_SDCFG_VAL_OFFSET]
  177. and r2, r1, #SDRAM_TYPE_MASK
  178. cmp r2, #EMIF_SDCFG_TYPE_DDR2
  179. streq r1, [r0, #EMIF_SDRAM_CONFIG]
  180. mov pc, lr
  181. ENDPROC(ti_emif_restore_context)
  182. /*
  183. * void ti_emif_run_hw_leveling(void)
  184. *
  185. * Used during resume to run hardware leveling again and restore the
  186. * configuration of the EMIF PHY, only for DDR3.
  187. */
  188. ENTRY(ti_emif_run_hw_leveling)
  189. adr r4, ti_emif_pm_sram_data
  190. ldr r0, [r4, #EMIF_PM_BASE_ADDR_PHYS_OFFSET]
  191. ldr r3, [r0, #EMIF_READ_WRITE_LEVELING_CONTROL]
  192. orr r3, r3, #RDWRLVLFULL_START
  193. ldr r2, [r0, #EMIF_SDRAM_CONFIG]
  194. and r2, r2, #SDRAM_TYPE_MASK
  195. cmp r2, #EMIF_SDCFG_TYPE_DDR3
  196. bne skip_hwlvl
  197. str r3, [r0, #EMIF_READ_WRITE_LEVELING_CONTROL]
  198. /*
  199. * If EMIF registers are touched during initial stage of HW
  200. * leveling sequence there will be an L3 NOC timeout error issued
  201. * as the EMIF will not respond, which is not fatal, but it is
  202. * avoidable. This small wait loop is enough time for this condition
  203. * to clear, even at worst case of CPU running at max speed of 1Ghz.
  204. */
  205. mov r2, #0x2000
  206. 1:
  207. subs r2, r2, #0x1
  208. bne 1b
  209. /* Bit clears when operation is complete */
  210. 2: ldr r1, [r0, #EMIF_READ_WRITE_LEVELING_CONTROL]
  211. tst r1, #RDWRLVLFULL_START
  212. bne 2b
  213. skip_hwlvl:
  214. mov pc, lr
  215. ENDPROC(ti_emif_run_hw_leveling)
  216. /*
  217. * void ti_emif_enter_sr(void)
  218. *
  219. * Programs the EMIF to tell the SDRAM to enter into self-refresh
  220. * mode during a sleep transition. Operates on the VIRTUAL address
  221. * of the EMIF.
  222. */
  223. ENTRY(ti_emif_enter_sr)
  224. stmfd sp!, {r4 - r11, lr} @ save registers on stack
  225. adr r4, ti_emif_pm_sram_data
  226. ldr r0, [r4, #EMIF_PM_BASE_ADDR_VIRT_OFFSET]
  227. ldr r2, [r4, #EMIF_PM_REGS_VIRT_OFFSET]
  228. ldr r1, [r0, #EMIF_POWER_MANAGEMENT_CONTROL]
  229. bic r1, r1, #EMIF_POWER_MGMT_SELF_REFRESH_MODE_MASK
  230. orr r1, r1, #EMIF_POWER_MGMT_SELF_REFRESH_MODE
  231. str r1, [r0, #EMIF_POWER_MANAGEMENT_CONTROL]
  232. ldmfd sp!, {r4 - r11, pc} @ restore regs and return
  233. ENDPROC(ti_emif_enter_sr)
  234. /*
  235. * void ti_emif_exit_sr(void)
  236. *
  237. * Programs the EMIF to tell the SDRAM to exit self-refresh mode
  238. * after a sleep transition. Operates on the PHYSICAL address of
  239. * the EMIF.
  240. */
  241. ENTRY(ti_emif_exit_sr)
  242. adr r4, ti_emif_pm_sram_data
  243. ldr r0, [r4, #EMIF_PM_BASE_ADDR_PHYS_OFFSET]
  244. ldr r2, [r4, #EMIF_PM_REGS_PHYS_OFFSET]
  245. /*
  246. * Toggle EMIF to exit refresh mode:
  247. * if EMIF lost context, PWR_MGT_CTRL is currently 0, writing disable
  248. * (0x0), wont do diddly squat! so do a toggle from SR(0x2) to disable
  249. * (0x0) here.
  250. * *If* EMIF did not lose context, nothing broken as we write the same
  251. * value(0x2) to reg before we write a disable (0x0).
  252. */
  253. ldr r1, [r2, #EMIF_PMCR_VAL_OFFSET]
  254. bic r1, r1, #EMIF_POWER_MGMT_SELF_REFRESH_MODE_MASK
  255. orr r1, r1, #EMIF_POWER_MGMT_SELF_REFRESH_MODE
  256. str r1, [r0, #EMIF_POWER_MANAGEMENT_CONTROL]
  257. bic r1, r1, #EMIF_POWER_MGMT_SELF_REFRESH_MODE_MASK
  258. str r1, [r0, #EMIF_POWER_MANAGEMENT_CONTROL]
  259. /* Wait for EMIF to become ready */
  260. 1: ldr r1, [r0, #EMIF_STATUS]
  261. tst r1, #EMIF_STATUS_READY
  262. beq 1b
  263. mov pc, lr
  264. ENDPROC(ti_emif_exit_sr)
  265. /*
  266. * void ti_emif_abort_sr(void)
  267. *
  268. * Disables self-refresh after a failed transition to a low-power
  269. * state so the kernel can jump back to DDR and follow abort path.
  270. * Operates on the VIRTUAL address of the EMIF.
  271. */
  272. ENTRY(ti_emif_abort_sr)
  273. stmfd sp!, {r4 - r11, lr} @ save registers on stack
  274. adr r4, ti_emif_pm_sram_data
  275. ldr r0, [r4, #EMIF_PM_BASE_ADDR_VIRT_OFFSET]
  276. ldr r2, [r4, #EMIF_PM_REGS_VIRT_OFFSET]
  277. ldr r1, [r2, #EMIF_PMCR_VAL_OFFSET]
  278. bic r1, r1, #EMIF_POWER_MGMT_SELF_REFRESH_MODE_MASK
  279. str r1, [r0, #EMIF_POWER_MANAGEMENT_CONTROL]
  280. /* Wait for EMIF to become ready */
  281. 1: ldr r1, [r0, #EMIF_STATUS]
  282. tst r1, #EMIF_STATUS_READY
  283. beq 1b
  284. ldmfd sp!, {r4 - r11, pc} @ restore regs and return
  285. ENDPROC(ti_emif_abort_sr)
  286. .align 3
  287. ENTRY(ti_emif_pm_sram_data)
  288. .space EMIF_PM_DATA_SIZE
  289. ENTRY(ti_emif_sram_sz)
  290. .word . - ti_emif_save_context