sdram_rv1126.c 98 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * (C) Copyright 2020 Rockchip Electronics Co., Ltd.
  4. * Copyright (c) 2022 Edgeble AI Technologies Pvt. Ltd.
  5. */
  6. #include <common.h>
  7. #include <debug_uart.h>
  8. #include <dm.h>
  9. #include <ram.h>
  10. #include <syscon.h>
  11. #include <asm/io.h>
  12. #include <asm/arch-rockchip/clock.h>
  13. #include <asm/arch-rockchip/hardware.h>
  14. #include <asm/arch-rockchip/cru_rv1126.h>
  15. #include <asm/arch-rockchip/grf_rv1126.h>
  16. #include <asm/arch-rockchip/sdram_common.h>
  17. #include <asm/arch-rockchip/sdram_rv1126.h>
  18. #include <linux/delay.h>
  19. /* define training flag */
  20. #define CA_TRAINING (0x1 << 0)
  21. #define READ_GATE_TRAINING (0x1 << 1)
  22. #define WRITE_LEVELING (0x1 << 2)
  23. #define WRITE_TRAINING (0x1 << 3)
  24. #define READ_TRAINING (0x1 << 4)
  25. #define FULL_TRAINING (0xff)
  26. #define SKEW_RX_SIGNAL (0)
  27. #define SKEW_TX_SIGNAL (1)
  28. #define SKEW_CA_SIGNAL (2)
  29. #define DESKEW_MDF_ABS_VAL (0)
  30. #define DESKEW_MDF_DIFF_VAL (1)
  31. struct dram_info {
  32. #if defined(CONFIG_TPL_BUILD) || \
  33. (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
  34. void __iomem *pctl;
  35. void __iomem *phy;
  36. struct rv1126_cru *cru;
  37. struct msch_regs *msch;
  38. struct rv1126_ddrgrf *ddrgrf;
  39. struct rv1126_grf *grf;
  40. u32 sr_idle;
  41. u32 pd_idle;
  42. #endif
  43. struct ram_info info;
  44. struct rv1126_pmugrf *pmugrf;
  45. };
  46. #if defined(CONFIG_TPL_BUILD) || \
  47. (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
  48. #define GRF_BASE_ADDR 0xfe000000
  49. #define PMU_GRF_BASE_ADDR 0xfe020000
  50. #define DDR_GRF_BASE_ADDR 0xfe030000
  51. #define BUS_SGRF_BASE_ADDR 0xfe0a0000
  52. #define SERVER_MSCH_BASE_ADDR 0xfe800000
  53. #define CRU_BASE_ADDR 0xff490000
  54. #define DDR_PHY_BASE_ADDR 0xff4a0000
  55. #define UPCTL2_BASE_ADDR 0xffa50000
  56. #define SGRF_SOC_CON2 0x8
  57. #define SGRF_SOC_CON12 0x30
  58. #define SGRF_SOC_CON13 0x34
  59. struct dram_info dram_info;
  60. struct rv1126_sdram_params sdram_configs[] = {
  61. #if defined(CONFIG_RAM_ROCKCHIP_LPDDR4)
  62. # include "sdram-rv1126-lpddr4-detect-328.inc"
  63. # include "sdram-rv1126-lpddr4-detect-396.inc"
  64. # include "sdram-rv1126-lpddr4-detect-528.inc"
  65. # include "sdram-rv1126-lpddr4-detect-664.inc"
  66. # include "sdram-rv1126-lpddr4-detect-784.inc"
  67. # include "sdram-rv1126-lpddr4-detect-924.inc"
  68. # include "sdram-rv1126-lpddr4-detect-1056.inc"
  69. #else
  70. # include "sdram-rv1126-ddr3-detect-328.inc"
  71. # include "sdram-rv1126-ddr3-detect-396.inc"
  72. # include "sdram-rv1126-ddr3-detect-528.inc"
  73. # include "sdram-rv1126-ddr3-detect-664.inc"
  74. # include "sdram-rv1126-ddr3-detect-784.inc"
  75. # include "sdram-rv1126-ddr3-detect-924.inc"
  76. # include "sdram-rv1126-ddr3-detect-1056.inc"
  77. #endif
  78. };
  79. u32 common_info[] = {
  80. #include "sdram-rv1126-loader_params.inc"
  81. };
  82. #if defined(CONFIG_CMD_DDR_TEST_TOOL)
  83. static struct rw_trn_result rw_trn_result;
  84. #endif
  85. static struct rv1126_fsp_param fsp_param[MAX_IDX];
  86. static u8 lp3_odt_value;
  87. static s8 wrlvl_result[2][4];
  88. /* DDR configuration 0-9 */
  89. u16 ddr_cfg_2_rbc[] = {
  90. ((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */
  91. ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */
  92. ((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */
  93. ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */
  94. ((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */
  95. ((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */
  96. ((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */
  97. ((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */
  98. ((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */
  99. ((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */
  100. };
  101. /* DDR configuration 10-21 */
  102. u8 ddr4_cfg_2_rbc[] = {
  103. ((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */
  104. ((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */
  105. ((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */
  106. ((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */
  107. ((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */
  108. ((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */
  109. ((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */
  110. ((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */
  111. ((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */
  112. ((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */
  113. ((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */
  114. ((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */
  115. };
  116. /* DDR configuration 22-28 */
  117. u16 ddr_cfg_2_rbc_p2[] = {
  118. ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */
  119. ((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */
  120. ((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */
  121. ((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */
  122. ((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */
  123. ((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */
  124. ((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */
  125. };
  126. u8 d4_rbc_2_d3_rbc[][2] = {
  127. {10, 0},
  128. {11, 2},
  129. {12, 23},
  130. {13, 1},
  131. {14, 28},
  132. {15, 24},
  133. {16, 27},
  134. {17, 7},
  135. {18, 6},
  136. {19, 25},
  137. {20, 26},
  138. {21, 3}
  139. };
  140. u32 addrmap[29][9] = {
  141. {24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
  142. 0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */
  143. {23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
  144. 0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */
  145. {23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
  146. 0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */
  147. {22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606,
  148. 0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */
  149. {24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909,
  150. 0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */
  151. {6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707,
  152. 0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */
  153. {7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808,
  154. 0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */
  155. {8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909,
  156. 0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */
  157. {22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
  158. 0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */
  159. {23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
  160. 0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */
  161. {24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
  162. 0x08080808, 0x00000f0f, 0x0801}, /* 10 */
  163. {23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
  164. 0x0f080808, 0x00000f0f, 0x0801}, /* 11 */
  165. {24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
  166. 0x07070707, 0x00000f07, 0x0700}, /* 12 */
  167. {23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
  168. 0x07070707, 0x00000f0f, 0x0700}, /* 13 */
  169. {24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
  170. 0x07070707, 0x00000f07, 0x3f01}, /* 14 */
  171. {23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
  172. 0x07070707, 0x00000f0f, 0x3f01}, /* 15 */
  173. {23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606,
  174. 0x06060606, 0x00000f06, 0x3f00}, /* 16 */
  175. {8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909,
  176. 0x0f090909, 0x00000f0f, 0x0801}, /* 17 */
  177. {7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808,
  178. 0x08080808, 0x00000f0f, 0x0700}, /* 18 */
  179. {7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
  180. 0x08080808, 0x00000f0f, 0x3f01}, /* 19 */
  181. {6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
  182. 0x07070707, 0x00000f07, 0x3f00}, /* 20 */
  183. {23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606,
  184. 0x06060606, 0x00000f06, 0x0600}, /* 21 */
  185. {21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505,
  186. 0x05050505, 0x00000f0f, 0x3f3f}, /* 22 */
  187. {24, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
  188. 0x07070707, 0x00000f07, 0x3f3f}, /* 23 */
  189. {23, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707,
  190. 0x07070707, 0x00000f0f, 0x3f3f}, /* 24 */
  191. {7, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
  192. 0x08080808, 0x00000f0f, 0x3f3f}, /* 25 */
  193. {6, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
  194. 0x07070707, 0x00000f07, 0x3f3f}, /* 26 */
  195. {23, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
  196. 0x06060606, 0x00000f06, 0x3f3f}, /* 27 */
  197. {24, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707,
  198. 0x07070707, 0x00000f07, 0x3f3f} /* 28 */
  199. };
  200. static u8 dq_sel[22][3] = {
  201. {0x0, 0x17, 0x22},
  202. {0x1, 0x18, 0x23},
  203. {0x2, 0x19, 0x24},
  204. {0x3, 0x1a, 0x25},
  205. {0x4, 0x1b, 0x26},
  206. {0x5, 0x1c, 0x27},
  207. {0x6, 0x1d, 0x28},
  208. {0x7, 0x1e, 0x29},
  209. {0x8, 0x16, 0x21},
  210. {0x9, 0x1f, 0x2a},
  211. {0xa, 0x20, 0x2b},
  212. {0x10, 0x1, 0xc},
  213. {0x11, 0x2, 0xd},
  214. {0x12, 0x3, 0xe},
  215. {0x13, 0x4, 0xf},
  216. {0x14, 0x5, 0x10},
  217. {0x15, 0x6, 0x11},
  218. {0x16, 0x7, 0x12},
  219. {0x17, 0x8, 0x13},
  220. {0x18, 0x0, 0xb},
  221. {0x19, 0x9, 0x14},
  222. {0x1a, 0xa, 0x15}
  223. };
  224. static u16 grp_addr[4] = {
  225. ADD_GROUP_CS0_A,
  226. ADD_GROUP_CS0_B,
  227. ADD_GROUP_CS1_A,
  228. ADD_GROUP_CS1_B
  229. };
  230. static u8 wrlvl_result_offset[2][4] = {
  231. {0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27},
  232. {0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29},
  233. };
  234. static u16 dqs_dq_skew_adr[16] = {
  235. 0x170 + 0, /* SKEW_UPDATE_RX_CS0_DQS0 */
  236. 0x170 + 0xb, /* SKEW_UPDATE_RX_CS0_DQS1 */
  237. 0x1d0 + 0, /* SKEW_UPDATE_RX_CS0_DQS2 */
  238. 0x1d0 + 0xb, /* SKEW_UPDATE_RX_CS0_DQS3 */
  239. 0x1a0 + 0, /* SKEW_UPDATE_RX_CS1_DQS0 */
  240. 0x1a0 + 0xb, /* SKEW_UPDATE_RX_CS1_DQS1 */
  241. 0x200 + 0, /* SKEW_UPDATE_RX_CS1_DQS2 */
  242. 0x200 + 0xb, /* SKEW_UPDATE_RX_CS1_DQS3 */
  243. 0x170 + 0x16, /* SKEW_UPDATE_TX_CS0_DQS0 */
  244. 0x170 + 0x21, /* SKEW_UPDATE_TX_CS0_DQS1 */
  245. 0x1d0 + 0x16, /* SKEW_UPDATE_TX_CS0_DQS2 */
  246. 0x1d0 + 0x21, /* SKEW_UPDATE_TX_CS0_DQS3 */
  247. 0x1a0 + 0x16, /* SKEW_UPDATE_TX_CS1_DQS0 */
  248. 0x1a0 + 0x21, /* SKEW_UPDATE_TX_CS1_DQS1 */
  249. 0x200 + 0x16, /* SKEW_UPDATE_TX_CS1_DQS2 */
  250. 0x200 + 0x21, /* SKEW_UPDATE_TX_CS1_DQS3 */
  251. };
  252. static void rkclk_ddr_reset(struct dram_info *dram,
  253. u32 ctl_srstn, u32 ctl_psrstn,
  254. u32 phy_srstn, u32 phy_psrstn)
  255. {
  256. writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) |
  257. UPCTL2_ASRSTN_REQ(ctl_srstn),
  258. BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13);
  259. writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn),
  260. &dram->cru->softrst_con[12]);
  261. }
  262. static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
  263. {
  264. unsigned int refdiv, postdiv1, postdiv2, fbdiv;
  265. int delay = 1000;
  266. u32 mhz = hz / MHz;
  267. struct global_info *gbl_info;
  268. struct sdram_head_info_index_v2 *index =
  269. (struct sdram_head_info_index_v2 *)common_info;
  270. u32 ssmod_info;
  271. u32 dsmpd = 1;
  272. gbl_info = (struct global_info *)((void *)common_info +
  273. index->global_index.offset * 4);
  274. ssmod_info = gbl_info->info_2t;
  275. refdiv = 1;
  276. if (mhz <= 100) {
  277. postdiv1 = 6;
  278. postdiv2 = 4;
  279. } else if (mhz <= 150) {
  280. postdiv1 = 4;
  281. postdiv2 = 4;
  282. } else if (mhz <= 200) {
  283. postdiv1 = 6;
  284. postdiv2 = 2;
  285. } else if (mhz <= 300) {
  286. postdiv1 = 4;
  287. postdiv2 = 2;
  288. } else if (mhz <= 400) {
  289. postdiv1 = 6;
  290. postdiv2 = 1;
  291. } else {
  292. postdiv1 = 4;
  293. postdiv2 = 1;
  294. }
  295. fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
  296. writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
  297. writel(0x1f000000, &dram->cru->clksel_con[64]);
  298. writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
  299. /* enable ssmod */
  300. if (PLL_SSMOD_SPREAD(ssmod_info)) {
  301. dsmpd = 0;
  302. clrsetbits_le32(&dram->cru->pll[1].con2,
  303. 0xffffff << 0, 0x0 << 0);
  304. writel(SSMOD_SPREAD(PLL_SSMOD_SPREAD(ssmod_info)) |
  305. SSMOD_DIVVAL(PLL_SSMOD_DIV(ssmod_info)) |
  306. SSMOD_DOWNSPREAD(PLL_SSMOD_DOWNSPREAD(ssmod_info)) |
  307. SSMOD_RESET(0) |
  308. SSMOD_DIS_SSCG(0) |
  309. SSMOD_BP(0),
  310. &dram->cru->pll[1].con3);
  311. }
  312. writel(DSMPD(dsmpd) | POSTDIV2(postdiv2) | REFDIV(refdiv),
  313. &dram->cru->pll[1].con1);
  314. while (delay > 0) {
  315. udelay(1);
  316. if (LOCK(readl(&dram->cru->pll[1].con1)))
  317. break;
  318. delay--;
  319. }
  320. writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
  321. }
  322. static void rkclk_configure_ddr(struct dram_info *dram,
  323. struct rv1126_sdram_params *sdram_params)
  324. {
  325. /* for inno ddr phy need freq / 2 */
  326. rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2);
  327. }
  328. static unsigned int
  329. calculate_ddrconfig(struct rv1126_sdram_params *sdram_params)
  330. {
  331. struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
  332. u32 cs, bw, die_bw, col, row, bank;
  333. u32 cs1_row;
  334. u32 i, tmp;
  335. u32 ddrconf = -1;
  336. u32 row_3_4;
  337. cs = cap_info->rank;
  338. bw = cap_info->bw;
  339. die_bw = cap_info->dbw;
  340. col = cap_info->col;
  341. row = cap_info->cs0_row;
  342. cs1_row = cap_info->cs1_row;
  343. bank = cap_info->bk;
  344. row_3_4 = cap_info->row_3_4;
  345. if (sdram_params->base.dramtype == DDR4) {
  346. if (cs == 2 && row == cs1_row && !row_3_4) {
  347. tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) |
  348. die_bw;
  349. for (i = 17; i < 21; i++) {
  350. if (((tmp & 0xf) ==
  351. (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
  352. ((tmp & 0x70) <=
  353. (ddr4_cfg_2_rbc[i - 10] & 0x70))) {
  354. ddrconf = i;
  355. goto out;
  356. }
  357. }
  358. }
  359. tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw;
  360. for (i = 10; i < 21; i++) {
  361. if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
  362. ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) &&
  363. ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) {
  364. ddrconf = i;
  365. goto out;
  366. }
  367. }
  368. } else {
  369. if (cs == 2 && row == cs1_row && bank == 3) {
  370. for (i = 5; i < 8; i++) {
  371. if (((bw + col - 10) == (ddr_cfg_2_rbc[i] &
  372. 0x7)) &&
  373. ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] &
  374. (0x7 << 5))) {
  375. ddrconf = i;
  376. goto out;
  377. }
  378. }
  379. }
  380. tmp = ((cs - 1) << 8) | ((row - 13) << 5) |
  381. ((bw + col - 10) << 0);
  382. if (bank == 3)
  383. tmp |= (1 << 3);
  384. for (i = 0; i < 9; i++)
  385. if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) &&
  386. ((tmp & (7 << 5)) <=
  387. (ddr_cfg_2_rbc[i] & (7 << 5))) &&
  388. ((tmp & (1 << 8)) <=
  389. (ddr_cfg_2_rbc[i] & (1 << 8)))) {
  390. ddrconf = i;
  391. goto out;
  392. }
  393. for (i = 0; i < 7; i++)
  394. if (((tmp & 0x1f) == (ddr_cfg_2_rbc_p2[i] & 0x1f)) &&
  395. ((tmp & (7 << 5)) <=
  396. (ddr_cfg_2_rbc_p2[i] & (7 << 5))) &&
  397. ((tmp & (1 << 8)) <=
  398. (ddr_cfg_2_rbc_p2[i] & (1 << 8)))) {
  399. ddrconf = i + 22;
  400. goto out;
  401. }
  402. if (cs == 1 && bank == 3 && row <= 17 &&
  403. (col + bw) == 12)
  404. ddrconf = 23;
  405. }
  406. out:
  407. if (ddrconf > 28)
  408. printascii("calculate ddrconfig error\n");
  409. if (sdram_params->base.dramtype == DDR4) {
  410. for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
  411. if (ddrconf == d4_rbc_2_d3_rbc[i][0]) {
  412. if (ddrconf == 21 && row > 16)
  413. printascii("warn:ddrconf21 row > 16\n");
  414. else
  415. ddrconf = d4_rbc_2_d3_rbc[i][1];
  416. break;
  417. }
  418. }
  419. }
  420. return ddrconf;
  421. }
  422. static void sw_set_req(struct dram_info *dram)
  423. {
  424. void __iomem *pctl_base = dram->pctl;
  425. /* clear sw_done=0 */
  426. writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL);
  427. }
  428. static void sw_set_ack(struct dram_info *dram)
  429. {
  430. void __iomem *pctl_base = dram->pctl;
  431. /* set sw_done=1 */
  432. writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL);
  433. while (1) {
  434. /* wait programming done */
  435. if (readl(pctl_base + DDR_PCTL2_SWSTAT) &
  436. PCTL2_SW_DONE_ACK)
  437. break;
  438. }
  439. }
  440. static void set_ctl_address_map(struct dram_info *dram,
  441. struct rv1126_sdram_params *sdram_params)
  442. {
  443. struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
  444. void __iomem *pctl_base = dram->pctl;
  445. u32 ddrconf = cap_info->ddrconfig;
  446. u32 i, row;
  447. row = cap_info->cs0_row;
  448. if (sdram_params->base.dramtype == DDR4) {
  449. for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
  450. if (ddrconf == d4_rbc_2_d3_rbc[i][1]) {
  451. ddrconf = d4_rbc_2_d3_rbc[i][0];
  452. break;
  453. }
  454. }
  455. }
  456. if (ddrconf >= ARRAY_SIZE(addrmap)) {
  457. printascii("set ctl address map fail\n");
  458. return;
  459. }
  460. sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
  461. &addrmap[ddrconf][0], ARRAY_SIZE(addrmap[ddrconf]) * 4);
  462. /* unused row set to 0xf */
  463. for (i = 17; i >= row; i--)
  464. setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
  465. ((i - 12) * 8 / 32) * 4,
  466. 0xf << ((i - 12) * 8 % 32));
  467. if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4)
  468. setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
  469. if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1)
  470. setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
  471. if (cap_info->rank == 1)
  472. clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
  473. }
  474. static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait)
  475. {
  476. void __iomem *phy_base = dram->phy;
  477. u32 fbdiv, prediv, postdiv, postdiv_en;
  478. if (wait) {
  479. clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB);
  480. while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK))
  481. continue;
  482. } else {
  483. freq /= MHz;
  484. prediv = 1;
  485. if (freq <= 200) {
  486. fbdiv = 16;
  487. postdiv = 2;
  488. postdiv_en = 1;
  489. } else if (freq <= 456) {
  490. fbdiv = 8;
  491. postdiv = 1;
  492. postdiv_en = 1;
  493. } else {
  494. fbdiv = 4;
  495. postdiv = 0;
  496. postdiv_en = 0;
  497. }
  498. writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50));
  499. clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK,
  500. (fbdiv >> 8) & 1);
  501. clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK,
  502. postdiv_en << PHY_POSTDIV_EN_SHIFT);
  503. clrsetbits_le32(PHY_REG(phy_base, 0x52),
  504. PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv);
  505. clrsetbits_le32(PHY_REG(phy_base, 0x53),
  506. PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT,
  507. postdiv << PHY_POSTDIV_SHIFT);
  508. }
  509. }
  510. static const u16 d3_phy_drv_2_ohm[][2] = {
  511. {PHY_DDR3_RON_455ohm, 455},
  512. {PHY_DDR3_RON_230ohm, 230},
  513. {PHY_DDR3_RON_153ohm, 153},
  514. {PHY_DDR3_RON_115ohm, 115},
  515. {PHY_DDR3_RON_91ohm, 91},
  516. {PHY_DDR3_RON_76ohm, 76},
  517. {PHY_DDR3_RON_65ohm, 65},
  518. {PHY_DDR3_RON_57ohm, 57},
  519. {PHY_DDR3_RON_51ohm, 51},
  520. {PHY_DDR3_RON_46ohm, 46},
  521. {PHY_DDR3_RON_41ohm, 41},
  522. {PHY_DDR3_RON_38ohm, 38},
  523. {PHY_DDR3_RON_35ohm, 35},
  524. {PHY_DDR3_RON_32ohm, 32},
  525. {PHY_DDR3_RON_30ohm, 30},
  526. {PHY_DDR3_RON_28ohm, 28},
  527. {PHY_DDR3_RON_27ohm, 27},
  528. {PHY_DDR3_RON_25ohm, 25},
  529. {PHY_DDR3_RON_24ohm, 24},
  530. {PHY_DDR3_RON_23ohm, 23},
  531. {PHY_DDR3_RON_22ohm, 22},
  532. {PHY_DDR3_RON_21ohm, 21},
  533. {PHY_DDR3_RON_20ohm, 20}
  534. };
  535. static u16 d3_phy_odt_2_ohm[][2] = {
  536. {PHY_DDR3_RTT_DISABLE, 0},
  537. {PHY_DDR3_RTT_561ohm, 561},
  538. {PHY_DDR3_RTT_282ohm, 282},
  539. {PHY_DDR3_RTT_188ohm, 188},
  540. {PHY_DDR3_RTT_141ohm, 141},
  541. {PHY_DDR3_RTT_113ohm, 113},
  542. {PHY_DDR3_RTT_94ohm, 94},
  543. {PHY_DDR3_RTT_81ohm, 81},
  544. {PHY_DDR3_RTT_72ohm, 72},
  545. {PHY_DDR3_RTT_64ohm, 64},
  546. {PHY_DDR3_RTT_58ohm, 58},
  547. {PHY_DDR3_RTT_52ohm, 52},
  548. {PHY_DDR3_RTT_48ohm, 48},
  549. {PHY_DDR3_RTT_44ohm, 44},
  550. {PHY_DDR3_RTT_41ohm, 41},
  551. {PHY_DDR3_RTT_38ohm, 38},
  552. {PHY_DDR3_RTT_37ohm, 37},
  553. {PHY_DDR3_RTT_34ohm, 34},
  554. {PHY_DDR3_RTT_32ohm, 32},
  555. {PHY_DDR3_RTT_31ohm, 31},
  556. {PHY_DDR3_RTT_29ohm, 29},
  557. {PHY_DDR3_RTT_28ohm, 28},
  558. {PHY_DDR3_RTT_27ohm, 27},
  559. {PHY_DDR3_RTT_25ohm, 25}
  560. };
  561. static u16 d4lp3_phy_drv_2_ohm[][2] = {
  562. {PHY_DDR4_LPDDR3_RON_482ohm, 482},
  563. {PHY_DDR4_LPDDR3_RON_244ohm, 244},
  564. {PHY_DDR4_LPDDR3_RON_162ohm, 162},
  565. {PHY_DDR4_LPDDR3_RON_122ohm, 122},
  566. {PHY_DDR4_LPDDR3_RON_97ohm, 97},
  567. {PHY_DDR4_LPDDR3_RON_81ohm, 81},
  568. {PHY_DDR4_LPDDR3_RON_69ohm, 69},
  569. {PHY_DDR4_LPDDR3_RON_61ohm, 61},
  570. {PHY_DDR4_LPDDR3_RON_54ohm, 54},
  571. {PHY_DDR4_LPDDR3_RON_48ohm, 48},
  572. {PHY_DDR4_LPDDR3_RON_44ohm, 44},
  573. {PHY_DDR4_LPDDR3_RON_40ohm, 40},
  574. {PHY_DDR4_LPDDR3_RON_37ohm, 37},
  575. {PHY_DDR4_LPDDR3_RON_34ohm, 34},
  576. {PHY_DDR4_LPDDR3_RON_32ohm, 32},
  577. {PHY_DDR4_LPDDR3_RON_30ohm, 30},
  578. {PHY_DDR4_LPDDR3_RON_28ohm, 28},
  579. {PHY_DDR4_LPDDR3_RON_27ohm, 27},
  580. {PHY_DDR4_LPDDR3_RON_25ohm, 25},
  581. {PHY_DDR4_LPDDR3_RON_24ohm, 24},
  582. {PHY_DDR4_LPDDR3_RON_23ohm, 23},
  583. {PHY_DDR4_LPDDR3_RON_22ohm, 22},
  584. {PHY_DDR4_LPDDR3_RON_21ohm, 21}
  585. };
  586. static u16 d4lp3_phy_odt_2_ohm[][2] = {
  587. {PHY_DDR4_LPDDR3_RTT_DISABLE, 0},
  588. {PHY_DDR4_LPDDR3_RTT_586ohm, 586},
  589. {PHY_DDR4_LPDDR3_RTT_294ohm, 294},
  590. {PHY_DDR4_LPDDR3_RTT_196ohm, 196},
  591. {PHY_DDR4_LPDDR3_RTT_148ohm, 148},
  592. {PHY_DDR4_LPDDR3_RTT_118ohm, 118},
  593. {PHY_DDR4_LPDDR3_RTT_99ohm, 99},
  594. {PHY_DDR4_LPDDR3_RTT_85ohm, 58},
  595. {PHY_DDR4_LPDDR3_RTT_76ohm, 76},
  596. {PHY_DDR4_LPDDR3_RTT_67ohm, 67},
  597. {PHY_DDR4_LPDDR3_RTT_60ohm, 60},
  598. {PHY_DDR4_LPDDR3_RTT_55ohm, 55},
  599. {PHY_DDR4_LPDDR3_RTT_50ohm, 50},
  600. {PHY_DDR4_LPDDR3_RTT_46ohm, 46},
  601. {PHY_DDR4_LPDDR3_RTT_43ohm, 43},
  602. {PHY_DDR4_LPDDR3_RTT_40ohm, 40},
  603. {PHY_DDR4_LPDDR3_RTT_38ohm, 38},
  604. {PHY_DDR4_LPDDR3_RTT_36ohm, 36},
  605. {PHY_DDR4_LPDDR3_RTT_34ohm, 34},
  606. {PHY_DDR4_LPDDR3_RTT_32ohm, 32},
  607. {PHY_DDR4_LPDDR3_RTT_31ohm, 31},
  608. {PHY_DDR4_LPDDR3_RTT_29ohm, 29},
  609. {PHY_DDR4_LPDDR3_RTT_28ohm, 28},
  610. {PHY_DDR4_LPDDR3_RTT_27ohm, 27}
  611. };
  612. static u16 lp4_phy_drv_2_ohm[][2] = {
  613. {PHY_LPDDR4_RON_501ohm, 501},
  614. {PHY_LPDDR4_RON_253ohm, 253},
  615. {PHY_LPDDR4_RON_168ohm, 168},
  616. {PHY_LPDDR4_RON_126ohm, 126},
  617. {PHY_LPDDR4_RON_101ohm, 101},
  618. {PHY_LPDDR4_RON_84ohm, 84},
  619. {PHY_LPDDR4_RON_72ohm, 72},
  620. {PHY_LPDDR4_RON_63ohm, 63},
  621. {PHY_LPDDR4_RON_56ohm, 56},
  622. {PHY_LPDDR4_RON_50ohm, 50},
  623. {PHY_LPDDR4_RON_46ohm, 46},
  624. {PHY_LPDDR4_RON_42ohm, 42},
  625. {PHY_LPDDR4_RON_38ohm, 38},
  626. {PHY_LPDDR4_RON_36ohm, 36},
  627. {PHY_LPDDR4_RON_33ohm, 33},
  628. {PHY_LPDDR4_RON_31ohm, 31},
  629. {PHY_LPDDR4_RON_29ohm, 29},
  630. {PHY_LPDDR4_RON_28ohm, 28},
  631. {PHY_LPDDR4_RON_26ohm, 26},
  632. {PHY_LPDDR4_RON_25ohm, 25},
  633. {PHY_LPDDR4_RON_24ohm, 24},
  634. {PHY_LPDDR4_RON_23ohm, 23},
  635. {PHY_LPDDR4_RON_22ohm, 22}
  636. };
  637. static u16 lp4_phy_odt_2_ohm[][2] = {
  638. {PHY_LPDDR4_RTT_DISABLE, 0},
  639. {PHY_LPDDR4_RTT_604ohm, 604},
  640. {PHY_LPDDR4_RTT_303ohm, 303},
  641. {PHY_LPDDR4_RTT_202ohm, 202},
  642. {PHY_LPDDR4_RTT_152ohm, 152},
  643. {PHY_LPDDR4_RTT_122ohm, 122},
  644. {PHY_LPDDR4_RTT_101ohm, 101},
  645. {PHY_LPDDR4_RTT_87ohm, 87},
  646. {PHY_LPDDR4_RTT_78ohm, 78},
  647. {PHY_LPDDR4_RTT_69ohm, 69},
  648. {PHY_LPDDR4_RTT_62ohm, 62},
  649. {PHY_LPDDR4_RTT_56ohm, 56},
  650. {PHY_LPDDR4_RTT_52ohm, 52},
  651. {PHY_LPDDR4_RTT_48ohm, 48},
  652. {PHY_LPDDR4_RTT_44ohm, 44},
  653. {PHY_LPDDR4_RTT_41ohm, 41},
  654. {PHY_LPDDR4_RTT_39ohm, 39},
  655. {PHY_LPDDR4_RTT_37ohm, 37},
  656. {PHY_LPDDR4_RTT_35ohm, 35},
  657. {PHY_LPDDR4_RTT_33ohm, 33},
  658. {PHY_LPDDR4_RTT_32ohm, 32},
  659. {PHY_LPDDR4_RTT_30ohm, 30},
  660. {PHY_LPDDR4_RTT_29ohm, 29},
  661. {PHY_LPDDR4_RTT_27ohm, 27}
  662. };
  663. static u32 lp4_odt_calc(u32 odt_ohm)
  664. {
  665. u32 odt;
  666. if (odt_ohm == 0)
  667. odt = LPDDR4_DQODT_DIS;
  668. else if (odt_ohm <= 40)
  669. odt = LPDDR4_DQODT_40;
  670. else if (odt_ohm <= 48)
  671. odt = LPDDR4_DQODT_48;
  672. else if (odt_ohm <= 60)
  673. odt = LPDDR4_DQODT_60;
  674. else if (odt_ohm <= 80)
  675. odt = LPDDR4_DQODT_80;
  676. else if (odt_ohm <= 120)
  677. odt = LPDDR4_DQODT_120;
  678. else
  679. odt = LPDDR4_DQODT_240;
  680. return odt;
  681. }
  682. static void *get_ddr_drv_odt_info(u32 dramtype)
  683. {
  684. struct sdram_head_info_index_v2 *index =
  685. (struct sdram_head_info_index_v2 *)common_info;
  686. void *ddr_info = 0;
  687. if (dramtype == DDR4)
  688. ddr_info = (void *)common_info + index->ddr4_index.offset * 4;
  689. else if (dramtype == DDR3)
  690. ddr_info = (void *)common_info + index->ddr3_index.offset * 4;
  691. else if (dramtype == LPDDR3)
  692. ddr_info = (void *)common_info + index->lp3_index.offset * 4;
  693. else if (dramtype == LPDDR4)
  694. ddr_info = (void *)common_info + index->lp4_index.offset * 4;
  695. else
  696. printascii("unsupported dram type\n");
  697. return ddr_info;
  698. }
  699. static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info,
  700. u32 freq_mhz, u32 dst_fsp, u32 dramtype)
  701. {
  702. void __iomem *pctl_base = dram->pctl;
  703. u32 ca_vref, dq_vref;
  704. if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
  705. ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff);
  706. else
  707. ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten);
  708. if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq))
  709. dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff);
  710. else
  711. dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten);
  712. if (dramtype == LPDDR4) {
  713. if (ca_vref < 100)
  714. ca_vref = 100;
  715. if (ca_vref > 420)
  716. ca_vref = 420;
  717. if (ca_vref <= 300)
  718. ca_vref = (0 << 6) | (ca_vref - 100) / 4;
  719. else
  720. ca_vref = (1 << 6) | (ca_vref - 220) / 4;
  721. if (dq_vref < 100)
  722. dq_vref = 100;
  723. if (dq_vref > 420)
  724. dq_vref = 420;
  725. if (dq_vref <= 300)
  726. dq_vref = (0 << 6) | (dq_vref - 100) / 4;
  727. else
  728. dq_vref = (1 << 6) | (dq_vref - 220) / 4;
  729. } else {
  730. ca_vref = ca_vref * 11 / 6;
  731. if (ca_vref < 150)
  732. ca_vref = 150;
  733. if (ca_vref > 629)
  734. ca_vref = 629;
  735. if (ca_vref <= 449)
  736. ca_vref = (0 << 6) | (ca_vref - 150) / 4;
  737. else
  738. ca_vref = (1 << 6) | (ca_vref - 329) / 4;
  739. if (dq_vref < 150)
  740. dq_vref = 150;
  741. if (dq_vref > 629)
  742. dq_vref = 629;
  743. if (dq_vref <= 449)
  744. dq_vref = (0 << 6) | (dq_vref - 150) / 6;
  745. else
  746. dq_vref = (1 << 6) | (dq_vref - 329) / 6;
  747. }
  748. sw_set_req(dram);
  749. clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
  750. DDR_PCTL2_INIT6,
  751. PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT,
  752. ca_vref << PCTL2_LPDDR4_MR12_SHIFT);
  753. clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
  754. DDR_PCTL2_INIT7,
  755. PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT,
  756. dq_vref << PCTL2_LPDDR4_MR14_SHIFT);
  757. sw_set_ack(dram);
  758. }
  759. static void set_ds_odt(struct dram_info *dram,
  760. struct rv1126_sdram_params *sdram_params, u32 dst_fsp)
  761. {
  762. void __iomem *phy_base = dram->phy;
  763. void __iomem *pctl_base = dram->pctl;
  764. u32 dramtype = sdram_params->base.dramtype;
  765. struct ddr2_3_4_lp2_3_info *ddr_info;
  766. struct lp4_info *lp4_info;
  767. u32 i, j, tmp;
  768. const u16 (*p_drv)[2];
  769. const u16 (*p_odt)[2];
  770. u32 drv_info, sr_info;
  771. u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm;
  772. u32 phy_odt_ohm, dram_odt_ohm;
  773. u32 lp4_pu_cal, phy_lp4_drv_pd_en;
  774. u32 phy_odt_up_en, phy_odt_dn_en;
  775. u32 sr_dq, sr_clk;
  776. u32 freq = sdram_params->base.ddr_freq;
  777. u32 mr1_mr3, mr11, mr22, vref_out, vref_inner;
  778. u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0;
  779. u32 phy_dq_drv = 0;
  780. u32 phy_odt_up = 0, phy_odt_dn = 0;
  781. ddr_info = get_ddr_drv_odt_info(dramtype);
  782. lp4_info = (void *)ddr_info;
  783. if (!ddr_info)
  784. return;
  785. /* dram odt en freq control phy drv, dram odt and phy sr */
  786. if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) {
  787. drv_info = ddr_info->drv_when_odtoff;
  788. dram_odt_ohm = 0;
  789. sr_info = ddr_info->sr_when_odtoff;
  790. phy_lp4_drv_pd_en =
  791. PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info);
  792. } else {
  793. drv_info = ddr_info->drv_when_odten;
  794. dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info);
  795. sr_info = ddr_info->sr_when_odten;
  796. phy_lp4_drv_pd_en =
  797. PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info);
  798. }
  799. phy_dq_drv_ohm =
  800. DRV_INFO_PHY_DQ_DRV(drv_info);
  801. phy_clk_drv_ohm =
  802. DRV_INFO_PHY_CLK_DRV(drv_info);
  803. phy_ca_drv_ohm =
  804. DRV_INFO_PHY_CA_DRV(drv_info);
  805. sr_dq = DQ_SR_INFO(sr_info);
  806. sr_clk = CLK_SR_INFO(sr_info);
  807. /* phy odt en freq control dram drv and phy odt */
  808. if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) {
  809. dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff);
  810. lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info);
  811. phy_odt_ohm = 0;
  812. phy_odt_up_en = 0;
  813. phy_odt_dn_en = 0;
  814. } else {
  815. dram_drv_ohm =
  816. DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten);
  817. phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info);
  818. phy_odt_up_en =
  819. ODT_INFO_PULLUP_EN(ddr_info->odt_info);
  820. phy_odt_dn_en =
  821. ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
  822. lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info);
  823. }
  824. if (dramtype == LPDDR4) {
  825. if (phy_odt_ohm) {
  826. phy_odt_up_en = 0;
  827. phy_odt_dn_en = 1;
  828. }
  829. if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
  830. dram_caodt_ohm = 0;
  831. else
  832. dram_caodt_ohm =
  833. ODT_INFO_LP4_CA_ODT(lp4_info->odt_info);
  834. }
  835. if (dramtype == DDR3) {
  836. p_drv = d3_phy_drv_2_ohm;
  837. p_odt = d3_phy_odt_2_ohm;
  838. } else if (dramtype == LPDDR4) {
  839. p_drv = lp4_phy_drv_2_ohm;
  840. p_odt = lp4_phy_odt_2_ohm;
  841. } else {
  842. p_drv = d4lp3_phy_drv_2_ohm;
  843. p_odt = d4lp3_phy_odt_2_ohm;
  844. }
  845. for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
  846. if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) {
  847. phy_dq_drv = **(p_drv + i);
  848. break;
  849. }
  850. if (i == 0)
  851. break;
  852. }
  853. for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
  854. if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) {
  855. phy_clk_drv = **(p_drv + i);
  856. break;
  857. }
  858. if (i == 0)
  859. break;
  860. }
  861. for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
  862. if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) {
  863. phy_ca_drv = **(p_drv + i);
  864. break;
  865. }
  866. if (i == 0)
  867. break;
  868. }
  869. if (!phy_odt_ohm)
  870. phy_odt = 0;
  871. else
  872. for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) {
  873. if (phy_odt_ohm <= *(*(p_odt + i) + 1)) {
  874. phy_odt = **(p_odt + i);
  875. break;
  876. }
  877. if (i == 0)
  878. break;
  879. }
  880. if (dramtype != LPDDR4) {
  881. if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en))
  882. vref_inner = 0x80;
  883. else if (phy_odt_up_en)
  884. vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 /
  885. (dram_drv_ohm + phy_odt_ohm);
  886. else
  887. vref_inner = phy_odt_ohm * 128 /
  888. (phy_odt_ohm + dram_drv_ohm);
  889. if (dramtype != DDR3 && dram_odt_ohm)
  890. vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 /
  891. (phy_dq_drv_ohm + dram_odt_ohm);
  892. else
  893. vref_out = 0x80;
  894. } else {
  895. /* for lp4 and lp4x*/
  896. if (phy_odt_ohm)
  897. vref_inner =
  898. (PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) *
  899. 256) / 1000;
  900. else
  901. vref_inner =
  902. (PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) *
  903. 256) / 1000;
  904. vref_out = 0x80;
  905. }
  906. /* default ZQCALIB bypass mode */
  907. clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv);
  908. clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv);
  909. clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv);
  910. clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv);
  911. if (dramtype == LPDDR4) {
  912. clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_clk_drv);
  913. clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_clk_drv);
  914. } else {
  915. clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_ca_drv);
  916. clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_ca_drv);
  917. }
  918. /* clk / cmd slew rate */
  919. clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk);
  920. phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1;
  921. if (phy_odt_up_en)
  922. phy_odt_up = phy_odt;
  923. if (phy_odt_dn_en)
  924. phy_odt_dn = phy_odt;
  925. for (i = 0; i < 4; i++) {
  926. j = 0x110 + i * 0x10;
  927. clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up);
  928. clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn);
  929. clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv);
  930. clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv);
  931. writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10));
  932. clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10),
  933. 1 << 3, phy_lp4_drv_pd_en << 3);
  934. if (dramtype == LPDDR4)
  935. clrbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10), BIT(5));
  936. /* dq slew rate */
  937. clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10),
  938. 0x1f, sr_dq);
  939. }
  940. /* reg_rx_vref_value_update */
  941. setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
  942. clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
  943. /* RAM VREF */
  944. writel(vref_out, PHY_REG(phy_base, 0x105));
  945. if (dramtype == LPDDR3)
  946. udelay(100);
  947. if (dramtype == LPDDR4)
  948. set_lp4_vref(dram, lp4_info, freq, dst_fsp, dramtype);
  949. if (dramtype == DDR3 || dramtype == DDR4) {
  950. mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
  951. DDR_PCTL2_INIT3);
  952. mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK;
  953. } else {
  954. mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
  955. DDR_PCTL2_INIT4);
  956. mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK;
  957. }
  958. if (dramtype == DDR3) {
  959. mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK);
  960. if (dram_drv_ohm == 34)
  961. mr1_mr3 |= DDR3_DS_34;
  962. if (dram_odt_ohm == 0)
  963. mr1_mr3 |= DDR3_RTT_NOM_DIS;
  964. else if (dram_odt_ohm <= 40)
  965. mr1_mr3 |= DDR3_RTT_NOM_40;
  966. else if (dram_odt_ohm <= 60)
  967. mr1_mr3 |= DDR3_RTT_NOM_60;
  968. else
  969. mr1_mr3 |= DDR3_RTT_NOM_120;
  970. } else if (dramtype == DDR4) {
  971. mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK);
  972. if (dram_drv_ohm == 48)
  973. mr1_mr3 |= DDR4_DS_48;
  974. if (dram_odt_ohm == 0)
  975. mr1_mr3 |= DDR4_RTT_NOM_DIS;
  976. else if (dram_odt_ohm <= 34)
  977. mr1_mr3 |= DDR4_RTT_NOM_34;
  978. else if (dram_odt_ohm <= 40)
  979. mr1_mr3 |= DDR4_RTT_NOM_40;
  980. else if (dram_odt_ohm <= 48)
  981. mr1_mr3 |= DDR4_RTT_NOM_48;
  982. else if (dram_odt_ohm <= 60)
  983. mr1_mr3 |= DDR4_RTT_NOM_60;
  984. else
  985. mr1_mr3 |= DDR4_RTT_NOM_120;
  986. } else if (dramtype == LPDDR3) {
  987. if (dram_drv_ohm <= 34)
  988. mr1_mr3 |= LPDDR3_DS_34;
  989. else if (dram_drv_ohm <= 40)
  990. mr1_mr3 |= LPDDR3_DS_40;
  991. else if (dram_drv_ohm <= 48)
  992. mr1_mr3 |= LPDDR3_DS_48;
  993. else if (dram_drv_ohm <= 60)
  994. mr1_mr3 |= LPDDR3_DS_60;
  995. else if (dram_drv_ohm <= 80)
  996. mr1_mr3 |= LPDDR3_DS_80;
  997. if (dram_odt_ohm == 0)
  998. lp3_odt_value = LPDDR3_ODT_DIS;
  999. else if (dram_odt_ohm <= 60)
  1000. lp3_odt_value = LPDDR3_ODT_60;
  1001. else if (dram_odt_ohm <= 120)
  1002. lp3_odt_value = LPDDR3_ODT_120;
  1003. else
  1004. lp3_odt_value = LPDDR3_ODT_240;
  1005. } else {/* for lpddr4 and lpddr4x */
  1006. /* MR3 for lp4 PU-CAL and PDDS */
  1007. mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK);
  1008. mr1_mr3 |= lp4_pu_cal;
  1009. tmp = lp4_odt_calc(dram_drv_ohm);
  1010. if (!tmp)
  1011. tmp = LPDDR4_PDDS_240;
  1012. mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT);
  1013. /* MR11 for lp4 ca odt, dq odt set */
  1014. mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
  1015. DDR_PCTL2_INIT6);
  1016. mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK;
  1017. mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK);
  1018. tmp = lp4_odt_calc(dram_odt_ohm);
  1019. mr11 |= (tmp << LPDDR4_DQODT_SHIFT);
  1020. tmp = lp4_odt_calc(dram_caodt_ohm);
  1021. mr11 |= (tmp << LPDDR4_CAODT_SHIFT);
  1022. sw_set_req(dram);
  1023. clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
  1024. DDR_PCTL2_INIT6,
  1025. PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT,
  1026. mr11 << PCTL2_LPDDR4_MR11_SHIFT);
  1027. sw_set_ack(dram);
  1028. /* MR22 for soc odt/odt-ck/odt-cs/odt-ca */
  1029. mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
  1030. DDR_PCTL2_INIT7);
  1031. mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK;
  1032. mr22 &= ~LPDDR4_SOC_ODT_MASK;
  1033. tmp = lp4_odt_calc(phy_odt_ohm);
  1034. mr22 |= tmp;
  1035. mr22 = mr22 |
  1036. (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) <<
  1037. LPDDR4_ODTE_CK_SHIFT) |
  1038. (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) <<
  1039. LPDDR4_ODTE_CS_SHIFT) |
  1040. (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) <<
  1041. LPDDR4_ODTD_CA_SHIFT);
  1042. sw_set_req(dram);
  1043. clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
  1044. DDR_PCTL2_INIT7,
  1045. PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT,
  1046. mr22 << PCTL2_LPDDR4_MR22_SHIFT);
  1047. sw_set_ack(dram);
  1048. }
  1049. if (dramtype == DDR4 || dramtype == DDR3) {
  1050. sw_set_req(dram);
  1051. clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
  1052. DDR_PCTL2_INIT3,
  1053. PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT,
  1054. mr1_mr3 << PCTL2_DDR34_MR1_SHIFT);
  1055. sw_set_ack(dram);
  1056. } else {
  1057. sw_set_req(dram);
  1058. clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
  1059. DDR_PCTL2_INIT4,
  1060. PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT,
  1061. mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT);
  1062. sw_set_ack(dram);
  1063. }
  1064. }
  1065. static int sdram_cmd_dq_path_remap(struct dram_info *dram,
  1066. struct rv1126_sdram_params *sdram_params)
  1067. {
  1068. void __iomem *phy_base = dram->phy;
  1069. u32 dramtype = sdram_params->base.dramtype;
  1070. struct sdram_head_info_index_v2 *index =
  1071. (struct sdram_head_info_index_v2 *)common_info;
  1072. struct dq_map_info *map_info;
  1073. map_info = (struct dq_map_info *)((void *)common_info +
  1074. index->dq_map_index.offset * 4);
  1075. if (dramtype <= LPDDR4)
  1076. writel((map_info->byte_map[dramtype / 4] >>
  1077. ((dramtype % 4) * 8)) & 0xff,
  1078. PHY_REG(phy_base, 0x4f));
  1079. return 0;
  1080. }
  1081. static void phy_cfg(struct dram_info *dram,
  1082. struct rv1126_sdram_params *sdram_params)
  1083. {
  1084. struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
  1085. void __iomem *phy_base = dram->phy;
  1086. u32 i, dq_map, tmp;
  1087. u32 byte1 = 0, byte0 = 0;
  1088. sdram_cmd_dq_path_remap(dram, sdram_params);
  1089. phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0);
  1090. for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
  1091. writel(sdram_params->phy_regs.phy[i][1],
  1092. phy_base + sdram_params->phy_regs.phy[i][0]);
  1093. }
  1094. clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5));
  1095. dq_map = readl(PHY_REG(phy_base, 0x4f));
  1096. for (i = 0; i < 4; i++) {
  1097. if (((dq_map >> (i * 2)) & 0x3) == 0)
  1098. byte0 = i;
  1099. if (((dq_map >> (i * 2)) & 0x3) == 1)
  1100. byte1 = i;
  1101. }
  1102. tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK);
  1103. if (cap_info->bw == 2)
  1104. tmp |= 0xf;
  1105. else if (cap_info->bw == 1)
  1106. tmp |= ((1 << byte0) | (1 << byte1));
  1107. else
  1108. tmp |= (1 << byte0);
  1109. writel(tmp, PHY_REG(phy_base, 0xf));
  1110. /* lpddr4 odt control by phy, enable cs0 odt */
  1111. if (sdram_params->base.dramtype == LPDDR4)
  1112. clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4,
  1113. (1 << 6) | (1 << 4));
  1114. /* for ca training ca vref choose range1 */
  1115. setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6));
  1116. setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6));
  1117. /* for wr training PHY_0x7c[5], choose range0 */
  1118. clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5));
  1119. }
  1120. static int update_refresh_reg(struct dram_info *dram)
  1121. {
  1122. void __iomem *pctl_base = dram->pctl;
  1123. u32 ret;
  1124. ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
  1125. writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
  1126. return 0;
  1127. }
  1128. /*
  1129. * rank = 1: cs0
  1130. * rank = 2: cs1
  1131. */
  1132. int read_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 dramtype)
  1133. {
  1134. u32 ret;
  1135. u32 i, temp;
  1136. u32 dqmap;
  1137. void __iomem *pctl_base = dram->pctl;
  1138. struct sdram_head_info_index_v2 *index =
  1139. (struct sdram_head_info_index_v2 *)common_info;
  1140. struct dq_map_info *map_info;
  1141. map_info = (struct dq_map_info *)((void *)common_info +
  1142. index->dq_map_index.offset * 4);
  1143. if (dramtype == LPDDR2)
  1144. dqmap = map_info->lp2_dq0_7_map;
  1145. else
  1146. dqmap = map_info->lp3_dq0_7_map;
  1147. pctl_read_mr(pctl_base, rank, mr_num);
  1148. ret = (readl(&dram->ddrgrf->ddr_grf_status[0]) & 0xff);
  1149. if (dramtype != LPDDR4) {
  1150. temp = 0;
  1151. for (i = 0; i < 8; i++) {
  1152. temp = temp | (((ret >> i) & 0x1) <<
  1153. ((dqmap >> (i * 4)) & 0xf));
  1154. }
  1155. } else {
  1156. temp = (readl(&dram->ddrgrf->ddr_grf_status[1]) & 0xff);
  1157. }
  1158. return temp;
  1159. }
  1160. /* before call this function autorefresh should be disabled */
  1161. void send_a_refresh(struct dram_info *dram)
  1162. {
  1163. void __iomem *pctl_base = dram->pctl;
  1164. while (readl(pctl_base + DDR_PCTL2_DBGSTAT) & 0x3)
  1165. continue;
  1166. writel(0x3, pctl_base + DDR_PCTL2_DBGCMD);
  1167. }
  1168. static void enter_sr(struct dram_info *dram, u32 en)
  1169. {
  1170. void __iomem *pctl_base = dram->pctl;
  1171. if (en) {
  1172. setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
  1173. while (1) {
  1174. if (((readl(pctl_base + DDR_PCTL2_STAT) &
  1175. PCTL2_SELFREF_TYPE_MASK) ==
  1176. PCTL2_SELFREF_TYPE_SR_NOT_AUTO) &&
  1177. ((readl(pctl_base + DDR_PCTL2_STAT) &
  1178. PCTL2_OPERATING_MODE_MASK) ==
  1179. PCTL2_OPERATING_MODE_SR))
  1180. break;
  1181. }
  1182. } else {
  1183. clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
  1184. while ((readl(pctl_base + DDR_PCTL2_STAT) &
  1185. PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR)
  1186. continue;
  1187. }
  1188. }
  1189. void record_dq_prebit(struct dram_info *dram)
  1190. {
  1191. u32 group, i, tmp;
  1192. void __iomem *phy_base = dram->phy;
  1193. for (group = 0; group < 4; group++) {
  1194. for (i = 0; i < ARRAY_SIZE(dq_sel); i++) {
  1195. /* l_loop_invdelaysel */
  1196. writel(dq_sel[i][0], PHY_REG(phy_base,
  1197. grp_addr[group] + 0x2c));
  1198. tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e));
  1199. writel(tmp, PHY_REG(phy_base,
  1200. grp_addr[group] + dq_sel[i][1]));
  1201. /* r_loop_invdelaysel */
  1202. writel(dq_sel[i][0], PHY_REG(phy_base,
  1203. grp_addr[group] + 0x2d));
  1204. tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f));
  1205. writel(tmp, PHY_REG(phy_base,
  1206. grp_addr[group] + dq_sel[i][2]));
  1207. }
  1208. }
  1209. }
  1210. static void update_dq_rx_prebit(struct dram_info *dram)
  1211. {
  1212. void __iomem *phy_base = dram->phy;
  1213. clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4),
  1214. BIT(4));
  1215. udelay(1);
  1216. clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4));
  1217. }
  1218. static void update_dq_tx_prebit(struct dram_info *dram)
  1219. {
  1220. void __iomem *phy_base = dram->phy;
  1221. clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
  1222. setbits_le32(PHY_REG(phy_base, 0x2), BIT(3));
  1223. setbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
  1224. udelay(1);
  1225. clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
  1226. }
  1227. static void update_ca_prebit(struct dram_info *dram)
  1228. {
  1229. void __iomem *phy_base = dram->phy;
  1230. clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2));
  1231. setbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
  1232. udelay(1);
  1233. clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
  1234. }
  1235. /*
  1236. * dir: 0: de-skew = delta_*
  1237. * 1: de-skew = reg val - delta_*
  1238. * delta_dir: value for differential signal: clk/
  1239. * delta_sig: value for single signal: ca/cmd
  1240. */
  1241. static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif,
  1242. int delta_sig, u32 cs, u32 dramtype)
  1243. {
  1244. void __iomem *phy_base = dram->phy;
  1245. u32 i, cs_en, tmp;
  1246. u32 dfi_lp_stat = 0;
  1247. if (cs == 0)
  1248. cs_en = 1;
  1249. else if (cs == 2)
  1250. cs_en = 2;
  1251. else
  1252. cs_en = 3;
  1253. if (dramtype == LPDDR4 &&
  1254. ((readl(PHY_REG(phy_base, 0x60)) & BIT(5)) == 0)) {
  1255. dfi_lp_stat = 1;
  1256. setbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
  1257. }
  1258. enter_sr(dram, 1);
  1259. for (i = 0; i < 0x20; i++) {
  1260. if (dir == DESKEW_MDF_ABS_VAL)
  1261. tmp = delta_sig;
  1262. else
  1263. tmp = readl(PHY_REG(phy_base, 0x150 + i)) +
  1264. delta_sig;
  1265. writel(tmp, PHY_REG(phy_base, 0x150 + i));
  1266. }
  1267. if (dir == DESKEW_MDF_ABS_VAL)
  1268. tmp = delta_dif;
  1269. else
  1270. tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) -
  1271. delta_sig + delta_dif;
  1272. writel(tmp, PHY_REG(phy_base, 0x150 + 0x17));
  1273. writel(tmp, PHY_REG(phy_base, 0x150 + 0x18));
  1274. if (dramtype == LPDDR4) {
  1275. writel(tmp, PHY_REG(phy_base, 0x150 + 0x4));
  1276. writel(tmp, PHY_REG(phy_base, 0x150 + 0xa));
  1277. clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6);
  1278. update_ca_prebit(dram);
  1279. }
  1280. enter_sr(dram, 0);
  1281. if (dfi_lp_stat)
  1282. clrbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
  1283. }
  1284. static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank)
  1285. {
  1286. u32 i, j, offset = 0;
  1287. u32 min = 0x3f;
  1288. void __iomem *phy_base = dram->phy;
  1289. u32 byte_en;
  1290. if (signal == SKEW_TX_SIGNAL)
  1291. offset = 8;
  1292. if (signal == SKEW_CA_SIGNAL) {
  1293. for (i = 0; i < 0x20; i++)
  1294. min = MIN(min, readl(PHY_REG(phy_base, 0x150 + i)));
  1295. } else {
  1296. byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
  1297. for (j = offset; j < offset + rank * 4; j++) {
  1298. if (!((byte_en >> (j % 4)) & 1))
  1299. continue;
  1300. for (i = 0; i < 11; i++)
  1301. min = MIN(min,
  1302. readl(PHY_REG(phy_base,
  1303. dqs_dq_skew_adr[j] +
  1304. i)));
  1305. }
  1306. }
  1307. return min;
  1308. }
  1309. static u32 low_power_update(struct dram_info *dram, u32 en)
  1310. {
  1311. void __iomem *pctl_base = dram->pctl;
  1312. u32 lp_stat = 0;
  1313. if (en) {
  1314. setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf);
  1315. } else {
  1316. lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf;
  1317. clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf);
  1318. }
  1319. return lp_stat;
  1320. }
  1321. /*
  1322. * signal:
  1323. * dir: 0: de-skew = delta_*
  1324. * 1: de-skew = reg val - delta_*
  1325. * delta_dir: value for differential signal: dqs
  1326. * delta_sig: value for single signal: dq/dm
  1327. */
  1328. static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir,
  1329. int delta_dif, int delta_sig, u32 rank)
  1330. {
  1331. void __iomem *phy_base = dram->phy;
  1332. u32 i, j, tmp, offset;
  1333. u32 byte_en;
  1334. byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
  1335. if (signal == SKEW_RX_SIGNAL)
  1336. offset = 0;
  1337. else
  1338. offset = 8;
  1339. for (j = offset; j < (offset + rank * 4); j++) {
  1340. if (!((byte_en >> (j % 4)) & 1))
  1341. continue;
  1342. for (i = 0; i < 0x9; i++) {
  1343. if (dir == DESKEW_MDF_ABS_VAL)
  1344. tmp = delta_sig;
  1345. else
  1346. tmp = delta_sig + readl(PHY_REG(phy_base,
  1347. dqs_dq_skew_adr[j] +
  1348. i));
  1349. writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i));
  1350. }
  1351. if (dir == DESKEW_MDF_ABS_VAL)
  1352. tmp = delta_dif;
  1353. else
  1354. tmp = delta_dif + readl(PHY_REG(phy_base,
  1355. dqs_dq_skew_adr[j] + 9));
  1356. writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9));
  1357. writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa));
  1358. }
  1359. if (signal == SKEW_RX_SIGNAL)
  1360. update_dq_rx_prebit(dram);
  1361. else
  1362. update_dq_tx_prebit(dram);
  1363. }
  1364. static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype)
  1365. {
  1366. void __iomem *phy_base = dram->phy;
  1367. u32 ret;
  1368. u32 dis_auto_zq = 0;
  1369. u32 odt_val_up, odt_val_dn;
  1370. u32 i, j;
  1371. odt_val_dn = readl(PHY_REG(phy_base, 0x110));
  1372. odt_val_up = readl(PHY_REG(phy_base, 0x111));
  1373. if (dramtype != LPDDR4) {
  1374. for (i = 0; i < 4; i++) {
  1375. j = 0x110 + i * 0x10;
  1376. writel(PHY_DDR4_LPDDR3_RTT_294ohm,
  1377. PHY_REG(phy_base, j));
  1378. writel(PHY_DDR4_LPDDR3_RTT_DISABLE,
  1379. PHY_REG(phy_base, j + 0x1));
  1380. }
  1381. }
  1382. dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
  1383. /* use normal read mode for data training */
  1384. clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
  1385. if (dramtype == DDR4)
  1386. setbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
  1387. /* choose training cs */
  1388. clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
  1389. /* enable gate training */
  1390. clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
  1391. udelay(50);
  1392. ret = readl(PHY_REG(phy_base, 0x91));
  1393. /* disable gate training */
  1394. clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
  1395. clrbits_le32(PHY_REG(phy_base, 2), 0x30);
  1396. pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
  1397. ret = (ret & 0x2f) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf);
  1398. if (dramtype != LPDDR4) {
  1399. for (i = 0; i < 4; i++) {
  1400. j = 0x110 + i * 0x10;
  1401. writel(odt_val_dn, PHY_REG(phy_base, j));
  1402. writel(odt_val_up, PHY_REG(phy_base, j + 0x1));
  1403. }
  1404. }
  1405. return ret;
  1406. }
  1407. static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype,
  1408. u32 rank)
  1409. {
  1410. void __iomem *pctl_base = dram->pctl;
  1411. void __iomem *phy_base = dram->phy;
  1412. u32 dis_auto_zq = 0;
  1413. u32 tmp;
  1414. u32 cur_fsp;
  1415. u32 timeout_us = 1000;
  1416. dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
  1417. clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
  1418. cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
  1419. tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) &
  1420. 0xffff;
  1421. writel(tmp & 0xff, PHY_REG(phy_base, 0x3));
  1422. /* disable another cs's output */
  1423. if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
  1424. pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12),
  1425. dramtype);
  1426. if (dramtype == DDR3 || dramtype == DDR4)
  1427. writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
  1428. else
  1429. writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
  1430. /* choose cs */
  1431. clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
  1432. ((0x2 >> cs) << 6) | (0 << 2));
  1433. /* enable write leveling */
  1434. clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
  1435. ((0x2 >> cs) << 6) | (1 << 2));
  1436. while (1) {
  1437. if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) ==
  1438. (readl(PHY_REG(phy_base, 0xf)) & 0xf))
  1439. break;
  1440. udelay(1);
  1441. if (timeout_us-- == 0) {
  1442. printascii("error: write leveling timeout\n");
  1443. while (1)
  1444. ;
  1445. }
  1446. }
  1447. /* disable write leveling */
  1448. clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
  1449. ((0x2 >> cs) << 6) | (0 << 2));
  1450. clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6);
  1451. /* enable another cs's output */
  1452. if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
  1453. pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12),
  1454. dramtype);
  1455. pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
  1456. return 0;
  1457. }
  1458. char pattern[32] = {
  1459. 0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa,
  1460. 0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55,
  1461. 0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55,
  1462. 0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa
  1463. };
  1464. static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype,
  1465. u32 mhz)
  1466. {
  1467. void __iomem *pctl_base = dram->pctl;
  1468. void __iomem *phy_base = dram->phy;
  1469. u32 trefi_1x, trfc_1x;
  1470. u32 dis_auto_zq = 0;
  1471. u32 timeout_us = 1000;
  1472. u32 dqs_default;
  1473. u32 cur_fsp;
  1474. u32 vref_inner;
  1475. u32 i;
  1476. struct sdram_head_info_index_v2 *index =
  1477. (struct sdram_head_info_index_v2 *)common_info;
  1478. struct dq_map_info *map_info;
  1479. vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff;
  1480. if (dramtype == DDR3 && vref_inner == 0x80) {
  1481. for (i = 0; i < 4; i++)
  1482. writel(vref_inner - 0xa,
  1483. PHY_REG(phy_base, 0x118 + i * 0x10));
  1484. /* reg_rx_vref_value_update */
  1485. setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
  1486. clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
  1487. }
  1488. map_info = (struct dq_map_info *)((void *)common_info +
  1489. index->dq_map_index.offset * 4);
  1490. /* only 1cs a time, 0:cs0 1 cs1 */
  1491. if (cs > 1)
  1492. return -1;
  1493. dqs_default = 0xf;
  1494. dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
  1495. cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
  1496. /* config refresh timing */
  1497. trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
  1498. DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
  1499. trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
  1500. DDR_PCTL2_RFSHTMG) & 0x3ff;
  1501. /* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
  1502. clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
  1503. clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
  1504. /* reg_phy_trfc */
  1505. clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
  1506. /* reg_max_refi_cnt */
  1507. clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
  1508. /* choose training cs */
  1509. clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6);
  1510. /* set dq map for ddr4 */
  1511. if (dramtype == DDR4) {
  1512. setbits_le32(PHY_REG(phy_base, 0x70), BIT(7));
  1513. for (i = 0; i < 4; i++) {
  1514. writel((map_info->ddr4_dq_map[cs * 2] >>
  1515. ((i % 4) * 8)) & 0xff,
  1516. PHY_REG(phy_base, 0x238 + i));
  1517. writel((map_info->ddr4_dq_map[cs * 2 + 1] >>
  1518. ((i % 4) * 8)) & 0xff,
  1519. PHY_REG(phy_base, 0x2b8 + i));
  1520. }
  1521. }
  1522. /* cha_l reg_l_rd_train_dqs_default[5:0] */
  1523. clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default);
  1524. /* cha_h reg_h_rd_train_dqs_default[5:0] */
  1525. clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default);
  1526. /* chb_l reg_l_rd_train_dqs_default[5:0] */
  1527. clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default);
  1528. /* chb_h reg_h_rd_train_dqs_default[5:0] */
  1529. clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default);
  1530. /* Choose the read train auto mode */
  1531. clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1);
  1532. /* Enable the auto train of the read train */
  1533. clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3);
  1534. /* Wait the train done. */
  1535. while (1) {
  1536. if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1)
  1537. break;
  1538. udelay(1);
  1539. if (timeout_us-- == 0) {
  1540. printascii("error: read training timeout\n");
  1541. return -1;
  1542. }
  1543. }
  1544. /* Check the read train state */
  1545. if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) ||
  1546. (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) {
  1547. printascii("error: read training error\n");
  1548. return -1;
  1549. }
  1550. /* Exit the Read Training by setting */
  1551. clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1));
  1552. pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
  1553. if (dramtype == DDR3 && vref_inner == 0x80) {
  1554. for (i = 0; i < 4; i++)
  1555. writel(vref_inner,
  1556. PHY_REG(phy_base, 0x118 + i * 0x10));
  1557. /* reg_rx_vref_value_update */
  1558. setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
  1559. clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
  1560. }
  1561. return 0;
  1562. }
  1563. static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype,
  1564. u32 mhz, u32 dst_fsp)
  1565. {
  1566. void __iomem *pctl_base = dram->pctl;
  1567. void __iomem *phy_base = dram->phy;
  1568. u32 trefi_1x, trfc_1x;
  1569. u32 dis_auto_zq = 0;
  1570. u32 timeout_us = 1000;
  1571. u32 cur_fsp;
  1572. u32 mr_tmp, cl, cwl, phy_fsp, offset = 0;
  1573. if (dramtype == LPDDR3 && mhz <= 400) {
  1574. phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3;
  1575. offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3;
  1576. cl = readl(PHY_REG(phy_base, offset));
  1577. cwl = readl(PHY_REG(phy_base, offset + 2));
  1578. clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8);
  1579. clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4);
  1580. pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype);
  1581. }
  1582. dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
  1583. /* PHY_0x7b[7:0] reg_train_col_addr[7:0] */
  1584. clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0);
  1585. /* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */
  1586. clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2);
  1587. /* PHY_0x7c[1:0] reg_train_col_addr[9:8] */
  1588. clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0);
  1589. /* PHY_0x7d[7:0] reg_train_row_addr[7:0] */
  1590. clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0);
  1591. /* PHY_0x7e[7:0] reg_train_row_addr[15:8] */
  1592. clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0);
  1593. /* PHY_0x71[3] wrtrain_check_data_value_random_gen */
  1594. clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3));
  1595. /* config refresh timing */
  1596. cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
  1597. trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
  1598. DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
  1599. trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
  1600. DDR_PCTL2_RFSHTMG) & 0x3ff;
  1601. /* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
  1602. clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
  1603. clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
  1604. /* reg_phy_trfc */
  1605. clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
  1606. /* reg_max_refi_cnt */
  1607. clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
  1608. /* choose training cs */
  1609. clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6);
  1610. /* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */
  1611. /* 0: Use the write-leveling value. */
  1612. /* 1: use reg0x233 0x237 0x2b3 0x2b7 */
  1613. setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4));
  1614. /* PHY_0x7a [0] reg_dq_wr_train_auto */
  1615. setbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
  1616. /* PHY_0x7a [1] reg_dq_wr_train_en */
  1617. setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
  1618. send_a_refresh(dram);
  1619. while (1) {
  1620. if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1)
  1621. break;
  1622. udelay(1);
  1623. if (timeout_us-- == 0) {
  1624. printascii("error: write training timeout\n");
  1625. while (1)
  1626. ;
  1627. }
  1628. }
  1629. /* Check the write train state */
  1630. if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) {
  1631. printascii("error: write training error\n");
  1632. return -1;
  1633. }
  1634. /* PHY_0x7a [1] reg_dq_wr_train_en */
  1635. clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
  1636. pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
  1637. /* save LPDDR4 write vref to fsp_param for dfs */
  1638. if (dramtype == LPDDR4) {
  1639. fsp_param[dst_fsp].vref_dq[cs] =
  1640. ((readl(PHY_REG(phy_base, 0x384)) & 0x3f) +
  1641. (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2;
  1642. /* add range info */
  1643. fsp_param[dst_fsp].vref_dq[cs] |=
  1644. ((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1);
  1645. }
  1646. if (dramtype == LPDDR3 && mhz <= 400) {
  1647. clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl);
  1648. clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl);
  1649. mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
  1650. DDR_PCTL2_INIT3);
  1651. pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
  1652. dramtype);
  1653. }
  1654. return 0;
  1655. }
  1656. static int data_training(struct dram_info *dram, u32 cs,
  1657. struct rv1126_sdram_params *sdram_params, u32 dst_fsp,
  1658. u32 training_flag)
  1659. {
  1660. u32 ret = 0;
  1661. if (training_flag == FULL_TRAINING)
  1662. training_flag = READ_GATE_TRAINING | WRITE_LEVELING |
  1663. WRITE_TRAINING | READ_TRAINING;
  1664. if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) {
  1665. ret = data_training_wl(dram, cs,
  1666. sdram_params->base.dramtype,
  1667. sdram_params->ch.cap_info.rank);
  1668. if (ret != 0)
  1669. goto out;
  1670. }
  1671. if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) {
  1672. ret = data_training_rg(dram, cs,
  1673. sdram_params->base.dramtype);
  1674. if (ret != 0)
  1675. goto out;
  1676. }
  1677. if ((training_flag & READ_TRAINING) == READ_TRAINING) {
  1678. ret = data_training_rd(dram, cs,
  1679. sdram_params->base.dramtype,
  1680. sdram_params->base.ddr_freq);
  1681. if (ret != 0)
  1682. goto out;
  1683. }
  1684. if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) {
  1685. ret = data_training_wr(dram, cs,
  1686. sdram_params->base.dramtype,
  1687. sdram_params->base.ddr_freq, dst_fsp);
  1688. if (ret != 0)
  1689. goto out;
  1690. }
  1691. out:
  1692. return ret;
  1693. }
  1694. static int get_wrlvl_val(struct dram_info *dram,
  1695. struct rv1126_sdram_params *sdram_params)
  1696. {
  1697. int i, j, clk_skew;
  1698. void __iomem *phy_base = dram->phy;
  1699. u32 lp_stat;
  1700. int ret;
  1701. lp_stat = low_power_update(dram, 0);
  1702. clk_skew = 0x1f;
  1703. modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, clk_skew, 3,
  1704. sdram_params->base.dramtype);
  1705. ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING);
  1706. if (sdram_params->ch.cap_info.rank == 2)
  1707. ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING);
  1708. for (j = 0; j < 2; j++)
  1709. for (i = 0; i < 4; i++)
  1710. wrlvl_result[j][i] =
  1711. (readl(PHY_REG(phy_base, wrlvl_result_offset[j][i])) & 0x3f) -
  1712. clk_skew;
  1713. low_power_update(dram, lp_stat);
  1714. return ret;
  1715. }
  1716. #if defined(CONFIG_CMD_DDR_TEST_TOOL)
  1717. static void init_rw_trn_result_struct(struct rw_trn_result *result,
  1718. void __iomem *phy_base, u8 cs_num)
  1719. {
  1720. int i;
  1721. result->cs_num = cs_num;
  1722. result->byte_en = readb(PHY_REG(dram_info.phy, 0xf)) &
  1723. PHY_DQ_WIDTH_MASK;
  1724. for (i = 0; i < FSP_NUM; i++)
  1725. result->fsp_mhz[i] = 0;
  1726. }
  1727. static void save_rw_trn_min_max(void __iomem *phy_base,
  1728. struct cs_rw_trn_result *rd_result,
  1729. struct cs_rw_trn_result *wr_result,
  1730. u8 byte_en)
  1731. {
  1732. u16 phy_ofs;
  1733. u8 dqs;
  1734. u8 dq;
  1735. for (dqs = 0; dqs < BYTE_NUM; dqs++) {
  1736. if ((byte_en & BIT(dqs)) == 0)
  1737. continue;
  1738. /* Channel A or B (low or high 16 bit) */
  1739. phy_ofs = dqs < 2 ? 0x230 : 0x2b0;
  1740. /* low or high 8 bit */
  1741. phy_ofs += (dqs & 0x1) == 0 ? 0 : 0x9;
  1742. for (dq = 0; dq < 8; dq++) {
  1743. rd_result->dqs[dqs].dq_min[dq] =
  1744. readb(PHY_REG(phy_base, phy_ofs + 0x15 + dq));
  1745. rd_result->dqs[dqs].dq_max[dq] =
  1746. readb(PHY_REG(phy_base, phy_ofs + 0x27 + dq));
  1747. wr_result->dqs[dqs].dq_min[dq] =
  1748. readb(PHY_REG(phy_base, phy_ofs + 0x3d + dq));
  1749. wr_result->dqs[dqs].dq_max[dq] =
  1750. readb(PHY_REG(phy_base, phy_ofs + 0x4f + dq));
  1751. }
  1752. }
  1753. }
  1754. static void save_rw_trn_deskew(void __iomem *phy_base,
  1755. struct fsp_rw_trn_result *result, u8 cs_num,
  1756. int min_val, bool rw)
  1757. {
  1758. u16 phy_ofs;
  1759. u8 cs;
  1760. u8 dq;
  1761. result->min_val = min_val;
  1762. for (cs = 0; cs < cs_num; cs++) {
  1763. phy_ofs = cs == 0 ? 0x170 : 0x1a0;
  1764. phy_ofs += rw == SKEW_RX_SIGNAL ? 0x1 : 0x17;
  1765. for (dq = 0; dq < 8; dq++) {
  1766. result->cs[cs].dqs[0].dq_deskew[dq] =
  1767. readb(PHY_REG(phy_base, phy_ofs + dq));
  1768. result->cs[cs].dqs[1].dq_deskew[dq] =
  1769. readb(PHY_REG(phy_base, phy_ofs + 0xb + dq));
  1770. result->cs[cs].dqs[2].dq_deskew[dq] =
  1771. readb(PHY_REG(phy_base, phy_ofs + 0x60 + dq));
  1772. result->cs[cs].dqs[3].dq_deskew[dq] =
  1773. readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + dq));
  1774. }
  1775. result->cs[cs].dqs[0].dqs_deskew =
  1776. readb(PHY_REG(phy_base, phy_ofs + 0x8));
  1777. result->cs[cs].dqs[1].dqs_deskew =
  1778. readb(PHY_REG(phy_base, phy_ofs + 0xb + 0x8));
  1779. result->cs[cs].dqs[2].dqs_deskew =
  1780. readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0x8));
  1781. result->cs[cs].dqs[3].dqs_deskew =
  1782. readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + 0x8));
  1783. }
  1784. }
  1785. static void save_rw_trn_result_to_ddr(struct rw_trn_result *result)
  1786. {
  1787. result->flag = DDR_DQ_EYE_FLAG;
  1788. memcpy((void *)(RW_TRN_RESULT_ADDR), result, sizeof(*result));
  1789. }
  1790. #endif
  1791. static int high_freq_training(struct dram_info *dram,
  1792. struct rv1126_sdram_params *sdram_params,
  1793. u32 fsp)
  1794. {
  1795. u32 i, j;
  1796. void __iomem *phy_base = dram->phy;
  1797. u32 dramtype = sdram_params->base.dramtype;
  1798. int min_val;
  1799. int dqs_skew, clk_skew, ca_skew;
  1800. u8 byte_en;
  1801. int ret;
  1802. byte_en = readl(PHY_REG(phy_base, 0xf)) & PHY_DQ_WIDTH_MASK;
  1803. dqs_skew = 0;
  1804. for (j = 0; j < sdram_params->ch.cap_info.rank; j++) {
  1805. for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) {
  1806. if ((byte_en & BIT(i)) != 0)
  1807. dqs_skew += wrlvl_result[j][i];
  1808. }
  1809. }
  1810. dqs_skew = dqs_skew /
  1811. (int)(sdram_params->ch.cap_info.rank * (1 << sdram_params->ch.cap_info.bw));
  1812. clk_skew = 0x20 - dqs_skew;
  1813. dqs_skew = 0x20;
  1814. if (dramtype == LPDDR4) {
  1815. min_val = 0xff;
  1816. for (j = 0; j < sdram_params->ch.cap_info.rank; j++)
  1817. for (i = 0; i < sdram_params->ch.cap_info.bw; i++)
  1818. min_val = MIN(wrlvl_result[j][i], min_val);
  1819. if (min_val < 0) {
  1820. clk_skew = -min_val;
  1821. ca_skew = -min_val;
  1822. } else {
  1823. clk_skew = 0;
  1824. ca_skew = 0;
  1825. }
  1826. } else if (dramtype == LPDDR3) {
  1827. ca_skew = clk_skew - 4;
  1828. } else {
  1829. ca_skew = clk_skew;
  1830. }
  1831. modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3,
  1832. dramtype);
  1833. writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233));
  1834. writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237));
  1835. writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
  1836. writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
  1837. ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING |
  1838. READ_TRAINING | WRITE_TRAINING);
  1839. #if defined(CONFIG_CMD_DDR_TEST_TOOL)
  1840. rw_trn_result.fsp_mhz[fsp] = (u16)sdram_params->base.ddr_freq;
  1841. save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[0],
  1842. &rw_trn_result.wr_fsp[fsp].cs[0],
  1843. rw_trn_result.byte_en);
  1844. #endif
  1845. if (sdram_params->ch.cap_info.rank == 2) {
  1846. writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233));
  1847. writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237));
  1848. writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
  1849. writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
  1850. ret |= data_training(dram, 1, sdram_params, fsp,
  1851. READ_GATE_TRAINING | READ_TRAINING |
  1852. WRITE_TRAINING);
  1853. #if defined(CONFIG_CMD_DDR_TEST_TOOL)
  1854. save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[1],
  1855. &rw_trn_result.wr_fsp[fsp].cs[1],
  1856. rw_trn_result.byte_en);
  1857. #endif
  1858. }
  1859. if (ret)
  1860. goto out;
  1861. record_dq_prebit(dram);
  1862. min_val = get_min_value(dram, SKEW_RX_SIGNAL,
  1863. sdram_params->ch.cap_info.rank) * -1;
  1864. modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL,
  1865. min_val, min_val, sdram_params->ch.cap_info.rank);
  1866. #if defined(CONFIG_CMD_DDR_TEST_TOOL)
  1867. save_rw_trn_deskew(phy_base, &rw_trn_result.rd_fsp[fsp],
  1868. rw_trn_result.cs_num, (u8)(min_val * (-1)),
  1869. SKEW_RX_SIGNAL);
  1870. #endif
  1871. min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL,
  1872. sdram_params->ch.cap_info.rank),
  1873. get_min_value(dram, SKEW_CA_SIGNAL,
  1874. sdram_params->ch.cap_info.rank)) * -1;
  1875. /* clk = 0, rx all skew -7, tx - min_value */
  1876. modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3,
  1877. dramtype);
  1878. modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL,
  1879. min_val, min_val, sdram_params->ch.cap_info.rank);
  1880. #if defined(CONFIG_CMD_DDR_TEST_TOOL)
  1881. save_rw_trn_deskew(phy_base, &rw_trn_result.wr_fsp[fsp],
  1882. rw_trn_result.cs_num, (u8)(min_val * (-1)),
  1883. SKEW_TX_SIGNAL);
  1884. #endif
  1885. ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING);
  1886. if (sdram_params->ch.cap_info.rank == 2)
  1887. ret |= data_training(dram, 1, sdram_params, 0,
  1888. READ_GATE_TRAINING);
  1889. out:
  1890. return ret;
  1891. }
  1892. static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
  1893. {
  1894. writel(ddrconfig, &dram->msch->deviceconf);
  1895. clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0);
  1896. }
  1897. static void update_noc_timing(struct dram_info *dram,
  1898. struct rv1126_sdram_params *sdram_params)
  1899. {
  1900. void __iomem *pctl_base = dram->pctl;
  1901. u32 bw, bl;
  1902. bw = 8 << sdram_params->ch.cap_info.bw;
  1903. bl = ((readl(pctl_base + DDR_PCTL2_MSTR) >> 16) & 0xf) * 2;
  1904. /* update the noc timing related to data bus width */
  1905. if ((bw / 8 * bl) <= 16)
  1906. sdram_params->ch.noc_timings.ddrmode.b.burstsize = 0;
  1907. else if ((bw / 8 * bl) == 32)
  1908. sdram_params->ch.noc_timings.ddrmode.b.burstsize = 1;
  1909. else if ((bw / 8 * bl) == 64)
  1910. sdram_params->ch.noc_timings.ddrmode.b.burstsize = 2;
  1911. else
  1912. sdram_params->ch.noc_timings.ddrmode.b.burstsize = 3;
  1913. sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty =
  1914. (bl * bw / 8) > 16 ? (bl / 4) : (16 / (bl * bw / 8)) * bl / 4;
  1915. if (sdram_params->base.dramtype == LPDDR4) {
  1916. sdram_params->ch.noc_timings.ddrmode.b.mwrsize =
  1917. (bw == 16) ? 0x1 : 0x2;
  1918. sdram_params->ch.noc_timings.ddrtimingc0.b.wrtomwr =
  1919. 3 * sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty;
  1920. }
  1921. writel(sdram_params->ch.noc_timings.ddrtiminga0.d32,
  1922. &dram->msch->ddrtiminga0);
  1923. writel(sdram_params->ch.noc_timings.ddrtimingb0.d32,
  1924. &dram->msch->ddrtimingb0);
  1925. writel(sdram_params->ch.noc_timings.ddrtimingc0.d32,
  1926. &dram->msch->ddrtimingc0);
  1927. writel(sdram_params->ch.noc_timings.devtodev0.d32,
  1928. &dram->msch->devtodev0);
  1929. writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
  1930. writel(sdram_params->ch.noc_timings.ddr4timing.d32,
  1931. &dram->msch->ddr4timing);
  1932. }
  1933. static int split_setup(struct dram_info *dram,
  1934. struct rv1126_sdram_params *sdram_params)
  1935. {
  1936. struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
  1937. u32 dramtype = sdram_params->base.dramtype;
  1938. u32 split_size, split_mode;
  1939. u64 cs_cap[2], cap;
  1940. cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dramtype);
  1941. cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dramtype);
  1942. /* only support the larger cap is in low 16bit */
  1943. if (cap_info->cs0_high16bit_row < cap_info->cs0_row) {
  1944. cap = cs_cap[0] / (1 << (cap_info->cs0_row -
  1945. cap_info->cs0_high16bit_row));
  1946. } else if ((cap_info->cs1_high16bit_row < cap_info->cs1_row) &&
  1947. (cap_info->rank == 2)) {
  1948. if (!cap_info->cs1_high16bit_row)
  1949. cap = cs_cap[0];
  1950. else
  1951. cap = cs_cap[0] + cs_cap[1] / (1 << (cap_info->cs1_row -
  1952. cap_info->cs1_high16bit_row));
  1953. } else {
  1954. goto out;
  1955. }
  1956. split_size = (u32)(cap >> 24) & SPLIT_SIZE_MASK;
  1957. if (cap_info->bw == 2)
  1958. split_mode = SPLIT_MODE_32_L16_VALID;
  1959. else
  1960. split_mode = SPLIT_MODE_16_L8_VALID;
  1961. rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
  1962. (SPLIT_MODE_MASK << SPLIT_MODE_OFFSET) |
  1963. (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
  1964. (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
  1965. (split_mode << SPLIT_MODE_OFFSET) |
  1966. (0x0 << SPLIT_BYPASS_OFFSET) |
  1967. (split_size << SPLIT_SIZE_OFFSET));
  1968. rk_clrsetreg(BUS_SGRF_BASE_ADDR + SGRF_SOC_CON2,
  1969. MSCH_AXI_BYPASS_ALL_MASK << MSCH_AXI_BYPASS_ALL_SHIFT,
  1970. 0x0 << MSCH_AXI_BYPASS_ALL_SHIFT);
  1971. out:
  1972. return 0;
  1973. }
  1974. static void split_bypass(struct dram_info *dram)
  1975. {
  1976. if ((readl(&dram->ddrgrf->grf_ddrsplit_con) &
  1977. (1 << SPLIT_BYPASS_OFFSET)) != 0)
  1978. return;
  1979. /* bypass split */
  1980. rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
  1981. (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
  1982. (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
  1983. (0x1 << SPLIT_BYPASS_OFFSET) |
  1984. (0x0 << SPLIT_SIZE_OFFSET));
  1985. }
  1986. static void dram_all_config(struct dram_info *dram,
  1987. struct rv1126_sdram_params *sdram_params)
  1988. {
  1989. struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
  1990. u32 dram_type = sdram_params->base.dramtype;
  1991. void __iomem *pctl_base = dram->pctl;
  1992. u32 sys_reg2 = 0;
  1993. u32 sys_reg3 = 0;
  1994. u64 cs_cap[2];
  1995. u32 cs_pst;
  1996. set_ddrconfig(dram, cap_info->ddrconfig);
  1997. sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
  1998. &sys_reg3, 0);
  1999. writel(sys_reg2, &dram->pmugrf->os_reg[2]);
  2000. writel(sys_reg3, &dram->pmugrf->os_reg[3]);
  2001. cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
  2002. cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
  2003. if (cap_info->rank == 2) {
  2004. cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
  2005. 6 + 2;
  2006. if (cs_pst > 28)
  2007. cs_cap[0] = 1llu << cs_pst;
  2008. }
  2009. writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
  2010. (((cs_cap[0] >> 20) / 64) & 0xff),
  2011. &dram->msch->devicesize);
  2012. update_noc_timing(dram, sdram_params);
  2013. }
  2014. static void enable_low_power(struct dram_info *dram,
  2015. struct rv1126_sdram_params *sdram_params)
  2016. {
  2017. void __iomem *pctl_base = dram->pctl;
  2018. u32 grf_lp_con;
  2019. writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]);
  2020. if (sdram_params->base.dramtype == DDR4)
  2021. grf_lp_con = (0x7 << 16) | (1 << 1);
  2022. else if (sdram_params->base.dramtype == DDR3)
  2023. grf_lp_con = (0x7 << 16) | (1 << 0);
  2024. else
  2025. grf_lp_con = (0x7 << 16) | (1 << 2);
  2026. /* en lpckdis_en */
  2027. grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
  2028. writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con);
  2029. /* enable sr, pd */
  2030. if (dram->pd_idle == 0)
  2031. clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
  2032. else
  2033. setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
  2034. if (dram->sr_idle == 0)
  2035. clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
  2036. else
  2037. setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
  2038. setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
  2039. }
  2040. static void print_ddr_info(struct rv1126_sdram_params *sdram_params)
  2041. {
  2042. u32 split;
  2043. if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
  2044. (1 << SPLIT_BYPASS_OFFSET)) != 0)
  2045. split = 0;
  2046. else
  2047. split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
  2048. SPLIT_SIZE_MASK;
  2049. sdram_print_ddr_info(&sdram_params->ch.cap_info,
  2050. &sdram_params->base, split);
  2051. }
  2052. static int sdram_init_(struct dram_info *dram,
  2053. struct rv1126_sdram_params *sdram_params, u32 post_init)
  2054. {
  2055. void __iomem *pctl_base = dram->pctl;
  2056. void __iomem *phy_base = dram->phy;
  2057. u32 ddr4_vref;
  2058. u32 mr_tmp;
  2059. rkclk_configure_ddr(dram, sdram_params);
  2060. rkclk_ddr_reset(dram, 1, 1, 1, 1);
  2061. udelay(10);
  2062. rkclk_ddr_reset(dram, 1, 1, 1, 0);
  2063. phy_cfg(dram, sdram_params);
  2064. rkclk_ddr_reset(dram, 1, 1, 0, 0);
  2065. phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1);
  2066. rkclk_ddr_reset(dram, 1, 0, 0, 0);
  2067. pctl_cfg(dram->pctl, &sdram_params->pctl_regs,
  2068. dram->sr_idle, dram->pd_idle);
  2069. if (sdram_params->ch.cap_info.bw == 2) {
  2070. /* 32bit interface use pageclose */
  2071. setbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
  2072. /* pageclose = 1, pageclose_timer = 0 will err in lp4 328MHz */
  2073. clrsetbits_le32(pctl_base + DDR_PCTL2_SCHED1, 0xff, 0x1 << 0);
  2074. } else {
  2075. clrbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
  2076. }
  2077. #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
  2078. u32 tmp, trefi;
  2079. tmp = readl(pctl_base + DDR_PCTL2_RFSHTMG);
  2080. trefi = (tmp >> 16) & 0xfff;
  2081. writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
  2082. pctl_base + DDR_PCTL2_RFSHTMG);
  2083. #endif
  2084. /* set frequency_mode */
  2085. setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
  2086. /* set target_frequency to Frequency 0 */
  2087. clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, 0);
  2088. set_ds_odt(dram, sdram_params, 0);
  2089. sdram_params->ch.cap_info.ddrconfig = calculate_ddrconfig(sdram_params);
  2090. set_ctl_address_map(dram, sdram_params);
  2091. setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
  2092. rkclk_ddr_reset(dram, 0, 0, 0, 0);
  2093. while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
  2094. continue;
  2095. if (sdram_params->base.dramtype == LPDDR3) {
  2096. pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, LPDDR3);
  2097. } else if (sdram_params->base.dramtype == LPDDR4) {
  2098. mr_tmp = readl(pctl_base + DDR_PCTL2_INIT6);
  2099. /* MR11 */
  2100. pctl_write_mr(dram->pctl, 3, 11,
  2101. mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
  2102. LPDDR4);
  2103. /* MR12 */
  2104. pctl_write_mr(dram->pctl, 3, 12,
  2105. mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
  2106. LPDDR4);
  2107. mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
  2108. /* MR22 */
  2109. pctl_write_mr(dram->pctl, 3, 22,
  2110. mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
  2111. LPDDR4);
  2112. }
  2113. if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) != 0) {
  2114. if (post_init != 0)
  2115. printascii("DTT cs0 error\n");
  2116. return -1;
  2117. }
  2118. if (sdram_params->base.dramtype == LPDDR4) {
  2119. mr_tmp = read_mr(dram, 1, 14, LPDDR4);
  2120. if (mr_tmp != 0x4d)
  2121. return -1;
  2122. }
  2123. if (sdram_params->base.dramtype == LPDDR4) {
  2124. mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
  2125. /* MR14 */
  2126. pctl_write_mr(dram->pctl, 3, 14,
  2127. mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
  2128. LPDDR4);
  2129. }
  2130. if (post_init != 0 && sdram_params->ch.cap_info.rank == 2) {
  2131. if (data_training(dram, 1, sdram_params, 0,
  2132. READ_GATE_TRAINING) != 0) {
  2133. printascii("DTT cs1 error\n");
  2134. return -1;
  2135. }
  2136. }
  2137. if (sdram_params->base.dramtype == DDR4) {
  2138. ddr4_vref = readl(PHY_REG(phy_base, 0x105)) * 39;
  2139. pctl_write_vrefdq(dram->pctl, 0x3, ddr4_vref,
  2140. sdram_params->base.dramtype);
  2141. }
  2142. dram_all_config(dram, sdram_params);
  2143. enable_low_power(dram, sdram_params);
  2144. return 0;
  2145. }
  2146. static u64 dram_detect_cap(struct dram_info *dram,
  2147. struct rv1126_sdram_params *sdram_params,
  2148. unsigned char channel)
  2149. {
  2150. struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
  2151. void __iomem *pctl_base = dram->pctl;
  2152. void __iomem *phy_base = dram->phy;
  2153. u32 mr8;
  2154. u32 bktmp;
  2155. u32 coltmp;
  2156. u32 rowtmp;
  2157. u32 cs;
  2158. u32 dram_type = sdram_params->base.dramtype;
  2159. u32 pwrctl;
  2160. u32 i, dq_map;
  2161. u32 byte1 = 0, byte0 = 0;
  2162. u32 tmp, byte;
  2163. struct sdram_head_info_index_v2 *index = (struct sdram_head_info_index_v2 *)common_info;
  2164. struct dq_map_info *map_info = (struct dq_map_info *)
  2165. ((void *)common_info + index->dq_map_index.offset * 4);
  2166. cap_info->bw = dram_type == DDR3 ? 0 : 1;
  2167. if (dram_type != LPDDR4) {
  2168. if (dram_type != DDR4) {
  2169. coltmp = 12;
  2170. bktmp = 3;
  2171. if (dram_type == LPDDR2)
  2172. rowtmp = 15;
  2173. else
  2174. rowtmp = 16;
  2175. if (sdram_detect_col(cap_info, coltmp) != 0)
  2176. goto cap_err;
  2177. sdram_detect_bank(cap_info, coltmp, bktmp);
  2178. if (dram_type != LPDDR3)
  2179. sdram_detect_dbw(cap_info, dram_type);
  2180. } else {
  2181. coltmp = 10;
  2182. bktmp = 4;
  2183. rowtmp = 17;
  2184. cap_info->col = 10;
  2185. cap_info->bk = 2;
  2186. sdram_detect_bg(cap_info, coltmp);
  2187. }
  2188. if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
  2189. goto cap_err;
  2190. sdram_detect_row_3_4(cap_info, coltmp, bktmp);
  2191. } else {
  2192. cap_info->col = 10;
  2193. cap_info->bk = 3;
  2194. mr8 = read_mr(dram, 1, 8, dram_type);
  2195. cap_info->dbw = ((mr8 >> 6) & 0x3) == 0 ? 1 : 0;
  2196. mr8 = (mr8 >> 2) & 0xf;
  2197. if (mr8 >= 0 && mr8 <= 6) {
  2198. cap_info->cs0_row = 14 + (mr8 + 1) / 2;
  2199. } else if (mr8 == 0xc) {
  2200. cap_info->cs0_row = 13;
  2201. } else {
  2202. printascii("Cap ERR: Fail to get cap of LPDDR4/X from MR8\n");
  2203. goto cap_err;
  2204. }
  2205. if (cap_info->dbw == 0)
  2206. cap_info->cs0_row++;
  2207. cap_info->row_3_4 = mr8 % 2 == 1 ? 1 : 0;
  2208. if (cap_info->cs0_row >= 17) {
  2209. printascii("Cap ERR: ");
  2210. printascii("RV1126 LPDDR4/X cannot support row >= 17\n");
  2211. goto cap_err;
  2212. // cap_info->cs0_row = 16;
  2213. // cap_info->row_3_4 = 0;
  2214. }
  2215. }
  2216. pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
  2217. writel(0, pctl_base + DDR_PCTL2_PWRCTL);
  2218. if (data_training(dram, 1, sdram_params, 0, READ_GATE_TRAINING) == 0)
  2219. cs = 1;
  2220. else
  2221. cs = 0;
  2222. cap_info->rank = cs + 1;
  2223. setbits_le32(PHY_REG(phy_base, 0xf), 0xf);
  2224. tmp = data_training_rg(dram, 0, dram_type) & 0xf;
  2225. if (tmp == 0) {
  2226. cap_info->bw = 2;
  2227. } else {
  2228. if (dram_type == DDR3 || dram_type == DDR4) {
  2229. dq_map = 0;
  2230. byte = 0;
  2231. for (i = 0; i < 4; i++) {
  2232. if ((tmp & BIT(i)) == 0) {
  2233. dq_map |= byte << (i * 2);
  2234. byte++;
  2235. }
  2236. }
  2237. cap_info->bw = byte / 2;
  2238. for (i = 0; i < 4; i++) {
  2239. if ((tmp & BIT(i)) != 0) {
  2240. dq_map |= byte << (i * 2);
  2241. byte++;
  2242. }
  2243. }
  2244. clrsetbits_le32(&map_info->byte_map[0], 0xff << 24, dq_map << 24);
  2245. } else {
  2246. dq_map = readl(PHY_REG(phy_base, 0x4f));
  2247. for (i = 0; i < 4; i++) {
  2248. if (((dq_map >> (i * 2)) & 0x3) == 0)
  2249. byte0 = i;
  2250. if (((dq_map >> (i * 2)) & 0x3) == 1)
  2251. byte1 = i;
  2252. }
  2253. clrsetbits_le32(PHY_REG(phy_base, 0xf), PHY_DQ_WIDTH_MASK,
  2254. BIT(byte0) | BIT(byte1));
  2255. if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) == 0)
  2256. cap_info->bw = 1;
  2257. else
  2258. cap_info->bw = 0;
  2259. }
  2260. }
  2261. if (cap_info->bw > 0)
  2262. cap_info->dbw = 1;
  2263. writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
  2264. cap_info->cs0_high16bit_row = cap_info->cs0_row;
  2265. if (cs) {
  2266. cap_info->cs1_row = cap_info->cs0_row;
  2267. cap_info->cs1_high16bit_row = cap_info->cs0_row;
  2268. } else {
  2269. cap_info->cs1_row = 0;
  2270. cap_info->cs1_high16bit_row = 0;
  2271. }
  2272. if (dram_type == LPDDR3)
  2273. sdram_detect_dbw(cap_info, dram_type);
  2274. return 0;
  2275. cap_err:
  2276. return -1;
  2277. }
  2278. static int dram_detect_cs1_row(struct dram_info *dram,
  2279. struct rv1126_sdram_params *sdram_params,
  2280. unsigned char channel)
  2281. {
  2282. struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
  2283. void __iomem *pctl_base = dram->pctl;
  2284. u32 ret = 0;
  2285. void __iomem *test_addr;
  2286. u32 row, bktmp, coltmp, bw;
  2287. u64 cs0_cap;
  2288. u32 byte_mask;
  2289. u32 cs_pst;
  2290. u32 cs_add = 0;
  2291. u32 max_row;
  2292. if (cap_info->rank == 2) {
  2293. cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
  2294. 6 + 2;
  2295. if (cs_pst < 28)
  2296. cs_add = 1;
  2297. cs0_cap = 1 << cs_pst;
  2298. if (sdram_params->base.dramtype == DDR4) {
  2299. if (cap_info->dbw == 0)
  2300. bktmp = cap_info->bk + 2;
  2301. else
  2302. bktmp = cap_info->bk + 1;
  2303. } else {
  2304. bktmp = cap_info->bk;
  2305. }
  2306. bw = cap_info->bw;
  2307. coltmp = cap_info->col;
  2308. if (bw == 2)
  2309. byte_mask = 0xFFFF;
  2310. else
  2311. byte_mask = 0xFF;
  2312. max_row = (cs_pst == 31) ? 30 : 31;
  2313. max_row = max_row - bktmp - coltmp - bw - cs_add + 1;
  2314. row = (cap_info->cs0_row > max_row) ? max_row :
  2315. cap_info->cs0_row;
  2316. for (; row > 12; row--) {
  2317. test_addr = (void __iomem *)(CFG_SYS_SDRAM_BASE +
  2318. (u32)cs0_cap +
  2319. (1ul << (row + bktmp + coltmp +
  2320. cs_add + bw - 1ul)));
  2321. writel(0, CFG_SYS_SDRAM_BASE + (u32)cs0_cap);
  2322. writel(PATTERN, test_addr);
  2323. if (((readl(test_addr) & byte_mask) ==
  2324. (PATTERN & byte_mask)) &&
  2325. ((readl(CFG_SYS_SDRAM_BASE + (u32)cs0_cap) &
  2326. byte_mask) == 0)) {
  2327. ret = row;
  2328. break;
  2329. }
  2330. }
  2331. }
  2332. return ret;
  2333. }
  2334. /* return: 0 = success, other = fail */
  2335. static int sdram_init_detect(struct dram_info *dram,
  2336. struct rv1126_sdram_params *sdram_params)
  2337. {
  2338. struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
  2339. u32 ret;
  2340. u32 sys_reg = 0;
  2341. u32 sys_reg3 = 0;
  2342. struct sdram_head_info_index_v2 *index =
  2343. (struct sdram_head_info_index_v2 *)common_info;
  2344. struct dq_map_info *map_info;
  2345. map_info = (struct dq_map_info *)((void *)common_info +
  2346. index->dq_map_index.offset * 4);
  2347. if (sdram_init_(dram, sdram_params, 0)) {
  2348. if (sdram_params->base.dramtype == DDR3) {
  2349. clrsetbits_le32(&map_info->byte_map[0], 0xff << 24,
  2350. ((0x1 << 6) | (0x3 << 4) | (0x2 << 2) |
  2351. (0x0 << 0)) << 24);
  2352. if (sdram_init_(dram, sdram_params, 0))
  2353. return -1;
  2354. } else {
  2355. return -1;
  2356. }
  2357. }
  2358. if (sdram_params->base.dramtype == DDR3) {
  2359. writel(PATTERN, CFG_SYS_SDRAM_BASE);
  2360. if (readl(CFG_SYS_SDRAM_BASE) != PATTERN)
  2361. return -1;
  2362. }
  2363. split_bypass(dram);
  2364. if (dram_detect_cap(dram, sdram_params, 0) != 0)
  2365. return -1;
  2366. pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
  2367. sdram_params->base.dramtype);
  2368. ret = sdram_init_(dram, sdram_params, 1);
  2369. if (ret != 0)
  2370. goto out;
  2371. cap_info->cs1_row =
  2372. dram_detect_cs1_row(dram, sdram_params, 0);
  2373. if (cap_info->cs1_row) {
  2374. sys_reg = readl(&dram->pmugrf->os_reg[2]);
  2375. sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
  2376. SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
  2377. sys_reg, sys_reg3, 0);
  2378. writel(sys_reg, &dram->pmugrf->os_reg[2]);
  2379. writel(sys_reg3, &dram->pmugrf->os_reg[3]);
  2380. }
  2381. sdram_detect_high_row(cap_info);
  2382. split_setup(dram, sdram_params);
  2383. out:
  2384. return ret;
  2385. }
  2386. struct rv1126_sdram_params *get_default_sdram_config(u32 freq_mhz)
  2387. {
  2388. u32 i;
  2389. u32 offset = 0;
  2390. struct ddr2_3_4_lp2_3_info *ddr_info;
  2391. if (!freq_mhz) {
  2392. ddr_info = get_ddr_drv_odt_info(sdram_configs[0].base.dramtype);
  2393. if (ddr_info)
  2394. freq_mhz =
  2395. (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
  2396. DDR_FREQ_MASK;
  2397. else
  2398. freq_mhz = 0;
  2399. }
  2400. for (i = 0; i < ARRAY_SIZE(sdram_configs); i++) {
  2401. if (sdram_configs[i].base.ddr_freq == 0 ||
  2402. freq_mhz < sdram_configs[i].base.ddr_freq)
  2403. break;
  2404. }
  2405. offset = i == 0 ? 0 : i - 1;
  2406. return &sdram_configs[offset];
  2407. }
  2408. static const u16 pctl_need_update_reg[] = {
  2409. DDR_PCTL2_RFSHTMG,
  2410. DDR_PCTL2_INIT3,
  2411. DDR_PCTL2_INIT4,
  2412. DDR_PCTL2_INIT6,
  2413. DDR_PCTL2_INIT7,
  2414. DDR_PCTL2_DRAMTMG0,
  2415. DDR_PCTL2_DRAMTMG1,
  2416. DDR_PCTL2_DRAMTMG2,
  2417. DDR_PCTL2_DRAMTMG3,
  2418. DDR_PCTL2_DRAMTMG4,
  2419. DDR_PCTL2_DRAMTMG5,
  2420. DDR_PCTL2_DRAMTMG6,
  2421. DDR_PCTL2_DRAMTMG7,
  2422. DDR_PCTL2_DRAMTMG8,
  2423. DDR_PCTL2_DRAMTMG9,
  2424. DDR_PCTL2_DRAMTMG12,
  2425. DDR_PCTL2_DRAMTMG13,
  2426. DDR_PCTL2_DRAMTMG14,
  2427. DDR_PCTL2_ZQCTL0,
  2428. DDR_PCTL2_DFITMG0,
  2429. DDR_PCTL2_ODTCFG
  2430. };
  2431. static const u16 phy_need_update_reg[] = {
  2432. 0x14,
  2433. 0x18,
  2434. 0x1c
  2435. };
  2436. static void pre_set_rate(struct dram_info *dram,
  2437. struct rv1126_sdram_params *sdram_params,
  2438. u32 dst_fsp, u32 dst_fsp_lp4)
  2439. {
  2440. u32 i, j, find;
  2441. void __iomem *pctl_base = dram->pctl;
  2442. void __iomem *phy_base = dram->phy;
  2443. u32 phy_offset;
  2444. u32 mr_tmp;
  2445. u32 dramtype = sdram_params->base.dramtype;
  2446. sw_set_req(dram);
  2447. /* pctl timing update */
  2448. for (i = 0, find = 0; i < ARRAY_SIZE(pctl_need_update_reg); i++) {
  2449. for (j = find; sdram_params->pctl_regs.pctl[j][0] != 0xFFFFFFFF;
  2450. j++) {
  2451. if (sdram_params->pctl_regs.pctl[j][0] ==
  2452. pctl_need_update_reg[i]) {
  2453. writel(sdram_params->pctl_regs.pctl[j][1],
  2454. pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
  2455. pctl_need_update_reg[i]);
  2456. find = j;
  2457. break;
  2458. }
  2459. }
  2460. }
  2461. #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
  2462. u32 tmp, trefi;
  2463. tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
  2464. trefi = (tmp >> 16) & 0xfff;
  2465. writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
  2466. pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
  2467. #endif
  2468. sw_set_ack(dram);
  2469. /* phy timing update */
  2470. if (dst_fsp == 0)
  2471. phy_offset = 0;
  2472. else
  2473. phy_offset = PHY_REG(0, 0x387 - 5 + (dst_fsp - 1) * 3);
  2474. /* cl cwl al update */
  2475. for (i = 0, find = 0; i < ARRAY_SIZE(phy_need_update_reg); i++) {
  2476. for (j = find; sdram_params->phy_regs.phy[j][0] != 0xFFFFFFFF;
  2477. j++) {
  2478. if (sdram_params->phy_regs.phy[j][0] ==
  2479. phy_need_update_reg[i]) {
  2480. writel(sdram_params->phy_regs.phy[j][1],
  2481. phy_base + phy_offset +
  2482. phy_need_update_reg[i]);
  2483. find = j;
  2484. break;
  2485. }
  2486. }
  2487. }
  2488. set_ds_odt(dram, sdram_params, dst_fsp);
  2489. if (dramtype == LPDDR4) {
  2490. mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
  2491. DDR_PCTL2_INIT4);
  2492. /* MR13 */
  2493. pctl_write_mr(dram->pctl, 3, 13,
  2494. ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
  2495. PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
  2496. ((0x2 << 6) >> dst_fsp_lp4), dramtype);
  2497. writel(((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
  2498. PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
  2499. ((0x2 << 6) >> dst_fsp_lp4),
  2500. PHY_REG(phy_base, 0x1b));
  2501. /* MR3 */
  2502. pctl_write_mr(dram->pctl, 3, 3,
  2503. mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT &
  2504. PCTL2_MR_MASK,
  2505. dramtype);
  2506. writel(mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK,
  2507. PHY_REG(phy_base, 0x19));
  2508. mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
  2509. DDR_PCTL2_INIT3);
  2510. /* MR1 */
  2511. pctl_write_mr(dram->pctl, 3, 1,
  2512. mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT &
  2513. PCTL2_MR_MASK,
  2514. dramtype);
  2515. writel(mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & PCTL2_MR_MASK,
  2516. PHY_REG(phy_base, 0x17));
  2517. /* MR2 */
  2518. pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
  2519. dramtype);
  2520. writel(mr_tmp & PCTL2_MR_MASK,
  2521. PHY_REG(phy_base, 0x18));
  2522. mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
  2523. DDR_PCTL2_INIT6);
  2524. /* MR11 */
  2525. pctl_write_mr(dram->pctl, 3, 11,
  2526. mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
  2527. dramtype);
  2528. writel(mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
  2529. PHY_REG(phy_base, 0x1a));
  2530. /* MR12 */
  2531. pctl_write_mr(dram->pctl, 3, 12,
  2532. mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
  2533. dramtype);
  2534. mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
  2535. DDR_PCTL2_INIT7);
  2536. /* MR22 */
  2537. pctl_write_mr(dram->pctl, 3, 22,
  2538. mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
  2539. dramtype);
  2540. writel(mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
  2541. PHY_REG(phy_base, 0x1d));
  2542. /* MR14 */
  2543. pctl_write_mr(dram->pctl, 3, 14,
  2544. mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
  2545. dramtype);
  2546. writel(mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
  2547. PHY_REG(phy_base, 0x1c));
  2548. }
  2549. update_noc_timing(dram, sdram_params);
  2550. }
  2551. static void save_fsp_param(struct dram_info *dram, u32 dst_fsp,
  2552. struct rv1126_sdram_params *sdram_params)
  2553. {
  2554. void __iomem *pctl_base = dram->pctl;
  2555. void __iomem *phy_base = dram->phy;
  2556. struct rv1126_fsp_param *p_fsp_param = &fsp_param[dst_fsp];
  2557. u32 temp, temp1;
  2558. struct ddr2_3_4_lp2_3_info *ddr_info;
  2559. ddr_info = get_ddr_drv_odt_info(sdram_params->base.dramtype);
  2560. p_fsp_param->freq_mhz = sdram_params->base.ddr_freq;
  2561. if (sdram_params->base.dramtype == LPDDR4) {
  2562. p_fsp_param->rd_odt_up_en = 0;
  2563. p_fsp_param->rd_odt_down_en = 1;
  2564. } else {
  2565. p_fsp_param->rd_odt_up_en =
  2566. ODT_INFO_PULLUP_EN(ddr_info->odt_info);
  2567. p_fsp_param->rd_odt_down_en =
  2568. ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
  2569. }
  2570. if (p_fsp_param->rd_odt_up_en)
  2571. p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x111));
  2572. else if (p_fsp_param->rd_odt_down_en)
  2573. p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x110));
  2574. else
  2575. p_fsp_param->rd_odt = 0;
  2576. p_fsp_param->wr_dq_drv = readl(PHY_REG(phy_base, 0x112));
  2577. p_fsp_param->wr_ca_drv = readl(PHY_REG(phy_base, 0x100));
  2578. p_fsp_param->wr_ckcs_drv = readl(PHY_REG(phy_base, 0x102));
  2579. p_fsp_param->vref_inner = readl(PHY_REG(phy_base, 0x128));
  2580. p_fsp_param->vref_out = readl(PHY_REG(phy_base, 0x105));
  2581. if (sdram_params->base.dramtype == DDR3) {
  2582. temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
  2583. DDR_PCTL2_INIT3);
  2584. temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
  2585. p_fsp_param->ds_pdds = temp & DDR3_DS_MASK;
  2586. p_fsp_param->dq_odt = temp & DDR3_RTT_NOM_MASK;
  2587. p_fsp_param->ca_odt = p_fsp_param->dq_odt;
  2588. } else if (sdram_params->base.dramtype == DDR4) {
  2589. temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
  2590. DDR_PCTL2_INIT3);
  2591. temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
  2592. p_fsp_param->ds_pdds = temp & DDR4_DS_MASK;
  2593. p_fsp_param->dq_odt = temp & DDR4_RTT_NOM_MASK;
  2594. p_fsp_param->ca_odt = p_fsp_param->dq_odt;
  2595. } else if (sdram_params->base.dramtype == LPDDR3) {
  2596. temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
  2597. DDR_PCTL2_INIT4);
  2598. temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
  2599. p_fsp_param->ds_pdds = temp & 0xf;
  2600. p_fsp_param->dq_odt = lp3_odt_value;
  2601. p_fsp_param->ca_odt = p_fsp_param->dq_odt;
  2602. } else if (sdram_params->base.dramtype == LPDDR4) {
  2603. temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
  2604. DDR_PCTL2_INIT4);
  2605. temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
  2606. p_fsp_param->ds_pdds = temp & LPDDR4_PDDS_MASK;
  2607. temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
  2608. DDR_PCTL2_INIT6);
  2609. temp = (temp >> PCTL2_LPDDR4_MR11_SHIFT) & PCTL2_MR_MASK;
  2610. p_fsp_param->dq_odt = temp & LPDDR4_DQODT_MASK;
  2611. p_fsp_param->ca_odt = temp & LPDDR4_CAODT_MASK;
  2612. temp = MAX(readl(PHY_REG(phy_base, 0x3ae)),
  2613. readl(PHY_REG(phy_base, 0x3ce)));
  2614. temp1 = MIN(readl(PHY_REG(phy_base, 0x3be)),
  2615. readl(PHY_REG(phy_base, 0x3de)));
  2616. p_fsp_param->vref_ca[0] = (temp + temp1) / 2;
  2617. temp = MAX(readl(PHY_REG(phy_base, 0x3af)),
  2618. readl(PHY_REG(phy_base, 0x3cf)));
  2619. temp1 = MIN(readl(PHY_REG(phy_base, 0x3bf)),
  2620. readl(PHY_REG(phy_base, 0x3df)));
  2621. p_fsp_param->vref_ca[1] = (temp + temp1) / 2;
  2622. p_fsp_param->vref_ca[0] |=
  2623. (readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
  2624. p_fsp_param->vref_ca[1] |=
  2625. (readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
  2626. p_fsp_param->lp4_drv_pd_en = (readl(PHY_REG(phy_base, 0x114)) >>
  2627. 3) & 0x1;
  2628. }
  2629. p_fsp_param->noc_timings.ddrtiminga0 =
  2630. sdram_params->ch.noc_timings.ddrtiminga0;
  2631. p_fsp_param->noc_timings.ddrtimingb0 =
  2632. sdram_params->ch.noc_timings.ddrtimingb0;
  2633. p_fsp_param->noc_timings.ddrtimingc0 =
  2634. sdram_params->ch.noc_timings.ddrtimingc0;
  2635. p_fsp_param->noc_timings.devtodev0 =
  2636. sdram_params->ch.noc_timings.devtodev0;
  2637. p_fsp_param->noc_timings.ddrmode =
  2638. sdram_params->ch.noc_timings.ddrmode;
  2639. p_fsp_param->noc_timings.ddr4timing =
  2640. sdram_params->ch.noc_timings.ddr4timing;
  2641. p_fsp_param->noc_timings.agingx0 =
  2642. sdram_params->ch.noc_timings.agingx0;
  2643. p_fsp_param->noc_timings.aging0 =
  2644. sdram_params->ch.noc_timings.aging0;
  2645. p_fsp_param->noc_timings.aging1 =
  2646. sdram_params->ch.noc_timings.aging1;
  2647. p_fsp_param->noc_timings.aging2 =
  2648. sdram_params->ch.noc_timings.aging2;
  2649. p_fsp_param->noc_timings.aging3 =
  2650. sdram_params->ch.noc_timings.aging3;
  2651. p_fsp_param->flag = FSP_FLAG;
  2652. }
  2653. static void copy_fsp_param_to_ddr(void)
  2654. {
  2655. memcpy((void *)FSP_PARAM_STORE_ADDR, (void *)&fsp_param,
  2656. sizeof(fsp_param));
  2657. }
  2658. static void pctl_modify_trfc(struct ddr_pctl_regs *pctl_regs,
  2659. struct sdram_cap_info *cap_info, u32 dram_type,
  2660. u32 freq)
  2661. {
  2662. u64 cs0_cap;
  2663. u32 die_cap;
  2664. u32 trfc_ns, trfc4_ns;
  2665. u32 trfc, txsnr;
  2666. u32 txs_abort_fast = 0;
  2667. u32 tmp;
  2668. cs0_cap = sdram_get_cs_cap(cap_info, 0, dram_type);
  2669. die_cap = (u32)(cs0_cap >> (20 + (cap_info->bw - cap_info->dbw)));
  2670. switch (dram_type) {
  2671. case DDR3:
  2672. if (die_cap <= DIE_CAP_512MBIT)
  2673. trfc_ns = 90;
  2674. else if (die_cap <= DIE_CAP_1GBIT)
  2675. trfc_ns = 110;
  2676. else if (die_cap <= DIE_CAP_2GBIT)
  2677. trfc_ns = 160;
  2678. else if (die_cap <= DIE_CAP_4GBIT)
  2679. trfc_ns = 260;
  2680. else
  2681. trfc_ns = 350;
  2682. txsnr = MAX(5, ((trfc_ns + 10) * freq + 999) / 1000);
  2683. break;
  2684. case DDR4:
  2685. if (die_cap <= DIE_CAP_2GBIT) {
  2686. trfc_ns = 160;
  2687. trfc4_ns = 90;
  2688. } else if (die_cap <= DIE_CAP_4GBIT) {
  2689. trfc_ns = 260;
  2690. trfc4_ns = 110;
  2691. } else if (die_cap <= DIE_CAP_8GBIT) {
  2692. trfc_ns = 350;
  2693. trfc4_ns = 160;
  2694. } else {
  2695. trfc_ns = 550;
  2696. trfc4_ns = 260;
  2697. }
  2698. txsnr = ((trfc_ns + 10) * freq + 999) / 1000;
  2699. txs_abort_fast = ((trfc4_ns + 10) * freq + 999) / 1000;
  2700. break;
  2701. case LPDDR3:
  2702. if (die_cap <= DIE_CAP_4GBIT)
  2703. trfc_ns = 130;
  2704. else
  2705. trfc_ns = 210;
  2706. txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
  2707. break;
  2708. case LPDDR4:
  2709. if (die_cap <= DIE_CAP_2GBIT)
  2710. trfc_ns = 130;
  2711. else if (die_cap <= DIE_CAP_4GBIT)
  2712. trfc_ns = 180;
  2713. else if (die_cap <= DIE_CAP_8GBIT)
  2714. trfc_ns = 280;
  2715. else
  2716. trfc_ns = 380;
  2717. txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
  2718. break;
  2719. default:
  2720. return;
  2721. }
  2722. trfc = (trfc_ns * freq + 999) / 1000;
  2723. for (int i = 0; pctl_regs->pctl[i][0] != 0xffffffff; i++) {
  2724. switch (pctl_regs->pctl[i][0]) {
  2725. case DDR_PCTL2_RFSHTMG:
  2726. tmp = pctl_regs->pctl[i][1];
  2727. /* t_rfc_min */
  2728. tmp &= ~((u32)0x3ff);
  2729. tmp |= ((trfc + 1) / 2) & 0x3ff;
  2730. pctl_regs->pctl[i][1] = tmp;
  2731. break;
  2732. case DDR_PCTL2_DRAMTMG8:
  2733. if (dram_type == DDR3 || dram_type == DDR4) {
  2734. tmp = pctl_regs->pctl[i][1];
  2735. /* t_xs_x32 */
  2736. tmp &= ~((u32)0x7f);
  2737. tmp |= ((txsnr + 63) / 64) & 0x7f;
  2738. if (dram_type == DDR4) {
  2739. /* t_xs_abort_x32 */
  2740. tmp &= ~((u32)(0x7f << 16));
  2741. tmp |= (((txs_abort_fast + 63) / 64) & 0x7f) << 16;
  2742. /* t_xs_fast_x32 */
  2743. tmp &= ~((u32)(0x7f << 24));
  2744. tmp |= (((txs_abort_fast + 63) / 64) & 0x7f) << 24;
  2745. }
  2746. pctl_regs->pctl[i][1] = tmp;
  2747. }
  2748. break;
  2749. case DDR_PCTL2_DRAMTMG14:
  2750. if (dram_type == LPDDR3 ||
  2751. dram_type == LPDDR4) {
  2752. tmp = pctl_regs->pctl[i][1];
  2753. /* t_xsr */
  2754. tmp &= ~((u32)0xfff);
  2755. tmp |= ((txsnr + 1) / 2) & 0xfff;
  2756. pctl_regs->pctl[i][1] = tmp;
  2757. }
  2758. break;
  2759. default:
  2760. break;
  2761. }
  2762. }
  2763. }
  2764. void ddr_set_rate(struct dram_info *dram,
  2765. struct rv1126_sdram_params *sdram_params,
  2766. u32 freq, u32 cur_freq, u32 dst_fsp,
  2767. u32 dst_fsp_lp4, u32 training_en)
  2768. {
  2769. u32 dest_dll_off, cur_init3, dst_init3, cur_fsp, cur_dll_off;
  2770. u32 mr_tmp;
  2771. u32 lp_stat;
  2772. u32 dramtype = sdram_params->base.dramtype;
  2773. struct rv1126_sdram_params *sdram_params_new;
  2774. void __iomem *pctl_base = dram->pctl;
  2775. void __iomem *phy_base = dram->phy;
  2776. lp_stat = low_power_update(dram, 0);
  2777. sdram_params_new = get_default_sdram_config(freq);
  2778. sdram_params_new->ch.cap_info.rank = sdram_params->ch.cap_info.rank;
  2779. sdram_params_new->ch.cap_info.bw = sdram_params->ch.cap_info.bw;
  2780. pctl_modify_trfc(&sdram_params_new->pctl_regs,
  2781. &sdram_params->ch.cap_info, dramtype, freq);
  2782. pre_set_rate(dram, sdram_params_new, dst_fsp, dst_fsp_lp4);
  2783. while ((readl(pctl_base + DDR_PCTL2_STAT) &
  2784. PCTL2_OPERATING_MODE_MASK) ==
  2785. PCTL2_OPERATING_MODE_SR)
  2786. continue;
  2787. dest_dll_off = 0;
  2788. dst_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
  2789. DDR_PCTL2_INIT3);
  2790. if ((dramtype == DDR3 && (dst_init3 & 1)) ||
  2791. (dramtype == DDR4 && !(dst_init3 & 1)))
  2792. dest_dll_off = 1;
  2793. cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
  2794. cur_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
  2795. DDR_PCTL2_INIT3);
  2796. cur_init3 &= PCTL2_MR_MASK;
  2797. cur_dll_off = 1;
  2798. if ((dramtype == DDR3 && !(cur_init3 & 1)) ||
  2799. (dramtype == DDR4 && (cur_init3 & 1)))
  2800. cur_dll_off = 0;
  2801. if (!cur_dll_off) {
  2802. if (dramtype == DDR3)
  2803. cur_init3 |= 1;
  2804. else
  2805. cur_init3 &= ~1;
  2806. pctl_write_mr(dram->pctl, 2, 1, cur_init3, dramtype);
  2807. }
  2808. setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
  2809. PCTL2_DIS_AUTO_REFRESH);
  2810. update_refresh_reg(dram);
  2811. enter_sr(dram, 1);
  2812. writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
  2813. PMUGRF_CON_DDRPHY_BUFFEREN_EN,
  2814. &dram->pmugrf->soc_con[0]);
  2815. sw_set_req(dram);
  2816. clrbits_le32(pctl_base + DDR_PCTL2_DFIMISC,
  2817. PCTL2_DFI_INIT_COMPLETE_EN);
  2818. sw_set_ack(dram);
  2819. sw_set_req(dram);
  2820. if ((dramtype == DDR3 || dramtype == DDR4) && dest_dll_off)
  2821. setbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
  2822. else
  2823. clrbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
  2824. setbits_le32(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_ZQCTL0,
  2825. PCTL2_DIS_SRX_ZQCL);
  2826. setbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_ZQCTL0,
  2827. PCTL2_DIS_SRX_ZQCL);
  2828. sw_set_ack(dram);
  2829. writel(DDR_MSCH_EN_MASK | (0x1 << DDR_MSCH_EN_SHIFT),
  2830. &dram->cru->clkgate_con[21]);
  2831. writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
  2832. (0x1 << CLK_DDR_UPCTL_EN_SHIFT) |
  2833. (0x1 << ACLK_DDR_UPCTL_EN_SHIFT),
  2834. BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
  2835. clrbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
  2836. rkclk_set_dpll(dram, freq * MHz / 2);
  2837. phy_pll_set(dram, freq * MHz, 0);
  2838. phy_pll_set(dram, freq * MHz, 1);
  2839. setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
  2840. writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
  2841. PMUGRF_CON_DDRPHY_BUFFEREN_DIS,
  2842. &dram->pmugrf->soc_con[0]);
  2843. writel(DDR_MSCH_EN_MASK | (0x0 << DDR_MSCH_EN_SHIFT),
  2844. &dram->cru->clkgate_con[21]);
  2845. writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
  2846. (0x0 << CLK_DDR_UPCTL_EN_SHIFT) |
  2847. (0x0 << ACLK_DDR_UPCTL_EN_SHIFT),
  2848. BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
  2849. while ((readl(pctl_base + DDR_PCTL2_DFISTAT) &
  2850. PCTL2_DFI_INIT_COMPLETE) != PCTL2_DFI_INIT_COMPLETE)
  2851. continue;
  2852. sw_set_req(dram);
  2853. setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
  2854. clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, dst_fsp);
  2855. sw_set_ack(dram);
  2856. update_refresh_reg(dram);
  2857. clrsetbits_le32(PHY_REG(phy_base, 0xc), 0x3 << 2, dst_fsp << 2);
  2858. enter_sr(dram, 0);
  2859. setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
  2860. clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
  2861. mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_INIT4);
  2862. if (dramtype == LPDDR3) {
  2863. pctl_write_mr(dram->pctl, 3, 1,
  2864. (dst_init3 >> PCTL2_LPDDR234_MR1_SHIFT) &
  2865. PCTL2_MR_MASK,
  2866. dramtype);
  2867. pctl_write_mr(dram->pctl, 3, 2, dst_init3 & PCTL2_MR_MASK,
  2868. dramtype);
  2869. pctl_write_mr(dram->pctl, 3, 3,
  2870. (mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT) &
  2871. PCTL2_MR_MASK,
  2872. dramtype);
  2873. pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, dramtype);
  2874. } else if ((dramtype == DDR3) || (dramtype == DDR4)) {
  2875. pctl_write_mr(dram->pctl, 3, 1, dst_init3 & PCTL2_MR_MASK,
  2876. dramtype);
  2877. if (!dest_dll_off) {
  2878. pctl_write_mr(dram->pctl, 3, 0,
  2879. ((dst_init3 >> PCTL2_DDR34_MR0_SHIFT) &
  2880. PCTL2_MR_MASK) | DDR3_DLL_RESET,
  2881. dramtype);
  2882. udelay(2);
  2883. }
  2884. pctl_write_mr(dram->pctl, 3, 0,
  2885. (dst_init3 >> PCTL2_DDR34_MR0_SHIFT &
  2886. PCTL2_MR_MASK) & (~DDR3_DLL_RESET),
  2887. dramtype);
  2888. pctl_write_mr(dram->pctl, 3, 2,
  2889. ((mr_tmp >> PCTL2_DDR34_MR2_SHIFT) &
  2890. PCTL2_MR_MASK), dramtype);
  2891. if (dramtype == DDR4) {
  2892. pctl_write_mr(dram->pctl, 3, 3, mr_tmp & PCTL2_MR_MASK,
  2893. dramtype);
  2894. mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
  2895. DDR_PCTL2_INIT6);
  2896. pctl_write_mr(dram->pctl, 3, 4,
  2897. (mr_tmp >> PCTL2_DDR4_MR4_SHIFT) &
  2898. PCTL2_MR_MASK,
  2899. dramtype);
  2900. pctl_write_mr(dram->pctl, 3, 5,
  2901. mr_tmp >> PCTL2_DDR4_MR5_SHIFT &
  2902. PCTL2_MR_MASK,
  2903. dramtype);
  2904. mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
  2905. DDR_PCTL2_INIT7);
  2906. pctl_write_mr(dram->pctl, 3, 6,
  2907. mr_tmp >> PCTL2_DDR4_MR6_SHIFT &
  2908. PCTL2_MR_MASK,
  2909. dramtype);
  2910. }
  2911. } else if (dramtype == LPDDR4) {
  2912. pctl_write_mr(dram->pctl, 3, 13,
  2913. ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
  2914. PCTL2_MR_MASK) & (~(BIT(7)))) |
  2915. dst_fsp_lp4 << 7, dramtype);
  2916. }
  2917. clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
  2918. PCTL2_DIS_AUTO_REFRESH);
  2919. update_refresh_reg(dram);
  2920. /* training */
  2921. high_freq_training(dram, sdram_params_new, dst_fsp);
  2922. low_power_update(dram, lp_stat);
  2923. save_fsp_param(dram, dst_fsp, sdram_params_new);
  2924. }
  2925. static void ddr_set_rate_for_fsp(struct dram_info *dram,
  2926. struct rv1126_sdram_params *sdram_params)
  2927. {
  2928. struct ddr2_3_4_lp2_3_info *ddr_info;
  2929. u32 f0;
  2930. u32 dramtype = sdram_params->base.dramtype;
  2931. u32 f1, f2, f3;
  2932. ddr_info = get_ddr_drv_odt_info(dramtype);
  2933. if (!ddr_info)
  2934. return;
  2935. f0 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
  2936. DDR_FREQ_MASK;
  2937. memset((void *)FSP_PARAM_STORE_ADDR, 0, sizeof(fsp_param));
  2938. memset((void *)&fsp_param, 0, sizeof(fsp_param));
  2939. f1 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F1_SHIFT) &
  2940. DDR_FREQ_MASK;
  2941. f2 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F2_SHIFT) &
  2942. DDR_FREQ_MASK;
  2943. f3 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F3_SHIFT) &
  2944. DDR_FREQ_MASK;
  2945. if (get_wrlvl_val(dram, sdram_params))
  2946. printascii("get wrlvl value fail\n");
  2947. if (IS_ENABLED(CONFIG_RAM_ROCKCHIP_DEBUG)) {
  2948. printascii("change to: ");
  2949. printdec(f1);
  2950. printascii("MHz\n");
  2951. }
  2952. ddr_set_rate(&dram_info, sdram_params, f1,
  2953. sdram_params->base.ddr_freq, 1, 1, 1);
  2954. if (IS_ENABLED(CONFIG_RAM_ROCKCHIP_DEBUG)) {
  2955. printascii("change to: ");
  2956. printdec(f2);
  2957. printascii("MHz\n");
  2958. }
  2959. ddr_set_rate(&dram_info, sdram_params, f2, f1, 2, 0, 1);
  2960. if (IS_ENABLED(CONFIG_RAM_ROCKCHIP_DEBUG)) {
  2961. printascii("change to: ");
  2962. printdec(f3);
  2963. printascii("MHz\n");
  2964. }
  2965. ddr_set_rate(&dram_info, sdram_params, f3, f2, 3, 1, 1);
  2966. if (IS_ENABLED(CONFIG_RAM_ROCKCHIP_DEBUG)) {
  2967. printascii("change to: ");
  2968. printdec(f0);
  2969. printascii("MHz(final freq)\n");
  2970. }
  2971. ddr_set_rate(&dram_info, sdram_params, f0, f3, 0, 0, 1);
  2972. }
  2973. int get_uart_config(void)
  2974. {
  2975. struct sdram_head_info_index_v2 *index =
  2976. (struct sdram_head_info_index_v2 *)common_info;
  2977. struct global_info *gbl_info;
  2978. gbl_info = (struct global_info *)((void *)common_info +
  2979. index->global_index.offset * 4);
  2980. return gbl_info->uart_info;
  2981. }
  2982. /* return: 0 = success, other = fail */
  2983. static int rv1126_dmc_init(struct udevice *dev)
  2984. {
  2985. struct rv1126_sdram_params *sdram_params;
  2986. int ret = 0;
  2987. struct sdram_head_info_index_v2 *index =
  2988. (struct sdram_head_info_index_v2 *)common_info;
  2989. struct global_info *gbl_info;
  2990. dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
  2991. dram_info.pctl = (void *)UPCTL2_BASE_ADDR;
  2992. dram_info.grf = (void *)GRF_BASE_ADDR;
  2993. dram_info.cru = (void *)CRU_BASE_ADDR;
  2994. dram_info.msch = (void *)SERVER_MSCH_BASE_ADDR;
  2995. dram_info.ddrgrf = (void *)DDR_GRF_BASE_ADDR;
  2996. dram_info.pmugrf = (void *)PMU_GRF_BASE_ADDR;
  2997. #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
  2998. printascii("extended temp support\n");
  2999. #endif
  3000. if (index->version_info != 2 ||
  3001. (index->global_index.size != sizeof(struct global_info) / 4) ||
  3002. (index->ddr3_index.size !=
  3003. sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
  3004. (index->ddr4_index.size !=
  3005. sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
  3006. (index->lp3_index.size !=
  3007. sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
  3008. (index->lp4_index.size != (sizeof(struct lp4_info) / 4)) ||
  3009. (index->lp4x_index.size != (sizeof(struct lp4_info) / 4)) ||
  3010. index->global_index.offset == 0 ||
  3011. index->ddr3_index.offset == 0 ||
  3012. index->ddr4_index.offset == 0 ||
  3013. index->lp3_index.offset == 0 ||
  3014. index->lp4_index.offset == 0 ||
  3015. index->lp4x_index.offset == 0) {
  3016. printascii("common info error\n");
  3017. goto error;
  3018. }
  3019. gbl_info = (struct global_info *)((void *)common_info +
  3020. index->global_index.offset * 4);
  3021. dram_info.sr_idle = SR_INFO(gbl_info->sr_pd_info);
  3022. dram_info.pd_idle = PD_INFO(gbl_info->sr_pd_info);
  3023. sdram_params = &sdram_configs[0];
  3024. if (sdram_params->base.dramtype == DDR3 ||
  3025. sdram_params->base.dramtype == DDR4) {
  3026. if (DDR_2T_INFO(gbl_info->info_2t))
  3027. sdram_params->pctl_regs.pctl[0][1] |= 0x1 << 10;
  3028. else
  3029. sdram_params->pctl_regs.pctl[0][1] &=
  3030. ~(0x1 << 10);
  3031. }
  3032. ret = sdram_init_detect(&dram_info, sdram_params);
  3033. if (ret) {
  3034. sdram_print_dram_type(sdram_params->base.dramtype);
  3035. printascii(", ");
  3036. printdec(sdram_params->base.ddr_freq);
  3037. printascii("MHz\n");
  3038. goto error;
  3039. }
  3040. print_ddr_info(sdram_params);
  3041. #if defined(CONFIG_CMD_DDR_TEST_TOOL)
  3042. init_rw_trn_result_struct(&rw_trn_result, dram_info.phy,
  3043. (u8)sdram_params->ch.cap_info.rank);
  3044. #endif
  3045. ddr_set_rate_for_fsp(&dram_info, sdram_params);
  3046. copy_fsp_param_to_ddr();
  3047. #if defined(CONFIG_CMD_DDR_TEST_TOOL)
  3048. save_rw_trn_result_to_ddr(&rw_trn_result);
  3049. #endif
  3050. if (IS_ENABLED(CONFIG_RAM_ROCKCHIP_DEBUG))
  3051. printascii("out\n");
  3052. return ret;
  3053. error:
  3054. printascii("error\n");
  3055. return (-1);
  3056. }
  3057. #endif
  3058. static int rv1126_dmc_probe(struct udevice *dev)
  3059. {
  3060. #if defined(CONFIG_TPL_BUILD) || \
  3061. (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
  3062. if (rv1126_dmc_init(dev))
  3063. return 0;
  3064. #else
  3065. struct dram_info *priv = dev_get_priv(dev);
  3066. priv->pmugrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUGRF);
  3067. debug("%s: grf=%p\n", __func__, priv->pmugrf);
  3068. priv->info.base = CFG_SYS_SDRAM_BASE;
  3069. priv->info.size =
  3070. rockchip_sdram_size((phys_addr_t)&priv->pmugrf->os_reg[2]);
  3071. #endif
  3072. return 0;
  3073. }
  3074. static int rv1126_dmc_get_info(struct udevice *dev, struct ram_info *info)
  3075. {
  3076. struct dram_info *priv = dev_get_priv(dev);
  3077. *info = priv->info;
  3078. return 0;
  3079. }
  3080. static struct ram_ops rv1126_dmc_ops = {
  3081. .get_info = rv1126_dmc_get_info,
  3082. };
  3083. static const struct udevice_id rv1126_dmc_ids[] = {
  3084. { .compatible = "rockchip,rv1126-dmc" },
  3085. { }
  3086. };
  3087. U_BOOT_DRIVER(dmc_rv1126) = {
  3088. .name = "rockchip_rv1126_dmc",
  3089. .id = UCLASS_RAM,
  3090. .of_match = rv1126_dmc_ids,
  3091. .ops = &rv1126_dmc_ops,
  3092. .probe = rv1126_dmc_probe,
  3093. .priv_auto = sizeof(struct dram_info),
  3094. };