blob: 536879d65b5395ebd0070fece069e02e20f72061 [file] [log] [blame]
Kever Yangfa437432017-02-22 16:56:35 +08001/*
2 * (C) Copyright 2016-2017 Rockchip Inc.
3 *
4 * SPDX-License-Identifier: GPL-2.0
5 *
6 * Adapted from coreboot.
7 */
8#include <common.h>
9#include <clk.h>
10#include <dm.h>
11#include <dt-structs.h>
12#include <ram.h>
13#include <regmap.h>
14#include <syscon.h>
15#include <asm/io.h>
16#include <asm/arch/clock.h>
17#include <asm/arch/sdram_rk3399.h>
18#include <asm/arch/cru_rk3399.h>
19#include <asm/arch/grf_rk3399.h>
20#include <asm/arch/hardware.h>
21#include <linux/err.h>
22
23DECLARE_GLOBAL_DATA_PTR;
24struct chan_info {
25 struct rk3399_ddr_pctl_regs *pctl;
26 struct rk3399_ddr_pi_regs *pi;
27 struct rk3399_ddr_publ_regs *publ;
28 struct rk3399_msch_regs *msch;
29};
30
31struct dram_info {
32#ifdef CONFIG_SPL_BUILD
33 struct chan_info chan[2];
34 struct clk ddr_clk;
35 struct rk3399_cru *cru;
36 struct rk3399_pmucru *pmucru;
37 struct rk3399_pmusgrf_regs *pmusgrf;
38 struct rk3399_ddr_cic_regs *cic;
39#endif
40 struct ram_info info;
41 struct rk3399_pmugrf_regs *pmugrf;
42};
43
44/*
45 * sys_reg bitfield struct
46 * [31] row_3_4_ch1
47 * [30] row_3_4_ch0
48 * [29:28] chinfo
49 * [27] rank_ch1
50 * [26:25] col_ch1
51 * [24] bk_ch1
52 * [23:22] cs0_row_ch1
53 * [21:20] cs1_row_ch1
54 * [19:18] bw_ch1
55 * [17:16] dbw_ch1;
56 * [15:13] ddrtype
57 * [12] channelnum
58 * [11] rank_ch0
59 * [10:9] col_ch0
60 * [8] bk_ch0
61 * [7:6] cs0_row_ch0
62 * [5:4] cs1_row_ch0
63 * [3:2] bw_ch0
64 * [1:0] dbw_ch0
65*/
66#define SYS_REG_DDRTYPE_SHIFT 13
67#define SYS_REG_DDRTYPE_MASK 7
68#define SYS_REG_NUM_CH_SHIFT 12
69#define SYS_REG_NUM_CH_MASK 1
70#define SYS_REG_ROW_3_4_SHIFT(ch) (30 + (ch))
71#define SYS_REG_ROW_3_4_MASK 1
72#define SYS_REG_CHINFO_SHIFT(ch) (28 + (ch))
73#define SYS_REG_RANK_SHIFT(ch) (11 + (ch) * 16)
74#define SYS_REG_RANK_MASK 1
75#define SYS_REG_COL_SHIFT(ch) (9 + (ch) * 16)
76#define SYS_REG_COL_MASK 3
77#define SYS_REG_BK_SHIFT(ch) (8 + (ch) * 16)
78#define SYS_REG_BK_MASK 1
79#define SYS_REG_CS0_ROW_SHIFT(ch) (6 + (ch) * 16)
80#define SYS_REG_CS0_ROW_MASK 3
81#define SYS_REG_CS1_ROW_SHIFT(ch) (4 + (ch) * 16)
82#define SYS_REG_CS1_ROW_MASK 3
83#define SYS_REG_BW_SHIFT(ch) (2 + (ch) * 16)
84#define SYS_REG_BW_MASK 3
85#define SYS_REG_DBW_SHIFT(ch) ((ch) * 16)
86#define SYS_REG_DBW_MASK 3
87
88#define PRESET_SGRF_HOLD(n) ((0x1 << (6 + 16)) | ((n) << 6))
89#define PRESET_GPIO0_HOLD(n) ((0x1 << (7 + 16)) | ((n) << 7))
90#define PRESET_GPIO1_HOLD(n) ((0x1 << (8 + 16)) | ((n) << 8))
91
92#define PHY_DRV_ODT_Hi_Z 0x0
93#define PHY_DRV_ODT_240 0x1
94#define PHY_DRV_ODT_120 0x8
95#define PHY_DRV_ODT_80 0x9
96#define PHY_DRV_ODT_60 0xc
97#define PHY_DRV_ODT_48 0xd
98#define PHY_DRV_ODT_40 0xe
99#define PHY_DRV_ODT_34_3 0xf
100
101#ifdef CONFIG_SPL_BUILD
102
103struct rockchip_dmc_plat {
104#if CONFIG_IS_ENABLED(OF_PLATDATA)
105 struct dtd_rockchip_rk3399_dmc dtplat;
106#else
107 struct rk3399_sdram_params sdram_params;
108#endif
109 struct regmap *map;
110};
111
112static void copy_to_reg(u32 *dest, const u32 *src, u32 n)
113{
114 int i;
115
116 for (i = 0; i < n / sizeof(u32); i++) {
117 writel(*src, dest);
118 src++;
119 dest++;
120 }
121}
122
123static void phy_dll_bypass_set(struct rk3399_ddr_publ_regs *ddr_publ_regs,
124 u32 freq)
125{
126 u32 *denali_phy = ddr_publ_regs->denali_phy;
127
128 /* From IP spec, only freq small than 125 can enter dll bypass mode */
129 if (freq <= 125) {
130 /* phy_sw_master_mode_X PHY_86/214/342/470 4bits offset_8 */
131 setbits_le32(&denali_phy[86], (0x3 << 2) << 8);
132 setbits_le32(&denali_phy[214], (0x3 << 2) << 8);
133 setbits_le32(&denali_phy[342], (0x3 << 2) << 8);
134 setbits_le32(&denali_phy[470], (0x3 << 2) << 8);
135
136 /* phy_adrctl_sw_master_mode PHY_547/675/803 4bits offset_16 */
137 setbits_le32(&denali_phy[547], (0x3 << 2) << 16);
138 setbits_le32(&denali_phy[675], (0x3 << 2) << 16);
139 setbits_le32(&denali_phy[803], (0x3 << 2) << 16);
140 } else {
141 /* phy_sw_master_mode_X PHY_86/214/342/470 4bits offset_8 */
142 clrbits_le32(&denali_phy[86], (0x3 << 2) << 8);
143 clrbits_le32(&denali_phy[214], (0x3 << 2) << 8);
144 clrbits_le32(&denali_phy[342], (0x3 << 2) << 8);
145 clrbits_le32(&denali_phy[470], (0x3 << 2) << 8);
146
147 /* phy_adrctl_sw_master_mode PHY_547/675/803 4bits offset_16 */
148 clrbits_le32(&denali_phy[547], (0x3 << 2) << 16);
149 clrbits_le32(&denali_phy[675], (0x3 << 2) << 16);
150 clrbits_le32(&denali_phy[803], (0x3 << 2) << 16);
151 }
152}
153
154static void set_memory_map(const struct chan_info *chan, u32 channel,
155 const struct rk3399_sdram_params *sdram_params)
156{
157 const struct rk3399_sdram_channel *sdram_ch =
158 &sdram_params->ch[channel];
159 u32 *denali_ctl = chan->pctl->denali_ctl;
160 u32 *denali_pi = chan->pi->denali_pi;
161 u32 cs_map;
162 u32 reduc;
163 u32 row;
164
165 /* Get row number from ddrconfig setting */
166 if (sdram_ch->ddrconfig < 2 || sdram_ch->ddrconfig == 4)
167 row = 16;
168 else if (sdram_ch->ddrconfig == 3)
169 row = 14;
170 else
171 row = 15;
172
173 cs_map = (sdram_ch->rank > 1) ? 3 : 1;
174 reduc = (sdram_ch->bw == 2) ? 0 : 1;
175
176 /* Set the dram configuration to ctrl */
177 clrsetbits_le32(&denali_ctl[191], 0xF, (12 - sdram_ch->col));
178 clrsetbits_le32(&denali_ctl[190], (0x3 << 16) | (0x7 << 24),
179 ((3 - sdram_ch->bk) << 16) |
180 ((16 - row) << 24));
181
182 clrsetbits_le32(&denali_ctl[196], 0x3 | (1 << 16),
183 cs_map | (reduc << 16));
184
185 /* PI_199 PI_COL_DIFF:RW:0:4 */
186 clrsetbits_le32(&denali_pi[199], 0xF, (12 - sdram_ch->col));
187
188 /* PI_155 PI_ROW_DIFF:RW:24:3 PI_BANK_DIFF:RW:16:2 */
189 clrsetbits_le32(&denali_pi[155], (0x3 << 16) | (0x7 << 24),
190 ((3 - sdram_ch->bk) << 16) |
191 ((16 - row) << 24));
192 /* PI_41 PI_CS_MAP:RW:24:4 */
193 clrsetbits_le32(&denali_pi[41], 0xf << 24, cs_map << 24);
194 if ((sdram_ch->rank == 1) && (sdram_params->base.dramtype == DDR3))
195 writel(0x2EC7FFFF, &denali_pi[34]);
196}
197
198static void set_ds_odt(const struct chan_info *chan,
199 const struct rk3399_sdram_params *sdram_params)
200{
201 u32 *denali_phy = chan->publ->denali_phy;
202
203 u32 tsel_idle_en, tsel_wr_en, tsel_rd_en;
204 u32 tsel_idle_select_p, tsel_wr_select_p, tsel_rd_select_p;
205 u32 ca_tsel_wr_select_p, ca_tsel_wr_select_n;
206 u32 tsel_idle_select_n, tsel_wr_select_n, tsel_rd_select_n;
207 u32 reg_value;
208
209 if (sdram_params->base.dramtype == LPDDR4) {
210 tsel_rd_select_p = PHY_DRV_ODT_Hi_Z;
211 tsel_wr_select_p = PHY_DRV_ODT_40;
212 ca_tsel_wr_select_p = PHY_DRV_ODT_40;
213 tsel_idle_select_p = PHY_DRV_ODT_Hi_Z;
214
215 tsel_rd_select_n = PHY_DRV_ODT_240;
216 tsel_wr_select_n = PHY_DRV_ODT_40;
217 ca_tsel_wr_select_n = PHY_DRV_ODT_40;
218 tsel_idle_select_n = PHY_DRV_ODT_240;
219 } else if (sdram_params->base.dramtype == LPDDR3) {
220 tsel_rd_select_p = PHY_DRV_ODT_240;
221 tsel_wr_select_p = PHY_DRV_ODT_34_3;
222 ca_tsel_wr_select_p = PHY_DRV_ODT_48;
223 tsel_idle_select_p = PHY_DRV_ODT_240;
224
225 tsel_rd_select_n = PHY_DRV_ODT_Hi_Z;
226 tsel_wr_select_n = PHY_DRV_ODT_34_3;
227 ca_tsel_wr_select_n = PHY_DRV_ODT_48;
228 tsel_idle_select_n = PHY_DRV_ODT_Hi_Z;
229 } else {
230 tsel_rd_select_p = PHY_DRV_ODT_240;
231 tsel_wr_select_p = PHY_DRV_ODT_34_3;
232 ca_tsel_wr_select_p = PHY_DRV_ODT_34_3;
233 tsel_idle_select_p = PHY_DRV_ODT_240;
234
235 tsel_rd_select_n = PHY_DRV_ODT_240;
236 tsel_wr_select_n = PHY_DRV_ODT_34_3;
237 ca_tsel_wr_select_n = PHY_DRV_ODT_34_3;
238 tsel_idle_select_n = PHY_DRV_ODT_240;
239 }
240
241 if (sdram_params->base.odt == 1)
242 tsel_rd_en = 1;
243 else
244 tsel_rd_en = 0;
245
246 tsel_wr_en = 0;
247 tsel_idle_en = 0;
248
249 /*
250 * phy_dq_tsel_select_X 24bits DENALI_PHY_6/134/262/390 offset_0
251 * sets termination values for read/idle cycles and drive strength
252 * for write cycles for DQ/DM
253 */
254 reg_value = tsel_rd_select_n | (tsel_rd_select_p << 0x4) |
255 (tsel_wr_select_n << 8) | (tsel_wr_select_p << 12) |
256 (tsel_idle_select_n << 16) | (tsel_idle_select_p << 20);
257 clrsetbits_le32(&denali_phy[6], 0xffffff, reg_value);
258 clrsetbits_le32(&denali_phy[134], 0xffffff, reg_value);
259 clrsetbits_le32(&denali_phy[262], 0xffffff, reg_value);
260 clrsetbits_le32(&denali_phy[390], 0xffffff, reg_value);
261
262 /*
263 * phy_dqs_tsel_select_X 24bits DENALI_PHY_7/135/263/391 offset_0
264 * sets termination values for read/idle cycles and drive strength
265 * for write cycles for DQS
266 */
267 clrsetbits_le32(&denali_phy[7], 0xffffff, reg_value);
268 clrsetbits_le32(&denali_phy[135], 0xffffff, reg_value);
269 clrsetbits_le32(&denali_phy[263], 0xffffff, reg_value);
270 clrsetbits_le32(&denali_phy[391], 0xffffff, reg_value);
271
272 /* phy_adr_tsel_select_ 8bits DENALI_PHY_544/672/800 offset_0 */
273 reg_value = ca_tsel_wr_select_n | (ca_tsel_wr_select_p << 0x4);
274 clrsetbits_le32(&denali_phy[544], 0xff, reg_value);
275 clrsetbits_le32(&denali_phy[672], 0xff, reg_value);
276 clrsetbits_le32(&denali_phy[800], 0xff, reg_value);
277
278 /* phy_pad_addr_drive 8bits DENALI_PHY_928 offset_0 */
279 clrsetbits_le32(&denali_phy[928], 0xff, reg_value);
280
281 /* phy_pad_rst_drive 8bits DENALI_PHY_937 offset_0 */
282 clrsetbits_le32(&denali_phy[937], 0xff, reg_value);
283
284 /* phy_pad_cke_drive 8bits DENALI_PHY_935 offset_0 */
285 clrsetbits_le32(&denali_phy[935], 0xff, reg_value);
286
287 /* phy_pad_cs_drive 8bits DENALI_PHY_939 offset_0 */
288 clrsetbits_le32(&denali_phy[939], 0xff, reg_value);
289
290 /* phy_pad_clk_drive 8bits DENALI_PHY_929 offset_0 */
291 clrsetbits_le32(&denali_phy[929], 0xff, reg_value);
292
293 /* phy_pad_fdbk_drive 23bit DENALI_PHY_924/925 */
294 clrsetbits_le32(&denali_phy[924], 0xff,
295 tsel_wr_select_n | (tsel_wr_select_p << 4));
296 clrsetbits_le32(&denali_phy[925], 0xff,
297 tsel_rd_select_n | (tsel_rd_select_p << 4));
298
299 /* phy_dq_tsel_enable_X 3bits DENALI_PHY_5/133/261/389 offset_16 */
300 reg_value = (tsel_rd_en | (tsel_wr_en << 1) | (tsel_idle_en << 2))
301 << 16;
302 clrsetbits_le32(&denali_phy[5], 0x7 << 16, reg_value);
303 clrsetbits_le32(&denali_phy[133], 0x7 << 16, reg_value);
304 clrsetbits_le32(&denali_phy[261], 0x7 << 16, reg_value);
305 clrsetbits_le32(&denali_phy[389], 0x7 << 16, reg_value);
306
307 /* phy_dqs_tsel_enable_X 3bits DENALI_PHY_6/134/262/390 offset_24 */
308 reg_value = (tsel_rd_en | (tsel_wr_en << 1) | (tsel_idle_en << 2))
309 << 24;
310 clrsetbits_le32(&denali_phy[6], 0x7 << 24, reg_value);
311 clrsetbits_le32(&denali_phy[134], 0x7 << 24, reg_value);
312 clrsetbits_le32(&denali_phy[262], 0x7 << 24, reg_value);
313 clrsetbits_le32(&denali_phy[390], 0x7 << 24, reg_value);
314
315 /* phy_adr_tsel_enable_ 1bit DENALI_PHY_518/646/774 offset_8 */
316 reg_value = tsel_wr_en << 8;
317 clrsetbits_le32(&denali_phy[518], 0x1 << 8, reg_value);
318 clrsetbits_le32(&denali_phy[646], 0x1 << 8, reg_value);
319 clrsetbits_le32(&denali_phy[774], 0x1 << 8, reg_value);
320
321 /* phy_pad_addr_term tsel 1bit DENALI_PHY_933 offset_17 */
322 reg_value = tsel_wr_en << 17;
323 clrsetbits_le32(&denali_phy[933], 0x1 << 17, reg_value);
324 /*
325 * pad_rst/cke/cs/clk_term tsel 1bits
326 * DENALI_PHY_938/936/940/934 offset_17
327 */
328 clrsetbits_le32(&denali_phy[938], 0x1 << 17, reg_value);
329 clrsetbits_le32(&denali_phy[936], 0x1 << 17, reg_value);
330 clrsetbits_le32(&denali_phy[940], 0x1 << 17, reg_value);
331 clrsetbits_le32(&denali_phy[934], 0x1 << 17, reg_value);
332
333 /* phy_pad_fdbk_term 1bit DENALI_PHY_930 offset_17 */
334 clrsetbits_le32(&denali_phy[930], 0x1 << 17, reg_value);
335}
336
337static int phy_io_config(const struct chan_info *chan,
338 const struct rk3399_sdram_params *sdram_params)
339{
340 u32 *denali_phy = chan->publ->denali_phy;
341 u32 vref_mode_dq, vref_value_dq, vref_mode_ac, vref_value_ac;
342 u32 mode_sel;
343 u32 reg_value;
344 u32 drv_value, odt_value;
345 u32 speed;
346
347 /* vref setting */
348 if (sdram_params->base.dramtype == LPDDR4) {
349 /* LPDDR4 */
350 vref_mode_dq = 0x6;
351 vref_value_dq = 0x1f;
352 vref_mode_ac = 0x6;
353 vref_value_ac = 0x1f;
354 } else if (sdram_params->base.dramtype == LPDDR3) {
355 if (sdram_params->base.odt == 1) {
356 vref_mode_dq = 0x5; /* LPDDR3 ODT */
357 drv_value = (readl(&denali_phy[6]) >> 12) & 0xf;
358 odt_value = (readl(&denali_phy[6]) >> 4) & 0xf;
359 if (drv_value == PHY_DRV_ODT_48) {
360 switch (odt_value) {
361 case PHY_DRV_ODT_240:
362 vref_value_dq = 0x16;
363 break;
364 case PHY_DRV_ODT_120:
365 vref_value_dq = 0x26;
366 break;
367 case PHY_DRV_ODT_60:
368 vref_value_dq = 0x36;
369 break;
370 default:
371 debug("Invalid ODT value.\n");
372 return -EINVAL;
373 }
374 } else if (drv_value == PHY_DRV_ODT_40) {
375 switch (odt_value) {
376 case PHY_DRV_ODT_240:
377 vref_value_dq = 0x19;
378 break;
379 case PHY_DRV_ODT_120:
380 vref_value_dq = 0x23;
381 break;
382 case PHY_DRV_ODT_60:
383 vref_value_dq = 0x31;
384 break;
385 default:
386 debug("Invalid ODT value.\n");
387 return -EINVAL;
388 }
389 } else if (drv_value == PHY_DRV_ODT_34_3) {
390 switch (odt_value) {
391 case PHY_DRV_ODT_240:
392 vref_value_dq = 0x17;
393 break;
394 case PHY_DRV_ODT_120:
395 vref_value_dq = 0x20;
396 break;
397 case PHY_DRV_ODT_60:
398 vref_value_dq = 0x2e;
399 break;
400 default:
401 debug("Invalid ODT value.\n");
402 return -EINVAL;
403 }
404 } else {
405 debug("Invalid DRV value.\n");
406 return -EINVAL;
407 }
408 } else {
409 vref_mode_dq = 0x2; /* LPDDR3 */
410 vref_value_dq = 0x1f;
411 }
412 vref_mode_ac = 0x2;
413 vref_value_ac = 0x1f;
414 } else if (sdram_params->base.dramtype == DDR3) {
415 /* DDR3L */
416 vref_mode_dq = 0x1;
417 vref_value_dq = 0x1f;
418 vref_mode_ac = 0x1;
419 vref_value_ac = 0x1f;
420 } else {
421 debug("Unknown DRAM type.\n");
422 return -EINVAL;
423 }
424
425 reg_value = (vref_mode_dq << 9) | (0x1 << 8) | vref_value_dq;
426
427 /* PHY_913 PHY_PAD_VREF_CTRL_DQ_0 12bits offset_8 */
428 clrsetbits_le32(&denali_phy[913], 0xfff << 8, reg_value << 8);
429 /* PHY_914 PHY_PAD_VREF_CTRL_DQ_1 12bits offset_0 */
430 clrsetbits_le32(&denali_phy[914], 0xfff, reg_value);
431 /* PHY_914 PHY_PAD_VREF_CTRL_DQ_2 12bits offset_16 */
432 clrsetbits_le32(&denali_phy[914], 0xfff << 16, reg_value << 16);
433 /* PHY_915 PHY_PAD_VREF_CTRL_DQ_3 12bits offset_0 */
434 clrsetbits_le32(&denali_phy[915], 0xfff, reg_value);
435
436 reg_value = (vref_mode_ac << 9) | (0x1 << 8) | vref_value_ac;
437
438 /* PHY_915 PHY_PAD_VREF_CTRL_AC 12bits offset_16 */
439 clrsetbits_le32(&denali_phy[915], 0xfff << 16, reg_value << 16);
440
441 if (sdram_params->base.dramtype == LPDDR4)
442 mode_sel = 0x6;
443 else if (sdram_params->base.dramtype == LPDDR3)
444 mode_sel = 0x0;
445 else if (sdram_params->base.dramtype == DDR3)
446 mode_sel = 0x1;
447 else
448 return -EINVAL;
449
450 /* PHY_924 PHY_PAD_FDBK_DRIVE */
451 clrsetbits_le32(&denali_phy[924], 0x7 << 15, mode_sel << 15);
452 /* PHY_926 PHY_PAD_DATA_DRIVE */
453 clrsetbits_le32(&denali_phy[926], 0x7 << 6, mode_sel << 6);
454 /* PHY_927 PHY_PAD_DQS_DRIVE */
455 clrsetbits_le32(&denali_phy[927], 0x7 << 6, mode_sel << 6);
456 /* PHY_928 PHY_PAD_ADDR_DRIVE */
457 clrsetbits_le32(&denali_phy[928], 0x7 << 14, mode_sel << 14);
458 /* PHY_929 PHY_PAD_CLK_DRIVE */
459 clrsetbits_le32(&denali_phy[929], 0x7 << 14, mode_sel << 14);
460 /* PHY_935 PHY_PAD_CKE_DRIVE */
461 clrsetbits_le32(&denali_phy[935], 0x7 << 14, mode_sel << 14);
462 /* PHY_937 PHY_PAD_RST_DRIVE */
463 clrsetbits_le32(&denali_phy[937], 0x7 << 14, mode_sel << 14);
464 /* PHY_939 PHY_PAD_CS_DRIVE */
465 clrsetbits_le32(&denali_phy[939], 0x7 << 14, mode_sel << 14);
466
467
468 /* speed setting */
469 if (sdram_params->base.ddr_freq < 400)
470 speed = 0x0;
471 else if (sdram_params->base.ddr_freq < 800)
472 speed = 0x1;
473 else if (sdram_params->base.ddr_freq < 1200)
474 speed = 0x2;
475 else
476 speed = 0x3;
477
478 /* PHY_924 PHY_PAD_FDBK_DRIVE */
479 clrsetbits_le32(&denali_phy[924], 0x3 << 21, speed << 21);
480 /* PHY_926 PHY_PAD_DATA_DRIVE */
481 clrsetbits_le32(&denali_phy[926], 0x3 << 9, speed << 9);
482 /* PHY_927 PHY_PAD_DQS_DRIVE */
483 clrsetbits_le32(&denali_phy[927], 0x3 << 9, speed << 9);
484 /* PHY_928 PHY_PAD_ADDR_DRIVE */
485 clrsetbits_le32(&denali_phy[928], 0x3 << 17, speed << 17);
486 /* PHY_929 PHY_PAD_CLK_DRIVE */
487 clrsetbits_le32(&denali_phy[929], 0x3 << 17, speed << 17);
488 /* PHY_935 PHY_PAD_CKE_DRIVE */
489 clrsetbits_le32(&denali_phy[935], 0x3 << 17, speed << 17);
490 /* PHY_937 PHY_PAD_RST_DRIVE */
491 clrsetbits_le32(&denali_phy[937], 0x3 << 17, speed << 17);
492 /* PHY_939 PHY_PAD_CS_DRIVE */
493 clrsetbits_le32(&denali_phy[939], 0x3 << 17, speed << 17);
494
495 return 0;
496}
497
498static int pctl_cfg(const struct chan_info *chan, u32 channel,
499 const struct rk3399_sdram_params *sdram_params)
500{
501 u32 *denali_ctl = chan->pctl->denali_ctl;
502 u32 *denali_pi = chan->pi->denali_pi;
503 u32 *denali_phy = chan->publ->denali_phy;
504 const u32 *params_ctl = sdram_params->pctl_regs.denali_ctl;
505 const u32 *params_phy = sdram_params->phy_regs.denali_phy;
506 u32 tmp, tmp1, tmp2;
507 u32 pwrup_srefresh_exit;
508 int ret;
509
510 /*
511 * work around controller bug:
512 * Do not program DRAM_CLASS until NO_PHY_IND_TRAIN_INT is programmed
513 */
514 copy_to_reg(&denali_ctl[1], &params_ctl[1],
515 sizeof(struct rk3399_ddr_pctl_regs) - 4);
516 writel(params_ctl[0], &denali_ctl[0]);
517 copy_to_reg(denali_pi, &sdram_params->pi_regs.denali_pi[0],
518 sizeof(struct rk3399_ddr_pi_regs));
519 /* rank count need to set for init */
520 set_memory_map(chan, channel, sdram_params);
521
522 writel(sdram_params->phy_regs.denali_phy[910], &denali_phy[910]);
523 writel(sdram_params->phy_regs.denali_phy[911], &denali_phy[911]);
524 writel(sdram_params->phy_regs.denali_phy[912], &denali_phy[912]);
525
526 pwrup_srefresh_exit = readl(&denali_ctl[68]) & PWRUP_SREFRESH_EXIT;
527 clrbits_le32(&denali_ctl[68], PWRUP_SREFRESH_EXIT);
528
529 /* PHY_DLL_RST_EN */
530 clrsetbits_le32(&denali_phy[957], 0x3 << 24, 1 << 24);
531
532 setbits_le32(&denali_pi[0], START);
533 setbits_le32(&denali_ctl[0], START);
534
535 /* Wating for phy DLL lock */
536 while (1) {
537 tmp = readl(&denali_phy[920]);
538 tmp1 = readl(&denali_phy[921]);
539 tmp2 = readl(&denali_phy[922]);
540 if ((((tmp >> 16) & 0x1) == 0x1) &&
541 (((tmp1 >> 16) & 0x1) == 0x1) &&
542 (((tmp1 >> 0) & 0x1) == 0x1) &&
543 (((tmp2 >> 0) & 0x1) == 0x1))
544 break;
545 }
546
547 copy_to_reg(&denali_phy[896], &params_phy[896], (958 - 895) * 4);
548 copy_to_reg(&denali_phy[0], &params_phy[0], (90 - 0 + 1) * 4);
549 copy_to_reg(&denali_phy[128], &params_phy[128], (218 - 128 + 1) * 4);
550 copy_to_reg(&denali_phy[256], &params_phy[256], (346 - 256 + 1) * 4);
551 copy_to_reg(&denali_phy[384], &params_phy[384], (474 - 384 + 1) * 4);
552 copy_to_reg(&denali_phy[512], &params_phy[512], (549 - 512 + 1) * 4);
553 copy_to_reg(&denali_phy[640], &params_phy[640], (677 - 640 + 1) * 4);
554 copy_to_reg(&denali_phy[768], &params_phy[768], (805 - 768 + 1) * 4);
555 set_ds_odt(chan, sdram_params);
556
557 /*
558 * phy_dqs_tsel_wr_timing_X 8bits DENALI_PHY_84/212/340/468 offset_8
559 * dqs_tsel_wr_end[7:4] add Half cycle
560 */
561 tmp = (readl(&denali_phy[84]) >> 8) & 0xff;
562 clrsetbits_le32(&denali_phy[84], 0xff << 8, (tmp + 0x10) << 8);
563 tmp = (readl(&denali_phy[212]) >> 8) & 0xff;
564 clrsetbits_le32(&denali_phy[212], 0xff << 8, (tmp + 0x10) << 8);
565 tmp = (readl(&denali_phy[340]) >> 8) & 0xff;
566 clrsetbits_le32(&denali_phy[340], 0xff << 8, (tmp + 0x10) << 8);
567 tmp = (readl(&denali_phy[468]) >> 8) & 0xff;
568 clrsetbits_le32(&denali_phy[468], 0xff << 8, (tmp + 0x10) << 8);
569
570 /*
571 * phy_dqs_tsel_wr_timing_X 8bits DENALI_PHY_83/211/339/467 offset_8
572 * dq_tsel_wr_end[7:4] add Half cycle
573 */
574 tmp = (readl(&denali_phy[83]) >> 16) & 0xff;
575 clrsetbits_le32(&denali_phy[83], 0xff << 16, (tmp + 0x10) << 16);
576 tmp = (readl(&denali_phy[211]) >> 16) & 0xff;
577 clrsetbits_le32(&denali_phy[211], 0xff << 16, (tmp + 0x10) << 16);
578 tmp = (readl(&denali_phy[339]) >> 16) & 0xff;
579 clrsetbits_le32(&denali_phy[339], 0xff << 16, (tmp + 0x10) << 16);
580 tmp = (readl(&denali_phy[467]) >> 16) & 0xff;
581 clrsetbits_le32(&denali_phy[467], 0xff << 16, (tmp + 0x10) << 16);
582
583 ret = phy_io_config(chan, sdram_params);
584 if (ret)
585 return ret;
586
587 /* PHY_DLL_RST_EN */
588 clrsetbits_le32(&denali_phy[957], 0x3 << 24, 0x2 << 24);
589
590 /* Wating for PHY and DRAM init complete */
591 tmp = 0;
592 while (!(readl(&denali_ctl[203]) & (1 << 3))) {
593 mdelay(10);
594 tmp++;
595 if (tmp > 10)
596 return -ETIME;
597 }
598
599 clrsetbits_le32(&denali_ctl[68], PWRUP_SREFRESH_EXIT,
600 pwrup_srefresh_exit);
601 return 0;
602}
603
604static void select_per_cs_training_index(const struct chan_info *chan,
605 u32 rank)
606{
607 u32 *denali_phy = chan->publ->denali_phy;
608
609 /* PHY_84 PHY_PER_CS_TRAINING_EN_0 1bit offset_16 */
610 if ((readl(&denali_phy[84])>>16) & 1) {
611 /*
612 * PHY_8/136/264/392
613 * phy_per_cs_training_index_X 1bit offset_24
614 */
615 clrsetbits_le32(&denali_phy[8], 0x1 << 24, rank << 24);
616 clrsetbits_le32(&denali_phy[136], 0x1 << 24, rank << 24);
617 clrsetbits_le32(&denali_phy[264], 0x1 << 24, rank << 24);
618 clrsetbits_le32(&denali_phy[392], 0x1 << 24, rank << 24);
619 }
620}
621
622static void override_write_leveling_value(const struct chan_info *chan)
623{
624 u32 *denali_ctl = chan->pctl->denali_ctl;
625 u32 *denali_phy = chan->publ->denali_phy;
626 u32 byte;
627
628 /* PHY_896 PHY_FREQ_SEL_MULTICAST_EN 1bit offset_0 */
629 setbits_le32(&denali_phy[896], 1);
630
631 /*
632 * PHY_8/136/264/392
633 * phy_per_cs_training_multicast_en_X 1bit offset_16
634 */
635 clrsetbits_le32(&denali_phy[8], 0x1 << 16, 1 << 16);
636 clrsetbits_le32(&denali_phy[136], 0x1 << 16, 1 << 16);
637 clrsetbits_le32(&denali_phy[264], 0x1 << 16, 1 << 16);
638 clrsetbits_le32(&denali_phy[392], 0x1 << 16, 1 << 16);
639
640 for (byte = 0; byte < 4; byte++)
641 clrsetbits_le32(&denali_phy[63 + (128 * byte)], 0xffff << 16,
642 0x200 << 16);
643
644 /* PHY_896 PHY_FREQ_SEL_MULTICAST_EN 1bit offset_0 */
645 clrbits_le32(&denali_phy[896], 1);
646
647 /* CTL_200 ctrlupd_req 1bit offset_8 */
648 clrsetbits_le32(&denali_ctl[200], 0x1 << 8, 0x1 << 8);
649}
650
651static int data_training_ca(const struct chan_info *chan, u32 channel,
652 const struct rk3399_sdram_params *sdram_params)
653{
654 u32 *denali_pi = chan->pi->denali_pi;
655 u32 *denali_phy = chan->publ->denali_phy;
656 u32 i, tmp;
657 u32 obs_0, obs_1, obs_2, obs_err = 0;
658 u32 rank = sdram_params->ch[channel].rank;
659
660 for (i = 0; i < rank; i++) {
661 select_per_cs_training_index(chan, i);
662 /* PI_100 PI_CALVL_EN:RW:8:2 */
663 clrsetbits_le32(&denali_pi[100], 0x3 << 8, 0x2 << 8);
664 /* PI_92 PI_CALVL_REQ:WR:16:1,PI_CALVL_CS:RW:24:2 */
665 clrsetbits_le32(&denali_pi[92],
666 (0x1 << 16) | (0x3 << 24),
667 (0x1 << 16) | (i << 24));
668
669 /* Waiting for training complete */
670 while (1) {
671 /* PI_174 PI_INT_STATUS:RD:8:18 */
672 tmp = readl(&denali_pi[174]) >> 8;
673 /*
674 * check status obs
675 * PHY_532/660/789 phy_adr_calvl_obs1_:0:32
676 */
677 obs_0 = readl(&denali_phy[532]);
678 obs_1 = readl(&denali_phy[660]);
679 obs_2 = readl(&denali_phy[788]);
680 if (((obs_0 >> 30) & 0x3) ||
681 ((obs_1 >> 30) & 0x3) ||
682 ((obs_2 >> 30) & 0x3))
683 obs_err = 1;
684 if ((((tmp >> 11) & 0x1) == 0x1) &&
685 (((tmp >> 13) & 0x1) == 0x1) &&
686 (((tmp >> 5) & 0x1) == 0x0) &&
687 (obs_err == 0))
688 break;
689 else if ((((tmp >> 5) & 0x1) == 0x1) ||
690 (obs_err == 1))
691 return -EIO;
692 }
693 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
694 writel(0x00003f7c, (&denali_pi[175]));
695 }
696 clrbits_le32(&denali_pi[100], 0x3 << 8);
697
698 return 0;
699}
700
701static int data_training_wl(const struct chan_info *chan, u32 channel,
702 const struct rk3399_sdram_params *sdram_params)
703{
704 u32 *denali_pi = chan->pi->denali_pi;
705 u32 *denali_phy = chan->publ->denali_phy;
706 u32 i, tmp;
707 u32 obs_0, obs_1, obs_2, obs_3, obs_err = 0;
708 u32 rank = sdram_params->ch[channel].rank;
709
710 for (i = 0; i < rank; i++) {
711 select_per_cs_training_index(chan, i);
712 /* PI_60 PI_WRLVL_EN:RW:8:2 */
713 clrsetbits_le32(&denali_pi[60], 0x3 << 8, 0x2 << 8);
714 /* PI_59 PI_WRLVL_REQ:WR:8:1,PI_WRLVL_CS:RW:16:2 */
715 clrsetbits_le32(&denali_pi[59],
716 (0x1 << 8) | (0x3 << 16),
717 (0x1 << 8) | (i << 16));
718
719 /* Waiting for training complete */
720 while (1) {
721 /* PI_174 PI_INT_STATUS:RD:8:18 */
722 tmp = readl(&denali_pi[174]) >> 8;
723
724 /*
725 * check status obs, if error maybe can not
726 * get leveling done PHY_40/168/296/424
727 * phy_wrlvl_status_obs_X:0:13
728 */
729 obs_0 = readl(&denali_phy[40]);
730 obs_1 = readl(&denali_phy[168]);
731 obs_2 = readl(&denali_phy[296]);
732 obs_3 = readl(&denali_phy[424]);
733 if (((obs_0 >> 12) & 0x1) ||
734 ((obs_1 >> 12) & 0x1) ||
735 ((obs_2 >> 12) & 0x1) ||
736 ((obs_3 >> 12) & 0x1))
737 obs_err = 1;
738 if ((((tmp >> 10) & 0x1) == 0x1) &&
739 (((tmp >> 13) & 0x1) == 0x1) &&
740 (((tmp >> 4) & 0x1) == 0x0) &&
741 (obs_err == 0))
742 break;
743 else if ((((tmp >> 4) & 0x1) == 0x1) ||
744 (obs_err == 1))
745 return -EIO;
746 }
747 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
748 writel(0x00003f7c, (&denali_pi[175]));
749 }
750
751 override_write_leveling_value(chan);
752 clrbits_le32(&denali_pi[60], 0x3 << 8);
753
754 return 0;
755}
756
757static int data_training_rg(const struct chan_info *chan, u32 channel,
758 const struct rk3399_sdram_params *sdram_params)
759{
760 u32 *denali_pi = chan->pi->denali_pi;
761 u32 *denali_phy = chan->publ->denali_phy;
762 u32 i, tmp;
763 u32 obs_0, obs_1, obs_2, obs_3, obs_err = 0;
764 u32 rank = sdram_params->ch[channel].rank;
765
766 for (i = 0; i < rank; i++) {
767 select_per_cs_training_index(chan, i);
768 /* PI_80 PI_RDLVL_GATE_EN:RW:24:2 */
769 clrsetbits_le32(&denali_pi[80], 0x3 << 24, 0x2 << 24);
770 /*
771 * PI_74 PI_RDLVL_GATE_REQ:WR:16:1
772 * PI_RDLVL_CS:RW:24:2
773 */
774 clrsetbits_le32(&denali_pi[74],
775 (0x1 << 16) | (0x3 << 24),
776 (0x1 << 16) | (i << 24));
777
778 /* Waiting for training complete */
779 while (1) {
780 /* PI_174 PI_INT_STATUS:RD:8:18 */
781 tmp = readl(&denali_pi[174]) >> 8;
782
783 /*
784 * check status obs
785 * PHY_43/171/299/427
786 * PHY_GTLVL_STATUS_OBS_x:16:8
787 */
788 obs_0 = readl(&denali_phy[43]);
789 obs_1 = readl(&denali_phy[171]);
790 obs_2 = readl(&denali_phy[299]);
791 obs_3 = readl(&denali_phy[427]);
792 if (((obs_0 >> (16 + 6)) & 0x3) ||
793 ((obs_1 >> (16 + 6)) & 0x3) ||
794 ((obs_2 >> (16 + 6)) & 0x3) ||
795 ((obs_3 >> (16 + 6)) & 0x3))
796 obs_err = 1;
797 if ((((tmp >> 9) & 0x1) == 0x1) &&
798 (((tmp >> 13) & 0x1) == 0x1) &&
799 (((tmp >> 3) & 0x1) == 0x0) &&
800 (obs_err == 0))
801 break;
802 else if ((((tmp >> 3) & 0x1) == 0x1) ||
803 (obs_err == 1))
804 return -EIO;
805 }
806 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
807 writel(0x00003f7c, (&denali_pi[175]));
808 }
809 clrbits_le32(&denali_pi[80], 0x3 << 24);
810
811 return 0;
812}
813
814static int data_training_rl(const struct chan_info *chan, u32 channel,
815 const struct rk3399_sdram_params *sdram_params)
816{
817 u32 *denali_pi = chan->pi->denali_pi;
818 u32 i, tmp;
819 u32 rank = sdram_params->ch[channel].rank;
820
821 for (i = 0; i < rank; i++) {
822 select_per_cs_training_index(chan, i);
823 /* PI_80 PI_RDLVL_EN:RW:16:2 */
824 clrsetbits_le32(&denali_pi[80], 0x3 << 16, 0x2 << 16);
825 /* PI_74 PI_RDLVL_REQ:WR:8:1,PI_RDLVL_CS:RW:24:2 */
826 clrsetbits_le32(&denali_pi[74],
827 (0x1 << 8) | (0x3 << 24),
828 (0x1 << 8) | (i << 24));
829
830 /* Waiting for training complete */
831 while (1) {
832 /* PI_174 PI_INT_STATUS:RD:8:18 */
833 tmp = readl(&denali_pi[174]) >> 8;
834
835 /*
836 * make sure status obs not report error bit
837 * PHY_46/174/302/430
838 * phy_rdlvl_status_obs_X:16:8
839 */
840 if ((((tmp >> 8) & 0x1) == 0x1) &&
841 (((tmp >> 13) & 0x1) == 0x1) &&
842 (((tmp >> 2) & 0x1) == 0x0))
843 break;
844 else if (((tmp >> 2) & 0x1) == 0x1)
845 return -EIO;
846 }
847 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
848 writel(0x00003f7c, (&denali_pi[175]));
849 }
850 clrbits_le32(&denali_pi[80], 0x3 << 16);
851
852 return 0;
853}
854
855static int data_training_wdql(const struct chan_info *chan, u32 channel,
856 const struct rk3399_sdram_params *sdram_params)
857{
858 u32 *denali_pi = chan->pi->denali_pi;
859 u32 i, tmp;
860 u32 rank = sdram_params->ch[channel].rank;
861
862 for (i = 0; i < rank; i++) {
863 select_per_cs_training_index(chan, i);
864 /*
865 * disable PI_WDQLVL_VREF_EN before wdq leveling?
866 * PI_181 PI_WDQLVL_VREF_EN:RW:8:1
867 */
868 clrbits_le32(&denali_pi[181], 0x1 << 8);
869 /* PI_124 PI_WDQLVL_EN:RW:16:2 */
870 clrsetbits_le32(&denali_pi[124], 0x3 << 16, 0x2 << 16);
871 /* PI_121 PI_WDQLVL_REQ:WR:8:1,PI_WDQLVL_CS:RW:16:2 */
872 clrsetbits_le32(&denali_pi[121],
873 (0x1 << 8) | (0x3 << 16),
874 (0x1 << 8) | (i << 16));
875
876 /* Waiting for training complete */
877 while (1) {
878 /* PI_174 PI_INT_STATUS:RD:8:18 */
879 tmp = readl(&denali_pi[174]) >> 8;
880 if ((((tmp >> 12) & 0x1) == 0x1) &&
881 (((tmp >> 13) & 0x1) == 0x1) &&
882 (((tmp >> 6) & 0x1) == 0x0))
883 break;
884 else if (((tmp >> 6) & 0x1) == 0x1)
885 return -EIO;
886 }
887 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
888 writel(0x00003f7c, (&denali_pi[175]));
889 }
890 clrbits_le32(&denali_pi[124], 0x3 << 16);
891
892 return 0;
893}
894
895static int data_training(const struct chan_info *chan, u32 channel,
896 const struct rk3399_sdram_params *sdram_params,
897 u32 training_flag)
898{
899 u32 *denali_phy = chan->publ->denali_phy;
900
901 /* PHY_927 PHY_PAD_DQS_DRIVE RPULL offset_22 */
902 setbits_le32(&denali_phy[927], (1 << 22));
903
904 if (training_flag == PI_FULL_TRAINING) {
905 if (sdram_params->base.dramtype == LPDDR4) {
906 training_flag = PI_CA_TRAINING | PI_WRITE_LEVELING |
907 PI_READ_GATE_TRAINING |
908 PI_READ_LEVELING | PI_WDQ_LEVELING;
909 } else if (sdram_params->base.dramtype == LPDDR3) {
910 training_flag = PI_CA_TRAINING | PI_WRITE_LEVELING |
911 PI_READ_GATE_TRAINING;
912 } else if (sdram_params->base.dramtype == DDR3) {
913 training_flag = PI_WRITE_LEVELING |
914 PI_READ_GATE_TRAINING |
915 PI_READ_LEVELING;
916 }
917 }
918
919 /* ca training(LPDDR4,LPDDR3 support) */
920 if ((training_flag & PI_CA_TRAINING) == PI_CA_TRAINING)
921 data_training_ca(chan, channel, sdram_params);
922
923 /* write leveling(LPDDR4,LPDDR3,DDR3 support) */
924 if ((training_flag & PI_WRITE_LEVELING) == PI_WRITE_LEVELING)
925 data_training_wl(chan, channel, sdram_params);
926
927 /* read gate training(LPDDR4,LPDDR3,DDR3 support) */
928 if ((training_flag & PI_READ_GATE_TRAINING) == PI_READ_GATE_TRAINING)
929 data_training_rg(chan, channel, sdram_params);
930
931 /* read leveling(LPDDR4,LPDDR3,DDR3 support) */
932 if ((training_flag & PI_READ_LEVELING) == PI_READ_LEVELING)
933 data_training_rl(chan, channel, sdram_params);
934
935 /* wdq leveling(LPDDR4 support) */
936 if ((training_flag & PI_WDQ_LEVELING) == PI_WDQ_LEVELING)
937 data_training_wdql(chan, channel, sdram_params);
938
939 /* PHY_927 PHY_PAD_DQS_DRIVE RPULL offset_22 */
940 clrbits_le32(&denali_phy[927], (1 << 22));
941
942 return 0;
943}
944
945static void set_ddrconfig(const struct chan_info *chan,
946 const struct rk3399_sdram_params *sdram_params,
947 unsigned char channel, u32 ddrconfig)
948{
949 /* only need to set ddrconfig */
950 struct rk3399_msch_regs *ddr_msch_regs = chan->msch;
951 unsigned int cs0_cap = 0;
952 unsigned int cs1_cap = 0;
953
954 cs0_cap = (1 << (sdram_params->ch[channel].cs0_row
955 + sdram_params->ch[channel].col
956 + sdram_params->ch[channel].bk
957 + sdram_params->ch[channel].bw - 20));
958 if (sdram_params->ch[channel].rank > 1)
959 cs1_cap = cs0_cap >> (sdram_params->ch[channel].cs0_row
960 - sdram_params->ch[channel].cs1_row);
961 if (sdram_params->ch[channel].row_3_4) {
962 cs0_cap = cs0_cap * 3 / 4;
963 cs1_cap = cs1_cap * 3 / 4;
964 }
965
966 writel(ddrconfig | (ddrconfig << 8), &ddr_msch_regs->ddrconf);
967 writel(((cs0_cap / 32) & 0xff) | (((cs1_cap / 32) & 0xff) << 8),
968 &ddr_msch_regs->ddrsize);
969}
970
971static void dram_all_config(struct dram_info *dram,
972 const struct rk3399_sdram_params *sdram_params)
973{
974 u32 sys_reg = 0;
975 unsigned int channel, idx;
976
977 sys_reg |= sdram_params->base.dramtype << SYS_REG_DDRTYPE_SHIFT;
978 sys_reg |= (sdram_params->base.num_channels - 1)
979 << SYS_REG_NUM_CH_SHIFT;
980 for (channel = 0, idx = 0;
981 (idx < sdram_params->base.num_channels) && (channel < 2);
982 channel++) {
983 const struct rk3399_sdram_channel *info =
984 &sdram_params->ch[channel];
985 struct rk3399_msch_regs *ddr_msch_regs;
986 const struct rk3399_msch_timings *noc_timing;
987
988 if (sdram_params->ch[channel].col == 0)
989 continue;
990 idx++;
991 sys_reg |= info->row_3_4 << SYS_REG_ROW_3_4_SHIFT(channel);
992 sys_reg |= 1 << SYS_REG_CHINFO_SHIFT(channel);
993 sys_reg |= (info->rank - 1) << SYS_REG_RANK_SHIFT(channel);
994 sys_reg |= (info->col - 9) << SYS_REG_COL_SHIFT(channel);
995 sys_reg |= info->bk == 3 ? 0 : 1 << SYS_REG_BK_SHIFT(channel);
996 sys_reg |= (info->cs0_row - 13) << SYS_REG_CS0_ROW_SHIFT(channel);
997 sys_reg |= (info->cs1_row - 13) << SYS_REG_CS1_ROW_SHIFT(channel);
998 sys_reg |= (2 >> info->bw) << SYS_REG_BW_SHIFT(channel);
999 sys_reg |= (2 >> info->dbw) << SYS_REG_DBW_SHIFT(channel);
1000
1001 ddr_msch_regs = dram->chan[channel].msch;
1002 noc_timing = &sdram_params->ch[channel].noc_timings;
1003 writel(noc_timing->ddrtiminga0,
1004 &ddr_msch_regs->ddrtiminga0);
1005 writel(noc_timing->ddrtimingb0,
1006 &ddr_msch_regs->ddrtimingb0);
1007 writel(noc_timing->ddrtimingc0,
1008 &ddr_msch_regs->ddrtimingc0);
1009 writel(noc_timing->devtodev0,
1010 &ddr_msch_regs->devtodev0);
1011 writel(noc_timing->ddrmode,
1012 &ddr_msch_regs->ddrmode);
1013
1014 /* rank 1 memory clock disable (dfi_dram_clk_disable = 1) */
1015 if (sdram_params->ch[channel].rank == 1)
1016 setbits_le32(&dram->chan[channel].pctl->denali_ctl[276],
1017 1 << 17);
1018 }
1019
1020 writel(sys_reg, &dram->pmugrf->os_reg2);
1021 rk_clrsetreg(&dram->pmusgrf->soc_con4, 0x1f << 10,
1022 sdram_params->base.stride << 10);
1023
1024 /* reboot hold register set */
1025 writel(PRESET_SGRF_HOLD(0) | PRESET_GPIO0_HOLD(1) |
1026 PRESET_GPIO1_HOLD(1),
1027 &dram->pmucru->pmucru_rstnhold_con[1]);
1028 clrsetbits_le32(&dram->cru->glb_rst_con, 0x3, 0x3);
1029}
1030
1031static int switch_to_phy_index1(struct dram_info *dram,
1032 const struct rk3399_sdram_params *sdram_params)
1033{
1034 u32 channel;
1035 u32 *denali_phy;
1036 u32 ch_count = sdram_params->base.num_channels;
1037 int ret;
1038 int i = 0;
1039
1040 writel(RK_CLRSETBITS(0x03 << 4 | 1 << 2 | 1,
1041 1 << 4 | 1 << 2 | 1),
1042 &dram->cic->cic_ctrl0);
1043 while (!(readl(&dram->cic->cic_status0) & (1 << 2))) {
1044 mdelay(10);
1045 i++;
1046 if (i > 10) {
1047 debug("index1 frequency change overtime\n");
1048 return -ETIME;
1049 }
1050 }
1051
1052 i = 0;
1053 writel(RK_CLRSETBITS(1 << 1, 1 << 1), &dram->cic->cic_ctrl0);
1054 while (!(readl(&dram->cic->cic_status0) & (1 << 0))) {
1055 mdelay(10);
1056 if (i > 10) {
1057 debug("index1 frequency done overtime\n");
1058 return -ETIME;
1059 }
1060 }
1061
1062 for (channel = 0; channel < ch_count; channel++) {
1063 denali_phy = dram->chan[channel].publ->denali_phy;
1064 clrsetbits_le32(&denali_phy[896], (0x3 << 8) | 1, 1 << 8);
1065 ret = data_training(&dram->chan[channel], channel,
1066 sdram_params, PI_FULL_TRAINING);
1067 if (ret) {
1068 debug("index1 training failed\n");
1069 return ret;
1070 }
1071 }
1072
1073 return 0;
1074}
1075
1076static int sdram_init(struct dram_info *dram,
1077 const struct rk3399_sdram_params *sdram_params)
1078{
1079 unsigned char dramtype = sdram_params->base.dramtype;
1080 unsigned int ddr_freq = sdram_params->base.ddr_freq;
1081 int channel;
1082
1083 debug("Starting SDRAM initialization...\n");
1084
1085 if ((dramtype == DDR3 && ddr_freq > 800) ||
1086 (dramtype == LPDDR3 && ddr_freq > 933) ||
1087 (dramtype == LPDDR4 && ddr_freq > 800)) {
1088 debug("SDRAM frequency is to high!");
1089 return -E2BIG;
1090 }
1091
1092 for (channel = 0; channel < 2; channel++) {
1093 const struct chan_info *chan = &dram->chan[channel];
1094 struct rk3399_ddr_publ_regs *publ = chan->publ;
1095
1096 phy_dll_bypass_set(publ, ddr_freq);
1097
1098 if (channel >= sdram_params->base.num_channels)
1099 continue;
1100
1101 if (pctl_cfg(chan, channel, sdram_params) != 0) {
1102 printf("pctl_cfg fail, reset\n");
1103 return -EIO;
1104 }
1105
1106 /* LPDDR2/LPDDR3 need to wait DAI complete, max 10us */
1107 if (dramtype == LPDDR3)
1108 udelay(10);
1109
1110 if (data_training(chan, channel,
1111 sdram_params, PI_FULL_TRAINING)) {
1112 printf("SDRAM initialization failed, reset\n");
1113 return -EIO;
1114 }
1115
1116 set_ddrconfig(chan, sdram_params, channel,
1117 sdram_params->ch[channel].ddrconfig);
1118 }
1119 dram_all_config(dram, sdram_params);
1120 switch_to_phy_index1(dram, sdram_params);
1121
1122 debug("Finish SDRAM initialization...\n");
1123 return 0;
1124}
1125
1126static int rk3399_dmc_ofdata_to_platdata(struct udevice *dev)
1127{
1128#if !CONFIG_IS_ENABLED(OF_PLATDATA)
1129 struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
1130 const void *blob = gd->fdt_blob;
1131 int node = dev->of_offset;
1132 int ret;
1133
1134 ret = fdtdec_get_int_array(blob, node, "rockchip,sdram-params",
1135 (u32 *)&plat->sdram_params,
1136 sizeof(plat->sdram_params) / sizeof(u32));
1137 if (ret) {
1138 printf("%s: Cannot read rockchip,sdram-params %d\n",
1139 __func__, ret);
1140 return ret;
1141 }
1142 ret = regmap_init_mem(dev, &plat->map);
1143 if (ret)
1144 printf("%s: regmap failed %d\n", __func__, ret);
1145
1146#endif
1147 return 0;
1148}
1149
1150#if CONFIG_IS_ENABLED(OF_PLATDATA)
1151static int conv_of_platdata(struct udevice *dev)
1152{
1153 struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
1154 struct dtd_rockchip_rk3399_dmc *dtplat = &plat->dtplat;
1155 int ret;
1156
1157 ret = regmap_init_mem_platdata(dev, dtplat->reg,
1158 ARRAY_SIZE(dtplat->reg) / 4,
1159 &plat->map);
1160 if (ret)
1161 return ret;
1162
1163 return 0;
1164}
1165#endif
1166
1167static int rk3399_dmc_init(struct udevice *dev)
1168{
1169 struct dram_info *priv = dev_get_priv(dev);
1170 struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
1171 int ret;
1172#if !CONFIG_IS_ENABLED(OF_PLATDATA)
1173 struct rk3399_sdram_params *params = &plat->sdram_params;
1174#else
1175 struct dtd_rockchip_rk3399_dmc *dtplat = &plat->dtplat;
1176 struct rk3399_sdram_params *params =
1177 (void *)dtplat->rockchip_sdram_params;
1178
1179 ret = conv_of_platdata(dev);
1180 if (ret)
1181 return ret;
1182#endif
1183
1184 priv->cic = syscon_get_first_range(ROCKCHIP_SYSCON_CIC);
1185 priv->pmugrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUGRF);
1186 priv->pmusgrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUSGRF);
1187 priv->pmucru = rockchip_get_pmucru();
1188 priv->cru = rockchip_get_cru();
1189 priv->chan[0].pctl = regmap_get_range(plat->map, 0);
1190 priv->chan[0].pi = regmap_get_range(plat->map, 1);
1191 priv->chan[0].publ = regmap_get_range(plat->map, 2);
1192 priv->chan[0].msch = regmap_get_range(plat->map, 3);
1193 priv->chan[1].pctl = regmap_get_range(plat->map, 4);
1194 priv->chan[1].pi = regmap_get_range(plat->map, 5);
1195 priv->chan[1].publ = regmap_get_range(plat->map, 6);
1196 priv->chan[1].msch = regmap_get_range(plat->map, 7);
1197
1198 debug("con reg %p %p %p %p %p %p %p %p\n",
1199 priv->chan[0].pctl, priv->chan[0].pi,
1200 priv->chan[0].publ, priv->chan[0].msch,
1201 priv->chan[1].pctl, priv->chan[1].pi,
1202 priv->chan[1].publ, priv->chan[1].msch);
1203 debug("cru %p, cic %p, grf %p, sgrf %p, pmucru %p\n", priv->cru,
1204 priv->cic, priv->pmugrf, priv->pmusgrf, priv->pmucru);
1205#if CONFIG_IS_ENABLED(OF_PLATDATA)
1206 ret = clk_get_by_index_platdata(dev, 0, dtplat->clocks, &priv->ddr_clk);
1207#else
1208 ret = clk_get_by_index(dev, 0, &priv->ddr_clk);
1209#endif
1210 if (ret) {
1211 printf("%s clk get failed %d\n", __func__, ret);
1212 return ret;
1213 }
1214 ret = clk_set_rate(&priv->ddr_clk, params->base.ddr_freq * MHz);
1215 if (ret < 0) {
1216 printf("%s clk set failed %d\n", __func__, ret);
1217 return ret;
1218 }
1219 ret = sdram_init(priv, params);
1220 if (ret < 0) {
1221 printf("%s DRAM init failed%d\n", __func__, ret);
1222 return ret;
1223 }
1224
1225 return 0;
1226}
1227#endif
1228
1229size_t sdram_size_mb(struct dram_info *dram)
1230{
1231 u32 rank, col, bk, cs0_row, cs1_row, bw, row_3_4;
1232 size_t chipsize_mb = 0;
1233 size_t size_mb = 0;
1234 u32 ch;
1235
1236 u32 sys_reg = readl(&dram->pmugrf->os_reg2);
1237 u32 ch_num = 1 + ((sys_reg >> SYS_REG_NUM_CH_SHIFT)
1238 & SYS_REG_NUM_CH_MASK);
1239
1240 for (ch = 0; ch < ch_num; ch++) {
1241 rank = 1 + (sys_reg >> SYS_REG_RANK_SHIFT(ch) &
1242 SYS_REG_RANK_MASK);
1243 col = 9 + (sys_reg >> SYS_REG_COL_SHIFT(ch) & SYS_REG_COL_MASK);
1244 bk = 3 - ((sys_reg >> SYS_REG_BK_SHIFT(ch)) & SYS_REG_BK_MASK);
1245 cs0_row = 13 + (sys_reg >> SYS_REG_CS0_ROW_SHIFT(ch) &
1246 SYS_REG_CS0_ROW_MASK);
1247 cs1_row = 13 + (sys_reg >> SYS_REG_CS1_ROW_SHIFT(ch) &
1248 SYS_REG_CS1_ROW_MASK);
1249 bw = (2 >> ((sys_reg >> SYS_REG_BW_SHIFT(ch)) &
1250 SYS_REG_BW_MASK));
1251 row_3_4 = sys_reg >> SYS_REG_ROW_3_4_SHIFT(ch) &
1252 SYS_REG_ROW_3_4_MASK;
1253
1254 chipsize_mb = (1 << (cs0_row + col + bk + bw - 20));
1255
1256 if (rank > 1)
1257 chipsize_mb += chipsize_mb >> (cs0_row - cs1_row);
1258 if (row_3_4)
1259 chipsize_mb = chipsize_mb * 3 / 4;
1260 size_mb += chipsize_mb;
1261 }
1262
1263 /*
1264 * we use the 0x00000000~0xf7ffffff space
1265 * since 0xf8000000~0xffffffff is soc register space
1266 * so we reserve it
1267 */
1268 size_mb = min_t(size_t, size_mb, 0xf8000000/(1<<20));
1269
1270 return size_mb;
1271}
1272
1273static int rk3399_dmc_probe(struct udevice *dev)
1274{
1275#ifdef CONFIG_SPL_BUILD
1276 if (rk3399_dmc_init(dev))
1277 return 0;
1278#else
1279 struct dram_info *priv = dev_get_priv(dev);
1280
1281 priv->pmugrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUGRF);
1282 debug("%s: pmugrf=%p\n", __func__, priv->pmugrf);
Kever Yang76e16932017-04-19 16:01:14 +08001283 priv->info.base = 0;
1284 priv->info.size = sdram_size_mb(priv) << 20;
Kever Yangfa437432017-02-22 16:56:35 +08001285#endif
1286 return 0;
1287}
1288
1289static int rk3399_dmc_get_info(struct udevice *dev, struct ram_info *info)
1290{
1291 struct dram_info *priv = dev_get_priv(dev);
1292
Kever Yang76e16932017-04-19 16:01:14 +08001293 *info = priv->info;
Kever Yangfa437432017-02-22 16:56:35 +08001294
1295 return 0;
1296}
1297
1298static struct ram_ops rk3399_dmc_ops = {
1299 .get_info = rk3399_dmc_get_info,
1300};
1301
1302
1303static const struct udevice_id rk3399_dmc_ids[] = {
1304 { .compatible = "rockchip,rk3399-dmc" },
1305 { }
1306};
1307
1308U_BOOT_DRIVER(dmc_rk3399) = {
1309 .name = "rockchip_rk3399_dmc",
1310 .id = UCLASS_RAM,
1311 .of_match = rk3399_dmc_ids,
1312 .ops = &rk3399_dmc_ops,
1313#ifdef CONFIG_SPL_BUILD
1314 .ofdata_to_platdata = rk3399_dmc_ofdata_to_platdata,
1315#endif
1316 .probe = rk3399_dmc_probe,
Kever Yangfa437432017-02-22 16:56:35 +08001317 .priv_auto_alloc_size = sizeof(struct dram_info),
Kever Yang76e16932017-04-19 16:01:14 +08001318#ifdef CONFIG_SPL_BUILD
Kever Yangfa437432017-02-22 16:56:35 +08001319 .platdata_auto_alloc_size = sizeof(struct rockchip_dmc_plat),
1320#endif
1321};