blob: 367b74061ed52aa2982dfc32d82142b5adf5bccb [file] [log] [blame]
Tom Rini83d290c2018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0+
Vishnu Patekarffc0ae02015-03-01 23:49:39 +05302/*
3 * Sun8i a33 platform dram controller init.
4 *
5 * (C) Copyright 2007-2015 Allwinner Technology Co.
6 * Jerry Wang <wangflord@allwinnertech.com>
7 * (C) Copyright 2015 Vishnu Patekar <vishnupatekar0510@gmail.com>
8 * (C) Copyright 2015 Hans de Goede <hdegoede@redhat.com>
Vishnu Patekarffc0ae02015-03-01 23:49:39 +05309 */
10#include <common.h>
11#include <errno.h>
Simon Glass691d7192020-05-10 11:40:02 -060012#include <init.h>
Vishnu Patekarffc0ae02015-03-01 23:49:39 +053013#include <asm/io.h>
14#include <asm/arch/clock.h>
15#include <asm/arch/dram.h>
16#include <asm/arch/prcm.h>
Simon Glassc05ed002020-05-10 11:40:11 -060017#include <linux/delay.h>
Vishnu Patekarffc0ae02015-03-01 23:49:39 +053018
19/* PLL runs at 2x dram-clk, controller runs at PLL / 4 (dram-clk / 2) */
20#define DRAM_CLK_MUL 2
21#define DRAM_CLK_DIV 4
22#define DRAM_SIGMA_DELTA_ENABLE 1
Vishnu Patekarffc0ae02015-03-01 23:49:39 +053023
24struct dram_para {
25 u8 cs1;
26 u8 seq;
27 u8 bank;
28 u8 rank;
29 u8 rows;
30 u8 bus_width;
31 u16 page_size;
32};
33
34static void mctl_set_cr(struct dram_para *para)
35{
36 struct sunxi_mctl_com_reg * const mctl_com =
37 (struct sunxi_mctl_com_reg *)SUNXI_DRAM_COM_BASE;
38
39 writel(MCTL_CR_CS1_CONTROL(para->cs1) | MCTL_CR_UNKNOWN |
40 MCTL_CR_CHANNEL(1) | MCTL_CR_DDR3 |
41 (para->seq ? MCTL_CR_SEQUENCE : 0) |
42 ((para->bus_width == 16) ? MCTL_CR_BUSW16 : MCTL_CR_BUSW8) |
43 MCTL_CR_PAGE_SIZE(para->page_size) | MCTL_CR_ROW(para->rows) |
44 MCTL_CR_BANK(para->bank) | MCTL_CR_RANK(para->rank),
45 &mctl_com->cr);
46}
47
48static void auto_detect_dram_size(struct dram_para *para)
49{
50 u8 orig_rank = para->rank;
51 int rows, columns;
52
53 /* Row detect */
54 para->page_size = 512;
55 para->seq = 1;
56 para->rows = 16;
57 para->rank = 1;
58 mctl_set_cr(para);
59 for (rows = 11 ; rows < 16 ; rows++) {
60 if (mctl_mem_matches(1 << (rows + 9))) /* row-column */
61 break;
62 }
63
64 /* Column (page size) detect */
65 para->rows = 11;
66 para->page_size = 8192;
67 mctl_set_cr(para);
68 for (columns = 9 ; columns < 13 ; columns++) {
69 if (mctl_mem_matches(1 << columns))
70 break;
71 }
72
73 para->seq = 0;
74 para->rank = orig_rank;
75 para->rows = rows;
76 para->page_size = 1 << columns;
77 mctl_set_cr(para);
78}
79
80static inline int ns_to_t(int nanoseconds)
81{
82 const unsigned int ctrl_freq =
83 CONFIG_DRAM_CLK * DRAM_CLK_MUL / DRAM_CLK_DIV;
84
85 return (ctrl_freq * nanoseconds + 999) / 1000;
86}
87
88static void auto_set_timing_para(struct dram_para *para)
89{
90 struct sunxi_mctl_ctl_reg * const mctl_ctl =
91 (struct sunxi_mctl_ctl_reg *)SUNXI_DRAM_CTL0_BASE;
92 u32 reg_val;
93
94 u8 tccd = 2;
95 u8 tfaw = ns_to_t(50);
96 u8 trrd = max(ns_to_t(10), 4);
97 u8 trcd = ns_to_t(15);
98 u8 trc = ns_to_t(53);
99 u8 txp = max(ns_to_t(8), 3);
100 u8 twtr = max(ns_to_t(8), 4);
101 u8 trtp = max(ns_to_t(8), 4);
102 u8 twr = max(ns_to_t(15), 3);
103 u8 trp = ns_to_t(15);
104 u8 tras = ns_to_t(38);
105
106 u16 trefi = ns_to_t(7800) / 32;
107 u16 trfc = ns_to_t(350);
108
109 /* Fixed timing parameters */
110 u8 tmrw = 0;
111 u8 tmrd = 4;
112 u8 tmod = 12;
113 u8 tcke = 3;
114 u8 tcksrx = 5;
115 u8 tcksre = 5;
116 u8 tckesr = 4;
117 u8 trasmax = 24;
118 u8 tcl = 6; /* CL 12 */
119 u8 tcwl = 4; /* CWL 8 */
120 u8 t_rdata_en = 4;
121 u8 wr_latency = 2;
122
123 u32 tdinit0 = (500 * CONFIG_DRAM_CLK) + 1; /* 500us */
124 u32 tdinit1 = (360 * CONFIG_DRAM_CLK) / 1000 + 1; /* 360ns */
125 u32 tdinit2 = (200 * CONFIG_DRAM_CLK) + 1; /* 200us */
126 u32 tdinit3 = (1 * CONFIG_DRAM_CLK) + 1; /* 1us */
127
128 u8 twtp = tcwl + 2 + twr; /* WL + BL / 2 + tWR */
Wolfgang Denk0cf207e2021-09-27 17:42:39 +0200129 u8 twr2rd = tcwl + 2 + twtr; /* WL + BL / 2 + tWTR */
130 u8 trd2wr = tcl + 2 + 1 - tcwl; /* RL + BL / 2 + 2 - WL */
Vishnu Patekarffc0ae02015-03-01 23:49:39 +0530131
132 /* Set work mode register */
133 mctl_set_cr(para);
134 /* Set mode register */
135 writel(MCTL_MR0, &mctl_ctl->mr0);
136 writel(MCTL_MR1, &mctl_ctl->mr1);
137 writel(MCTL_MR2, &mctl_ctl->mr2);
138 writel(MCTL_MR3, &mctl_ctl->mr3);
139 /* Set dram timing */
140 reg_val = (twtp << 24) | (tfaw << 16) | (trasmax << 8) | (tras << 0);
141 writel(reg_val, &mctl_ctl->dramtmg0);
142 reg_val = (txp << 16) | (trtp << 8) | (trc << 0);
143 writel(reg_val, &mctl_ctl->dramtmg1);
144 reg_val = (tcwl << 24) | (tcl << 16) | (trd2wr << 8) | (twr2rd << 0);
145 writel(reg_val, &mctl_ctl->dramtmg2);
146 reg_val = (tmrw << 16) | (tmrd << 12) | (tmod << 0);
147 writel(reg_val, &mctl_ctl->dramtmg3);
148 reg_val = (trcd << 24) | (tccd << 16) | (trrd << 8) | (trp << 0);
149 writel(reg_val, &mctl_ctl->dramtmg4);
150 reg_val = (tcksrx << 24) | (tcksre << 16) | (tckesr << 8) | (tcke << 0);
151 writel(reg_val, &mctl_ctl->dramtmg5);
152 /* Set two rank timing and exit self-refresh timing */
153 reg_val = readl(&mctl_ctl->dramtmg8);
154 reg_val &= ~(0xff << 8);
155 reg_val &= ~(0xff << 0);
156 reg_val |= (0x33 << 8);
Michael Trimarchiddd69302019-03-18 15:17:45 +0530157 reg_val |= (0x10 << 0);
Vishnu Patekarffc0ae02015-03-01 23:49:39 +0530158 writel(reg_val, &mctl_ctl->dramtmg8);
159 /* Set phy interface time */
160 reg_val = (0x2 << 24) | (t_rdata_en << 16) | (0x1 << 8)
161 | (wr_latency << 0);
162 /* PHY interface write latency and read latency configure */
163 writel(reg_val, &mctl_ctl->pitmg0);
164 /* Set phy time PTR0-2 use default */
165 writel(((tdinit0 << 0) | (tdinit1 << 20)), &mctl_ctl->ptr3);
166 writel(((tdinit2 << 0) | (tdinit3 << 20)), &mctl_ctl->ptr4);
167 /* Set refresh timing */
168 reg_val = (trefi << 16) | (trfc << 0);
169 writel(reg_val, &mctl_ctl->rfshtmg);
170}
171
172static void mctl_set_pir(u32 val)
173{
174 struct sunxi_mctl_ctl_reg * const mctl_ctl =
175 (struct sunxi_mctl_ctl_reg *)SUNXI_DRAM_CTL0_BASE;
176
177 writel(val, &mctl_ctl->pir);
178 mctl_await_completion(&mctl_ctl->pgsr0, 0x1, 0x1);
179}
180
181static void mctl_data_train_cfg(struct dram_para *para)
182{
183 struct sunxi_mctl_ctl_reg * const mctl_ctl =
184 (struct sunxi_mctl_ctl_reg *)SUNXI_DRAM_CTL0_BASE;
185
186 if (para->rank == 2)
187 clrsetbits_le32(&mctl_ctl->dtcr, 0x3 << 24, 0x3 << 24);
188 else
189 clrsetbits_le32(&mctl_ctl->dtcr, 0x3 << 24, 0x1 << 24);
190}
191
192static int mctl_train_dram(struct dram_para *para)
193{
194 struct sunxi_mctl_ctl_reg * const mctl_ctl =
195 (struct sunxi_mctl_ctl_reg *)SUNXI_DRAM_CTL0_BASE;
196
197 mctl_data_train_cfg(para);
Hans de Goedea881db02015-05-13 14:54:16 +0200198 mctl_set_pir(0x5f3);
Vishnu Patekarffc0ae02015-03-01 23:49:39 +0530199
200 return ((readl(&mctl_ctl->pgsr0) >> 20) & 0xff) ? -EIO : 0;
201}
202
203static int mctl_channel_init(struct dram_para *para)
204{
205 struct sunxi_mctl_ctl_reg * const mctl_ctl =
206 (struct sunxi_mctl_ctl_reg *)SUNXI_DRAM_CTL0_BASE;
207 struct sunxi_mctl_com_reg * const mctl_com =
208 (struct sunxi_mctl_com_reg *)SUNXI_DRAM_COM_BASE;
209 u32 low_data_lines_status; /* Training status of datalines 0 - 7 */
210 u32 high_data_lines_status; /* Training status of datalines 8 - 15 */
211
212 auto_set_timing_para(para);
213
214 /* Disable dram VTC */
215 clrbits_le32(&mctl_ctl->pgcr0, 0x3f << 0);
216
217 /* Set ODT */
Hans de Goede8975cdf2015-05-13 15:00:46 +0200218 if ((CONFIG_DRAM_CLK > 400) && IS_ENABLED(CONFIG_DRAM_ODT_EN)) {
Vishnu Patekarffc0ae02015-03-01 23:49:39 +0530219 setbits_le32(DXnGCR0(0), 0x3 << 9);
220 setbits_le32(DXnGCR0(1), 0x3 << 9);
221 } else {
222 clrbits_le32(DXnGCR0(0), 0x3 << 9);
223 clrbits_le32(DXnGCR0(1), 0x3 << 9);
224 }
225
226 /* set PLL configuration */
227 if (CONFIG_DRAM_CLK >= 480)
228 setbits_le32(&mctl_ctl->pllgcr, 0x1 << 18);
229 else
230 setbits_le32(&mctl_ctl->pllgcr, 0x3 << 18);
231
232 /* Auto detect dram config, set 2 rank and 16bit bus-width */
233 para->cs1 = 0;
234 para->rank = 2;
235 para->bus_width = 16;
236 mctl_set_cr(para);
237
238 /* Open DQS gating */
239 clrbits_le32(&mctl_ctl->pgcr2, (0x3 << 6));
240 clrbits_le32(&mctl_ctl->dqsgmr, (0x1 << 8) | (0x7));
241
242 mctl_data_train_cfg(para);
243
244 /* ZQ calibration */
245 writel(CONFIG_DRAM_ZQ & 0xff, &mctl_ctl->zqcr1);
246 /* CA calibration */
247 mctl_set_pir(0x00000003);
248 /* More ZQ calibration */
249 writel(readl(&mctl_ctl->zqsr0) | 0x10000000, &mctl_ctl->zqcr2);
250 writel((CONFIG_DRAM_ZQ >> 8) & 0xff, &mctl_ctl->zqcr1);
251
252 /* DQS gate training */
253 if (mctl_train_dram(para) != 0) {
254 low_data_lines_status = (readl(DXnGSR0(0)) >> 24) & 0x03;
255 high_data_lines_status = (readl(DXnGSR0(1)) >> 24) & 0x03;
256
257 if (low_data_lines_status == 0x3)
258 return -EIO;
259
260 /* DRAM has only one rank */
261 para->rank = 1;
262 mctl_set_cr(para);
263
264 if (low_data_lines_status == high_data_lines_status)
265 goto done; /* 16 bit bus, 1 rank */
266
267 if (!(low_data_lines_status & high_data_lines_status)) {
268 /* Retry 16 bit bus-width with CS1 set */
269 para->cs1 = 1;
270 mctl_set_cr(para);
271 if (mctl_train_dram(para) == 0)
272 goto done;
273 }
274
275 /* Try 8 bit bus-width */
276 writel(0x0, DXnGCR0(1)); /* Disable high DQ */
277 para->cs1 = 0;
278 para->bus_width = 8;
279 mctl_set_cr(para);
280 if (mctl_train_dram(para) != 0)
281 return -EIO;
282 }
283done:
284 /* Check the dramc status */
285 mctl_await_completion(&mctl_ctl->statr, 0x1, 0x1);
286
287 /* Close DQS gating */
288 setbits_le32(&mctl_ctl->pgcr2, 0x3 << 6);
289
290 /* Enable master access */
291 writel(0xffffffff, &mctl_com->maer);
292
293 return 0;
294}
295
296static void mctl_sys_init(struct dram_para *para)
297{
298 struct sunxi_ccm_reg * const ccm =
299 (struct sunxi_ccm_reg *)SUNXI_CCM_BASE;
300 struct sunxi_mctl_ctl_reg * const mctl_ctl =
301 (struct sunxi_mctl_ctl_reg *)SUNXI_DRAM_CTL0_BASE;
302 struct sunxi_mctl_com_reg * const mctl_com =
303 (struct sunxi_mctl_com_reg *)SUNXI_DRAM_COM_BASE;
304
305 clrsetbits_le32(&ccm->dram_pll_cfg, CCM_DRAMPLL_CFG_SRC_MASK,
306 CCM_DRAMPLL_CFG_SRC_PLL11);
307
308 clock_set_pll11(CONFIG_DRAM_CLK * 1000000 * DRAM_CLK_MUL,
309 DRAM_SIGMA_DELTA_ENABLE);
310
311 clrsetbits_le32(&ccm->dram_clk_cfg, CCM_DRAMCLK_CFG_DIV_MASK,
312 CCM_DRAMCLK_CFG_DIV(DRAM_CLK_DIV) |
313 CCM_DRAMCLK_CFG_RST | CCM_DRAMCLK_CFG_UPD);
314 mctl_await_completion(&ccm->dram_clk_cfg, CCM_DRAMCLK_CFG_UPD, 0);
315
316 setbits_le32(&ccm->ahb_reset0_cfg, 1 << AHB_RESET_OFFSET_MCTL);
317 setbits_le32(&ccm->ahb_gate0, 1 << AHB_GATE_OFFSET_MCTL);
318 setbits_le32(&ccm->mbus_reset, CCM_MBUS_RESET_RESET);
319 setbits_le32(&ccm->mbus0_clk_cfg, MBUS_CLK_GATE);
320
321 /* Set dram master access priority */
322 writel(0x0, &mctl_com->mapr);
323 writel(0x0f802f01, &mctl_ctl->sched);
324 writel(0x0000400f, &mctl_ctl->clken); /* normal */
325
326 udelay(250);
327}
328
329unsigned long sunxi_dram_init(void)
330{
331 struct sunxi_mctl_com_reg * const mctl_com =
332 (struct sunxi_mctl_com_reg *)SUNXI_DRAM_COM_BASE;
333 struct sunxi_mctl_ctl_reg * const mctl_ctl =
334 (struct sunxi_mctl_ctl_reg *)SUNXI_DRAM_CTL0_BASE;
335
336 struct dram_para para = {
337 .cs1 = 0,
338 .bank = 1,
Michael Trimarchiaa09a072018-10-31 20:03:16 +0100339 .rank = 2,
Vishnu Patekarffc0ae02015-03-01 23:49:39 +0530340 .rows = 15,
341 .bus_width = 16,
342 .page_size = 2048,
343 };
344
345 mctl_sys_init(&para);
346
347 if (mctl_channel_init(&para) != 0)
348 return 0;
349
350 auto_detect_dram_size(&para);
351
352 /* Enable master software clk */
353 writel(readl(&mctl_com->swonr) | 0x3ffff, &mctl_com->swonr);
354
355 /* Set DRAM ODT MAP */
356 if (para.rank == 2)
357 writel(0x00000303, &mctl_ctl->odtmap);
358 else
359 writel(0x00000201, &mctl_ctl->odtmap);
360
361 return para.page_size * (para.bus_width / 8) *
362 (1 << (para.bank + para.rank + para.rows));
363}