blob: bf75e52ec3c652a45b9f6d685a703904f9bba579 [file] [log] [blame]
Gregory CLEMENTdd1033e2018-12-14 16:16:47 +01001/* SPDX-License-Identifier: (GPL-2.0+ OR MIT) */
2/*
3 * Copyright (c) 2018 Microsemi Corporation
4 */
5
6#ifndef __ASM_MACH_DDR_H
7#define __ASM_MACH_DDR_H
8
9#include <asm/cacheops.h>
10#include <asm/io.h>
11#include <asm/reboot.h>
12#include <mach/common.h>
13
14#define MIPS_VCOREIII_MEMORY_DDR3
15#define MIPS_VCOREIII_DDR_SIZE CONFIG_SYS_SDRAM_SIZE
16
17#if defined(CONFIG_DDRTYPE_H5TQ1G63BFA) /* Serval1 Refboard */
18
19/* Hynix H5TQ1G63BFA (1Gbit DDR3, x16) @ 3.20ns */
20#define VC3_MPAR_bank_addr_cnt 3
21#define VC3_MPAR_row_addr_cnt 13
22#define VC3_MPAR_col_addr_cnt 10
23#define VC3_MPAR_tREFI 2437
24#define VC3_MPAR_tRAS_min 12
25#define VC3_MPAR_CL 6
26#define VC3_MPAR_tWTR 4
27#define VC3_MPAR_tRC 16
Horatiu Vultur1895b872019-01-23 16:39:42 +010028#define VC3_MPAR_tFAW 16
Gregory CLEMENTdd1033e2018-12-14 16:16:47 +010029#define VC3_MPAR_tRP 5
30#define VC3_MPAR_tRRD 4
31#define VC3_MPAR_tRCD 5
32#define VC3_MPAR_tMRD 4
33#define VC3_MPAR_tRFC 35
34#define VC3_MPAR_CWL 5
35#define VC3_MPAR_tXPR 38
36#define VC3_MPAR_tMOD 12
37#define VC3_MPAR_tDLLK 512
38#define VC3_MPAR_tWR 5
39
40#elif defined(CONFIG_DDRTYPE_MT41J128M16HA) /* Validation board */
41
42/* Micron MT41J128M16HA-15E:D (2Gbit DDR3, x16) @ 3.20ns */
43#define VC3_MPAR_bank_addr_cnt 3
44#define VC3_MPAR_row_addr_cnt 14
45#define VC3_MPAR_col_addr_cnt 10
46#define VC3_MPAR_tREFI 2437
47#define VC3_MPAR_tRAS_min 12
48#define VC3_MPAR_CL 5
49#define VC3_MPAR_tWTR 4
50#define VC3_MPAR_tRC 16
51#define VC3_MPAR_tFAW 16
52#define VC3_MPAR_tRP 5
53#define VC3_MPAR_tRRD 4
54#define VC3_MPAR_tRCD 5
55#define VC3_MPAR_tMRD 4
56#define VC3_MPAR_tRFC 50
57#define VC3_MPAR_CWL 5
58#define VC3_MPAR_tXPR 54
59#define VC3_MPAR_tMOD 12
60#define VC3_MPAR_tDLLK 512
61#define VC3_MPAR_tWR 5
62
63#elif defined(CONFIG_DDRTYPE_MT41K256M16) /* JR2 Validation board */
64
65/* Micron MT41K256M16 (4Gbit, DDR3L-800, 256Mbitx16) @ 3.20ns */
66#define VC3_MPAR_bank_addr_cnt 3
67#define VC3_MPAR_row_addr_cnt 15
68#define VC3_MPAR_col_addr_cnt 10
69#define VC3_MPAR_tREFI 2437
70#define VC3_MPAR_tRAS_min 12
71#define VC3_MPAR_CL 5
72#define VC3_MPAR_tWTR 4
73#define VC3_MPAR_tRC 16
74#define VC3_MPAR_tFAW 16
75#define VC3_MPAR_tRP 5
76#define VC3_MPAR_tRRD 4
77#define VC3_MPAR_tRCD 5
78#define VC3_MPAR_tMRD 4
79#define VC3_MPAR_tRFC 82
80#define VC3_MPAR_CWL 5
81#define VC3_MPAR_tXPR 85
82#define VC3_MPAR_tMOD 12
83#define VC3_MPAR_tDLLK 512
84#define VC3_MPAR_tWR 5
85
86#elif defined(CONFIG_DDRTYPE_H5TQ4G63MFR) /* JR2 Reference board */
87
88/* Hynix H5TQ4G63MFR-PBC (4Gbit, DDR3-800, 256Mbitx16) - 2kb pages @ 3.20ns */
89#define VC3_MPAR_bank_addr_cnt 3
90#define VC3_MPAR_row_addr_cnt 15
91#define VC3_MPAR_col_addr_cnt 10
92#define VC3_MPAR_tREFI 2437
93#define VC3_MPAR_tRAS_min 12
94#define VC3_MPAR_CL 6
95#define VC3_MPAR_tWTR 4
96#define VC3_MPAR_tRC 17
97#define VC3_MPAR_tFAW 16
98#define VC3_MPAR_tRP 5
99#define VC3_MPAR_tRRD 4
100#define VC3_MPAR_tRCD 5
101#define VC3_MPAR_tMRD 4
102#define VC3_MPAR_tRFC 82
103#define VC3_MPAR_CWL 5
104#define VC3_MPAR_tXPR 85
105#define VC3_MPAR_tMOD 12
106#define VC3_MPAR_tDLLK 512
107#define VC3_MPAR_tWR 5
108
109#elif defined(CONFIG_DDRTYPE_MT41K128M16JT)
110
111/* Micron Micron MT41K128M16JT-125 (2Gbit DDR3L, 128Mbitx16) @ 3.20ns */
112#define VC3_MPAR_bank_addr_cnt 3
113#define VC3_MPAR_row_addr_cnt 14
114#define VC3_MPAR_col_addr_cnt 10
115#define VC3_MPAR_tREFI 2437
116#define VC3_MPAR_tRAS_min 12
117#define VC3_MPAR_CL 6
118#define VC3_MPAR_tWTR 4
119#define VC3_MPAR_tRC 16
120#define VC3_MPAR_tFAW 16
121#define VC3_MPAR_tRP 5
122#define VC3_MPAR_tRRD 4
123#define VC3_MPAR_tRCD 5
124#define VC3_MPAR_tMRD 4
125#define VC3_MPAR_tRFC 82
126#define VC3_MPAR_CWL 5
127#define VC3_MPAR_tXPR 85
128#define VC3_MPAR_tMOD 12
129#define VC3_MPAR_tDLLK 512
130#define VC3_MPAR_tWR 5
131
132#elif defined(CONFIG_DDRTYPE_MT47H128M8HQ) /* Luton10/26 Refboards */
133
134/* Micron 1Gb MT47H128M8-3 16Meg x 8 x 8 banks, DDR-533@CL4 @ 4.80ns */
135#define VC3_MPAR_bank_addr_cnt 3
136#define VC3_MPAR_row_addr_cnt 14
137#define VC3_MPAR_col_addr_cnt 10
138#define VC3_MPAR_tREFI 1625
139#define VC3_MPAR_tRAS_min 9
140#define VC3_MPAR_CL 4
141#define VC3_MPAR_tWTR 2
142#define VC3_MPAR_tRC 12
143#define VC3_MPAR_tFAW 8
144#define VC3_MPAR_tRP 4
145#define VC3_MPAR_tRRD 2
146#define VC3_MPAR_tRCD 4
147
148#define VC3_MPAR_tRPA 4
149#define VC3_MPAR_tRP 4
150
151#define VC3_MPAR_tMRD 2
152#define VC3_MPAR_tRFC 27
153
154#define VC3_MPAR__400_ns_dly 84
155
156#define VC3_MPAR_tWR 4
157#undef MIPS_VCOREIII_MEMORY_DDR3
158#else
159
160#error Unknown DDR system configuration - please add!
161
162#endif
163
Horatiu Vultur05512512019-01-17 15:33:27 +0100164#if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_JR2) || \
Horatiu Vultur1895b872019-01-23 16:39:42 +0100165 defined(CONFIG_SOC_SERVALT) || defined(CONFIG_SOC_SERVAL)
Gregory CLEMENTdd1033e2018-12-14 16:16:47 +0100166#define MIPS_VCOREIII_MEMORY_16BIT 1
167#endif
168
169#define MIPS_VCOREIII_MEMORY_SSTL_ODT 7
170#define MIPS_VCOREIII_MEMORY_SSTL_DRIVE 7
171#define VCOREIII_DDR_DQS_MODE_CALIBRATE
172
173#ifdef MIPS_VCOREIII_MEMORY_16BIT
174#define VC3_MPAR_16BIT 1
175#else
176#define VC3_MPAR_16BIT 0
177#endif
178
179#ifdef MIPS_VCOREIII_MEMORY_DDR3
180#define VC3_MPAR_DDR3_MODE 1 /* DDR3 */
181#define VC3_MPAR_BURST_LENGTH 8 /* Always 8 (1) for DDR3 */
182#ifdef MIPS_VCOREIII_MEMORY_16BIT
183#define VC3_MPAR_BURST_SIZE 1 /* Always 1 for DDR3/16bit */
184#else
185#define VC3_MPAR_BURST_SIZE 0
186#endif
187#else
188#define VC3_MPAR_DDR3_MODE 0 /* DDR2 */
189#ifdef MIPS_VCOREIII_MEMORY_16BIT
190#define VC3_MPAR_BURST_LENGTH 4 /* in DDR2 16-bit mode, use burstlen 4 */
191#else
192#define VC3_MPAR_BURST_LENGTH 8 /* For 8-bit IF we must run burst-8 */
193#endif
194#define VC3_MPAR_BURST_SIZE 0 /* Always 0 for DDR2 */
195#endif
196
197#define VC3_MPAR_RL VC3_MPAR_CL
198#if !defined(MIPS_VCOREIII_MEMORY_DDR3)
199#define VC3_MPAR_WL (VC3_MPAR_RL - 1)
200#define VC3_MPAR_MD VC3_MPAR_tMRD
201#define VC3_MPAR_ID VC3_MPAR__400_ns_dly
202#define VC3_MPAR_SD VC3_MPAR_tXSRD
203#define VC3_MPAR_OW (VC3_MPAR_WL - 2)
204#define VC3_MPAR_OR (VC3_MPAR_WL - 3)
205#define VC3_MPAR_RP (VC3_MPAR_bank_addr_cnt < 3 ? VC3_MPAR_tRP : VC3_MPAR_tRPA)
206#define VC3_MPAR_FAW (VC3_MPAR_bank_addr_cnt < 3 ? 1 : VC3_MPAR_tFAW)
207#define VC3_MPAR_BL (VC3_MPAR_BURST_LENGTH == 4 ? 2 : 4)
208#define MSCC_MEMPARM_MR0 \
209 (VC3_MPAR_BURST_LENGTH == 8 ? 3 : 2) | (VC3_MPAR_CL << 4) | \
210 ((VC3_MPAR_tWR - 1) << 9)
211/* DLL-on, Full-OD, AL=0, RTT=off, nDQS-on, RDQS-off, out-en */
212#define MSCC_MEMPARM_MR1 0x382
213#define MSCC_MEMPARM_MR2 0
214#define MSCC_MEMPARM_MR3 0
215#else
216#define VC3_MPAR_WL VC3_MPAR_CWL
217#define VC3_MPAR_MD VC3_MPAR_tMOD
218#define VC3_MPAR_ID VC3_MPAR_tXPR
219#define VC3_MPAR_SD VC3_MPAR_tDLLK
220#define VC3_MPAR_OW 2
221#define VC3_MPAR_OR 2
222#define VC3_MPAR_RP VC3_MPAR_tRP
223#define VC3_MPAR_FAW VC3_MPAR_tFAW
224#define VC3_MPAR_BL 4
225#define MSCC_MEMPARM_MR0 ((VC3_MPAR_RL - 4) << 4) | ((VC3_MPAR_tWR - 4) << 9)
226/* ODT_RTT: “0x0040” for 120ohm, and “0x0004” for 60ohm. */
227#define MSCC_MEMPARM_MR1 0x0040
228#define MSCC_MEMPARM_MR2 ((VC3_MPAR_WL - 5) << 3)
229#define MSCC_MEMPARM_MR3 0
230#endif /* MIPS_VCOREIII_MEMORY_DDR3 */
231
232#define MSCC_MEMPARM_MEMCFG \
233 ((MIPS_VCOREIII_DDR_SIZE > SZ_512M) ? \
234 ICPU_MEMCTRL_CFG_DDR_512MBYTE_PLUS : 0) | \
235 (VC3_MPAR_16BIT ? ICPU_MEMCTRL_CFG_DDR_WIDTH : 0) | \
236 (VC3_MPAR_DDR3_MODE ? ICPU_MEMCTRL_CFG_DDR_MODE : 0) | \
237 (VC3_MPAR_BURST_SIZE ? ICPU_MEMCTRL_CFG_BURST_SIZE : 0) | \
238 (VC3_MPAR_BURST_LENGTH == 8 ? ICPU_MEMCTRL_CFG_BURST_LEN : 0) | \
239 (VC3_MPAR_bank_addr_cnt == 3 ? ICPU_MEMCTRL_CFG_BANK_CNT : 0) | \
240 ICPU_MEMCTRL_CFG_MSB_ROW_ADDR(VC3_MPAR_row_addr_cnt - 1) | \
241 ICPU_MEMCTRL_CFG_MSB_COL_ADDR(VC3_MPAR_col_addr_cnt - 1)
242
Horatiu Vultur05512512019-01-17 15:33:27 +0100243#if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_JR2) || \
Horatiu Vultur1895b872019-01-23 16:39:42 +0100244 defined(CONFIG_SOC_SERVALT) || defined(CONFIG_SOC_SERVAL)
Gregory CLEMENTdd1033e2018-12-14 16:16:47 +0100245#define MSCC_MEMPARM_PERIOD \
246 ICPU_MEMCTRL_REF_PERIOD_MAX_PEND_REF(8) | \
247 ICPU_MEMCTRL_REF_PERIOD_REF_PERIOD(VC3_MPAR_tREFI)
248
249#define MSCC_MEMPARM_TIMING0 \
250 ICPU_MEMCTRL_TIMING0_RD_TO_WR_DLY(VC3_MPAR_RL + VC3_MPAR_BL + 1 - \
251 VC3_MPAR_WL) | \
252 ICPU_MEMCTRL_TIMING0_WR_CS_CHANGE_DLY(VC3_MPAR_BL - 1) | \
253 ICPU_MEMCTRL_TIMING0_RD_CS_CHANGE_DLY(VC3_MPAR_BL) | \
254 ICPU_MEMCTRL_TIMING0_RAS_TO_PRECH_DLY(VC3_MPAR_tRAS_min - 1) | \
255 ICPU_MEMCTRL_TIMING0_WR_TO_PRECH_DLY(VC3_MPAR_WL + \
256 VC3_MPAR_BL + \
257 VC3_MPAR_tWR - 1) | \
258 ICPU_MEMCTRL_TIMING0_RD_TO_PRECH_DLY(VC3_MPAR_BL - 1) | \
259 ICPU_MEMCTRL_TIMING0_WR_DATA_XFR_DLY(VC3_MPAR_WL - 1) | \
260 ICPU_MEMCTRL_TIMING0_RD_DATA_XFR_DLY(VC3_MPAR_RL - 3)
261
262#define MSCC_MEMPARM_TIMING1 \
263 ICPU_MEMCTRL_TIMING1_RAS_TO_RAS_SAME_BANK_DLY(VC3_MPAR_tRC - 1) | \
264 ICPU_MEMCTRL_TIMING1_BANK8_FAW_DLY(VC3_MPAR_FAW - 1) | \
265 ICPU_MEMCTRL_TIMING1_PRECH_TO_RAS_DLY(VC3_MPAR_RP - 1) | \
266 ICPU_MEMCTRL_TIMING1_RAS_TO_RAS_DLY(VC3_MPAR_tRRD - 1) | \
267 ICPU_MEMCTRL_TIMING1_RAS_TO_CAS_DLY(VC3_MPAR_tRCD - 1) | \
268 ICPU_MEMCTRL_TIMING1_WR_TO_RD_DLY(VC3_MPAR_WL + \
269 VC3_MPAR_BL + \
270 VC3_MPAR_tWTR - 1)
271
272#define MSCC_MEMPARM_TIMING2 \
273 ICPU_MEMCTRL_TIMING2_PRECH_ALL_DLY(VC3_MPAR_RP - 1) | \
274 ICPU_MEMCTRL_TIMING2_MDSET_DLY(VC3_MPAR_MD - 1) | \
275 ICPU_MEMCTRL_TIMING2_REF_DLY(VC3_MPAR_tRFC - 1) | \
276 ICPU_MEMCTRL_TIMING2_INIT_DLY(VC3_MPAR_ID - 1)
277
278#define MSCC_MEMPARM_TIMING3 \
279 ICPU_MEMCTRL_TIMING3_WR_TO_RD_CS_CHANGE_DLY(VC3_MPAR_WL + \
280 VC3_MPAR_tWTR - 1) |\
281 ICPU_MEMCTRL_TIMING3_ODT_RD_DLY(VC3_MPAR_OR - 1) | \
282 ICPU_MEMCTRL_TIMING3_ODT_WR_DLY(VC3_MPAR_OW - 1) | \
283 ICPU_MEMCTRL_TIMING3_LOCAL_ODT_RD_DLY(VC3_MPAR_RL - 3)
284
285#else
286#define MSCC_MEMPARM_PERIOD \
287 ICPU_MEMCTRL_REF_PERIOD_MAX_PEND_REF(1) | \
288 ICPU_MEMCTRL_REF_PERIOD_REF_PERIOD(VC3_MPAR_tREFI)
289
290#define MSCC_MEMPARM_TIMING0 \
291 ICPU_MEMCTRL_TIMING0_RAS_TO_PRECH_DLY(VC3_MPAR_tRAS_min - 1) | \
292 ICPU_MEMCTRL_TIMING0_WR_TO_PRECH_DLY(VC3_MPAR_CL + \
293 (VC3_MPAR_BURST_LENGTH == 8 ? 2 : 0) + \
294 VC3_MPAR_tWR) | \
295 ICPU_MEMCTRL_TIMING0_RD_TO_PRECH_DLY(VC3_MPAR_BURST_LENGTH == 8 ? 3 : 1) | \
296 ICPU_MEMCTRL_TIMING0_WR_DATA_XFR_DLY(VC3_MPAR_CL - 3) | \
297 ICPU_MEMCTRL_TIMING0_RD_DATA_XFR_DLY(VC3_MPAR_CL - 3)
298
299#define MSCC_MEMPARM_TIMING1 \
300 ICPU_MEMCTRL_TIMING1_RAS_TO_RAS_SAME_BANK_DLY(VC3_MPAR_tRC - 1) | \
301 ICPU_MEMCTRL_TIMING1_BANK8_FAW_DLY(VC3_MPAR_tFAW - 1) | \
302 ICPU_MEMCTRL_TIMING1_PRECH_TO_RAS_DLY(VC3_MPAR_tRP - 1) | \
303 ICPU_MEMCTRL_TIMING1_RAS_TO_RAS_DLY(VC3_MPAR_tRRD - 1) | \
304 ICPU_MEMCTRL_TIMING1_RAS_TO_CAS_DLY(VC3_MPAR_tRCD - 1) | \
305 ICPU_MEMCTRL_TIMING1_WR_TO_RD_DLY(VC3_MPAR_CL + \
306 (VC3_MPAR_BURST_LENGTH == 8 ? 2 : 0) + \
307 VC3_MPAR_tWTR)
308#define MSCC_MEMPARM_TIMING2 \
309 ICPU_MEMCTRL_TIMING2_PRECH_ALL_DLY(VC3_MPAR_tRPA - 1) | \
310 ICPU_MEMCTRL_TIMING2_MDSET_DLY(VC3_MPAR_tMRD - 1) | \
311 ICPU_MEMCTRL_TIMING2_REF_DLY(VC3_MPAR_tRFC - 1) | \
312 ICPU_MEMCTRL_TIMING2_FOUR_HUNDRED_NS_DLY(VC3_MPAR__400_ns_dly)
313
314#define MSCC_MEMPARM_TIMING3 \
315 ICPU_MEMCTRL_TIMING3_WR_TO_RD_CS_CHANGE_DLY(VC3_MPAR_CL - 1) | \
316 ICPU_MEMCTRL_TIMING3_ODT_WR_DLY(VC3_MPAR_CL - 1) | \
317 ICPU_MEMCTRL_TIMING3_LOCAL_ODT_RD_DLY(VC3_MPAR_CL - 1)
318
319#endif
320
321enum {
322 DDR_TRAIN_OK,
323 DDR_TRAIN_CONTINUE,
324 DDR_TRAIN_ERROR,
325};
326
327/*
328 * We actually have very few 'pause' possibilities apart from
329 * these assembly nops (at this very early stage).
330 */
331#define PAUSE() asm volatile("nop; nop; nop; nop; nop; nop; nop; nop")
332
333/* NB: Assumes inlining as no stack is available! */
334static inline void set_dly(u32 bytelane, u32 dly)
335{
336 register u32 r = readl(BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane));
337
338 r &= ~ICPU_MEMCTRL_DQS_DLY_DQS_DLY_M;
339 r |= ICPU_MEMCTRL_DQS_DLY_DQS_DLY(dly);
340 writel(r, BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane));
341}
342
343static inline bool incr_dly(u32 bytelane)
344{
345 register u32 r = readl(BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane));
346
347 if (ICPU_MEMCTRL_DQS_DLY_DQS_DLY(r) < 31) {
348 writel(r + 1, BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane));
349 return true;
350 }
351
352 return false;
353}
354
355static inline bool adjust_dly(int adjust)
356{
357 register u32 r = readl(BASE_CFG + ICPU_MEMCTRL_DQS_DLY(0));
358
359 if (ICPU_MEMCTRL_DQS_DLY_DQS_DLY(r) < 31) {
360 writel(r + adjust, BASE_CFG + ICPU_MEMCTRL_DQS_DLY(0));
361 return true;
362 }
363
364 return false;
365}
366
367/* NB: Assumes inlining as no stack is available! */
368static inline void center_dly(u32 bytelane, u32 start)
369{
370 register u32 r = readl(BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane)) - start;
371
372 writel(start + (r >> 1), BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane));
373}
374
375static inline void memphy_soft_reset(void)
376{
377 setbits_le32(BASE_CFG + ICPU_MEMPHY_CFG, ICPU_MEMPHY_CFG_PHY_FIFO_RST);
378 PAUSE();
379 clrbits_le32(BASE_CFG + ICPU_MEMPHY_CFG, ICPU_MEMPHY_CFG_PHY_FIFO_RST);
380 PAUSE();
381}
382
Horatiu Vultur05512512019-01-17 15:33:27 +0100383#if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_JR2) || \
Horatiu Vultur1895b872019-01-23 16:39:42 +0100384 defined(CONFIG_SOC_SERVALT) || defined(CONFIG_SOC_SERVAL)
Gregory CLEMENTdd1033e2018-12-14 16:16:47 +0100385static u8 training_data[] = { 0xfe, 0x11, 0x33, 0x55, 0x77, 0x99, 0xbb, 0xdd };
386
387static inline void sleep_100ns(u32 val)
388{
389 /* Set the timer tick generator to 100 ns */
390 writel(VCOREIII_TIMER_DIVIDER - 1, BASE_CFG + ICPU_TIMER_TICK_DIV);
391
392 /* Set the timer value */
393 writel(val, BASE_CFG + ICPU_TIMER_VALUE(0));
394
395 /* Enable timer 0 for one-shot */
396 writel(ICPU_TIMER_CTRL_ONE_SHOT_ENA | ICPU_TIMER_CTRL_TIMER_ENA,
397 BASE_CFG + ICPU_TIMER_CTRL(0));
398
399 /* Wait for timer 0 to reach 0 */
400 while (readl(BASE_CFG + ICPU_TIMER_VALUE(0)) != 0)
401 ;
402}
403
Horatiu Vultur72e224b2019-04-15 11:56:36 +0200404#if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_SERVAL)
Gregory CLEMENTdd1033e2018-12-14 16:16:47 +0100405/*
406 * DDR memory sanity checking failed, tally and do hard reset
407 *
408 * NB: Assumes inlining as no stack is available!
409 */
410static inline void hal_vcoreiii_ddr_failed(void)
411{
412 register u32 reset;
413
Horatiu Vultur72e224b2019-04-15 11:56:36 +0200414#if defined(CONFIG_SOC_OCELOT)
Gregory CLEMENTdd1033e2018-12-14 16:16:47 +0100415 writel(readl(BASE_CFG + ICPU_GPR(6)) + 1, BASE_CFG + ICPU_GPR(6));
416
417 clrbits_le32(BASE_DEVCPU_GCB + PERF_GPIO_OE, BIT(19));
Horatiu Vultur72e224b2019-04-15 11:56:36 +0200418#endif
Gregory CLEMENTdd1033e2018-12-14 16:16:47 +0100419
420 /* We have to execute the reset function from cache. Indeed,
421 * the reboot workaround in _machine_restart() will change the
422 * SPI NOR into SW bitbang.
423 *
424 * This will render the CPU unable to execute directly from
425 * the NOR, which is why the reset instructions are prefetched
426 * into the I-cache.
427 *
428 * When failing the DDR initialization we are executing from
429 * NOR.
430 *
431 * The last instruction in _machine_restart() will reset the
432 * MIPS CPU (and the cache), and the CPU will start executing
433 * from the reset vector.
434 */
435 reset = KSEG0ADDR(_machine_restart);
436 icache_lock((void *)reset, 128);
437 asm volatile ("jr %0"::"r" (reset));
Gregory CLEMENTdd1033e2018-12-14 16:16:47 +0100438}
Horatiu Vultur72e224b2019-04-15 11:56:36 +0200439#else /* JR2 || ServalT */
440static inline void hal_vcoreiii_ddr_failed(void)
441{
442 writel(0, BASE_CFG + ICPU_RESET);
443 writel(PERF_SOFT_RST_SOFT_CHIP_RST, BASE_CFG + PERF_SOFT_RST);
Horatiu Vultur72e224b2019-04-15 11:56:36 +0200444}
445#endif
446
447#if defined(CONFIG_SOC_OCELOT)
448static inline void hal_vcoreiii_ddr_reset_assert(void)
449{
450 /* DDR has reset pin on GPIO 19 toggle Low-High to release */
451 setbits_le32(BASE_DEVCPU_GCB + PERF_GPIO_OE, BIT(19));
452 writel(BIT(19), BASE_DEVCPU_GCB + PERF_GPIO_OUT_CLR);
453 sleep_100ns(10000);
454}
455
456static inline void hal_vcoreiii_ddr_reset_release(void)
457{
458 /* DDR has reset pin on GPIO 19 toggle Low-High to release */
459 setbits_le32(BASE_DEVCPU_GCB + PERF_GPIO_OE, BIT(19));
460 writel(BIT(19), BASE_DEVCPU_GCB + PERF_GPIO_OUT_SET);
461 sleep_100ns(10000);
462}
463
Horatiu Vultur1895b872019-01-23 16:39:42 +0100464#else /* JR2 || ServalT || Serval */
Horatiu Vulture7a0de22019-01-12 18:56:56 +0100465static inline void hal_vcoreiii_ddr_reset_assert(void)
466{
467 /* Ensure the memory controller physical iface is forced reset */
468 writel(readl(BASE_CFG + ICPU_MEMPHY_CFG) |
469 ICPU_MEMPHY_CFG_PHY_RST, BASE_CFG + ICPU_MEMPHY_CFG);
470
471 /* Ensure the memory controller is forced reset */
472 writel(readl(BASE_CFG + ICPU_RESET) |
473 ICPU_RESET_MEM_RST_FORCE, BASE_CFG + ICPU_RESET);
474}
Horatiu Vultur1895b872019-01-23 16:39:42 +0100475#endif /* JR2 || ServalT || Serval */
Gregory CLEMENTdd1033e2018-12-14 16:16:47 +0100476
477/*
478 * DDR memory sanity checking done, possibly enable ECC.
479 *
480 * NB: Assumes inlining as no stack is available!
481 */
482static inline void hal_vcoreiii_ddr_verified(void)
483{
484#ifdef MIPS_VCOREIII_MEMORY_ECC
485 /* Finally, enable ECC */
486 register u32 val = readl(BASE_CFG + ICPU_MEMCTRL_CFG);
487
488 val |= ICPU_MEMCTRL_CFG_DDR_ECC_ERR_ENA;
489 val &= ~ICPU_MEMCTRL_CFG_BURST_SIZE;
490
491 writel(val, BASE_CFG + ICPU_MEMCTRL_CFG);
492#endif
493
494 /* Reset Status register - sticky bits */
495 writel(readl(BASE_CFG + ICPU_MEMCTRL_STAT), BASE_CFG + ICPU_MEMCTRL_STAT);
496}
497
498/* NB: Assumes inlining as no stack is available! */
499static inline int look_for(u32 bytelane)
500{
501 register u32 i;
502
503 /* Reset FIFO in case any previous access failed */
504 for (i = 0; i < sizeof(training_data); i++) {
505 register u32 byte;
506
507 memphy_soft_reset();
508 /* Reset sticky bits */
509 writel(readl(BASE_CFG + ICPU_MEMCTRL_STAT),
510 BASE_CFG + ICPU_MEMCTRL_STAT);
511 /* Read data */
512 byte = __raw_readb((void __iomem *)MSCC_DDR_TO + bytelane +
513 (i * 4));
514
515 /*
516 * Prevent the compiler reordering the instruction so
517 * the read of RAM happens after the check of the
518 * errors.
519 */
520 rmb();
521 if (readl(BASE_CFG + ICPU_MEMCTRL_STAT) &
522 (ICPU_MEMCTRL_STAT_RDATA_MASKED |
523 ICPU_MEMCTRL_STAT_RDATA_DUMMY)) {
524 /* Noise on the line */
525 goto read_error;
526 }
527 /* If mismatch, increment DQS - if possible */
528 if (byte != training_data[i]) {
529 read_error:
530 if (!incr_dly(bytelane))
531 return DDR_TRAIN_ERROR;
532 return DDR_TRAIN_CONTINUE;
533 }
534 }
535 return DDR_TRAIN_OK;
536}
537
538/* NB: Assumes inlining as no stack is available! */
539static inline int look_past(u32 bytelane)
540{
541 register u32 i;
542
543 /* Reset FIFO in case any previous access failed */
544 for (i = 0; i < sizeof(training_data); i++) {
545 register u32 byte;
546
547 memphy_soft_reset();
548 /* Ack sticky bits */
549 writel(readl(BASE_CFG + ICPU_MEMCTRL_STAT),
550 BASE_CFG + ICPU_MEMCTRL_STAT);
551 byte = __raw_readb((void __iomem *)MSCC_DDR_TO + bytelane +
552 (i * 4));
553 /*
554 * Prevent the compiler reordering the instruction so
555 * the read of RAM happens after the check of the
556 * errors.
557 */
558 rmb();
559 if (readl(BASE_CFG + ICPU_MEMCTRL_STAT) &
560 (ICPU_MEMCTRL_STAT_RDATA_MASKED |
561 ICPU_MEMCTRL_STAT_RDATA_DUMMY)) {
562 /* Noise on the line */
563 goto read_error;
564 }
565 /* Bail out when we see first mismatch */
566 if (byte != training_data[i]) {
567 read_error:
568 return DDR_TRAIN_OK;
569 }
570 }
571 /* All data compares OK, increase DQS and retry */
572 if (!incr_dly(bytelane))
573 return DDR_TRAIN_ERROR;
574
575 return DDR_TRAIN_CONTINUE;
576}
577
578static inline int hal_vcoreiii_train_bytelane(u32 bytelane)
579{
580 register int res;
581 register u32 dqs_s;
582
583 set_dly(bytelane, 0); /* Start training at DQS=0 */
584 while ((res = look_for(bytelane)) == DDR_TRAIN_CONTINUE)
585 ;
586 if (res != DDR_TRAIN_OK)
587 return res;
588
589 dqs_s = readl(BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane));
590 while ((res = look_past(bytelane)) == DDR_TRAIN_CONTINUE)
591 ;
592 if (res != DDR_TRAIN_OK)
593 return res;
594 /* Reset FIFO - for good measure */
595 memphy_soft_reset();
596 /* Adjust to center [dqs_s;cur] */
597 center_dly(bytelane, dqs_s);
598 return DDR_TRAIN_OK;
599}
600
601/* This algorithm is converted from the TCL training algorithm used
602 * during silicon simulation.
603 * NB: Assumes inlining as no stack is available!
604 */
605static inline int hal_vcoreiii_init_dqs(void)
606{
607#define MAX_DQS 32
608 register u32 i, j;
609
610 for (i = 0; i < MAX_DQS; i++) {
611 set_dly(0, i); /* Byte-lane 0 */
612 for (j = 0; j < MAX_DQS; j++) {
613 __maybe_unused register u32 byte;
614
615 set_dly(1, j); /* Byte-lane 1 */
616 /* Reset FIFO in case any previous access failed */
617 memphy_soft_reset();
618 writel(readl(BASE_CFG + ICPU_MEMCTRL_STAT),
619 BASE_CFG + ICPU_MEMCTRL_STAT);
620 byte = __raw_readb((void __iomem *)MSCC_DDR_TO);
621 byte = __raw_readb((void __iomem *)(MSCC_DDR_TO + 1));
622 if (!(readl(BASE_CFG + ICPU_MEMCTRL_STAT) &
623 (ICPU_MEMCTRL_STAT_RDATA_MASKED |
624 ICPU_MEMCTRL_STAT_RDATA_DUMMY)))
625 return 0;
626 }
627 }
628 return -1;
629}
630
631static inline int dram_check(void)
632{
633 register u32 i;
634
635 for (i = 0; i < 8; i++) {
636 __raw_writel(~i, (void __iomem *)(MSCC_DDR_TO + (i * 4)));
637 if (__raw_readl((void __iomem *)(MSCC_DDR_TO + (i * 4))) != ~i)
638 return 1;
639 }
640 return 0;
641}
Gregory CLEMENT6bd82312018-12-14 16:16:48 +0100642#else /* Luton */
643
644static inline void sleep_100ns(u32 val)
645{
646}
647
648static inline void hal_vcoreiii_ddr_reset_assert(void)
649{
650 setbits_le32(BASE_CFG + ICPU_MEMPHY_CFG, ICPU_MEMPHY_CFG_PHY_RST);
651 setbits_le32(BASE_CFG + ICPU_RESET, ICPU_RESET_MEM_RST_FORCE);
652}
653
654static inline void hal_vcoreiii_ddr_reset_release(void)
655{
656}
657
658static inline void hal_vcoreiii_ddr_failed(void)
659{
660 register u32 memphy_cfg = readl(BASE_CFG + ICPU_MEMPHY_CFG);
661
662 /* Do a fifo reset and start over */
663 writel(memphy_cfg | ICPU_MEMPHY_CFG_PHY_FIFO_RST,
664 BASE_CFG + ICPU_MEMPHY_CFG);
665 writel(memphy_cfg & ~ICPU_MEMPHY_CFG_PHY_FIFO_RST,
666 BASE_CFG + ICPU_MEMPHY_CFG);
667 writel(memphy_cfg | ICPU_MEMPHY_CFG_PHY_FIFO_RST,
668 BASE_CFG + ICPU_MEMPHY_CFG);
669}
670
671static inline void hal_vcoreiii_ddr_verified(void)
672{
673}
674
675static inline int look_for(u32 data)
676{
677 register u32 byte = __raw_readb((void __iomem *)MSCC_DDR_TO);
678
679 if (data != byte) {
680 if (!incr_dly(0))
681 return DDR_TRAIN_ERROR;
682 return DDR_TRAIN_CONTINUE;
683 }
684
685 return DDR_TRAIN_OK;
686}
687
688/* This algorithm is converted from the TCL training algorithm used
689 * during silicon simulation.
690 * NB: Assumes inlining as no stack is available!
691 */
692static inline int hal_vcoreiii_train_bytelane(u32 bytelane)
693{
694 register int res;
695
696 set_dly(bytelane, 0); /* Start training at DQS=0 */
697 while ((res = look_for(0xff)) == DDR_TRAIN_CONTINUE)
698 ;
699 if (res != DDR_TRAIN_OK)
700 return res;
701
702 set_dly(bytelane, 0); /* Start training at DQS=0 */
703 while ((res = look_for(0x00)) == DDR_TRAIN_CONTINUE)
704
705 ;
706
707 if (res != DDR_TRAIN_OK)
708 return res;
709
710 adjust_dly(-3);
711
712 return DDR_TRAIN_OK;
713}
714
715static inline int hal_vcoreiii_init_dqs(void)
716{
717 return 0;
718}
719
720static inline int dram_check(void)
721{
722 register u32 i;
723
724 for (i = 0; i < 8; i++) {
725 __raw_writel(~i, (void __iomem *)(MSCC_DDR_TO + (i * 4)));
726
727 if (__raw_readl((void __iomem *)(MSCC_DDR_TO + (i * 4))) != ~i)
728 return 1;
729 }
730
731 return 0;
732}
733#endif
Gregory CLEMENTdd1033e2018-12-14 16:16:47 +0100734
735/*
736 * NB: Called *early* to init memory controller - assumes inlining as
737 * no stack is available!
738 */
739static inline void hal_vcoreiii_init_memctl(void)
740{
741 /* Ensure DDR is in reset */
742 hal_vcoreiii_ddr_reset_assert();
743
744 /* Wait maybe not needed, but ... */
745 PAUSE();
746
747 /* Drop sys ctl memory controller forced reset */
748 clrbits_le32(BASE_CFG + ICPU_RESET, ICPU_RESET_MEM_RST_FORCE);
749
750 PAUSE();
751
752 /* Drop Reset, enable SSTL */
753 writel(ICPU_MEMPHY_CFG_PHY_SSTL_ENA, BASE_CFG + ICPU_MEMPHY_CFG);
754 PAUSE();
755
756 /* Start the automatic SSTL output and ODT drive-strength calibration */
757 writel(ICPU_MEMPHY_ZCAL_ZCAL_PROG_ODT(MIPS_VCOREIII_MEMORY_SSTL_ODT) |
758 /* drive strength */
759 ICPU_MEMPHY_ZCAL_ZCAL_PROG(MIPS_VCOREIII_MEMORY_SSTL_DRIVE) |
760 /* Start calibration process */
761 ICPU_MEMPHY_ZCAL_ZCAL_ENA, BASE_CFG + ICPU_MEMPHY_ZCAL);
762
763 /* Wait for ZCAL to clear */
764 while (readl(BASE_CFG + ICPU_MEMPHY_ZCAL) & ICPU_MEMPHY_ZCAL_ZCAL_ENA)
765 ;
Horatiu Vultur05512512019-01-17 15:33:27 +0100766#if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_JR2) || \
767 defined(CONFIG_SOC_SERVALT)
Gregory CLEMENTdd1033e2018-12-14 16:16:47 +0100768 /* Check no ZCAL_ERR */
769 if (readl(BASE_CFG + ICPU_MEMPHY_ZCAL_STAT)
770 & ICPU_MEMPHY_ZCAL_STAT_ZCAL_ERR)
771 hal_vcoreiii_ddr_failed();
Gregory CLEMENT6bd82312018-12-14 16:16:48 +0100772#endif
Gregory CLEMENTdd1033e2018-12-14 16:16:47 +0100773 /* Drive CL, CK, ODT */
774 setbits_le32(BASE_CFG + ICPU_MEMPHY_CFG, ICPU_MEMPHY_CFG_PHY_ODT_OE |
775 ICPU_MEMPHY_CFG_PHY_CK_OE | ICPU_MEMPHY_CFG_PHY_CL_OE);
776
777 /* Initialize memory controller */
778 writel(MSCC_MEMPARM_MEMCFG, BASE_CFG + ICPU_MEMCTRL_CFG);
779 writel(MSCC_MEMPARM_PERIOD, BASE_CFG + ICPU_MEMCTRL_REF_PERIOD);
780
Horatiu Vultur05512512019-01-17 15:33:27 +0100781#if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_JR2) || \
Horatiu Vultur1895b872019-01-23 16:39:42 +0100782 defined(CONFIG_SOC_SERVALT) || defined(CONFIG_SOC_SERVAL)
Gregory CLEMENTdd1033e2018-12-14 16:16:47 +0100783 writel(MSCC_MEMPARM_TIMING0, BASE_CFG + ICPU_MEMCTRL_TIMING0);
Gregory CLEMENT6bd82312018-12-14 16:16:48 +0100784#else /* Luton */
785 clrbits_le32(BASE_CFG + ICPU_MEMCTRL_TIMING0, ((1 << 20) - 1));
786 setbits_le32(BASE_CFG + ICPU_MEMCTRL_TIMING0, MSCC_MEMPARM_TIMING0);
787#endif
Gregory CLEMENTdd1033e2018-12-14 16:16:47 +0100788
789 writel(MSCC_MEMPARM_TIMING1, BASE_CFG + ICPU_MEMCTRL_TIMING1);
790 writel(MSCC_MEMPARM_TIMING2, BASE_CFG + ICPU_MEMCTRL_TIMING2);
791 writel(MSCC_MEMPARM_TIMING3, BASE_CFG + ICPU_MEMCTRL_TIMING3);
792 writel(MSCC_MEMPARM_MR0, BASE_CFG + ICPU_MEMCTRL_MR0_VAL);
793 writel(MSCC_MEMPARM_MR1, BASE_CFG + ICPU_MEMCTRL_MR1_VAL);
794 writel(MSCC_MEMPARM_MR2, BASE_CFG + ICPU_MEMCTRL_MR2_VAL);
795 writel(MSCC_MEMPARM_MR3, BASE_CFG + ICPU_MEMCTRL_MR3_VAL);
796
Horatiu Vultur1895b872019-01-23 16:39:42 +0100797#if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_SERVAL)
Gregory CLEMENTdd1033e2018-12-14 16:16:47 +0100798 /* Termination setup - enable ODT */
799 writel(ICPU_MEMCTRL_TERMRES_CTRL_LOCAL_ODT_RD_ENA |
800 /* Assert ODT0 for any write */
801 ICPU_MEMCTRL_TERMRES_CTRL_ODT_WR_ENA(3),
802 BASE_CFG + ICPU_MEMCTRL_TERMRES_CTRL);
803
804 /* Release Reset from DDR */
Horatiu Vultur1895b872019-01-23 16:39:42 +0100805#if defined(CONFIG_SOC_OCELOT)
Gregory CLEMENTdd1033e2018-12-14 16:16:47 +0100806 hal_vcoreiii_ddr_reset_release();
Horatiu Vultur1895b872019-01-23 16:39:42 +0100807#endif
Gregory CLEMENTdd1033e2018-12-14 16:16:47 +0100808
809 writel(readl(BASE_CFG + ICPU_GPR(7)) + 1, BASE_CFG + ICPU_GPR(7));
Horatiu Vultur05512512019-01-17 15:33:27 +0100810#elif defined(CONFIG_SOC_JR2) || defined(CONFIG_SOC_SERVALT)
Horatiu Vulture7a0de22019-01-12 18:56:56 +0100811 writel(ICPU_MEMCTRL_TERMRES_CTRL_ODT_WR_ENA(3),
812 BASE_CFG + ICPU_MEMCTRL_TERMRES_CTRL);
Gregory CLEMENT6bd82312018-12-14 16:16:48 +0100813#else /* Luton */
814 /* Termination setup - disable ODT */
815 writel(0, BASE_CFG + ICPU_MEMCTRL_TERMRES_CTRL);
816
817#endif
Gregory CLEMENTdd1033e2018-12-14 16:16:47 +0100818}
819
820static inline void hal_vcoreiii_wait_memctl(void)
821{
822 /* Now, rip it! */
823 writel(ICPU_MEMCTRL_CTRL_INITIALIZE, BASE_CFG + ICPU_MEMCTRL_CTRL);
824
825 while (!(readl(BASE_CFG + ICPU_MEMCTRL_STAT)
826 & ICPU_MEMCTRL_STAT_INIT_DONE))
827 ;
828
829 /* Settle...? */
830 sleep_100ns(10000);
Horatiu Vultur05512512019-01-17 15:33:27 +0100831#if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_JR2) || \
Horatiu Vultur1895b872019-01-23 16:39:42 +0100832 defined(CONFIG_SOC_SERVALT) || defined(CONFIG_SOC_SERVAL)
Gregory CLEMENTdd1033e2018-12-14 16:16:47 +0100833 /* Establish data contents in DDR RAM for training */
834
835 __raw_writel(0xcacafefe, ((void __iomem *)MSCC_DDR_TO));
836 __raw_writel(0x22221111, ((void __iomem *)MSCC_DDR_TO + 0x4));
837 __raw_writel(0x44443333, ((void __iomem *)MSCC_DDR_TO + 0x8));
838 __raw_writel(0x66665555, ((void __iomem *)MSCC_DDR_TO + 0xC));
839 __raw_writel(0x88887777, ((void __iomem *)MSCC_DDR_TO + 0x10));
840 __raw_writel(0xaaaa9999, ((void __iomem *)MSCC_DDR_TO + 0x14));
841 __raw_writel(0xccccbbbb, ((void __iomem *)MSCC_DDR_TO + 0x18));
842 __raw_writel(0xeeeedddd, ((void __iomem *)MSCC_DDR_TO + 0x1C));
Gregory CLEMENT6bd82312018-12-14 16:16:48 +0100843#else
844 __raw_writel(0xff, ((void __iomem *)MSCC_DDR_TO));
845#endif
Gregory CLEMENTdd1033e2018-12-14 16:16:47 +0100846}
847#endif /* __ASM_MACH_DDR_H */