blob: b3e5fd559576dbc74bddf4954a50fdc2e7208f85 [file] [log] [blame]
Nobuhiro Iwamatsub02bad12007-09-23 02:12:30 +09001/*
2 * (C) Copyright 2007
3 * Nobuhiro Iwamatsu <iwamatsu@nigauri.org>
4 *
Wolfgang Denk1a459662013-07-08 09:37:19 +02005 * SPDX-License-Identifier: GPL-2.0+
Nobuhiro Iwamatsub02bad12007-09-23 02:12:30 +09006 */
7
8#include <common.h>
9#include <command.h>
10#include <asm/processor.h>
11#include <asm/io.h>
12
13/*
14 * Jump to P2 area.
15 * When handling TLB or caches, we need to do it from P2 area.
16 */
Wolfgang Denk61fb15c52007-12-27 01:52:50 +010017#define jump_to_P2() \
18 do { \
Nobuhiro Iwamatsub02bad12007-09-23 02:12:30 +090019 unsigned long __dummy; \
Wolfgang Denk61fb15c52007-12-27 01:52:50 +010020 __asm__ __volatile__( \
21 "mov.l 1f, %0\n\t" \
22 "or %1, %0\n\t" \
23 "jmp @%0\n\t" \
24 " nop\n\t" \
25 ".balign 4\n" \
26 "1: .long 2f\n" \
27 "2:" \
28 : "=&r" (__dummy) \
29 : "r" (0x20000000)); \
Nobuhiro Iwamatsub02bad12007-09-23 02:12:30 +090030 } while (0)
31
32/*
33 * Back to P1 area.
34 */
Wolfgang Denk61fb15c52007-12-27 01:52:50 +010035#define back_to_P1() \
36 do { \
37 unsigned long __dummy; \
38 __asm__ __volatile__( \
39 "nop;nop;nop;nop;nop;nop;nop\n\t" \
40 "mov.l 1f, %0\n\t" \
41 "jmp @%0\n\t" \
42 " nop\n\t" \
43 ".balign 4\n" \
44 "1: .long 2f\n" \
45 "2:" \
46 : "=&r" (__dummy)); \
Nobuhiro Iwamatsub02bad12007-09-23 02:12:30 +090047 } while (0)
48
49#define CACHE_VALID 1
50#define CACHE_UPDATED 2
51
52static inline void cache_wback_all(void)
53{
54 unsigned long addr, data, i, j;
55
56 jump_to_P2();
57 for (i = 0; i < CACHE_OC_NUM_ENTRIES; i++){
58 for (j = 0; j < CACHE_OC_NUM_WAYS; j++) {
Wolfgang Denk53677ef2008-05-20 16:00:29 +020059 addr = CACHE_OC_ADDRESS_ARRAY | (j << CACHE_OC_WAY_SHIFT)
Nobuhiro Iwamatsub02bad12007-09-23 02:12:30 +090060 | (i << CACHE_OC_ENTRY_SHIFT);
Wolfgang Denk53677ef2008-05-20 16:00:29 +020061 data = inl(addr);
Nobuhiro Iwamatsub02bad12007-09-23 02:12:30 +090062 if (data & CACHE_UPDATED) {
63 data &= ~CACHE_UPDATED;
64 outl(data, addr);
65 }
66 }
67 }
68 back_to_P1();
69}
70
71
72#define CACHE_ENABLE 0
73#define CACHE_DISABLE 1
74
75int cache_control(unsigned int cmd)
76{
77 unsigned long ccr;
78
79 jump_to_P2();
80 ccr = inl(CCR);
81
82 if (ccr & CCR_CACHE_ENABLE)
83 cache_wback_all();
84
85 if (cmd == CACHE_DISABLE)
86 outl(CCR_CACHE_STOP, CCR);
87 else
88 outl(CCR_CACHE_INIT, CCR);
89 back_to_P1();
90
91 return 0;
92}
Mike Frysinger17210642011-10-27 04:59:59 -040093
Nobuhiro Iwamatsua633a182013-08-22 08:43:47 +090094void flush_dcache_range(unsigned long start, unsigned long end)
Mike Frysinger17210642011-10-27 04:59:59 -040095{
96 u32 v;
97
98 start &= ~(L1_CACHE_BYTES - 1);
99 for (v = start; v < end; v += L1_CACHE_BYTES) {
Vladimir Zapolskiyee47c4c2016-11-28 00:15:13 +0200100 asm volatile ("ocbp %0" : /* no output */
Mike Frysinger17210642011-10-27 04:59:59 -0400101 : "m" (__m(v)));
102 }
103}
104
Nobuhiro Iwamatsua633a182013-08-22 08:43:47 +0900105void invalidate_dcache_range(unsigned long start, unsigned long end)
Mike Frysinger17210642011-10-27 04:59:59 -0400106{
107 u32 v;
108
109 start &= ~(L1_CACHE_BYTES - 1);
110 for (v = start; v < end; v += L1_CACHE_BYTES) {
111 asm volatile ("ocbi %0" : /* no output */
112 : "m" (__m(v)));
113 }
114}