1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
|
// SPDX-License-Identifier: GPL-2.0+
/*
* (C) Copyright 2002
* Wolfgang Denk, DENX Software Engineering, wd@denx.de.
*/
#include <common.h>
#include <cpu_func.h>
#include <asm/immap.h>
#include <asm/cache.h>
volatile int *cf_icache_status = (int *)ICACHE_STATUS;
volatile int *cf_dcache_status = (int *)DCACHE_STATUS;
void flush_cache(ulong start_addr, ulong size)
{
/* Must be implemented for all M68k processors with copy-back data cache */
}
int icache_status(void)
{
return *cf_icache_status;
}
int dcache_status(void)
{
return *cf_dcache_status;
}
void icache_enable(void)
{
icache_invalid();
*cf_icache_status = 1;
#if defined(CONFIG_CF_V4) || defined(CONFIG_CF_V4E)
__asm__ __volatile__("movec %0, %%acr2"::"r"(CONFIG_SYS_CACHE_ACR2));
__asm__ __volatile__("movec %0, %%acr3"::"r"(CONFIG_SYS_CACHE_ACR3));
#if defined(CONFIG_CF_V4E)
__asm__ __volatile__("movec %0, %%acr6"::"r"(CONFIG_SYS_CACHE_ACR6));
__asm__ __volatile__("movec %0, %%acr7"::"r"(CONFIG_SYS_CACHE_ACR7));
#endif
#else
__asm__ __volatile__("movec %0, %%acr0"::"r"(CONFIG_SYS_CACHE_ACR0));
__asm__ __volatile__("movec %0, %%acr1"::"r"(CONFIG_SYS_CACHE_ACR1));
#endif
__asm__ __volatile__("movec %0, %%cacr"::"r"(CONFIG_SYS_CACHE_ICACR));
}
void icache_disable(void)
{
u32 temp = 0;
*cf_icache_status = 0;
icache_invalid();
#if defined(CONFIG_CF_V4) || defined(CONFIG_CF_V4E)
__asm__ __volatile__("movec %0, %%acr2"::"r"(temp));
__asm__ __volatile__("movec %0, %%acr3"::"r"(temp));
#if defined(CONFIG_CF_V4E)
__asm__ __volatile__("movec %0, %%acr6"::"r"(temp));
__asm__ __volatile__("movec %0, %%acr7"::"r"(temp));
#endif
#else
__asm__ __volatile__("movec %0, %%acr0"::"r"(temp));
__asm__ __volatile__("movec %0, %%acr1"::"r"(temp));
#endif
}
void icache_invalid(void)
{
u32 temp;
temp = CONFIG_SYS_ICACHE_INV;
if (*cf_icache_status)
temp |= CONFIG_SYS_CACHE_ICACR;
__asm__ __volatile__("movec %0, %%cacr"::"r"(temp));
}
/*
* data cache only for ColdFire V4 such as MCF547x_8x, MCF5445x
* the dcache will be dummy in ColdFire V2 and V3
*/
void dcache_enable(void)
{
dcache_invalid();
*cf_dcache_status = 1;
#if defined(CONFIG_CF_V4) || defined(CONFIG_CF_V4E)
__asm__ __volatile__("movec %0, %%acr0"::"r"(CONFIG_SYS_CACHE_ACR0));
__asm__ __volatile__("movec %0, %%acr1"::"r"(CONFIG_SYS_CACHE_ACR1));
#if defined(CONFIG_CF_V4E)
__asm__ __volatile__("movec %0, %%acr4"::"r"(CONFIG_SYS_CACHE_ACR4));
__asm__ __volatile__("movec %0, %%acr5"::"r"(CONFIG_SYS_CACHE_ACR5));
#endif
#endif
__asm__ __volatile__("movec %0, %%cacr"::"r"(CONFIG_SYS_CACHE_DCACR));
}
void dcache_disable(void)
{
u32 temp = 0;
*cf_dcache_status = 0;
dcache_invalid();
__asm__ __volatile__("movec %0, %%cacr"::"r"(temp));
#if defined(CONFIG_CF_V4) || defined(CONFIG_CF_V4E)
__asm__ __volatile__("movec %0, %%acr0"::"r"(temp));
__asm__ __volatile__("movec %0, %%acr1"::"r"(temp));
#if defined(CONFIG_CF_V4E)
__asm__ __volatile__("movec %0, %%acr4"::"r"(temp));
__asm__ __volatile__("movec %0, %%acr5"::"r"(temp));
#endif
#endif
}
void dcache_invalid(void)
{
#if defined(CONFIG_CF_V4) || defined(CONFIG_CF_V4E)
u32 temp;
temp = CONFIG_SYS_DCACHE_INV;
if (*cf_dcache_status)
temp |= CONFIG_SYS_CACHE_DCACR;
if (*cf_icache_status)
temp |= CONFIG_SYS_CACHE_ICACR;
__asm__ __volatile__("movec %0, %%cacr"::"r"(temp));
#endif
}
__weak void invalidate_dcache_range(unsigned long start, unsigned long stop)
{
/* An empty stub, real implementation should be in platform code */
}
__weak void flush_dcache_range(unsigned long start, unsigned long stop)
{
/* An empty stub, real implementation should be in platform code */
}
|