Mon Jul 27 10:59:11 2020 UTC ()
s/MODULE/_MODULE/

spotted by chuq@


(skrll)
diff -r1.15 -r1.16 src/sys/arch/mips/include/cache.h

cvs diff -r1.15 -r1.16 src/sys/arch/mips/include/cache.h (switch to unified diff)

--- src/sys/arch/mips/include/cache.h 2020/07/26 08:08:41 1.15
+++ src/sys/arch/mips/include/cache.h 2020/07/27 10:59:10 1.16
@@ -1,326 +1,326 @@ @@ -1,326 +1,326 @@
1/* $NetBSD: cache.h,v 1.15 2020/07/26 08:08:41 simonb Exp $ */ 1/* $NetBSD: cache.h,v 1.16 2020/07/27 10:59:10 skrll Exp $ */
2 2
3/* 3/*
4 * Copyright 2001 Wasabi Systems, Inc. 4 * Copyright 2001 Wasabi Systems, Inc.
5 * All rights reserved. 5 * All rights reserved.
6 * 6 *
7 * Written by Jason R. Thorpe for Wasabi Systems, Inc. 7 * Written by Jason R. Thorpe for Wasabi Systems, Inc.
8 * 8 *
9 * Redistribution and use in source and binary forms, with or without 9 * Redistribution and use in source and binary forms, with or without
10 * modification, are permitted provided that the following conditions 10 * modification, are permitted provided that the following conditions
11 * are met: 11 * are met:
12 * 1. Redistributions of source code must retain the above copyright 12 * 1. Redistributions of source code must retain the above copyright
13 * notice, this list of conditions and the following disclaimer. 13 * notice, this list of conditions and the following disclaimer.
14 * 2. Redistributions in binary form must reproduce the above copyright 14 * 2. Redistributions in binary form must reproduce the above copyright
15 * notice, this list of conditions and the following disclaimer in the 15 * notice, this list of conditions and the following disclaimer in the
16 * documentation and/or other materials provided with the distribution. 16 * documentation and/or other materials provided with the distribution.
17 * 3. All advertising materials mentioning features or use of this software 17 * 3. All advertising materials mentioning features or use of this software
18 * must display the following acknowledgement: 18 * must display the following acknowledgement:
19 * This product includes software developed for the NetBSD Project by 19 * This product includes software developed for the NetBSD Project by
20 * Wasabi Systems, Inc. 20 * Wasabi Systems, Inc.
21 * 4. The name of Wasabi Systems, Inc. may not be used to endorse 21 * 4. The name of Wasabi Systems, Inc. may not be used to endorse
22 * or promote products derived from this software without specific prior 22 * or promote products derived from this software without specific prior
23 * written permission. 23 * written permission.
24 * 24 *
25 * THIS SOFTWARE IS PROVIDED BY WASABI SYSTEMS, INC. ``AS IS'' AND 25 * THIS SOFTWARE IS PROVIDED BY WASABI SYSTEMS, INC. ``AS IS'' AND
26 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 26 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
27 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 27 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL WASABI SYSTEMS, INC 28 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL WASABI SYSTEMS, INC
29 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 29 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
30 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 30 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
31 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 31 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
32 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 32 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
33 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 33 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
34 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 34 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
35 * POSSIBILITY OF SUCH DAMAGE. 35 * POSSIBILITY OF SUCH DAMAGE.
36 */ 36 */
37 37
38#ifndef _MIPS_CACHE_H_ 38#ifndef _MIPS_CACHE_H_
39#define _MIPS_CACHE_H_ 39#define _MIPS_CACHE_H_
40 40
41/* 41/*
42 * Cache operations. 42 * Cache operations.
43 * 43 *
44 * We define the following primitives: 44 * We define the following primitives:
45 * 45 *
46 * --- Instruction cache synchronization (mandatory): 46 * --- Instruction cache synchronization (mandatory):
47 * 47 *
48 * icache_sync_all Synchronize I-cache 48 * icache_sync_all Synchronize I-cache
49 * 49 *
50 * icache_sync_range Synchronize I-cache range 50 * icache_sync_range Synchronize I-cache range
51 * 51 *
52 * icache_sync_range_index (index ops) 52 * icache_sync_range_index (index ops)
53 * 53 *
54 * --- Primary data cache (mandatory): 54 * --- Primary data cache (mandatory):
55 * 55 *
56 * pdcache_wbinv_all Write-back Invalidate primary D-cache 56 * pdcache_wbinv_all Write-back Invalidate primary D-cache
57 * 57 *
58 * pdcache_wbinv_range Write-back Invalidate primary D-cache range 58 * pdcache_wbinv_range Write-back Invalidate primary D-cache range
59 * 59 *
60 * pdcache_wbinv_range_index (index ops) 60 * pdcache_wbinv_range_index (index ops)
61 * 61 *
62 * pdcache_inv_range Invalidate primary D-cache range 62 * pdcache_inv_range Invalidate primary D-cache range
63 * 63 *
64 * pdcache_wb_range Write-back primary D-cache range 64 * pdcache_wb_range Write-back primary D-cache range
65 * 65 *
66 * --- Secondary data cache (optional): 66 * --- Secondary data cache (optional):
67 * 67 *
68 * sdcache_wbinv_all Write-back Invalidate secondary D-cache 68 * sdcache_wbinv_all Write-back Invalidate secondary D-cache
69 * 69 *
70 * sdcache_wbinv_range Write-back Invalidate secondary D-cache range 70 * sdcache_wbinv_range Write-back Invalidate secondary D-cache range
71 * 71 *
72 * sdcache_wbinv_range_index (index ops) 72 * sdcache_wbinv_range_index (index ops)
73 * 73 *
74 * sdcache_inv_range Invalidate secondary D-cache range 74 * sdcache_inv_range Invalidate secondary D-cache range
75 * 75 *
76 * sdcache_wb_range Write-back secondary D-cache range 76 * sdcache_wb_range Write-back secondary D-cache range
77 * 77 *
78 * There are some rules that must be followed: 78 * There are some rules that must be followed:
79 * 79 *
80 * I-cache Synch (all or range): 80 * I-cache Synch (all or range):
81 * The goal is to synchronize the instruction stream, 81 * The goal is to synchronize the instruction stream,
82 * so you may need to write-back dirty data cache 82 * so you may need to write-back dirty data cache
83 * blocks first. If a range is requested, and you 83 * blocks first. If a range is requested, and you
84 * can't synchronize just a range, you have to hit 84 * can't synchronize just a range, you have to hit
85 * the whole thing. 85 * the whole thing.
86 * 86 *
87 * D-cache Write-back Invalidate range: 87 * D-cache Write-back Invalidate range:
88 * If you can't WB-Inv a range, you must WB-Inv the 88 * If you can't WB-Inv a range, you must WB-Inv the
89 * entire D-cache. 89 * entire D-cache.
90 * 90 *
91 * D-cache Invalidate: 91 * D-cache Invalidate:
92 * If you can't Inv the D-cache without doing a 92 * If you can't Inv the D-cache without doing a
93 * Write-back, YOU MUST PANIC. This is to catch 93 * Write-back, YOU MUST PANIC. This is to catch
94 * errors in calling code. Callers must be aware 94 * errors in calling code. Callers must be aware
95 * of this scenario, and must handle it appropriately 95 * of this scenario, and must handle it appropriately
96 * (consider the bus_dma(9) operations). 96 * (consider the bus_dma(9) operations).
97 * 97 *
98 * D-cache Write-back: 98 * D-cache Write-back:
99 * If you can't Write-back without doing an invalidate, 99 * If you can't Write-back without doing an invalidate,
100 * that's fine. Then treat this as a WB-Inv. Skipping 100 * that's fine. Then treat this as a WB-Inv. Skipping
101 * the invalidate is merely an optimization. 101 * the invalidate is merely an optimization.
102 * 102 *
103 * All operations: 103 * All operations:
104 * Valid virtual addresses must be passed to the 104 * Valid virtual addresses must be passed to the
105 * cache operation. 105 * cache operation.
106 * 106 *
107 * Finally, these primitives are grouped together in reasonable 107 * Finally, these primitives are grouped together in reasonable
108 * ways. For all operations described here, first the primary 108 * ways. For all operations described here, first the primary
109 * cache is frobbed, then the secondary cache frobbed, if the 109 * cache is frobbed, then the secondary cache frobbed, if the
110 * operation for the secondary cache exists. 110 * operation for the secondary cache exists.
111 * 111 *
112 * mips_icache_sync_all Synchronize I-cache 112 * mips_icache_sync_all Synchronize I-cache
113 * 113 *
114 * mips_icache_sync_range Synchronize I-cache range 114 * mips_icache_sync_range Synchronize I-cache range
115 * 115 *
116 * mips_icache_sync_range_index (index ops) 116 * mips_icache_sync_range_index (index ops)
117 * 117 *
118 * mips_dcache_wbinv_all Write-back Invalidate D-cache 118 * mips_dcache_wbinv_all Write-back Invalidate D-cache
119 * 119 *
120 * mips_dcache_wbinv_range Write-back Invalidate D-cache range 120 * mips_dcache_wbinv_range Write-back Invalidate D-cache range
121 * 121 *
122 * mips_dcache_wbinv_range_index (index ops) 122 * mips_dcache_wbinv_range_index (index ops)
123 * 123 *
124 * mips_dcache_inv_range Invalidate D-cache range 124 * mips_dcache_inv_range Invalidate D-cache range
125 * 125 *
126 * mips_dcache_wb_range Write-back D-cache range 126 * mips_dcache_wb_range Write-back D-cache range
127 */ 127 */
128 128
129struct mips_cache_ops { 129struct mips_cache_ops {
130 void (*mco_icache_sync_all)(void); 130 void (*mco_icache_sync_all)(void);
131 void (*mco_icache_sync_range)(register_t, vsize_t); 131 void (*mco_icache_sync_range)(register_t, vsize_t);
132 void (*mco_icache_sync_range_index)(vaddr_t, vsize_t); 132 void (*mco_icache_sync_range_index)(vaddr_t, vsize_t);
133 133
134 void (*mco_pdcache_wbinv_all)(void); 134 void (*mco_pdcache_wbinv_all)(void);
135 void (*mco_pdcache_wbinv_range)(register_t, vsize_t); 135 void (*mco_pdcache_wbinv_range)(register_t, vsize_t);
136 void (*mco_pdcache_wbinv_range_index)(vaddr_t, vsize_t); 136 void (*mco_pdcache_wbinv_range_index)(vaddr_t, vsize_t);
137 void (*mco_pdcache_inv_range)(register_t, vsize_t); 137 void (*mco_pdcache_inv_range)(register_t, vsize_t);
138 void (*mco_pdcache_wb_range)(register_t, vsize_t); 138 void (*mco_pdcache_wb_range)(register_t, vsize_t);
139 139
140 /* These are called only by the (mipsNN) icache functions. */ 140 /* These are called only by the (mipsNN) icache functions. */
141 void (*mco_intern_icache_sync_range_index)(vaddr_t, vsize_t); 141 void (*mco_intern_icache_sync_range_index)(vaddr_t, vsize_t);
142 void (*mco_intern_icache_sync_range)(register_t, vsize_t); 142 void (*mco_intern_icache_sync_range)(register_t, vsize_t);
143 void (*mco_intern_pdcache_sync_all)(void); 143 void (*mco_intern_pdcache_sync_all)(void);
144 void (*mco_intern_pdcache_sync_range_index)(vaddr_t, vsize_t); 144 void (*mco_intern_pdcache_sync_range_index)(vaddr_t, vsize_t);
145 void (*mco_intern_pdcache_sync_range)(register_t, vsize_t); 145 void (*mco_intern_pdcache_sync_range)(register_t, vsize_t);
146 /* This is used internally by the (mipsNN) pdcache functions. */ 146 /* This is used internally by the (mipsNN) pdcache functions. */
147 void (*mco_intern_pdcache_wbinv_range_index)(vaddr_t, vsize_t); 147 void (*mco_intern_pdcache_wbinv_range_index)(vaddr_t, vsize_t);
148 148
149 void (*mco_sdcache_wbinv_all)(void); 149 void (*mco_sdcache_wbinv_all)(void);
150 void (*mco_sdcache_wbinv_range)(register_t, vsize_t); 150 void (*mco_sdcache_wbinv_range)(register_t, vsize_t);
151 void (*mco_sdcache_wbinv_range_index)(vaddr_t, vsize_t); 151 void (*mco_sdcache_wbinv_range_index)(vaddr_t, vsize_t);
152 void (*mco_sdcache_inv_range)(register_t, vsize_t); 152 void (*mco_sdcache_inv_range)(register_t, vsize_t);
153 void (*mco_sdcache_wb_range)(register_t, vsize_t); 153 void (*mco_sdcache_wb_range)(register_t, vsize_t);
154 154
155 /* These are called only by the (mipsNN) icache functions. */ 155 /* These are called only by the (mipsNN) icache functions. */
156 void (*mco_intern_sdcache_sync_all)(void); 156 void (*mco_intern_sdcache_sync_all)(void);
157 void (*mco_intern_sdcache_sync_range_index)(vaddr_t, vsize_t); 157 void (*mco_intern_sdcache_sync_range_index)(vaddr_t, vsize_t);
158 void (*mco_intern_sdcache_sync_range)(register_t, vsize_t); 158 void (*mco_intern_sdcache_sync_range)(register_t, vsize_t);
159 159
160 /* This is used internally by the (mipsNN) sdcache functions. */ 160 /* This is used internally by the (mipsNN) sdcache functions. */
161 void (*mco_intern_sdcache_wbinv_range_index)(vaddr_t, vsize_t); 161 void (*mco_intern_sdcache_wbinv_range_index)(vaddr_t, vsize_t);
162}; 162};
163 163
164extern struct mips_cache_ops mips_cache_ops; 164extern struct mips_cache_ops mips_cache_ops;
165 165
166/* PRIMARY CACHE VARIABLES */ 166/* PRIMARY CACHE VARIABLES */
167struct mips_cache_info { 167struct mips_cache_info {
168 u_int mci_picache_size; 168 u_int mci_picache_size;
169 u_int mci_picache_line_size; 169 u_int mci_picache_line_size;
170 u_int mci_picache_ways; 170 u_int mci_picache_ways;
171 u_int mci_picache_way_size; 171 u_int mci_picache_way_size;
172 u_int mci_picache_way_mask; 172 u_int mci_picache_way_mask;
173 bool mci_picache_vivt; /* virtually indexed and tagged */ 173 bool mci_picache_vivt; /* virtually indexed and tagged */
174 174
175 u_int mci_pdcache_size; /* and unified */ 175 u_int mci_pdcache_size; /* and unified */
176 u_int mci_pdcache_line_size; 176 u_int mci_pdcache_line_size;
177 u_int mci_pdcache_ways; 177 u_int mci_pdcache_ways;
178 u_int mci_pdcache_way_size; 178 u_int mci_pdcache_way_size;
179 u_int mci_pdcache_way_mask; 179 u_int mci_pdcache_way_mask;
180 bool mci_pdcache_write_through; 180 bool mci_pdcache_write_through;
181 181
182 bool mci_pcache_unified; 182 bool mci_pcache_unified;
183 183
184 /* SECONDARY CACHE VARIABLES */ 184 /* SECONDARY CACHE VARIABLES */
185 u_int mci_sicache_size; 185 u_int mci_sicache_size;
186 u_int mci_sicache_line_size; 186 u_int mci_sicache_line_size;
187 u_int mci_sicache_ways; 187 u_int mci_sicache_ways;
188 u_int mci_sicache_way_size; 188 u_int mci_sicache_way_size;
189 u_int mci_sicache_way_mask; 189 u_int mci_sicache_way_mask;
190 190
191 u_int mci_sdcache_size; /* and unified */ 191 u_int mci_sdcache_size; /* and unified */
192 u_int mci_sdcache_line_size; 192 u_int mci_sdcache_line_size;
193 u_int mci_sdcache_ways; 193 u_int mci_sdcache_ways;
194 u_int mci_sdcache_way_size; 194 u_int mci_sdcache_way_size;
195 u_int mci_sdcache_way_mask; 195 u_int mci_sdcache_way_mask;
196 bool mci_sdcache_write_through; 196 bool mci_sdcache_write_through;
197 197
198 bool mci_scache_unified; 198 bool mci_scache_unified;
199 199
200 /* TERTIARY CACHE VARIABLES */ 200 /* TERTIARY CACHE VARIABLES */
201 u_int mci_tcache_size; /* always unified */ 201 u_int mci_tcache_size; /* always unified */
202 u_int mci_tcache_line_size; 202 u_int mci_tcache_line_size;
203 u_int mci_tcache_ways; 203 u_int mci_tcache_ways;
204 u_int mci_tcache_way_size; 204 u_int mci_tcache_way_size;
205 u_int mci_tcache_way_mask; 205 u_int mci_tcache_way_mask;
206 bool mci_tcache_write_through; 206 bool mci_tcache_write_through;
207 207
208 /* 208 /*
209 * These two variables inform the rest of the kernel about the 209 * These two variables inform the rest of the kernel about the
210 * size of the largest D-cache line present in the system. The 210 * size of the largest D-cache line present in the system. The
211 * mask can be used to determine if a region of memory is cache 211 * mask can be used to determine if a region of memory is cache
212 * line size aligned. 212 * line size aligned.
213 * 213 *
214 * Whenever any code updates a data cache line size, it should 214 * Whenever any code updates a data cache line size, it should
215 * call mips_dcache_compute_align() to recompute these values. 215 * call mips_dcache_compute_align() to recompute these values.
216 */ 216 */
217 u_int mci_dcache_align; 217 u_int mci_dcache_align;
218 u_int mci_dcache_align_mask; 218 u_int mci_dcache_align_mask;
219 219
220 u_int mci_cache_prefer_mask; 220 u_int mci_cache_prefer_mask;
221 u_int mci_cache_alias_mask; 221 u_int mci_cache_alias_mask;
222 u_int mci_icache_alias_mask; 222 u_int mci_icache_alias_mask;
223 223
224 bool mci_cache_virtual_alias; 224 bool mci_cache_virtual_alias;
225 bool mci_icache_virtual_alias; 225 bool mci_icache_virtual_alias;
226}; 226};
227 227
228 228
229#if (MIPS1 + MIPS64_RMIXL + MIPS64R2_RMIXL + MIPS64_OCTEON) > 0 && \ 229#if (MIPS1 + MIPS64_RMIXL + MIPS64R2_RMIXL + MIPS64_OCTEON) > 0 && \
230 (MIPS3 + MIPS4) == 0 \ 230 (MIPS3 + MIPS4) == 0 \
231 && !defined(MODULE) 231 && !defined(_MODULE)
232#define MIPS_CACHE_ALIAS_MASK 0 232#define MIPS_CACHE_ALIAS_MASK 0
233#define MIPS_CACHE_VIRTUAL_ALIAS false 233#define MIPS_CACHE_VIRTUAL_ALIAS false
234#else 234#else
235#define MIPS_CACHE_ALIAS_MASK mips_cache_info.mci_cache_alias_mask 235#define MIPS_CACHE_ALIAS_MASK mips_cache_info.mci_cache_alias_mask
236#define MIPS_CACHE_VIRTUAL_ALIAS mips_cache_info.mci_cache_virtual_alias 236#define MIPS_CACHE_VIRTUAL_ALIAS mips_cache_info.mci_cache_virtual_alias
237#endif 237#endif
238#if (MIPS1 + MIPS64_RMIXL + MIPS64_OCTEON) > 0 && \ 238#if (MIPS1 + MIPS64_RMIXL + MIPS64_OCTEON) > 0 && \
239 (MIPS3 + MIPS4) == 0 \ 239 (MIPS3 + MIPS4) == 0 \
240 && !defined(_MODULE) 240 && !defined(_MODULE)
241#define MIPS_ICACHE_ALIAS_MASK 0 241#define MIPS_ICACHE_ALIAS_MASK 0
242#define MIPS_ICACHE_VIRTUAL_ALIAS false 242#define MIPS_ICACHE_VIRTUAL_ALIAS false
243#else 243#else
244#define MIPS_ICACHE_ALIAS_MASK mips_cache_info.mci_icache_alias_mask 244#define MIPS_ICACHE_ALIAS_MASK mips_cache_info.mci_icache_alias_mask
245#define MIPS_ICACHE_VIRTUAL_ALIAS mips_cache_info.mci_icache_virtual_alias 245#define MIPS_ICACHE_VIRTUAL_ALIAS mips_cache_info.mci_icache_virtual_alias
246#endif 246#endif
247 247
248extern struct mips_cache_info mips_cache_info; 248extern struct mips_cache_info mips_cache_info;
249 249
250 250
251/* 251/*
252 * XXX XXX XXX THIS SHOULD NOT EXIST XXX XXX XXX 252 * XXX XXX XXX THIS SHOULD NOT EXIST XXX XXX XXX
253 */ 253 */
254#define mips_cache_indexof(x) (((vaddr_t)(x)) & MIPS_CACHE_ALIAS_MASK) 254#define mips_cache_indexof(x) (((vaddr_t)(x)) & MIPS_CACHE_ALIAS_MASK)
255#define mips_cache_badalias(x,y) (((vaddr_t)(x)^(vaddr_t)(y)) & MIPS_CACHE_ALIAS_MASK) 255#define mips_cache_badalias(x,y) (((vaddr_t)(x)^(vaddr_t)(y)) & MIPS_CACHE_ALIAS_MASK)
256 256
257#define __mco_noargs(prefix, x) \ 257#define __mco_noargs(prefix, x) \
258do { \ 258do { \
259 (*mips_cache_ops.mco_ ## prefix ## p ## x )(); \ 259 (*mips_cache_ops.mco_ ## prefix ## p ## x )(); \
260 if (*mips_cache_ops.mco_ ## prefix ## s ## x ) \ 260 if (*mips_cache_ops.mco_ ## prefix ## s ## x ) \
261 (*mips_cache_ops.mco_ ## prefix ## s ## x )(); \ 261 (*mips_cache_ops.mco_ ## prefix ## s ## x )(); \
262} while (/*CONSTCOND*/0) 262} while (/*CONSTCOND*/0)
263 263
264#define __mco_2args(prefix, x, a, b) \ 264#define __mco_2args(prefix, x, a, b) \
265do { \ 265do { \
266 (*mips_cache_ops.mco_ ## prefix ## p ## x )((a), (b)); \ 266 (*mips_cache_ops.mco_ ## prefix ## p ## x )((a), (b)); \
267 if (*mips_cache_ops.mco_ ## prefix ## s ## x ) \ 267 if (*mips_cache_ops.mco_ ## prefix ## s ## x ) \
268 (*mips_cache_ops.mco_ ## prefix ## s ## x )((a), (b)); \ 268 (*mips_cache_ops.mco_ ## prefix ## s ## x )((a), (b)); \
269} while (/*CONSTCOND*/0) 269} while (/*CONSTCOND*/0)
270 270
271#define mips_icache_sync_all() \ 271#define mips_icache_sync_all() \
272 (*mips_cache_ops.mco_icache_sync_all)() 272 (*mips_cache_ops.mco_icache_sync_all)()
273 273
274#define mips_icache_sync_range(v, s) \ 274#define mips_icache_sync_range(v, s) \
275 (*mips_cache_ops.mco_icache_sync_range)((v), (s)) 275 (*mips_cache_ops.mco_icache_sync_range)((v), (s))
276 276
277#define mips_icache_sync_range_index(v, s) \ 277#define mips_icache_sync_range_index(v, s) \
278 (*mips_cache_ops.mco_icache_sync_range_index)((v), (s)) 278 (*mips_cache_ops.mco_icache_sync_range_index)((v), (s))
279 279
280#define mips_dcache_wbinv_all() \ 280#define mips_dcache_wbinv_all() \
281 __mco_noargs(, dcache_wbinv_all) 281 __mco_noargs(, dcache_wbinv_all)
282 282
283#define mips_dcache_wbinv_range(v, s) \ 283#define mips_dcache_wbinv_range(v, s) \
284 __mco_2args(, dcache_wbinv_range, (v), (s)) 284 __mco_2args(, dcache_wbinv_range, (v), (s))
285 285
286#define mips_dcache_wbinv_range_index(v, s) \ 286#define mips_dcache_wbinv_range_index(v, s) \
287 __mco_2args(, dcache_wbinv_range_index, (v), (s)) 287 __mco_2args(, dcache_wbinv_range_index, (v), (s))
288 288
289#define mips_dcache_inv_range(v, s) \ 289#define mips_dcache_inv_range(v, s) \
290 __mco_2args(, dcache_inv_range, (v), (s)) 290 __mco_2args(, dcache_inv_range, (v), (s))
291 291
292#define mips_dcache_wb_range(v, s) \ 292#define mips_dcache_wb_range(v, s) \
293 __mco_2args(, dcache_wb_range, (v), (s)) 293 __mco_2args(, dcache_wb_range, (v), (s))
294 294
295 295
296/* 296/*
297 * Private D-cache functions only called from (currently only the 297 * Private D-cache functions only called from (currently only the
298 * mipsNN) I-cache functions. 298 * mipsNN) I-cache functions.
299 */ 299 */
300#define mips_intern_dcache_sync_all() \ 300#define mips_intern_dcache_sync_all() \
301 __mco_noargs(intern_, dcache_sync_all) 301 __mco_noargs(intern_, dcache_sync_all)
302 302
303#define mips_intern_dcache_sync_range_index(v, s) \ 303#define mips_intern_dcache_sync_range_index(v, s) \
304 __mco_2args(intern_, dcache_sync_range_index, (v), (s)) 304 __mco_2args(intern_, dcache_sync_range_index, (v), (s))
305 305
306#define mips_intern_dcache_sync_range(v, s) \ 306#define mips_intern_dcache_sync_range(v, s) \
307 __mco_2args(intern_, dcache_sync_range, (v), (s)) 307 __mco_2args(intern_, dcache_sync_range, (v), (s))
308 308
309#define mips_intern_pdcache_wbinv_range_index(v, s) \ 309#define mips_intern_pdcache_wbinv_range_index(v, s) \
310 (*mips_cache_ops.mco_intern_pdcache_wbinv_range_index)((v), (s)) 310 (*mips_cache_ops.mco_intern_pdcache_wbinv_range_index)((v), (s))
311 311
312#define mips_intern_sdcache_wbinv_range_index(v, s) \ 312#define mips_intern_sdcache_wbinv_range_index(v, s) \
313 (*mips_cache_ops.mco_intern_sdcache_wbinv_range_index)((v), (s)) 313 (*mips_cache_ops.mco_intern_sdcache_wbinv_range_index)((v), (s))
314 314
315#define mips_intern_icache_sync_range(v, s) \ 315#define mips_intern_icache_sync_range(v, s) \
316 (*mips_cache_ops.mco_intern_icache_sync_range)((v), (s)) 316 (*mips_cache_ops.mco_intern_icache_sync_range)((v), (s))
317 317
318#define mips_intern_icache_sync_range_index(v, s) \ 318#define mips_intern_icache_sync_range_index(v, s) \
319 (*mips_cache_ops.mco_intern_icache_sync_range_index)((v), (s)) 319 (*mips_cache_ops.mco_intern_icache_sync_range_index)((v), (s))
320 320
321void mips_config_cache(void); 321void mips_config_cache(void);
322void mips_dcache_compute_align(void); 322void mips_dcache_compute_align(void);
323 323
324#include <mips/cache_mipsNN.h> 324#include <mips/cache_mipsNN.h>
325 325
326#endif /* _MIPS_CACHE_H_ */ 326#endif /* _MIPS_CACHE_H_ */