]> granicus.if.org Git - esp-idf/blob - components/esp32/include/xtensa/core-macros.h
Initial public version
[esp-idf] / components / esp32 / include / xtensa / core-macros.h
1 /*
2  * xtensa/core-macros.h -- C specific definitions
3  *                         that depend on CORE configuration
4  */
5
6 /*
7  * Copyright (c) 2012 Tensilica Inc.
8  *
9  * Permission is hereby granted, free of charge, to any person obtaining
10  * a copy of this software and associated documentation files (the
11  * "Software"), to deal in the Software without restriction, including
12  * without limitation the rights to use, copy, modify, merge, publish,
13  * distribute, sublicense, and/or sell copies of the Software, and to
14  * permit persons to whom the Software is furnished to do so, subject to
15  * the following conditions:
16  *
17  * The above copyright notice and this permission notice shall be included
18  * in all copies or substantial portions of the Software.
19  *
20  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
21  * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
22  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
23  * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
24  * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
25  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
26  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
27  */
28
29 #ifndef XTENSA_CACHE_H
30 #define XTENSA_CACHE_H
31
32 #include <xtensa/config/core.h>
33
34 /*  Only define things for C code.  */
35 #if !defined(_ASMLANGUAGE) && !defined(_NOCLANGUAGE) && !defined(__ASSEMBLER__)
36
37
38
39 /***************************   CACHE   ***************************/
40
41 /* All the macros are in the lower case now and some of them 
42  * share the name with the existing functions from hal.h.
43  * Including this header file will define XTHAL_USE_CACHE_MACROS 
44  * which directs hal.h not to use the functions.
45  *
46
47 /*
48  *  Single-cache-line operations in C-callable inline assembly.
49  *  Essentially macro versions (uppercase) of:
50  *
51  *      xthal_icache_line_invalidate(void *addr);
52  *      xthal_icache_line_lock(void *addr);
53  *      xthal_icache_line_unlock(void *addr);
54  *      xthal_icache_sync(void);
55  *
56  *  NOTE:  unlike the above functions, the following macros do NOT
57  *  execute the xthal_icache_sync() as part of each line operation.
58  *  This sync must be called explicitly by the caller.  This is to
59  *  allow better optimization when operating on more than one line.
60  *
61  *      xthal_dcache_line_invalidate(void *addr);
62  *      xthal_dcache_line_writeback(void *addr);
63  *      xthal_dcache_line_writeback_inv(void *addr);
64  *      xthal_dcache_line_lock(void *addr);
65  *      xthal_dcache_line_unlock(void *addr);
66  *      xthal_dcache_sync(void);
67  *      xthal_dcache_line_prefetch_for_write(void *addr);
68  *      xthal_dcache_line_prefetch_for_read(void *addr);
69  *
70  *  All are made memory-barriers, given that's how they're typically used
71  *  (ops operate on a whole line, so clobbers all memory not just *addr).
72  *
73  *  NOTE:  All the block block cache ops and line prefetches are implemented
74  *  using intrinsics so they are better optimized regarding memory barriers etc.
75  *  
76  * All block downgrade functions exist in two forms: with and without
77  * the 'max' parameter: This parameter allows compiler to optimize
78  * the functions whenever the parameter is smaller than the cache size.
79  *
80  *      xthal_dcache_block_invalidate(void *addr, unsigned size);
81  *      xthal_dcache_block_writeback(void *addr, unsigned size);
82  *      xthal_dcache_block_writeback_inv(void *addr, unsigned size);
83  *      xthal_dcache_block_invalidate_max(void *addr, unsigned size, unsigned max);
84  *      xthal_dcache_block_writeback_max(void *addr, unsigned size, unsigned max);
85  *      xthal_dcache_block_writeback_inv_max(void *addr, unsigned size, unsigned max);
86  *
87  *      xthal_dcache_block_prefetch_for_read(void *addr, unsigned size);
88  *      xthal_dcache_block_prefetch_for_write(void *addr, unsigned size);
89  *      xthal_dcache_block_prefetch_modify(void *addr, unsigned size);
90  *      xthal_dcache_block_prefetch_read_write(void *addr, unsigned size);
91  *      xthal_dcache_block_prefetch_for_read_grp(void *addr, unsigned size);
92  *      xthal_dcache_block_prefetch_for_write_grp(void *addr, unsigned size);
93  *      xthal_dcache_block_prefetch_modify_grp(void *addr, unsigned size);
94  *      xthal_dcache_block_prefetch_read_write_grp(void *addr, unsigned size)
95  *
96  *      xthal_dcache_block_wait();
97  *      xthal_dcache_block_required_wait();
98  *      xthal_dcache_block_abort();
99  *      xthal_dcache_block_prefetch_end();
100  *      xthal_dcache_block_newgrp();
101  */
102
103 /***   INSTRUCTION CACHE   ***/
104
105 #define XTHAL_USE_CACHE_MACROS
106
107 #if XCHAL_ICACHE_SIZE > 0
108 # define xthal_icache_line_invalidate(addr)     do { void *__a = (void*)(addr); \
109                 __asm__ __volatile__("ihi %0, 0" :: "a"(__a) : "memory");       \
110                 } while(0)
111 #else
112 # define xthal_icache_line_invalidate(addr)     do {/*nothing*/} while(0)
113 #endif
114
115 #if XCHAL_ICACHE_SIZE > 0 && XCHAL_ICACHE_LINE_LOCKABLE
116 # define xthal_icache_line_lock(addr)   do { void *__a = (void*)(addr);         \
117                 __asm__ __volatile__("ipfl %0, 0" :: "a"(__a) : "memory");      \
118                 } while(0)
119 # define xthal_icache_line_unlock(addr) do { void *__a = (void*)(addr);         \
120                 __asm__ __volatile__("ihu %0, 0" :: "a"(__a) : "memory");       \
121                 } while(0)
122 #else
123 # define xthal_icache_line_lock(addr)           do {/*nothing*/} while(0)
124 # define xthal_icache_line_unlock(addr)         do {/*nothing*/} while(0)
125 #endif
126
127 /*
128  * Even if a config doesn't have caches, an isync is still needed
129  * when instructions in any memory are modified, whether by a loader
130  * or self-modifying code.  Therefore, this macro always produces
131  * an isync, whether or not an icache is present.
132  */
133 #define xthal_icache_sync()                                                     \
134                 __asm__ __volatile__("isync":::"memory")
135
136
137 /***   DATA CACHE   ***/
138
139 #if XCHAL_DCACHE_SIZE > 0
140
141 # include <xtensa/tie/xt_datacache.h>
142
143 # define xthal_dcache_line_invalidate(addr)     do { void *__a = (void*)(addr); \
144                 __asm__ __volatile__("dhi %0, 0" :: "a"(__a) : "memory");       \
145                 } while(0)
146 # define xthal_dcache_line_writeback(addr)      do { void *__a = (void*)(addr); \
147                 __asm__ __volatile__("dhwb %0, 0" :: "a"(__a) : "memory");      \
148                 } while(0)
149 # define xthal_dcache_line_writeback_inv(addr)  do { void *__a = (void*)(addr); \
150                 __asm__ __volatile__("dhwbi %0, 0" :: "a"(__a) : "memory");     \
151                 } while(0)
152 # define xthal_dcache_sync()                                                    \
153                 __asm__ __volatile__("" /*"dsync"?*/:::"memory")
154 # define xthal_dcache_line_prefetch_for_read(addr) do {                         \
155                 XT_DPFR((const int*)addr, 0);                                   \
156                 } while(0)
157 #else
158 # define xthal_dcache_line_invalidate(addr)             do {/*nothing*/} while(0)
159 # define xthal_dcache_line_writeback(addr)              do {/*nothing*/} while(0)
160 # define xthal_dcache_line_writeback_inv(addr)          do {/*nothing*/} while(0)
161 # define xthal_dcache_sync()                            __asm__ __volatile__("":::"memory")
162 # define xthal_dcache_line_prefetch_for_read(addr)      do {/*nothing*/} while(0)
163 #endif
164
165 #if XCHAL_DCACHE_SIZE > 0 && XCHAL_DCACHE_LINE_LOCKABLE
166 # define xthal_dcache_line_lock(addr)   do { void *__a = (void*)(addr);         \
167                 __asm__ __volatile__("dpfl %0, 0" :: "a"(__a) : "memory");      \
168                 } while(0)
169 # define xthal_dcache_line_unlock(addr) do { void *__a = (void*)(addr);         \
170                 __asm__ __volatile__("dhu %0, 0" :: "a"(__a) : "memory");       \
171                 } while(0)
172 #else
173 # define xthal_dcache_line_lock(addr)           do {/*nothing*/} while(0)
174 # define xthal_dcache_line_unlock(addr)         do {/*nothing*/} while(0)
175 #endif
176
177 #if XCHAL_DCACHE_SIZE > 0 && XCHAL_DCACHE_IS_WRITEBACK
178
179 # define xthal_dcache_line_prefetch_for_write(addr) do {                        \
180                 XT_DPFW((const int*)addr, 0);                                   \
181                 } while(0)
182 #else
183 # define xthal_dcache_line_prefetch_for_write(addr)     do {/*nothing*/} while(0)
184 #endif
185
186
187 /*****   Block Operations   *****/
188
189 #if XCHAL_DCACHE_SIZE > 0 && XCHAL_HAVE_CACHE_BLOCKOPS
190
191 /* upgrades */
192
193 # define _XTHAL_DCACHE_BLOCK_UPGRADE(addr, size, type)          \
194                 {                                               \
195                 type((const int*)addr, size);                   \
196                 }
197
198 /*downgrades */
199
200 # define _XTHAL_DCACHE_BLOCK_DOWNGRADE(addr, size, type)        \
201         unsigned _s = size;                                     \
202         unsigned _a = addr;                                     \
203         do {                                                    \
204                 unsigned __s = (_s > XCHAL_DCACHE_SIZE) ?       \
205                                 XCHAL_DCACHE_SIZE : _s;         \
206                 type((const int*)_a, __s);                      \
207                 _s -= __s;                                      \
208                 _a += __s;                                      \
209         } while(_s > 0);
210
211 # define _XTHAL_DCACHE_BLOCK_DOWNGRADE_MAX(addr, size, type, max)       \
212         if (max <= XCHAL_DCACHE_SIZE) {                                 \
213                 unsigned _s = size;                                     \
214                 unsigned _a = addr;                                     \
215                 type((const int*)_a, _s);                               \
216         }                                                               \
217         else {                                                          \
218                 _XTHAL_DCACHE_BLOCK_DOWNGRADE(addr, size, type);        \
219         }
220
221 # define xthal_dcache_block_invalidate(addr, size)      do {            \
222                 _XTHAL_DCACHE_BLOCK_DOWNGRADE(addr, size, XT_DHI_B);    \
223                 } while(0)
224 # define xthal_dcache_block_writeback(addr, size)       do {            \
225                 _XTHAL_DCACHE_BLOCK_DOWNGRADE(addr, size, XT_DHWB_B);   \
226                 } while(0)
227 # define xthal_dcache_block_writeback_inv(addr, size)   do {            \
228                 _XTHAL_DCACHE_BLOCK_DOWNGRADE(addr, size, XT_DHWBI_B);  \
229                 } while(0)
230
231 # define xthal_dcache_block_invalidate_max(addr, size, max)     do {            \
232                 _XTHAL_DCACHE_BLOCK_DOWNGRADE_MAX(addr, size, XT_DHI_B, max);   \
233                 } while(0)
234 # define xthal_dcache_block_writeback_max(addr, size, max)      do {            \
235                 _XTHAL_DCACHE_BLOCK_DOWNGRADE_MAX(addr, size, XT_DHWB_B, max);  \
236                 } while(0)
237 # define xthal_dcache_block_writeback_inv_max(addr, size, max)  do {            \
238                 _XTHAL_DCACHE_BLOCK_DOWNGRADE_MAX(addr, size, XT_DHWBI_B, max); \
239                 } while(0)
240
241 /* upgrades that are performed even with write-thru caches  */
242
243 # define xthal_dcache_block_prefetch_read_write(addr, size) do {        \
244                 _XTHAL_DCACHE_BLOCK_UPGRADE(addr, size, XT_DPFW_B);     \
245                 } while(0)
246 # define xthal_dcache_block_prefetch_read_write_grp(addr, size) do {    \
247                 _XTHAL_DCACHE_BLOCK_UPGRADE(addr, size, XT_DPFW_BF);    \
248                 } while(0)
249 # define xthal_dcache_block_prefetch_for_read(addr, size) do {          \
250                 _XTHAL_DCACHE_BLOCK_UPGRADE(addr, size, XT_DPFR_B);     \
251                 } while(0)
252 # define xthal_dcache_block_prefetch_for_read_grp(addr, size) do {      \
253                 _XTHAL_DCACHE_BLOCK_UPGRADE(addr, size, XT_DPFR_BF);    \
254                 } while(0)
255
256 /* abort all or end optional block cache operations */
257 # define xthal_dcache_block_abort()     do {            \
258                 XT_PFEND_A();                           \
259                 } while(0)
260 # define xthal_dcache_block_end()       do {            \
261                 XT_PFEND_O();                           \
262                 } while(0)
263
264 /* wait for all/required block cache operations to finish */
265 # define xthal_dcache_block_wait()      do {            \
266                 XT_PFWAIT_A();                          \
267                 } while(0)
268 # define xthal_dcache_block_required_wait()     do {    \
269                 XT_PFWAIT_R();                          \
270                 } while(0)
271 /* Start a new group */
272 # define xthal_dcache_block_newgrp()    do {            \
273                 XT_PFNXT_F();                           \
274                 } while(0)
275 #else
276 # define xthal_dcache_block_invalidate(addr, size)              do {/*nothing*/} while(0)
277 # define xthal_dcache_block_writeback(addr, size)               do {/*nothing*/} while(0)
278 # define xthal_dcache_block_writeback_inv(addr, size)           do {/*nothing*/} while(0)
279 # define xthal_dcache_block_invalidate_max(addr, size, max)     do {/*nothing*/} while(0)
280 # define xthal_dcache_block_writeback_max(addr, size, max)      do {/*nothing*/} while(0)
281 # define xthal_dcache_block_writeback_inv_max(addr, size, max)  do {/*nothing*/} while(0)
282 # define xthal_dcache_block_prefetch_read_write(addr, size)     do {/*nothing*/} while(0)
283 # define xthal_dcache_block_prefetch_read_write_grp(addr, size) do {/*nothing*/} while(0)
284 # define xthal_dcache_block_prefetch_for_read(addr, size)       do {/*nothing*/} while(0)
285 # define xthal_dcache_block_prefetch_for_read_grp(addr, size)   do {/*nothing*/} while(0)
286 # define xthal_dcache_block_end()                               do {/*nothing*/} while(0)
287 # define xthal_dcache_block_abort()                             do {/*nothing*/} while(0)
288 # define xthal_dcache_block_wait()                              do {/*nothing*/} while(0)
289 # define xthal_dcache_block_required_wait()                     do {/*nothing*/} while(0)
290 # define xthal_dcache_block_newgrp()                            do {/*nothing*/} while(0)
291 #endif
292
293 #if XCHAL_DCACHE_SIZE > 0 && XCHAL_HAVE_CACHE_BLOCKOPS && XCHAL_DCACHE_IS_WRITEBACK
294
295 # define xthal_dcache_block_prefetch_for_write(addr, size) do { \
296                 _XTHAL_DCACHE_BLOCK_UPGRADE(addr, size, XT_DPFW_B);     \
297                 } while(0)
298 # define xthal_dcache_block_prefetch_modify(addr, size) do {            \
299                 _XTHAL_DCACHE_BLOCK_UPGRADE(addr, size, XT_DPFM_B);     \
300                 } while(0)
301 # define xthal_dcache_block_prefetch_for_write_grp(addr, size) do {     \
302                 _XTHAL_DCACHE_BLOCK_UPGRADE(addr, size, XT_DPFW_BF);    \
303                 } while(0)
304 # define xthal_dcache_block_prefetch_modify_grp(addr, size) do {        \
305                 _XTHAL_DCACHE_BLOCK_UPGRADE(addr, size, XT_DPFM_BF);    \
306                 } while(0)
307 #else
308 # define xthal_dcache_block_prefetch_for_write(addr, size)      do {/*nothing*/} while(0)
309 # define xthal_dcache_block_prefetch_modify(addr, size)         do {/*nothing*/} while(0)
310 # define xthal_dcache_block_prefetch_for_write_grp(addr, size)  do {/*nothing*/} while(0)
311 # define xthal_dcache_block_prefetch_modify_grp(addr, size)     do {/*nothing*/} while(0)
312 #endif
313
314 /***************************   INTERRUPTS   ***************************/
315
316 /*
317  *  Macro versions of:
318  *      unsigned  xthal_get_intenable( void );
319  *      void      xthal_set_intenable( unsigned );
320  *      unsigned  xthal_get_interrupt( void );
321  *      void      xthal_set_intset( unsigned );
322  *      void      xthal_set_intclear( unsigned );
323  *      unsigned  xthal_get_ccount(void);
324  *      void      xthal_set_ccompare(int, unsigned);
325  *      unsigned  xthal_get_ccompare(int);
326  *
327  *  NOTE: for {set,get}_ccompare, the first argument MUST be a decimal constant.
328  */
329
330 #if XCHAL_HAVE_INTERRUPTS
331 # define XTHAL_GET_INTENABLE()  ({ int __intenable; \
332                                 __asm__("rsr.intenable %0" : "=a"(__intenable)); \
333                                 __intenable; })
334 # define XTHAL_SET_INTENABLE(v) do { int __intenable = (int)(v); \
335                         __asm__ __volatile__("wsr.intenable %0" :: "a"(__intenable):"memory"); \
336                                 } while(0)
337 # define XTHAL_GET_INTERRUPT()  ({ int __interrupt; \
338                                 __asm__("rsr.interrupt %0" : "=a"(__interrupt)); \
339                                 __interrupt; })
340 # define XTHAL_SET_INTSET(v)    do { int __interrupt = (int)(v); \
341                         __asm__ __volatile__("wsr.intset %0" :: "a"(__interrupt):"memory"); \
342                                 } while(0)
343 # define XTHAL_SET_INTCLEAR(v)  do { int __interrupt = (int)(v); \
344                         __asm__ __volatile__("wsr.intclear %0" :: "a"(__interrupt):"memory"); \
345                                 } while(0)
346 # define XTHAL_GET_CCOUNT()     ({ int __ccount; \
347                                 __asm__("rsr.ccount %0" : "=a"(__ccount)); \
348                                 __ccount; })
349 # define XTHAL_SET_CCOUNT(v)    do { int __ccount = (int)(v); \
350                         __asm__ __volatile__("wsr.ccount %0" :: "a"(__ccount):"memory"); \
351                                 } while(0)
352 # define _XTHAL_GET_CCOMPARE(n) ({ int __ccompare; \
353                                 __asm__("rsr.ccompare" #n " %0" : "=a"(__ccompare)); \
354                                 __ccompare; })
355 # define XTHAL_GET_CCOMPARE(n)  _XTHAL_GET_CCOMPARE(n)
356 # define _XTHAL_SET_CCOMPARE(n,v) do { int __ccompare = (int)(v); \
357                         __asm__ __volatile__("wsr.ccompare" #n " %0 ; esync" :: "a"(__ccompare):"memory"); \
358                                 } while(0)
359 # define XTHAL_SET_CCOMPARE(n,v) _XTHAL_SET_CCOMPARE(n,v)
360 #else
361 # define XTHAL_GET_INTENABLE()          0
362 # define XTHAL_SET_INTENABLE(v)         do {/*nothing*/} while(0)
363 # define XTHAL_GET_INTERRUPT()          0
364 # define XTHAL_SET_INTSET(v)            do {/*nothing*/} while(0)
365 # define XTHAL_SET_INTCLEAR(v)          do {/*nothing*/} while(0)
366 # define XTHAL_GET_CCOUNT()             0
367 # define XTHAL_SET_CCOUNT(v)            do {/*nothing*/} while(0)
368 # define XTHAL_GET_CCOMPARE(n)          0
369 # define XTHAL_SET_CCOMPARE(n,v)        do {/*nothing*/} while(0)
370 #endif
371
372
373 /***************************   MISC   ***************************/
374
375 /*
376  *  Macro or inline versions of:
377  *      void      xthal_clear_regcached_code( void );
378  *      unsigned  xthal_get_prid( void );
379  *      unsigned  xthal_compare_and_set( int *addr, int testval, int setval );
380  */
381
382 #if XCHAL_HAVE_LOOPS
383 # define XTHAL_CLEAR_REGCACHED_CODE()           \
384                 __asm__ __volatile__("wsr.lcount %0" :: "a"(0) : "memory")
385 #else
386 # define XTHAL_CLEAR_REGCACHED_CODE()           do {/*nothing*/} while(0)
387 #endif
388
389 #if XCHAL_HAVE_PRID
390 # define XTHAL_GET_PRID()       ({ int __prid; \
391                                 __asm__("rsr.prid %0" : "=a"(__prid)); \
392                                 __prid; })
393 #else
394 # define XTHAL_GET_PRID()       0
395 #endif
396
397
398 static inline unsigned  XTHAL_COMPARE_AND_SET( int *addr, int testval, int setval )
399 {
400     int result;
401
402 #if XCHAL_HAVE_S32C1I && XCHAL_HW_MIN_VERSION_MAJOR >= 2200
403     __asm__ __volatile__ (
404         "   wsr.scompare1 %2 \n"
405         "   s32c1i %0, %3, 0 \n"
406             : "=a"(result) : "0" (setval), "a" (testval), "a" (addr)
407             : "memory");
408 #elif XCHAL_HAVE_INTERRUPTS
409     int tmp;
410     __asm__ __volatile__ (
411         "   rsil   %4, 15 \n"           // %4 == saved ps
412         "   l32i   %0, %3, 0 \n"        // %0 == value to test, return val
413         "   bne    %2, %0, 9f \n"       // test
414         "   s32i   %1, %3, 0 \n"        // write the new value
415         "9: wsr.ps %4 ; rsync \n"       // restore the PS
416         : "=a"(result) 
417         : "0" (setval), "a" (testval), "a" (addr), "a" (tmp)
418         : "memory");
419 #else
420     __asm__ __volatile__ (
421         "   l32i  %0, %3, 0 \n"         // %0 == value to test, return val
422         "   bne   %2, %0, 9f \n"        // test
423         "   s32i  %1, %3, 0 \n"         // write the new value
424         "9: \n"
425             : "=a"(result) : "0" (setval), "a" (testval), "a" (addr)
426             : "memory");
427 #endif
428     return result;
429 }
430
431 #if XCHAL_HAVE_EXTERN_REGS
432
433 static inline unsigned XTHAL_RER (unsigned int reg)
434 {
435   unsigned result;
436
437   __asm__ __volatile__ (
438         "   rer     %0, %1"
439         : "=a" (result) : "a" (reg) : "memory");
440
441   return result;
442 }
443
444 static inline void XTHAL_WER (unsigned reg, unsigned value)
445 {
446   __asm__ __volatile__ (
447         "   wer     %0, %1"
448         : : "a" (value), "a" (reg) : "memory");
449 }
450
451 #endif /* XCHAL_HAVE_EXTERN_REGS */
452
453 #endif /* C code */
454
455 #endif /*XTENSA_CACHE_H*/
456