5#ifndef ZEPHYR_INCLUDE_ARCH_XTENSA_CACHE_H_
6#define ZEPHYR_INCLUDE_ARCH_XTENSA_CACHE_H_
8#include <xtensa/config/core-isa.h>
17#define Z_DCACHE_MAX (XCHAL_DCACHE_SIZE / XCHAL_DCACHE_WAYS)
20BUILD_ASSERT(Z_IS_POW2(XCHAL_DCACHE_LINESIZE));
21BUILD_ASSERT(Z_IS_POW2(Z_DCACHE_MAX));
24static ALWAYS_INLINE void z_xtensa_cache_flush(
void *addr,
size_t bytes)
27 size_t step = XCHAL_DCACHE_LINESIZE;
29 size_t last =
ROUND_UP(((
long)addr) + bytes, step);
32 for (line = first; bytes && line < last; line += step) {
33 __asm__
volatile(
"dhwb %0, 0" ::
"r"(line));
38static ALWAYS_INLINE void z_xtensa_cache_flush_inv(
void *addr,
size_t bytes)
41 size_t step = XCHAL_DCACHE_LINESIZE;
43 size_t last =
ROUND_UP(((
long)addr) + bytes, step);
46 for (line = first; bytes && line < last; line += step) {
47 __asm__
volatile(
"dhwbi %0, 0" ::
"r"(line));
52static ALWAYS_INLINE void z_xtensa_cache_inv(
void *addr,
size_t bytes)
55 size_t step = XCHAL_DCACHE_LINESIZE;
57 size_t last =
ROUND_UP(((
long)addr) + bytes, step);
60 for (line = first; bytes && line < last; line += step) {
61 __asm__
volatile(
"dhi %0, 0" ::
"r"(line));
69 size_t step = XCHAL_DCACHE_LINESIZE;
72 for (line = 0; line < XCHAL_DCACHE_SIZE; line += step) {
73 __asm__
volatile(
"dii %0, 0" ::
"r"(line));
81 size_t step = XCHAL_DCACHE_LINESIZE;
84 for (line = 0; line < XCHAL_DCACHE_SIZE; line += step) {
85 __asm__
volatile(
"diwb %0, 0" ::
"r"(line));
93 size_t step = XCHAL_DCACHE_LINESIZE;
96 for (line = 0; line < XCHAL_DCACHE_SIZE; line += step) {
97 __asm__
volatile(
"diwbi %0, 0" ::
"r"(line));
103#if defined(CONFIG_XTENSA_RPO_CACHE)
104#if defined(CONFIG_ARCH_HAS_COHERENCE)
107 size_t addr = (size_t)
ptr;
109 return (addr >> 29) == CONFIG_XTENSA_UNCACHED_REGION;
120 uint32_t rxor = (rto ^ rfrom) << 29;
123 if (Z_IS_POW2(rxor)) {
124 if ((rxor & rto) == 0) {
130 return (addr & ~(7U << 29)) | rto;
153static inline void __sparse_cache *arch_xtensa_cached_ptr(
void *
ptr)
155 return (__sparse_force
void __sparse_cache *)z_xtrpoflip((
uint32_t)
ptr,
156 CONFIG_XTENSA_CACHED_REGION,
157 CONFIG_XTENSA_UNCACHED_REGION);
178static inline void *arch_xtensa_uncached_ptr(
void __sparse_cache *
ptr)
180 return (
void *)z_xtrpoflip((__sparse_force
uint32_t)
ptr,
181 CONFIG_XTENSA_UNCACHED_REGION,
182 CONFIG_XTENSA_CACHED_REGION);
208#define _REGION_ATTR(r) \
210 ((r) == CONFIG_XTENSA_CACHED_REGION ? 4 : \
211 ((r) == CONFIG_XTENSA_UNCACHED_REGION ? 2 : 15)))
213#define _SET_ONE_TLB(region) do { \
214 uint32_t attr = _REGION_ATTR(region); \
215 if (XCHAL_HAVE_XLT_CACHEATTR) { \
218 if (region != CONFIG_XTENSA_CACHED_REGION) { \
219 __asm__ volatile("wdtlb %0, %1; witlb %0, %1" \
220 :: "r"(attr), "r"(addr)); \
222 __asm__ volatile("wdtlb %0, %1" \
223 :: "r"(attr), "r"(addr)); \
224 __asm__ volatile("j 1f; .align 8; 1:"); \
225 __asm__ volatile("witlb %0, %1; isync" \
226 :: "r"(attr), "r"(addr)); \
231#define ARCH_XTENSA_SET_RPO_TLB() do { \
232 register uint32_t addr = 0, addrincr = 0x20000000; \
233 FOR_EACH(_SET_ONE_TLB, (;), 0, 1, 2, 3, 4, 5, 6, 7); \
#define ALWAYS_INLINE
Definition: common.h:124
static bool arch_mem_coherent(void *ptr)
Detect memory coherence type.
Definition: arch_interface.h:830
#define ROUND_UP(x, align)
Value of x rounded up to the next multiple of align, which must be a power of 2.
Definition: util.h:227
#define ROUND_DOWN(x, align)
Value of x rounded down to the previous multiple of align, which must be a power of 2.
Definition: util.h:235
void * ptr
Definition: printk.c:120
__UINT32_TYPE__ uint32_t
Definition: stdint.h:90