5#ifndef ZEPHYR_INCLUDE_ARCH_XTENSA_CACHE_H_
6#define ZEPHYR_INCLUDE_ARCH_XTENSA_CACHE_H_
8#include <xtensa/config/core-isa.h>
17#define Z_DCACHE_MAX (XCHAL_DCACHE_SIZE / XCHAL_DCACHE_WAYS)
20BUILD_ASSERT(Z_IS_POW2(XCHAL_DCACHE_LINESIZE));
21BUILD_ASSERT(Z_IS_POW2(Z_DCACHE_MAX));
24static ALWAYS_INLINE void z_xtensa_cache_flush(
void *addr,
size_t bytes)
27 size_t step = XCHAL_DCACHE_LINESIZE;
29 size_t last =
ROUND_UP(((
long)addr) + bytes, step);
32 for (line = first; bytes && line < last; line += step) {
33 __asm__
volatile(
"dhwb %0, 0" ::
"r"(line));
38static ALWAYS_INLINE void z_xtensa_cache_flush_inv(
void *addr,
size_t bytes)
41 size_t step = XCHAL_DCACHE_LINESIZE;
43 size_t last =
ROUND_UP(((
long)addr) + bytes, step);
46 for (line = first; bytes && line < last; line += step) {
47 __asm__
volatile(
"dhwbi %0, 0" ::
"r"(line));
52static ALWAYS_INLINE void z_xtensa_cache_inv(
void *addr,
size_t bytes)
55 size_t step = XCHAL_DCACHE_LINESIZE;
57 size_t last =
ROUND_UP(((
long)addr) + bytes, step);
60 for (line = first; bytes && line < last; line += step) {
61 __asm__
volatile(
"dhi %0, 0" ::
"r"(line));
68 z_xtensa_cache_inv(NULL, Z_DCACHE_MAX);
73 z_xtensa_cache_flush(NULL, Z_DCACHE_MAX);
78 z_xtensa_cache_flush_inv(NULL, Z_DCACHE_MAX);
81#ifdef CONFIG_ARCH_HAS_COHERENCE
84 size_t addr = (size_t)
ptr;
86 return (addr >> 29) == CONFIG_XTENSA_UNCACHED_REGION;
100 if (Z_IS_POW2(rxor)) {
101 if ((rxor & rto) == 0) {
107 return (addr & ~(7U << 29)) | rto;
133 return (__sparse_force
void __sparse_cache *)z_xtrpoflip((
uint32_t)
ptr,
134 CONFIG_XTENSA_CACHED_REGION,
135 CONFIG_XTENSA_UNCACHED_REGION);
159 CONFIG_XTENSA_UNCACHED_REGION,
160 CONFIG_XTENSA_CACHED_REGION);
186#define _REGION_ATTR(r) \
188 ((r) == CONFIG_XTENSA_CACHED_REGION ? 4 : \
189 ((r) == CONFIG_XTENSA_UNCACHED_REGION ? 2 : 15)))
191#define _SET_ONE_TLB(region) do { \
192 uint32_t attr = _REGION_ATTR(region); \
193 if (XCHAL_HAVE_XLT_CACHEATTR) { \
196 if (region != CONFIG_XTENSA_CACHED_REGION) { \
197 __asm__ volatile("wdtlb %0, %1; witlb %0, %1" \
198 :: "r"(attr), "r"(addr)); \
200 __asm__ volatile("wdtlb %0, %1" \
201 :: "r"(attr), "r"(addr)); \
202 __asm__ volatile("j 1f; .align 8; 1:"); \
203 __asm__ volatile("witlb %0, %1; isync" \
204 :: "r"(attr), "r"(addr)); \
209#define ARCH_XTENSA_SET_RPO_TLB() do { \
210 register uint32_t addr = 0, addrincr = 0x20000000; \
211 FOR_EACH(_SET_ONE_TLB, (;), 0, 1, 2, 3, 4, 5, 6, 7); \
static void * arch_xtensa_uncached_ptr(void __sparse_cache *ptr)
Return uncached pointer to a RAM address.
Definition: cache.h:156
static void __sparse_cache * arch_xtensa_cached_ptr(void *ptr)
Return cached pointer to a RAM address.
Definition: cache.h:131
#define ALWAYS_INLINE
Definition: common.h:124
static bool arch_mem_coherent(void *ptr)
Detect memory coherence type.
Definition: arch_interface.h:797
#define ROUND_UP(x, align)
Value of x rounded up to the next multiple of align, which must be a power of 2.
Definition: util.h:154
#define ROUND_DOWN(x, align)
Value of x rounded down to the previous multiple of align, which must be a power of 2.
Definition: util.h:162
void * ptr
Definition: printk.c:79
__UINT32_TYPE__ uint32_t
Definition: stdint.h:90