5#ifndef ZEPHYR_INCLUDE_ARCH_XTENSA_CACHE_H_
6#define ZEPHYR_INCLUDE_ARCH_XTENSA_CACHE_H_
8#include <xtensa/config/core-isa.h>
17#define Z_DCACHE_MAX (XCHAL_DCACHE_SIZE / XCHAL_DCACHE_WAYS)
20BUILD_ASSERT(Z_IS_POW2(XCHAL_DCACHE_LINESIZE));
21BUILD_ASSERT(Z_IS_POW2(Z_DCACHE_MAX));
24static ALWAYS_INLINE void z_xtensa_cache_flush(
void *addr,
size_t bytes)
27 size_t step = XCHAL_DCACHE_LINESIZE;
29 size_t last =
ROUND_UP(((
long)addr) + bytes, step);
32 for (line = first; bytes && line < last; line += step) {
33 __asm__
volatile(
"dhwb %0, 0" ::
"r"(line));
38static ALWAYS_INLINE void z_xtensa_cache_flush_inv(
void *addr,
size_t bytes)
41 size_t step = XCHAL_DCACHE_LINESIZE;
43 size_t last =
ROUND_UP(((
long)addr) + bytes, step);
46 for (line = first; bytes && line < last; line += step) {
47 __asm__
volatile(
"dhwbi %0, 0" ::
"r"(line));
52static ALWAYS_INLINE void z_xtensa_cache_inv(
void *addr,
size_t bytes)
55 size_t step = XCHAL_DCACHE_LINESIZE;
57 size_t last =
ROUND_UP(((
long)addr) + bytes, step);
60 for (line = first; bytes && line < last; line += step) {
61 __asm__
volatile(
"dhi %0, 0" ::
"r"(line));
68 z_xtensa_cache_inv(NULL, Z_DCACHE_MAX);
73 z_xtensa_cache_flush(NULL, Z_DCACHE_MAX);
78 z_xtensa_cache_flush_inv(NULL, Z_DCACHE_MAX);
82#if defined(CONFIG_XTENSA_RPO_CACHE)
83#if defined(CONFIG_ARCH_HAS_COHERENCE)
86 size_t addr = (size_t)
ptr;
88 return (addr >> 29) == CONFIG_XTENSA_UNCACHED_REGION;
102 if (Z_IS_POW2(rxor)) {
103 if ((rxor & rto) == 0) {
109 return (addr & ~(7U << 29)) | rto;
132static inline void __sparse_cache *arch_xtensa_cached_ptr(
void *
ptr)
134 return (__sparse_force
void __sparse_cache *)z_xtrpoflip((
uint32_t)
ptr,
135 CONFIG_XTENSA_CACHED_REGION,
136 CONFIG_XTENSA_UNCACHED_REGION);
157static inline void *arch_xtensa_uncached_ptr(
void __sparse_cache *
ptr)
160 CONFIG_XTENSA_UNCACHED_REGION,
161 CONFIG_XTENSA_CACHED_REGION);
187#define _REGION_ATTR(r) \
189 ((r) == CONFIG_XTENSA_CACHED_REGION ? 4 : \
190 ((r) == CONFIG_XTENSA_UNCACHED_REGION ? 2 : 15)))
192#define _SET_ONE_TLB(region) do { \
193 uint32_t attr = _REGION_ATTR(region); \
194 if (XCHAL_HAVE_XLT_CACHEATTR) { \
197 if (region != CONFIG_XTENSA_CACHED_REGION) { \
198 __asm__ volatile("wdtlb %0, %1; witlb %0, %1" \
199 :: "r"(attr), "r"(addr)); \
201 __asm__ volatile("wdtlb %0, %1" \
202 :: "r"(attr), "r"(addr)); \
203 __asm__ volatile("j 1f; .align 8; 1:"); \
204 __asm__ volatile("witlb %0, %1; isync" \
205 :: "r"(attr), "r"(addr)); \
210#define ARCH_XTENSA_SET_RPO_TLB() do { \
211 register uint32_t addr = 0, addrincr = 0x20000000; \
212 FOR_EACH(_SET_ONE_TLB, (;), 0, 1, 2, 3, 4, 5, 6, 7); \
#define ALWAYS_INLINE
Definition: common.h:124
static bool arch_mem_coherent(void *ptr)
Detect memory coherence type.
Definition: arch_interface.h:819
#define ROUND_UP(x, align)
Value of x rounded up to the next multiple of align, which must be a power of 2.
Definition: util.h:217
#define ROUND_DOWN(x, align)
Value of x rounded down to the previous multiple of align, which must be a power of 2.
Definition: util.h:225
void * ptr
Definition: printk.c:111
__UINT32_TYPE__ uint32_t
Definition: stdint.h:90