arch/x86: add more arch dcache functions

Adapt to the reworked zephyr cache API.
Fix build errors when building tests/kernel/cache with CACHE_MANAGEMENT and
CPU_HAS_DCACHE enabled for x86 SoCs

Signed-off-by: Dong Wang <dong.d.wang@intel.com>
This commit is contained in:
Dong Wang 2023-06-21 11:34:47 +08:00 committed by Anas Nashif
commit 4774a02b3b

View file

@ -18,6 +18,58 @@
#include <zephyr/cache.h>
#include <stdbool.h>
static inline void z_x86_wbinvd(void)
{
__asm__ volatile("wbinvd;\n\t" : : : "memory");
}
void arch_dcache_enable(void)
{
uint32_t cr0;
/* Enable write-back caching by clearing the NW and CD bits */
__asm__ volatile("movl %%cr0, %0;\n\t"
"andl $0x9fffffff, %0;\n\t"
"movl %0, %%cr0;\n\t"
: "=r" (cr0));
}
void arch_dcache_disable(void)
{
uint32_t cr0;
/* Enter the no-fill mode by setting NW=0 and CD=1 */
__asm__ volatile("movl %%cr0, %0;\n\t"
"andl $0xdfffffff, %0;\n\t"
"orl $0x40000000, %0;\n\t"
"movl %0, %%cr0;\n\t"
: "=r" (cr0));
/* Flush all caches */
z_x86_wbinvd();
}
int arch_dcache_flush_all(void)
{
z_x86_wbinvd();
return 0;
}
int arch_dcache_invd_all(void)
{
z_x86_wbinvd();
return 0;
}
int arch_dcache_flush_and_invd_all(void)
{
z_x86_wbinvd();
return 0;
}
/**
* No alignment is required for either <virt> or <size>, but since
* sys_cache_flush() iterates on the cache lines, a cache line alignment for
@ -49,3 +101,13 @@ int arch_dcache_flush_range(void *start_addr, size_t size)
#endif
return 0;
}
int arch_dcache_invd_range(void *start_addr, size_t size)
{
return arch_dcache_flush_range(start_addr, size);
}
int arch_dcache_flush_and_invd_range(void *start_addr, size_t size)
{
return arch_dcache_flush_range(start_addr, size);
}