kernel/arch/um/include/asm/cache.h

19 lines
483 B
C
Raw Normal View History

2024-07-22 17:22:30 +08:00
/* SPDX-License-Identifier: GPL-2.0 */
#ifndef __UM_CACHE_H
#define __UM_CACHE_H
#if defined(CONFIG_UML_X86) && !defined(CONFIG_64BIT)
# define L1_CACHE_SHIFT (CONFIG_X86_L1_CACHE_SHIFT)
#elif defined(CONFIG_UML_X86) /* 64-bit */
# define L1_CACHE_SHIFT 6 /* Should be 7 on Intel */
#else
/* XXX: this was taken from x86, now it's completely random. Luckily only
* affects SMP padding. */
# define L1_CACHE_SHIFT 5
#endif
#define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
#endif