x86: unify include/asm/cache_32/64.h

Same file, except for whitespace, comment formatting and the extra
defines in _64, which are conditional on VSMP anyway.

Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
This commit is contained in:
Thomas Gleixner 2007-10-15 23:28:20 +02:00 committed by Thomas Gleixner
parent b2bba72c10
commit 9bfa23df56
3 changed files with 19 additions and 44 deletions

View file

@ -1,5 +1,20 @@
#ifdef CONFIG_X86_32
# include "cache_32.h"
#else
# include "cache_64.h"
#ifndef _ARCH_X86_CACHE_H
#define _ARCH_X86_CACHE_H
/* L1 cache line size */
#define L1_CACHE_SHIFT (CONFIG_X86_L1_CACHE_SHIFT)
#define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
#define __read_mostly __attribute__((__section__(".data.read_mostly")))
#ifdef CONFIG_X86_VSMP
/* vSMP Internode cacheline shift */
#define INTERNODE_CACHE_SHIFT (12)
#ifdef CONFIG_SMP
#define __cacheline_aligned_in_smp \
__attribute__((__aligned__(1 << (INTERNODE_CACHE_SHIFT)))) \
__attribute__((__section__(".data.page_aligned")))
#endif
#endif
#endif

View file

@ -1,14 +0,0 @@
/*
* include/asm-i386/cache.h
*/
#ifndef __ARCH_I386_CACHE_H
#define __ARCH_I386_CACHE_H
/* L1 cache line size */
#define L1_CACHE_SHIFT (CONFIG_X86_L1_CACHE_SHIFT)
#define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
#define __read_mostly __attribute__((__section__(".data.read_mostly")))
#endif

View file

@ -1,26 +0,0 @@
/*
* include/asm-x86_64/cache.h
*/
#ifndef __ARCH_X8664_CACHE_H
#define __ARCH_X8664_CACHE_H
/* L1 cache line size */
#define L1_CACHE_SHIFT (CONFIG_X86_L1_CACHE_SHIFT)
#define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
#ifdef CONFIG_X86_VSMP
/* vSMP Internode cacheline shift */
#define INTERNODE_CACHE_SHIFT (12)
#ifdef CONFIG_SMP
#define __cacheline_aligned_in_smp \
__attribute__((__aligned__(1 << (INTERNODE_CACHE_SHIFT)))) \
__attribute__((__section__(".data.page_aligned")))
#endif
#endif
#define __read_mostly __attribute__((__section__(".data.read_mostly")))
#endif