[PATCH] SHPC: Cleanup SHPC Logical Slot Register access
[linux-2.6.22.y-op.git] / include / linux / cache.h
blobcc4b3aafad9a1fda984dbe6e08bfcd09d8b3b73f
1 #ifndef __LINUX_CACHE_H
2 #define __LINUX_CACHE_H
4 #include <linux/kernel.h>
5 #include <linux/config.h>
6 #include <asm/cache.h>
8 #ifndef L1_CACHE_ALIGN
9 #define L1_CACHE_ALIGN(x) ALIGN(x, L1_CACHE_BYTES)
10 #endif
12 #ifndef SMP_CACHE_BYTES
13 #define SMP_CACHE_BYTES L1_CACHE_BYTES
14 #endif
16 #ifndef __read_mostly
17 #define __read_mostly
18 #endif
20 #ifndef ____cacheline_aligned
21 #define ____cacheline_aligned __attribute__((__aligned__(SMP_CACHE_BYTES)))
22 #endif
24 #ifndef ____cacheline_aligned_in_smp
25 #ifdef CONFIG_SMP
26 #define ____cacheline_aligned_in_smp ____cacheline_aligned
27 #else
28 #define ____cacheline_aligned_in_smp
29 #endif /* CONFIG_SMP */
30 #endif
32 #ifndef __cacheline_aligned
33 #define __cacheline_aligned \
34 __attribute__((__aligned__(SMP_CACHE_BYTES), \
35 __section__(".data.cacheline_aligned")))
36 #endif /* __cacheline_aligned */
38 #ifndef __cacheline_aligned_in_smp
39 #ifdef CONFIG_SMP
40 #define __cacheline_aligned_in_smp __cacheline_aligned
41 #else
42 #define __cacheline_aligned_in_smp
43 #endif /* CONFIG_SMP */
44 #endif
47 * The maximum alignment needed for some critical structures
48 * These could be inter-node cacheline sizes/L3 cacheline
49 * size etc. Define this in asm/cache.h for your arch
51 #ifndef INTERNODE_CACHE_SHIFT
52 #define INTERNODE_CACHE_SHIFT L1_CACHE_SHIFT
53 #endif
55 #if !defined(____cacheline_internodealigned_in_smp)
56 #if defined(CONFIG_SMP)
57 #define ____cacheline_internodealigned_in_smp \
58 __attribute__((__aligned__(1 << (INTERNODE_CACHE_SHIFT))))
59 #else
60 #define ____cacheline_internodealigned_in_smp
61 #endif
62 #endif
64 #endif /* __LINUX_CACHE_H */