locks: fix setlease methods to free passed-in lock
[linux-2.6/linux-acpi-2.6/ibm-acpi-2.6.git] / include / linux / cache.h
blob4c570653ab84f9822ba9e1e1f19a41242c16b74f
1 #ifndef __LINUX_CACHE_H
2 #define __LINUX_CACHE_H
4 #include <linux/kernel.h>
5 #include <asm/cache.h>
7 #ifndef L1_CACHE_ALIGN
8 #define L1_CACHE_ALIGN(x) ALIGN(x, L1_CACHE_BYTES)
9 #endif
11 #ifndef SMP_CACHE_BYTES
12 #define SMP_CACHE_BYTES L1_CACHE_BYTES
13 #endif
15 #ifndef __read_mostly
16 #define __read_mostly
17 #endif
19 #ifndef ____cacheline_aligned
20 #define ____cacheline_aligned __attribute__((__aligned__(SMP_CACHE_BYTES)))
21 #endif
23 #ifndef ____cacheline_aligned_in_smp
24 #ifdef CONFIG_SMP
25 #define ____cacheline_aligned_in_smp ____cacheline_aligned
26 #else
27 #define ____cacheline_aligned_in_smp
28 #endif /* CONFIG_SMP */
29 #endif
31 #ifndef __cacheline_aligned
32 #define __cacheline_aligned \
33 __attribute__((__aligned__(SMP_CACHE_BYTES), \
34 __section__(".data..cacheline_aligned")))
35 #endif /* __cacheline_aligned */
37 #ifndef __cacheline_aligned_in_smp
38 #ifdef CONFIG_SMP
39 #define __cacheline_aligned_in_smp __cacheline_aligned
40 #else
41 #define __cacheline_aligned_in_smp
42 #endif /* CONFIG_SMP */
43 #endif
46 * The maximum alignment needed for some critical structures
47 * These could be inter-node cacheline sizes/L3 cacheline
48 * size etc. Define this in asm/cache.h for your arch
50 #ifndef INTERNODE_CACHE_SHIFT
51 #define INTERNODE_CACHE_SHIFT L1_CACHE_SHIFT
52 #endif
54 #if !defined(____cacheline_internodealigned_in_smp)
55 #if defined(CONFIG_SMP)
56 #define ____cacheline_internodealigned_in_smp \
57 __attribute__((__aligned__(1 << (INTERNODE_CACHE_SHIFT))))
58 #else
59 #define ____cacheline_internodealigned_in_smp
60 #endif
61 #endif
63 #ifndef CONFIG_ARCH_HAS_CACHE_LINE_SIZE
64 #define cache_line_size() L1_CACHE_BYTES
65 #endif
67 #endif /* __LINUX_CACHE_H */