alpha: unbreak percpu again
[linux-2.6/mini2440.git] / arch / alpha / include / asm / percpu.h
blob06c5c7a4afd3f6617440cdbcf669d70d591a0d44
1 #ifndef __ALPHA_PERCPU_H
2 #define __ALPHA_PERCPU_H
4 #include <linux/compiler.h>
5 #include <linux/threads.h>
6 #include <linux/percpu-defs.h>
8 /*
9 * Determine the real variable name from the name visible in the
10 * kernel sources.
12 #define per_cpu_var(var) per_cpu__##var
14 #ifdef CONFIG_SMP
17 * per_cpu_offset() is the offset that has to be added to a
18 * percpu variable to get to the instance for a certain processor.
20 extern unsigned long __per_cpu_offset[NR_CPUS];
22 #define per_cpu_offset(x) (__per_cpu_offset[x])
24 #define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
25 #ifdef CONFIG_DEBUG_PREEMPT
26 #define my_cpu_offset per_cpu_offset(smp_processor_id())
27 #else
28 #define my_cpu_offset __my_cpu_offset
29 #endif
31 #ifndef MODULE
32 #define SHIFT_PERCPU_PTR(var, offset) RELOC_HIDE(&per_cpu_var(var), (offset))
33 #define PER_CPU_ATTRIBUTES
34 #else
36 * To calculate addresses of locally defined variables, GCC uses 32-bit
37 * displacement from the GP. Which doesn't work for per cpu variables in
38 * modules, as an offset to the kernel per cpu area is way above 4G.
40 * This forces allocation of a GOT entry for per cpu variable using
41 * ldq instruction with a 'literal' relocation.
43 #define SHIFT_PERCPU_PTR(var, offset) ({ \
44 extern int simple_identifier_##var(void); \
45 unsigned long __ptr, tmp_gp; \
46 asm ( "br %1, 1f \n\
47 1: ldgp %1, 0(%1) \n\
48 ldq %0, per_cpu__" #var"(%1)\t!literal" \
49 : "=&r"(__ptr), "=&r"(tmp_gp)); \
50 (typeof(&per_cpu_var(var)))(__ptr + (offset)); })
52 #define PER_CPU_ATTRIBUTES __used
54 #endif /* MODULE */
57 * A percpu variable may point to a discarded regions. The following are
58 * established ways to produce a usable pointer from the percpu variable
59 * offset.
61 #define per_cpu(var, cpu) \
62 (*SHIFT_PERCPU_PTR(var, per_cpu_offset(cpu)))
63 #define __get_cpu_var(var) \
64 (*SHIFT_PERCPU_PTR(var, my_cpu_offset))
65 #define __raw_get_cpu_var(var) \
66 (*SHIFT_PERCPU_PTR(var, __my_cpu_offset))
68 #else /* ! SMP */
70 #define per_cpu(var, cpu) (*((void)(cpu), &per_cpu_var(var)))
71 #define __get_cpu_var(var) per_cpu_var(var)
72 #define __raw_get_cpu_var(var) per_cpu_var(var)
74 #define PER_CPU_ATTRIBUTES
76 #endif /* SMP */
78 #ifdef CONFIG_SMP
79 #define PER_CPU_BASE_SECTION ".data.percpu"
80 #else
81 #define PER_CPU_BASE_SECTION ".data"
82 #endif
84 #ifdef CONFIG_SMP
86 #ifdef MODULE
87 #define PER_CPU_SHARED_ALIGNED_SECTION ""
88 #else
89 #define PER_CPU_SHARED_ALIGNED_SECTION ".shared_aligned"
90 #endif
91 #define PER_CPU_FIRST_SECTION ".first"
93 #else
95 #define PER_CPU_SHARED_ALIGNED_SECTION ""
96 #define PER_CPU_FIRST_SECTION ""
98 #endif
100 #define PER_CPU_ATTRIBUTES
102 #endif /* __ALPHA_PERCPU_H */