[SPARC64]: Fix section-mismatch errors in solaris emul module.
[linux-2.6/linux-acpi-2.6/ibm-acpi-2.6.git] / include / asm-alpha / processor.h
blob425b7b6d28cbba8e99ec8c1bbd9af578b77752df
1 /*
2 * include/asm-alpha/processor.h
4 * Copyright (C) 1994 Linus Torvalds
5 */
7 #ifndef __ASM_ALPHA_PROCESSOR_H
8 #define __ASM_ALPHA_PROCESSOR_H
10 #include <linux/personality.h> /* for ADDR_LIMIT_32BIT */
13 * Returns current instruction pointer ("program counter").
15 #define current_text_addr() \
16 ({ void *__pc; __asm__ ("br %0,.+4" : "=r"(__pc)); __pc; })
19 * We have a 42-bit user address space: 4TB user VM...
21 #define TASK_SIZE (0x40000000000UL)
23 /* This decides where the kernel will search for a free chunk of vm
24 * space during mmap's.
26 #define TASK_UNMAPPED_BASE \
27 ((current->personality & ADDR_LIMIT_32BIT) ? 0x40000000 : TASK_SIZE / 2)
29 typedef struct {
30 unsigned long seg;
31 } mm_segment_t;
33 /* This is dead. Everything has been moved to thread_info. */
34 struct thread_struct { };
35 #define INIT_THREAD { }
37 /* Return saved PC of a blocked thread. */
38 struct task_struct;
39 extern unsigned long thread_saved_pc(struct task_struct *);
41 /* Do necessary setup to start up a newly executed thread. */
42 extern void start_thread(struct pt_regs *, unsigned long, unsigned long);
44 /* Free all resources held by a thread. */
45 extern void release_thread(struct task_struct *);
47 /* Prepare to copy thread state - unlazy all lazy status */
48 #define prepare_to_copy(tsk) do { } while (0)
50 /* Create a kernel thread without removing it from tasklists. */
51 extern long kernel_thread(int (*fn)(void *), void *arg, unsigned long flags);
53 unsigned long get_wchan(struct task_struct *p);
55 #define KSTK_EIP(tsk) (task_pt_regs(tsk)->pc)
57 #define KSTK_ESP(tsk) \
58 ((tsk) == current ? rdusp() : task_thread_info(tsk)->pcb.usp)
60 #define cpu_relax() barrier()
62 #define ARCH_HAS_PREFETCH
63 #define ARCH_HAS_PREFETCHW
64 #define ARCH_HAS_SPINLOCK_PREFETCH
66 #ifndef CONFIG_SMP
67 /* Nothing to prefetch. */
68 #define spin_lock_prefetch(lock) do { } while (0)
69 #endif
71 extern inline void prefetch(const void *ptr)
73 __builtin_prefetch(ptr, 0, 3);
76 extern inline void prefetchw(const void *ptr)
78 __builtin_prefetch(ptr, 1, 3);
81 #ifdef CONFIG_SMP
82 extern inline void spin_lock_prefetch(const void *ptr)
84 __builtin_prefetch(ptr, 1, 3);
86 #endif
88 #endif /* __ASM_ALPHA_PROCESSOR_H */