powerpc/mm: Add Strong Access Ordering support
[linux-2.6/linux-acpi-2.6/ibm-acpi-2.6.git] / include / asm-powerpc / mman.h
blobf8a32e20ba04ad02443ca320deb06580922b3a20
1 #ifndef _ASM_POWERPC_MMAN_H
2 #define _ASM_POWERPC_MMAN_H
4 #include <asm/cputable.h>
5 #include <asm-generic/mman.h>
6 #include <linux/mm.h>
8 /*
9 * This program is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU General Public License
11 * as published by the Free Software Foundation; either version
12 * 2 of the License, or (at your option) any later version.
15 #define PROT_SAO 0x10 /* Strong Access Ordering */
17 #define MAP_RENAME MAP_ANONYMOUS /* In SunOS terminology */
18 #define MAP_NORESERVE 0x40 /* don't reserve swap pages */
19 #define MAP_LOCKED 0x80
21 #define MAP_GROWSDOWN 0x0100 /* stack-like segment */
22 #define MAP_DENYWRITE 0x0800 /* ETXTBSY */
23 #define MAP_EXECUTABLE 0x1000 /* mark it as an executable */
25 #define MCL_CURRENT 0x2000 /* lock all currently mapped pages */
26 #define MCL_FUTURE 0x4000 /* lock all additions to address space */
28 #define MAP_POPULATE 0x8000 /* populate (prefault) pagetables */
29 #define MAP_NONBLOCK 0x10000 /* do not block on IO */
31 #ifdef CONFIG_PPC64
33 * This file is included by linux/mman.h, so we can't use cacl_vm_prot_bits()
34 * here. How important is the optimization?
36 static inline unsigned long arch_calc_vm_prot_bits(unsigned long prot)
38 return (prot & PROT_SAO) ? VM_SAO : 0;
40 #define arch_calc_vm_prot_bits(prot) arch_calc_vm_prot_bits(prot)
42 static inline pgprot_t arch_vm_get_page_prot(unsigned long vm_flags)
44 return (vm_flags & VM_SAO) ? __pgprot(_PAGE_SAO) : 0;
46 #define arch_vm_get_page_prot(vm_flags) arch_vm_get_page_prot(vm_flags)
48 static inline int arch_validate_prot(unsigned long prot)
50 if (prot & ~(PROT_READ | PROT_WRITE | PROT_EXEC | PROT_SEM | PROT_SAO))
51 return 0;
52 if ((prot & PROT_SAO) && !cpu_has_feature(CPU_FTR_SAO))
53 return 0;
54 return 1;
56 #define arch_validate_prot(prot) arch_validate_prot(prot)
58 #endif /* CONFIG_PPC64 */
59 #endif /* _ASM_POWERPC_MMAN_H */