From 1d8a1f6b51f6b195dfdcf05821be97edede5664a Mon Sep 17 00:00:00 2001 From: "H. Peter Anvin" Date: Wed, 30 Jan 2008 13:30:30 +0100 Subject: [PATCH] x86: prepare merger of Prepare for merging by making the 32- and 64-bit versions textually identical. This involves: - removing arbitrary header inclusion differences - reorganizing the 32-bit version slightly to match the 64-bit version - using to unify the assembly code - renaming struct paravirt_patch to struct paravirt_patch_site in the 64-bit version to match the 32-bit version; there are no references to struct paravirt_patch elsewhere in the tree. Signed-off-by: H. Peter Anvin Signed-off-by: Thomas Gleixner Signed-off-by: Ingo Molnar Signed-off-by: Thomas Gleixner --- include/asm-x86/alternative_32.h | 131 ++++++++++---------- .../asm-x86/{alternative_32.h => alternative_64.h} | 137 +++++++++++---------- 2 files changed, 141 insertions(+), 127 deletions(-) copy include/asm-x86/{alternative_32.h => alternative_64.h} (71%) diff --git a/include/asm-x86/alternative_32.h b/include/asm-x86/alternative_32.h index bda6c810c0f..4919b5ee8b9 100644 --- a/include/asm-x86/alternative_32.h +++ b/include/asm-x86/alternative_32.h @@ -1,23 +1,63 @@ #ifndef _I386_ALTERNATIVE_H #define _I386_ALTERNATIVE_H -#include -#include #include +#include +#include + +/* + * Alternative inline assembly for SMP. + * + * The LOCK_PREFIX macro defined here replaces the LOCK and + * LOCK_PREFIX macros used everywhere in the source tree. + * + * SMP alternatives use the same data structures as the other + * alternatives and the X86_FEATURE_UP flag to indicate the case of a + * UP system running a SMP kernel. The existing apply_alternatives() + * works fine for patching a SMP kernel for UP. + * + * The SMP alternative tables can be kept after boot and contain both + * UP and SMP versions of the instructions to allow switching back to + * SMP at runtime, when hotplugging in a new CPU, which is especially + * useful in virtualized environments. + * + * The very common lock prefix is handled as special case in a + * separate table which is a pure address list without replacement ptr + * and size information. That keeps the table sizes small. + */ + +#ifdef CONFIG_SMP +#define LOCK_PREFIX \ + ".section .smp_locks,\"a\"\n" \ + _ASM_ALIGN "\n" \ + _ASM_PTR "661f\n" /* address */ \ + ".previous\n" \ + "661:\n\tlock; " + +#else /* ! CONFIG_SMP */ +#define LOCK_PREFIX "" +#endif + +/* This must be included *after* the definition of LOCK_PREFIX */ +#include struct alt_instr { - u8 *instr; /* original instruction */ + u8 *instr; /* original instruction */ u8 *replacement; u8 cpuid; /* cpuid bit set for replacement */ u8 instrlen; /* length of original instruction */ - u8 replacementlen; /* length of new instruction, <= instrlen */ - u8 pad; + u8 replacementlen; /* length of new instruction, <= instrlen */ + u8 pad1; +#ifdef CONFIG_X86_64 + u32 pad2; +#endif }; extern void alternative_instructions(void); extern void apply_alternatives(struct alt_instr *start, struct alt_instr *end); struct module; + #ifdef CONFIG_SMP extern void alternatives_smp_module_add(struct module *mod, char *name, void *locks, void *locks_end, @@ -45,17 +85,17 @@ static inline void alternatives_smp_switch(int smp) {} * without volatile and memory clobber. */ #define alternative(oldinstr, newinstr, feature) \ - asm volatile ("661:\n\t" oldinstr "\n662:\n" \ + asm volatile ("661:\n\t" oldinstr "\n662:\n" \ ".section .altinstructions,\"a\"\n" \ - " .align 4\n" \ - " .long 661b\n" /* label */ \ - " .long 663f\n" /* new instruction */ \ - " .byte %c0\n" /* feature bit */ \ - " .byte 662b-661b\n" /* sourcelen */ \ - " .byte 664f-663f\n" /* replacementlen */ \ + _ASM_ALIGN "\n" \ + _ASM_PTR "661b\n" /* label */ \ + _ASM_PTR "663f\n" /* new instruction */ \ + " .byte %c0\n" /* feature bit */ \ + " .byte 662b-661b\n" /* sourcelen */ \ + " .byte 664f-663f\n" /* replacementlen */ \ ".previous\n" \ ".section .altinstr_replacement,\"ax\"\n" \ - "663:\n\t" newinstr "\n664:\n" /* replacement */\ + "663:\n\t" newinstr "\n664:\n" /* replacement */ \ ".previous" :: "i" (feature) : "memory") /* @@ -66,35 +106,35 @@ static inline void alternatives_smp_switch(int smp) {} * Argument numbers start with 1. * Best is to use constraints that are fixed size (like (%1) ... "r") * If you use variable sized constraints like "m" or "g" in the - * replacement maake sure to pad to the worst case length. + * replacement make sure to pad to the worst case length. */ #define alternative_input(oldinstr, newinstr, feature, input...) \ asm volatile ("661:\n\t" oldinstr "\n662:\n" \ ".section .altinstructions,\"a\"\n" \ - " .align 4\n" \ - " .long 661b\n" /* label */ \ - " .long 663f\n" /* new instruction */ \ - " .byte %c0\n" /* feature bit */ \ - " .byte 662b-661b\n" /* sourcelen */ \ - " .byte 664f-663f\n" /* replacementlen */ \ + _ASM_ALIGN "\n" \ + _ASM_PTR "661b\n" /* label */ \ + _ASM_PTR "663f\n" /* new instruction */ \ + " .byte %c0\n" /* feature bit */ \ + " .byte 662b-661b\n" /* sourcelen */ \ + " .byte 664f-663f\n" /* replacementlen */ \ ".previous\n" \ ".section .altinstr_replacement,\"ax\"\n" \ - "663:\n\t" newinstr "\n664:\n" /* replacement */\ + "663:\n\t" newinstr "\n664:\n" /* replacement */ \ ".previous" :: "i" (feature), ##input) /* Like alternative_input, but with a single output argument */ -#define alternative_io(oldinstr, newinstr, feature, output, input...) \ +#define alternative_io(oldinstr, newinstr, feature, output, input...) \ asm volatile ("661:\n\t" oldinstr "\n662:\n" \ ".section .altinstructions,\"a\"\n" \ - " .align 4\n" \ - " .long 661b\n" /* label */ \ - " .long 663f\n" /* new instruction */ \ - " .byte %c[feat]\n" /* feature bit */ \ - " .byte 662b-661b\n" /* sourcelen */ \ - " .byte 664f-663f\n" /* replacementlen */ \ + _ASM_ALIGN "\n" \ + _ASM_PTR "661b\n" /* label */ \ + _ASM_PTR "663f\n" /* new instruction */ \ + " .byte %c[feat]\n" /* feature bit */ \ + " .byte 662b-661b\n" /* sourcelen */ \ + " .byte 664f-663f\n" /* replacementlen */ \ ".previous\n" \ ".section .altinstr_replacement,\"ax\"\n" \ - "663:\n\t" newinstr "\n664:\n" /* replacement */ \ + "663:\n\t" newinstr "\n664:\n" /* replacement */ \ ".previous" : output : [feat] "i" (feature), ##input) /* @@ -103,39 +143,6 @@ static inline void alternatives_smp_switch(int smp) {} */ #define ASM_OUTPUT2(a, b) a, b -/* - * Alternative inline assembly for SMP. - * - * The LOCK_PREFIX macro defined here replaces the LOCK and - * LOCK_PREFIX macros used everywhere in the source tree. - * - * SMP alternatives use the same data structures as the other - * alternatives and the X86_FEATURE_UP flag to indicate the case of a - * UP system running a SMP kernel. The existing apply_alternatives() - * works fine for patching a SMP kernel for UP. - * - * The SMP alternative tables can be kept after boot and contain both - * UP and SMP versions of the instructions to allow switching back to - * SMP at runtime, when hotplugging in a new CPU, which is especially - * useful in virtualized environments. - * - * The very common lock prefix is handled as special case in a - * separate table which is a pure address list without replacement ptr - * and size information. That keeps the table sizes small. - */ - -#ifdef CONFIG_SMP -#define LOCK_PREFIX \ - ".section .smp_locks,\"a\"\n" \ - " .align 4\n" \ - " .long 661f\n" /* address */ \ - ".previous\n" \ - "661:\n\tlock; " - -#else /* ! CONFIG_SMP */ -#define LOCK_PREFIX "" -#endif - struct paravirt_patch_site; #ifdef CONFIG_PARAVIRT void apply_paravirt(struct paravirt_patch_site *start, diff --git a/include/asm-x86/alternative_32.h b/include/asm-x86/alternative_64.h similarity index 71% copy from include/asm-x86/alternative_32.h copy to include/asm-x86/alternative_64.h index bda6c810c0f..50efcebae33 100644 --- a/include/asm-x86/alternative_32.h +++ b/include/asm-x86/alternative_64.h @@ -1,23 +1,63 @@ -#ifndef _I386_ALTERNATIVE_H -#define _I386_ALTERNATIVE_H +#ifndef _X86_64_ALTERNATIVE_H +#define _X86_64_ALTERNATIVE_H -#include -#include #include +#include +#include + +/* + * Alternative inline assembly for SMP. + * + * The LOCK_PREFIX macro defined here replaces the LOCK and + * LOCK_PREFIX macros used everywhere in the source tree. + * + * SMP alternatives use the same data structures as the other + * alternatives and the X86_FEATURE_UP flag to indicate the case of a + * UP system running a SMP kernel. The existing apply_alternatives() + * works fine for patching a SMP kernel for UP. + * + * The SMP alternative tables can be kept after boot and contain both + * UP and SMP versions of the instructions to allow switching back to + * SMP at runtime, when hotplugging in a new CPU, which is especially + * useful in virtualized environments. + * + * The very common lock prefix is handled as special case in a + * separate table which is a pure address list without replacement ptr + * and size information. That keeps the table sizes small. + */ + +#ifdef CONFIG_SMP +#define LOCK_PREFIX \ + ".section .smp_locks,\"a\"\n" \ + _ASM_ALIGN "\n" \ + _ASM_PTR "661f\n" /* address */ \ + ".previous\n" \ + "661:\n\tlock; " + +#else /* ! CONFIG_SMP */ +#define LOCK_PREFIX "" +#endif + +/* This must be included *after* the definition of LOCK_PREFIX */ +#include struct alt_instr { - u8 *instr; /* original instruction */ + u8 *instr; /* original instruction */ u8 *replacement; u8 cpuid; /* cpuid bit set for replacement */ u8 instrlen; /* length of original instruction */ - u8 replacementlen; /* length of new instruction, <= instrlen */ - u8 pad; + u8 replacementlen; /* length of new instruction, <= instrlen */ + u8 pad1; +#ifdef CONFIG_X86_64 + u32 pad2; +#endif }; extern void alternative_instructions(void); extern void apply_alternatives(struct alt_instr *start, struct alt_instr *end); struct module; + #ifdef CONFIG_SMP extern void alternatives_smp_module_add(struct module *mod, char *name, void *locks, void *locks_end, @@ -45,17 +85,17 @@ static inline void alternatives_smp_switch(int smp) {} * without volatile and memory clobber. */ #define alternative(oldinstr, newinstr, feature) \ - asm volatile ("661:\n\t" oldinstr "\n662:\n" \ + asm volatile ("661:\n\t" oldinstr "\n662:\n" \ ".section .altinstructions,\"a\"\n" \ - " .align 4\n" \ - " .long 661b\n" /* label */ \ - " .long 663f\n" /* new instruction */ \ - " .byte %c0\n" /* feature bit */ \ - " .byte 662b-661b\n" /* sourcelen */ \ - " .byte 664f-663f\n" /* replacementlen */ \ + _ASM_ALIGN "\n" \ + _ASM_PTR "661b\n" /* label */ \ + _ASM_PTR "663f\n" /* new instruction */ \ + " .byte %c0\n" /* feature bit */ \ + " .byte 662b-661b\n" /* sourcelen */ \ + " .byte 664f-663f\n" /* replacementlen */ \ ".previous\n" \ ".section .altinstr_replacement,\"ax\"\n" \ - "663:\n\t" newinstr "\n664:\n" /* replacement */\ + "663:\n\t" newinstr "\n664:\n" /* replacement */ \ ".previous" :: "i" (feature) : "memory") /* @@ -66,35 +106,35 @@ static inline void alternatives_smp_switch(int smp) {} * Argument numbers start with 1. * Best is to use constraints that are fixed size (like (%1) ... "r") * If you use variable sized constraints like "m" or "g" in the - * replacement maake sure to pad to the worst case length. + * replacement make sure to pad to the worst case length. */ #define alternative_input(oldinstr, newinstr, feature, input...) \ asm volatile ("661:\n\t" oldinstr "\n662:\n" \ ".section .altinstructions,\"a\"\n" \ - " .align 4\n" \ - " .long 661b\n" /* label */ \ - " .long 663f\n" /* new instruction */ \ - " .byte %c0\n" /* feature bit */ \ - " .byte 662b-661b\n" /* sourcelen */ \ - " .byte 664f-663f\n" /* replacementlen */ \ + _ASM_ALIGN "\n" \ + _ASM_PTR "661b\n" /* label */ \ + _ASM_PTR "663f\n" /* new instruction */ \ + " .byte %c0\n" /* feature bit */ \ + " .byte 662b-661b\n" /* sourcelen */ \ + " .byte 664f-663f\n" /* replacementlen */ \ ".previous\n" \ ".section .altinstr_replacement,\"ax\"\n" \ - "663:\n\t" newinstr "\n664:\n" /* replacement */\ + "663:\n\t" newinstr "\n664:\n" /* replacement */ \ ".previous" :: "i" (feature), ##input) /* Like alternative_input, but with a single output argument */ -#define alternative_io(oldinstr, newinstr, feature, output, input...) \ +#define alternative_io(oldinstr, newinstr, feature, output, input...) \ asm volatile ("661:\n\t" oldinstr "\n662:\n" \ ".section .altinstructions,\"a\"\n" \ - " .align 4\n" \ - " .long 661b\n" /* label */ \ - " .long 663f\n" /* new instruction */ \ - " .byte %c[feat]\n" /* feature bit */ \ - " .byte 662b-661b\n" /* sourcelen */ \ - " .byte 664f-663f\n" /* replacementlen */ \ + _ASM_ALIGN "\n" \ + _ASM_PTR "661b\n" /* label */ \ + _ASM_PTR "663f\n" /* new instruction */ \ + " .byte %c[feat]\n" /* feature bit */ \ + " .byte 662b-661b\n" /* sourcelen */ \ + " .byte 664f-663f\n" /* replacementlen */ \ ".previous\n" \ ".section .altinstr_replacement,\"ax\"\n" \ - "663:\n\t" newinstr "\n664:\n" /* replacement */ \ + "663:\n\t" newinstr "\n664:\n" /* replacement */ \ ".previous" : output : [feat] "i" (feature), ##input) /* @@ -103,39 +143,6 @@ static inline void alternatives_smp_switch(int smp) {} */ #define ASM_OUTPUT2(a, b) a, b -/* - * Alternative inline assembly for SMP. - * - * The LOCK_PREFIX macro defined here replaces the LOCK and - * LOCK_PREFIX macros used everywhere in the source tree. - * - * SMP alternatives use the same data structures as the other - * alternatives and the X86_FEATURE_UP flag to indicate the case of a - * UP system running a SMP kernel. The existing apply_alternatives() - * works fine for patching a SMP kernel for UP. - * - * The SMP alternative tables can be kept after boot and contain both - * UP and SMP versions of the instructions to allow switching back to - * SMP at runtime, when hotplugging in a new CPU, which is especially - * useful in virtualized environments. - * - * The very common lock prefix is handled as special case in a - * separate table which is a pure address list without replacement ptr - * and size information. That keeps the table sizes small. - */ - -#ifdef CONFIG_SMP -#define LOCK_PREFIX \ - ".section .smp_locks,\"a\"\n" \ - " .align 4\n" \ - " .long 661f\n" /* address */ \ - ".previous\n" \ - "661:\n\tlock; " - -#else /* ! CONFIG_SMP */ -#define LOCK_PREFIX "" -#endif - struct paravirt_patch_site; #ifdef CONFIG_PARAVIRT void apply_paravirt(struct paravirt_patch_site *start, @@ -151,4 +158,4 @@ apply_paravirt(struct paravirt_patch_site *start, extern void text_poke(void *addr, unsigned char *opcode, int len); -#endif /* _I386_ALTERNATIVE_H */ +#endif /* _X86_64_ALTERNATIVE_H */ -- 2.11.4.GIT