From 7fda20f146d5d217684ffbc37c6b6c5f82c2dffd Mon Sep 17 00:00:00 2001 From: Ingo Molnar Date: Fri, 29 Feb 2008 10:29:38 +0100 Subject: [PATCH] x86: spinlock ops are always-inlined Signed-off-by: Ingo Molnar --- include/asm-x86/spinlock.h | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/include/asm-x86/spinlock.h b/include/asm-x86/spinlock.h index 47dfe2607bb..bc6376f1bc5 100644 --- a/include/asm-x86/spinlock.h +++ b/include/asm-x86/spinlock.h @@ -78,7 +78,7 @@ static inline int __raw_spin_is_contended(raw_spinlock_t *lock) return (((tmp >> 8) & 0xff) - (tmp & 0xff)) > 1; } -static inline void __raw_spin_lock(raw_spinlock_t *lock) +static __always_inline void __raw_spin_lock(raw_spinlock_t *lock) { short inc = 0x0100; @@ -99,7 +99,7 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock) #define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock) -static inline int __raw_spin_trylock(raw_spinlock_t *lock) +static __always_inline int __raw_spin_trylock(raw_spinlock_t *lock) { int tmp; short new; @@ -120,7 +120,7 @@ static inline int __raw_spin_trylock(raw_spinlock_t *lock) return tmp; } -static inline void __raw_spin_unlock(raw_spinlock_t *lock) +static __always_inline void __raw_spin_unlock(raw_spinlock_t *lock) { asm volatile(UNLOCK_LOCK_PREFIX "incb %0" : "+m" (lock->slock) @@ -142,7 +142,7 @@ static inline int __raw_spin_is_contended(raw_spinlock_t *lock) return (((tmp >> 16) & 0xffff) - (tmp & 0xffff)) > 1; } -static inline void __raw_spin_lock(raw_spinlock_t *lock) +static __always_inline void __raw_spin_lock(raw_spinlock_t *lock) { int inc = 0x00010000; int tmp; @@ -165,7 +165,7 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock) #define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock) -static inline int __raw_spin_trylock(raw_spinlock_t *lock) +static __always_inline int __raw_spin_trylock(raw_spinlock_t *lock) { int tmp; int new; @@ -187,7 +187,7 @@ static inline int __raw_spin_trylock(raw_spinlock_t *lock) return tmp; } -static inline void __raw_spin_unlock(raw_spinlock_t *lock) +static __always_inline void __raw_spin_unlock(raw_spinlock_t *lock) { asm volatile(UNLOCK_LOCK_PREFIX "incw %0" : "+m" (lock->slock) -- 2.11.4.GIT