418 lines
13 KiB
Diff
418 lines
13 KiB
Diff
|
From e0f6dec35f9286e78879fe1ac92803fd69fc4fdc Mon Sep 17 00:00:00 2001
|
||
|
From: H. Peter Anvin <hpa@linux.intel.com>
|
||
|
Date: Wed, 04 Dec 2013 22:31:28 +0000
|
||
|
Subject: x86, bitops: Correct the assembly constraints to testing bitops
|
||
|
|
||
|
In checkin:
|
||
|
|
||
|
0c44c2d0f459 x86: Use asm goto to implement better modify_and_test() functions
|
||
|
|
||
|
the various functions which do modify and test were unified and
|
||
|
optimized using "asm goto". However, this change missed the detail
|
||
|
that the bitops require an "Ir" constraint rather than an "er"
|
||
|
constraint ("I" = integer constant from 0-31, "e" = signed 32-bit
|
||
|
integer constant). This would cause code to miscompile if these
|
||
|
functions were used on constant bit positions 32-255 and the build to
|
||
|
fail if used on constant bit positions above 255.
|
||
|
|
||
|
Add the constraints as a parameter to the GEN_BINARY_RMWcc() macro to
|
||
|
avoid this problem.
|
||
|
|
||
|
Reported-by: Jesse Brandeburg <jesse.brandeburg@intel.com>
|
||
|
Signed-off-by: H. Peter Anvin <hpa@linux.intel.com>
|
||
|
Cc: Peter Zijlstra <peterz@infradead.org>
|
||
|
Link: http://lkml.kernel.org/r/529E8719.4070202@zytor.com
|
||
|
---
|
||
|
diff --git a/arch/x86/include/asm/atomic.h b/arch/x86/include/asm/atomic.h
|
||
|
index da31c8b..b17f4f4 100644
|
||
|
--- a/arch/x86/include/asm/atomic.h
|
||
|
+++ b/arch/x86/include/asm/atomic.h
|
||
|
@@ -77,7 +77,7 @@ static inline void atomic_sub(int i, atomic_t *v)
|
||
|
*/
|
||
|
static inline int atomic_sub_and_test(int i, atomic_t *v)
|
||
|
{
|
||
|
- GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, i, "%0", "e");
|
||
|
+ GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", "e");
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
@@ -141,7 +141,7 @@ static inline int atomic_inc_and_test(atomic_t *v)
|
||
|
*/
|
||
|
static inline int atomic_add_negative(int i, atomic_t *v)
|
||
|
{
|
||
|
- GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, i, "%0", "s");
|
||
|
+ GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, "er", i, "%0", "s");
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
diff --git a/arch/x86/include/asm/atomic64_64.h b/arch/x86/include/asm/atomic64_64.h
|
||
|
index 3f065c9..46e9052 100644
|
||
|
--- a/arch/x86/include/asm/atomic64_64.h
|
||
|
+++ b/arch/x86/include/asm/atomic64_64.h
|
||
|
@@ -72,7 +72,7 @@ static inline void atomic64_sub(long i, atomic64_t *v)
|
||
|
*/
|
||
|
static inline int atomic64_sub_and_test(long i, atomic64_t *v)
|
||
|
{
|
||
|
- GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, i, "%0", "e");
|
||
|
+ GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, "er", i, "%0", "e");
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
@@ -138,7 +138,7 @@ static inline int atomic64_inc_and_test(atomic64_t *v)
|
||
|
*/
|
||
|
static inline int atomic64_add_negative(long i, atomic64_t *v)
|
||
|
{
|
||
|
- GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, i, "%0", "s");
|
||
|
+ GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, "er", i, "%0", "s");
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
diff --git a/arch/x86/include/asm/bitops.h b/arch/x86/include/asm/bitops.h
|
||
|
index 6d76d09..9fc1af7 100644
|
||
|
--- a/arch/x86/include/asm/bitops.h
|
||
|
+++ b/arch/x86/include/asm/bitops.h
|
||
|
@@ -205,7 +205,7 @@ static inline void change_bit(long nr, volatile unsigned long *addr)
|
||
|
*/
|
||
|
static inline int test_and_set_bit(long nr, volatile unsigned long *addr)
|
||
|
{
|
||
|
- GEN_BINARY_RMWcc(LOCK_PREFIX "bts", *addr, nr, "%0", "c");
|
||
|
+ GEN_BINARY_RMWcc(LOCK_PREFIX "bts", *addr, "Ir", nr, "%0", "c");
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
@@ -251,7 +251,7 @@ static inline int __test_and_set_bit(long nr, volatile unsigned long *addr)
|
||
|
*/
|
||
|
static inline int test_and_clear_bit(long nr, volatile unsigned long *addr)
|
||
|
{
|
||
|
- GEN_BINARY_RMWcc(LOCK_PREFIX "btr", *addr, nr, "%0", "c");
|
||
|
+ GEN_BINARY_RMWcc(LOCK_PREFIX "btr", *addr, "Ir", nr, "%0", "c");
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
@@ -304,7 +304,7 @@ static inline int __test_and_change_bit(long nr, volatile unsigned long *addr)
|
||
|
*/
|
||
|
static inline int test_and_change_bit(long nr, volatile unsigned long *addr)
|
||
|
{
|
||
|
- GEN_BINARY_RMWcc(LOCK_PREFIX "btc", *addr, nr, "%0", "c");
|
||
|
+ GEN_BINARY_RMWcc(LOCK_PREFIX "btc", *addr, "Ir", nr, "%0", "c");
|
||
|
}
|
||
|
|
||
|
static __always_inline int constant_test_bit(long nr, const volatile unsigned long *addr)
|
||
|
diff --git a/arch/x86/include/asm/local.h b/arch/x86/include/asm/local.h
|
||
|
index 5b23e60..4ad6560 100644
|
||
|
--- a/arch/x86/include/asm/local.h
|
||
|
+++ b/arch/x86/include/asm/local.h
|
||
|
@@ -52,7 +52,7 @@ static inline void local_sub(long i, local_t *l)
|
||
|
*/
|
||
|
static inline int local_sub_and_test(long i, local_t *l)
|
||
|
{
|
||
|
- GEN_BINARY_RMWcc(_ASM_SUB, l->a.counter, i, "%0", "e");
|
||
|
+ GEN_BINARY_RMWcc(_ASM_SUB, l->a.counter, "er", i, "%0", "e");
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
@@ -92,7 +92,7 @@ static inline int local_inc_and_test(local_t *l)
|
||
|
*/
|
||
|
static inline int local_add_negative(long i, local_t *l)
|
||
|
{
|
||
|
- GEN_BINARY_RMWcc(_ASM_ADD, l->a.counter, i, "%0", "s");
|
||
|
+ GEN_BINARY_RMWcc(_ASM_ADD, l->a.counter, "er", i, "%0", "s");
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
diff --git a/arch/x86/include/asm/rmwcc.h b/arch/x86/include/asm/rmwcc.h
|
||
|
index 1ff990f..8f7866a 100644
|
||
|
--- a/arch/x86/include/asm/rmwcc.h
|
||
|
+++ b/arch/x86/include/asm/rmwcc.h
|
||
|
@@ -16,8 +16,8 @@ cc_label: \
|
||
|
#define GEN_UNARY_RMWcc(op, var, arg0, cc) \
|
||
|
__GEN_RMWcc(op " " arg0, var, cc)
|
||
|
|
||
|
-#define GEN_BINARY_RMWcc(op, var, val, arg0, cc) \
|
||
|
- __GEN_RMWcc(op " %1, " arg0, var, cc, "er" (val))
|
||
|
+#define GEN_BINARY_RMWcc(op, var, vcon, val, arg0, cc) \
|
||
|
+ __GEN_RMWcc(op " %1, " arg0, var, cc, vcon (val))
|
||
|
|
||
|
#else /* !CC_HAVE_ASM_GOTO */
|
||
|
|
||
|
@@ -33,8 +33,8 @@ do { \
|
||
|
#define GEN_UNARY_RMWcc(op, var, arg0, cc) \
|
||
|
__GEN_RMWcc(op " " arg0, var, cc)
|
||
|
|
||
|
-#define GEN_BINARY_RMWcc(op, var, val, arg0, cc) \
|
||
|
- __GEN_RMWcc(op " %2, " arg0, var, cc, "er" (val))
|
||
|
+#define GEN_BINARY_RMWcc(op, var, vcon, val, arg0, cc) \
|
||
|
+ __GEN_RMWcc(op " %2, " arg0, var, cc, vcon (val))
|
||
|
|
||
|
#endif /* CC_HAVE_ASM_GOTO */
|
||
|
|
||
|
--
|
||
|
cgit v0.9.2
|
||
|
|
||
|
From 0c44c2d0f459cd7e275242b72f500137c4fa834d Mon Sep 17 00:00:00 2001
|
||
|
From: Peter Zijlstra <peterz@infradead.org>
|
||
|
Date: Wed, 11 Sep 2013 13:19:24 +0000
|
||
|
Subject: x86: Use asm goto to implement better modify_and_test() functions
|
||
|
|
||
|
Linus suggested using asm goto to get rid of the typical SETcc + TEST
|
||
|
instruction pair -- which also clobbers an extra register -- for our
|
||
|
typical modify_and_test() functions.
|
||
|
|
||
|
Because asm goto doesn't allow output fields it has to include an
|
||
|
unconditinal memory clobber when it changes a memory variable to force
|
||
|
a reload.
|
||
|
|
||
|
Luckily all atomic ops already imply a compiler barrier to go along
|
||
|
with their memory barrier semantics.
|
||
|
|
||
|
Suggested-by: Linus Torvalds <torvalds@linux-foundation.org>
|
||
|
Signed-off-by: Peter Zijlstra <peterz@infradead.org>
|
||
|
Link: http://lkml.kernel.org/n/tip-0mtn9siwbeo1d33bap1422se@git.kernel.org
|
||
|
Signed-off-by: Ingo Molnar <mingo@kernel.org>
|
||
|
---
|
||
|
diff --git a/arch/x86/include/asm/atomic.h b/arch/x86/include/asm/atomic.h
|
||
|
index 722aa3b..da31c8b 100644
|
||
|
--- a/arch/x86/include/asm/atomic.h
|
||
|
+++ b/arch/x86/include/asm/atomic.h
|
||
|
@@ -6,6 +6,7 @@
|
||
|
#include <asm/processor.h>
|
||
|
#include <asm/alternative.h>
|
||
|
#include <asm/cmpxchg.h>
|
||
|
+#include <asm/rmwcc.h>
|
||
|
|
||
|
/*
|
||
|
* Atomic operations that C can't guarantee us. Useful for
|
||
|
@@ -76,12 +77,7 @@ static inline void atomic_sub(int i, atomic_t *v)
|
||
|
*/
|
||
|
static inline int atomic_sub_and_test(int i, atomic_t *v)
|
||
|
{
|
||
|
- unsigned char c;
|
||
|
-
|
||
|
- asm volatile(LOCK_PREFIX "subl %2,%0; sete %1"
|
||
|
- : "+m" (v->counter), "=qm" (c)
|
||
|
- : "ir" (i) : "memory");
|
||
|
- return c;
|
||
|
+ GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, i, "%0", "e");
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
@@ -118,12 +114,7 @@ static inline void atomic_dec(atomic_t *v)
|
||
|
*/
|
||
|
static inline int atomic_dec_and_test(atomic_t *v)
|
||
|
{
|
||
|
- unsigned char c;
|
||
|
-
|
||
|
- asm volatile(LOCK_PREFIX "decl %0; sete %1"
|
||
|
- : "+m" (v->counter), "=qm" (c)
|
||
|
- : : "memory");
|
||
|
- return c != 0;
|
||
|
+ GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, "%0", "e");
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
@@ -136,12 +127,7 @@ static inline int atomic_dec_and_test(atomic_t *v)
|
||
|
*/
|
||
|
static inline int atomic_inc_and_test(atomic_t *v)
|
||
|
{
|
||
|
- unsigned char c;
|
||
|
-
|
||
|
- asm volatile(LOCK_PREFIX "incl %0; sete %1"
|
||
|
- : "+m" (v->counter), "=qm" (c)
|
||
|
- : : "memory");
|
||
|
- return c != 0;
|
||
|
+ GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, "%0", "e");
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
@@ -155,12 +141,7 @@ static inline int atomic_inc_and_test(atomic_t *v)
|
||
|
*/
|
||
|
static inline int atomic_add_negative(int i, atomic_t *v)
|
||
|
{
|
||
|
- unsigned char c;
|
||
|
-
|
||
|
- asm volatile(LOCK_PREFIX "addl %2,%0; sets %1"
|
||
|
- : "+m" (v->counter), "=qm" (c)
|
||
|
- : "ir" (i) : "memory");
|
||
|
- return c;
|
||
|
+ GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, i, "%0", "s");
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
diff --git a/arch/x86/include/asm/atomic64_64.h b/arch/x86/include/asm/atomic64_64.h
|
||
|
index 0e1cbfc..3f065c9 100644
|
||
|
--- a/arch/x86/include/asm/atomic64_64.h
|
||
|
+++ b/arch/x86/include/asm/atomic64_64.h
|
||
|
@@ -72,12 +72,7 @@ static inline void atomic64_sub(long i, atomic64_t *v)
|
||
|
*/
|
||
|
static inline int atomic64_sub_and_test(long i, atomic64_t *v)
|
||
|
{
|
||
|
- unsigned char c;
|
||
|
-
|
||
|
- asm volatile(LOCK_PREFIX "subq %2,%0; sete %1"
|
||
|
- : "=m" (v->counter), "=qm" (c)
|
||
|
- : "er" (i), "m" (v->counter) : "memory");
|
||
|
- return c;
|
||
|
+ GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, i, "%0", "e");
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
@@ -116,12 +111,7 @@ static inline void atomic64_dec(atomic64_t *v)
|
||
|
*/
|
||
|
static inline int atomic64_dec_and_test(atomic64_t *v)
|
||
|
{
|
||
|
- unsigned char c;
|
||
|
-
|
||
|
- asm volatile(LOCK_PREFIX "decq %0; sete %1"
|
||
|
- : "=m" (v->counter), "=qm" (c)
|
||
|
- : "m" (v->counter) : "memory");
|
||
|
- return c != 0;
|
||
|
+ GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, "%0", "e");
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
@@ -134,12 +124,7 @@ static inline int atomic64_dec_and_test(atomic64_t *v)
|
||
|
*/
|
||
|
static inline int atomic64_inc_and_test(atomic64_t *v)
|
||
|
{
|
||
|
- unsigned char c;
|
||
|
-
|
||
|
- asm volatile(LOCK_PREFIX "incq %0; sete %1"
|
||
|
- : "=m" (v->counter), "=qm" (c)
|
||
|
- : "m" (v->counter) : "memory");
|
||
|
- return c != 0;
|
||
|
+ GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, "%0", "e");
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
@@ -153,12 +138,7 @@ static inline int atomic64_inc_and_test(atomic64_t *v)
|
||
|
*/
|
||
|
static inline int atomic64_add_negative(long i, atomic64_t *v)
|
||
|
{
|
||
|
- unsigned char c;
|
||
|
-
|
||
|
- asm volatile(LOCK_PREFIX "addq %2,%0; sets %1"
|
||
|
- : "=m" (v->counter), "=qm" (c)
|
||
|
- : "er" (i), "m" (v->counter) : "memory");
|
||
|
- return c;
|
||
|
+ GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, i, "%0", "s");
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
diff --git a/arch/x86/include/asm/bitops.h b/arch/x86/include/asm/bitops.h
|
||
|
index 41639ce..6d76d09 100644
|
||
|
--- a/arch/x86/include/asm/bitops.h
|
||
|
+++ b/arch/x86/include/asm/bitops.h
|
||
|
@@ -14,6 +14,7 @@
|
||
|
|
||
|
#include <linux/compiler.h>
|
||
|
#include <asm/alternative.h>
|
||
|
+#include <asm/rmwcc.h>
|
||
|
|
||
|
#if BITS_PER_LONG == 32
|
||
|
# define _BITOPS_LONG_SHIFT 5
|
||
|
@@ -204,12 +205,7 @@ static inline void change_bit(long nr, volatile unsigned long *addr)
|
||
|
*/
|
||
|
static inline int test_and_set_bit(long nr, volatile unsigned long *addr)
|
||
|
{
|
||
|
- int oldbit;
|
||
|
-
|
||
|
- asm volatile(LOCK_PREFIX "bts %2,%1\n\t"
|
||
|
- "sbb %0,%0" : "=r" (oldbit), ADDR : "Ir" (nr) : "memory");
|
||
|
-
|
||
|
- return oldbit;
|
||
|
+ GEN_BINARY_RMWcc(LOCK_PREFIX "bts", *addr, nr, "%0", "c");
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
@@ -255,13 +251,7 @@ static inline int __test_and_set_bit(long nr, volatile unsigned long *addr)
|
||
|
*/
|
||
|
static inline int test_and_clear_bit(long nr, volatile unsigned long *addr)
|
||
|
{
|
||
|
- int oldbit;
|
||
|
-
|
||
|
- asm volatile(LOCK_PREFIX "btr %2,%1\n\t"
|
||
|
- "sbb %0,%0"
|
||
|
- : "=r" (oldbit), ADDR : "Ir" (nr) : "memory");
|
||
|
-
|
||
|
- return oldbit;
|
||
|
+ GEN_BINARY_RMWcc(LOCK_PREFIX "btr", *addr, nr, "%0", "c");
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
@@ -314,13 +304,7 @@ static inline int __test_and_change_bit(long nr, volatile unsigned long *addr)
|
||
|
*/
|
||
|
static inline int test_and_change_bit(long nr, volatile unsigned long *addr)
|
||
|
{
|
||
|
- int oldbit;
|
||
|
-
|
||
|
- asm volatile(LOCK_PREFIX "btc %2,%1\n\t"
|
||
|
- "sbb %0,%0"
|
||
|
- : "=r" (oldbit), ADDR : "Ir" (nr) : "memory");
|
||
|
-
|
||
|
- return oldbit;
|
||
|
+ GEN_BINARY_RMWcc(LOCK_PREFIX "btc", *addr, nr, "%0", "c");
|
||
|
}
|
||
|
|
||
|
static __always_inline int constant_test_bit(long nr, const volatile unsigned long *addr)
|
||
|
diff --git a/arch/x86/include/asm/local.h b/arch/x86/include/asm/local.h
|
||
|
index 2d89e39..5b23e60 100644
|
||
|
--- a/arch/x86/include/asm/local.h
|
||
|
+++ b/arch/x86/include/asm/local.h
|
||
|
@@ -52,12 +52,7 @@ static inline void local_sub(long i, local_t *l)
|
||
|
*/
|
||
|
static inline int local_sub_and_test(long i, local_t *l)
|
||
|
{
|
||
|
- unsigned char c;
|
||
|
-
|
||
|
- asm volatile(_ASM_SUB "%2,%0; sete %1"
|
||
|
- : "+m" (l->a.counter), "=qm" (c)
|
||
|
- : "ir" (i) : "memory");
|
||
|
- return c;
|
||
|
+ GEN_BINARY_RMWcc(_ASM_SUB, l->a.counter, i, "%0", "e");
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
@@ -70,12 +65,7 @@ static inline int local_sub_and_test(long i, local_t *l)
|
||
|
*/
|
||
|
static inline int local_dec_and_test(local_t *l)
|
||
|
{
|
||
|
- unsigned char c;
|
||
|
-
|
||
|
- asm volatile(_ASM_DEC "%0; sete %1"
|
||
|
- : "+m" (l->a.counter), "=qm" (c)
|
||
|
- : : "memory");
|
||
|
- return c != 0;
|
||
|
+ GEN_UNARY_RMWcc(_ASM_DEC, l->a.counter, "%0", "e");
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
@@ -88,12 +78,7 @@ static inline int local_dec_and_test(local_t *l)
|
||
|
*/
|
||
|
static inline int local_inc_and_test(local_t *l)
|
||
|
{
|
||
|
- unsigned char c;
|
||
|
-
|
||
|
- asm volatile(_ASM_INC "%0; sete %1"
|
||
|
- : "+m" (l->a.counter), "=qm" (c)
|
||
|
- : : "memory");
|
||
|
- return c != 0;
|
||
|
+ GEN_UNARY_RMWcc(_ASM_INC, l->a.counter, "%0", "e");
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
@@ -107,12 +92,7 @@ static inline int local_inc_and_test(local_t *l)
|
||
|
*/
|
||
|
static inline int local_add_negative(long i, local_t *l)
|
||
|
{
|
||
|
- unsigned char c;
|
||
|
-
|
||
|
- asm volatile(_ASM_ADD "%2,%0; sets %1"
|
||
|
- : "+m" (l->a.counter), "=qm" (c)
|
||
|
- : "ir" (i) : "memory");
|
||
|
- return c;
|
||
|
+ GEN_BINARY_RMWcc(_ASM_ADD, l->a.counter, i, "%0", "s");
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
cgit v0.9.2
|