mirror of
https://github.com/Fishwaldo/Star64_linux.git
synced 2025-06-19 13:11:14 +00:00
arch: Remove __ARCH_HAVE_CMPXCHG
We removed the only user of this define in the rtmutex code. Get rid of it. Signed-off-by: Thomas Gleixner <tglx@linutronix.de> Cc: Sebastian Andrzej Siewior <bigeasy@linutronix.de>
This commit is contained in:
parent
cede88418b
commit
a22e5f579b
17 changed files with 0 additions and 31 deletions
|
@ -66,6 +66,4 @@
|
||||||
#undef __ASM__MB
|
#undef __ASM__MB
|
||||||
#undef ____cmpxchg
|
#undef ____cmpxchg
|
||||||
|
|
||||||
#define __HAVE_ARCH_CMPXCHG 1
|
|
||||||
|
|
||||||
#endif /* _ALPHA_CMPXCHG_H */
|
#endif /* _ALPHA_CMPXCHG_H */
|
||||||
|
|
|
@ -70,8 +70,6 @@ extern unsigned long __cmpxchg_u64_unsupported_on_32bit_kernels(
|
||||||
if something tries to do an invalid cmpxchg(). */
|
if something tries to do an invalid cmpxchg(). */
|
||||||
extern void __cmpxchg_called_with_bad_pointer(void);
|
extern void __cmpxchg_called_with_bad_pointer(void);
|
||||||
|
|
||||||
#define __HAVE_ARCH_CMPXCHG 1
|
|
||||||
|
|
||||||
static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
|
static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
|
||||||
unsigned long new, int size)
|
unsigned long new, int size)
|
||||||
{
|
{
|
||||||
|
|
|
@ -64,7 +64,6 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr,
|
||||||
* looks just like atomic_cmpxchg on our arch currently with a bunch of
|
* looks just like atomic_cmpxchg on our arch currently with a bunch of
|
||||||
* variable casting.
|
* variable casting.
|
||||||
*/
|
*/
|
||||||
#define __HAVE_ARCH_CMPXCHG 1
|
|
||||||
|
|
||||||
#define cmpxchg(ptr, old, new) \
|
#define cmpxchg(ptr, old, new) \
|
||||||
({ \
|
({ \
|
||||||
|
|
|
@ -61,8 +61,6 @@ extern void ia64_xchg_called_with_bad_pointer(void);
|
||||||
* indicated by comparing RETURN with OLD.
|
* indicated by comparing RETURN with OLD.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#define __HAVE_ARCH_CMPXCHG 1
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* This function doesn't exist, so you'll get a linker error
|
* This function doesn't exist, so you'll get a linker error
|
||||||
* if something tries to do an invalid cmpxchg().
|
* if something tries to do an invalid cmpxchg().
|
||||||
|
|
|
@ -107,8 +107,6 @@ __xchg_local(unsigned long x, volatile void *ptr, int size)
|
||||||
((__typeof__(*(ptr)))__xchg_local((unsigned long)(x), (ptr), \
|
((__typeof__(*(ptr)))__xchg_local((unsigned long)(x), (ptr), \
|
||||||
sizeof(*(ptr))))
|
sizeof(*(ptr))))
|
||||||
|
|
||||||
#define __HAVE_ARCH_CMPXCHG 1
|
|
||||||
|
|
||||||
static inline unsigned long
|
static inline unsigned long
|
||||||
__cmpxchg_u32(volatile unsigned int *p, unsigned int old, unsigned int new)
|
__cmpxchg_u32(volatile unsigned int *p, unsigned int old, unsigned int new)
|
||||||
{
|
{
|
||||||
|
|
|
@ -90,7 +90,6 @@ extern unsigned long __invalid_cmpxchg_size(volatile void *,
|
||||||
* indicated by comparing RETURN with OLD.
|
* indicated by comparing RETURN with OLD.
|
||||||
*/
|
*/
|
||||||
#ifdef CONFIG_RMW_INSNS
|
#ifdef CONFIG_RMW_INSNS
|
||||||
#define __HAVE_ARCH_CMPXCHG 1
|
|
||||||
|
|
||||||
static inline unsigned long __cmpxchg(volatile void *p, unsigned long old,
|
static inline unsigned long __cmpxchg(volatile void *p, unsigned long old,
|
||||||
unsigned long new, int size)
|
unsigned long new, int size)
|
||||||
|
|
|
@ -51,8 +51,6 @@ static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
|
||||||
return old;
|
return old;
|
||||||
}
|
}
|
||||||
|
|
||||||
#define __HAVE_ARCH_CMPXCHG 1
|
|
||||||
|
|
||||||
#define cmpxchg(ptr, o, n) \
|
#define cmpxchg(ptr, o, n) \
|
||||||
({ \
|
({ \
|
||||||
__typeof__(*(ptr)) _o_ = (o); \
|
__typeof__(*(ptr)) _o_ = (o); \
|
||||||
|
|
|
@ -138,8 +138,6 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz
|
||||||
__xchg((unsigned long)(x), (ptr), sizeof(*(ptr)))); \
|
__xchg((unsigned long)(x), (ptr), sizeof(*(ptr)))); \
|
||||||
})
|
})
|
||||||
|
|
||||||
#define __HAVE_ARCH_CMPXCHG 1
|
|
||||||
|
|
||||||
#define __cmpxchg_asm(ld, st, m, old, new) \
|
#define __cmpxchg_asm(ld, st, m, old, new) \
|
||||||
({ \
|
({ \
|
||||||
__typeof(*(m)) __ret; \
|
__typeof(*(m)) __ret; \
|
||||||
|
|
|
@ -46,8 +46,6 @@ __xchg(unsigned long x, __volatile__ void *ptr, int size)
|
||||||
#define xchg(ptr, x) \
|
#define xchg(ptr, x) \
|
||||||
((__typeof__(*(ptr)))__xchg((unsigned long)(x), (ptr), sizeof(*(ptr))))
|
((__typeof__(*(ptr)))__xchg((unsigned long)(x), (ptr), sizeof(*(ptr))))
|
||||||
|
|
||||||
#define __HAVE_ARCH_CMPXCHG 1
|
|
||||||
|
|
||||||
/* bug catcher for when unsupported size is used - won't link */
|
/* bug catcher for when unsupported size is used - won't link */
|
||||||
extern void __cmpxchg_called_with_bad_pointer(void);
|
extern void __cmpxchg_called_with_bad_pointer(void);
|
||||||
|
|
||||||
|
|
|
@ -144,7 +144,6 @@ __xchg_local(volatile void *ptr, unsigned long x, unsigned int size)
|
||||||
* Compare and exchange - if *p == old, set it to new,
|
* Compare and exchange - if *p == old, set it to new,
|
||||||
* and return the old value of *p.
|
* and return the old value of *p.
|
||||||
*/
|
*/
|
||||||
#define __HAVE_ARCH_CMPXCHG 1
|
|
||||||
|
|
||||||
static __always_inline unsigned long
|
static __always_inline unsigned long
|
||||||
__cmpxchg_u32(volatile unsigned int *p, unsigned long old, unsigned long new)
|
__cmpxchg_u32(volatile unsigned int *p, unsigned long old, unsigned long new)
|
||||||
|
|
|
@ -32,8 +32,6 @@
|
||||||
__old; \
|
__old; \
|
||||||
})
|
})
|
||||||
|
|
||||||
#define __HAVE_ARCH_CMPXCHG
|
|
||||||
|
|
||||||
#define __cmpxchg_double_op(p1, p2, o1, o2, n1, n2, insn) \
|
#define __cmpxchg_double_op(p1, p2, o1, o2, n1, n2, insn) \
|
||||||
({ \
|
({ \
|
||||||
register __typeof__(*(p1)) __old1 asm("2") = (o1); \
|
register __typeof__(*(p1)) __old1 asm("2") = (o1); \
|
||||||
|
|
|
@ -42,8 +42,6 @@ static inline unsigned long __cmpxchg(volatile unsigned long *m,
|
||||||
(unsigned long)(o), \
|
(unsigned long)(o), \
|
||||||
(unsigned long)(n)))
|
(unsigned long)(n)))
|
||||||
|
|
||||||
#define __HAVE_ARCH_CMPXCHG 1
|
|
||||||
|
|
||||||
#include <asm-generic/cmpxchg-local.h>
|
#include <asm-generic/cmpxchg-local.h>
|
||||||
|
|
||||||
#endif /* _ASM_SCORE_CMPXCHG_H */
|
#endif /* _ASM_SCORE_CMPXCHG_H */
|
||||||
|
|
|
@ -46,8 +46,6 @@ extern void __xchg_called_with_bad_pointer(void);
|
||||||
* if something tries to do an invalid cmpxchg(). */
|
* if something tries to do an invalid cmpxchg(). */
|
||||||
extern void __cmpxchg_called_with_bad_pointer(void);
|
extern void __cmpxchg_called_with_bad_pointer(void);
|
||||||
|
|
||||||
#define __HAVE_ARCH_CMPXCHG 1
|
|
||||||
|
|
||||||
static inline unsigned long __cmpxchg(volatile void * ptr, unsigned long old,
|
static inline unsigned long __cmpxchg(volatile void * ptr, unsigned long old,
|
||||||
unsigned long new, int size)
|
unsigned long new, int size)
|
||||||
{
|
{
|
||||||
|
|
|
@ -34,7 +34,6 @@ static inline unsigned long __xchg(unsigned long x, __volatile__ void * ptr, int
|
||||||
*
|
*
|
||||||
* Cribbed from <asm-parisc/atomic.h>
|
* Cribbed from <asm-parisc/atomic.h>
|
||||||
*/
|
*/
|
||||||
#define __HAVE_ARCH_CMPXCHG 1
|
|
||||||
|
|
||||||
/* bug catcher for when unsupported size is used - won't link */
|
/* bug catcher for when unsupported size is used - won't link */
|
||||||
void __cmpxchg_called_with_bad_pointer(void);
|
void __cmpxchg_called_with_bad_pointer(void);
|
||||||
|
|
|
@ -65,8 +65,6 @@ static inline unsigned long __xchg(unsigned long x, __volatile__ void * ptr,
|
||||||
|
|
||||||
#include <asm-generic/cmpxchg-local.h>
|
#include <asm-generic/cmpxchg-local.h>
|
||||||
|
|
||||||
#define __HAVE_ARCH_CMPXCHG 1
|
|
||||||
|
|
||||||
static inline unsigned long
|
static inline unsigned long
|
||||||
__cmpxchg_u32(volatile int *m, int old, int new)
|
__cmpxchg_u32(volatile int *m, int old, int new)
|
||||||
{
|
{
|
||||||
|
|
|
@ -105,9 +105,6 @@ static inline long atomic64_add_unless(atomic64_t *v, long a, long u)
|
||||||
|
|
||||||
#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
|
#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
|
||||||
|
|
||||||
/* Define this to indicate that cmpxchg is an efficient operation. */
|
|
||||||
#define __HAVE_ARCH_CMPXCHG
|
|
||||||
|
|
||||||
#endif /* !__ASSEMBLY__ */
|
#endif /* !__ASSEMBLY__ */
|
||||||
|
|
||||||
#endif /* _ASM_TILE_ATOMIC_64_H */
|
#endif /* _ASM_TILE_ATOMIC_64_H */
|
||||||
|
|
|
@ -4,8 +4,6 @@
|
||||||
#include <linux/compiler.h>
|
#include <linux/compiler.h>
|
||||||
#include <asm/alternative.h> /* Provides LOCK_PREFIX */
|
#include <asm/alternative.h> /* Provides LOCK_PREFIX */
|
||||||
|
|
||||||
#define __HAVE_ARCH_CMPXCHG 1
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Non-existant functions to indicate usage errors at link time
|
* Non-existant functions to indicate usage errors at link time
|
||||||
* (or compile-time if the compiler implements __compiletime_error().
|
* (or compile-time if the compiler implements __compiletime_error().
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue