Renamed ATOMIC->ATOMIC_SMP_LOCK

This commit is contained in:
Jaroslav Kysela 2003-02-20 19:24:01 +00:00
parent 1c908a75c1
commit 8d2eecb547

View file

@ -8,7 +8,7 @@
* resource counting etc.. * resource counting etc..
*/ */
#define LOCK "lock ; " #define ATOMIC_SMP_LOCK "lock ; "
/* /*
* Make sure gcc doesn't try to be clever and move things around * Make sure gcc doesn't try to be clever and move things around
@ -49,7 +49,7 @@ typedef struct { volatile int counter; } atomic_t;
static __inline__ void atomic_add(int i, atomic_t *v) static __inline__ void atomic_add(int i, atomic_t *v)
{ {
__asm__ __volatile__( __asm__ __volatile__(
LOCK "addl %1,%0" ATOMIC_SMP_LOCK "addl %1,%0"
:"=m" (v->counter) :"=m" (v->counter)
:"ir" (i), "m" (v->counter)); :"ir" (i), "m" (v->counter));
} }
@ -65,7 +65,7 @@ static __inline__ void atomic_add(int i, atomic_t *v)
static __inline__ void atomic_sub(int i, atomic_t *v) static __inline__ void atomic_sub(int i, atomic_t *v)
{ {
__asm__ __volatile__( __asm__ __volatile__(
LOCK "subl %1,%0" ATOMIC_SMP_LOCK "subl %1,%0"
:"=m" (v->counter) :"=m" (v->counter)
:"ir" (i), "m" (v->counter)); :"ir" (i), "m" (v->counter));
} }
@ -85,7 +85,7 @@ static __inline__ int atomic_sub_and_test(int i, atomic_t *v)
unsigned char c; unsigned char c;
__asm__ __volatile__( __asm__ __volatile__(
LOCK "subl %2,%0; sete %1" ATOMIC_SMP_LOCK "subl %2,%0; sete %1"
:"=m" (v->counter), "=qm" (c) :"=m" (v->counter), "=qm" (c)
:"ir" (i), "m" (v->counter) : "memory"); :"ir" (i), "m" (v->counter) : "memory");
return c; return c;
@ -101,7 +101,7 @@ static __inline__ int atomic_sub_and_test(int i, atomic_t *v)
static __inline__ void atomic_inc(atomic_t *v) static __inline__ void atomic_inc(atomic_t *v)
{ {
__asm__ __volatile__( __asm__ __volatile__(
LOCK "incl %0" ATOMIC_SMP_LOCK "incl %0"
:"=m" (v->counter) :"=m" (v->counter)
:"m" (v->counter)); :"m" (v->counter));
} }
@ -116,7 +116,7 @@ static __inline__ void atomic_inc(atomic_t *v)
static __inline__ void atomic_dec(atomic_t *v) static __inline__ void atomic_dec(atomic_t *v)
{ {
__asm__ __volatile__( __asm__ __volatile__(
LOCK "decl %0" ATOMIC_SMP_LOCK "decl %0"
:"=m" (v->counter) :"=m" (v->counter)
:"m" (v->counter)); :"m" (v->counter));
} }
@ -135,7 +135,7 @@ static __inline__ int atomic_dec_and_test(atomic_t *v)
unsigned char c; unsigned char c;
__asm__ __volatile__( __asm__ __volatile__(
LOCK "decl %0; sete %1" ATOMIC_SMP_LOCK "decl %0; sete %1"
:"=m" (v->counter), "=qm" (c) :"=m" (v->counter), "=qm" (c)
:"m" (v->counter) : "memory"); :"m" (v->counter) : "memory");
return c != 0; return c != 0;
@ -155,7 +155,7 @@ static __inline__ int atomic_inc_and_test(atomic_t *v)
unsigned char c; unsigned char c;
__asm__ __volatile__( __asm__ __volatile__(
LOCK "incl %0; sete %1" ATOMIC_SMP_LOCK "incl %0; sete %1"
:"=m" (v->counter), "=qm" (c) :"=m" (v->counter), "=qm" (c)
:"m" (v->counter) : "memory"); :"m" (v->counter) : "memory");
return c != 0; return c != 0;
@ -176,7 +176,7 @@ static __inline__ int atomic_add_negative(int i, atomic_t *v)
unsigned char c; unsigned char c;
__asm__ __volatile__( __asm__ __volatile__(
LOCK "addl %2,%0; sets %1" ATOMIC_SMP_LOCK "addl %2,%0; sets %1"
:"=m" (v->counter), "=qm" (c) :"=m" (v->counter), "=qm" (c)
:"ir" (i), "m" (v->counter) : "memory"); :"ir" (i), "m" (v->counter) : "memory");
return c; return c;
@ -184,11 +184,11 @@ static __inline__ int atomic_add_negative(int i, atomic_t *v)
/* These are x86-specific, used by some header files */ /* These are x86-specific, used by some header files */
#define atomic_clear_mask(mask, addr) \ #define atomic_clear_mask(mask, addr) \
__asm__ __volatile__(LOCK "andl %0,%1" \ __asm__ __volatile__(ATOMIC_SMP_LOCK "andl %0,%1" \
: : "r" (~(mask)),"m" (*addr) : "memory") : : "r" (~(mask)),"m" (*addr) : "memory")
#define atomic_set_mask(mask, addr) \ #define atomic_set_mask(mask, addr) \
__asm__ __volatile__(LOCK "orl %0,%1" \ __asm__ __volatile__(ATOMIC_SMP_LOCK "orl %0,%1" \
: : "r" (mask),"m" (*addr) : "memory") : : "r" (mask),"m" (*addr) : "memory")
/* /*
@ -216,6 +216,8 @@ __asm__ __volatile__(LOCK "orl %0,%1" \
#define wmb() asm volatile("sfence":::"memory") #define wmb() asm volatile("sfence":::"memory")
#endif #endif
#undef ATOMIC_SMP_LOCK
#define IATOMIC_DEFINED 1 #define IATOMIC_DEFINED 1
#endif /* __i386__ */ #endif /* __i386__ */