2004-05-05 19:19:47 +00:00
|
|
|
/*
|
|
|
|
* $HEADER$
|
|
|
|
*/
|
|
|
|
|
2004-06-07 15:33:53 +00:00
|
|
|
#ifndef OMPI_SYS_ATOMIC_H_INCLUDED
|
|
|
|
#define OMPI_SYS_ATOMIC_H_INCLUDED
|
2004-05-05 19:19:47 +00:00
|
|
|
|
2004-05-05 23:19:32 +00:00
|
|
|
/*
|
|
|
|
* On ia32, we use cmpxchg.
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
|
|
#ifdef HAVE_SMP
|
2004-05-05 19:19:47 +00:00
|
|
|
#define LOCK "lock; "
|
2004-05-05 23:19:32 +00:00
|
|
|
#define MB() __asm__ __volatile__("": : :"memory")
|
2004-05-05 19:19:47 +00:00
|
|
|
#else
|
|
|
|
#define LOCK
|
2004-05-05 23:19:32 +00:00
|
|
|
#define MB()
|
2004-05-05 19:19:47 +00:00
|
|
|
#endif
|
|
|
|
|
|
|
|
|
2004-06-07 15:33:53 +00:00
|
|
|
static inline ompi_atomic_mb(void)
|
2004-05-05 23:19:32 +00:00
|
|
|
{
|
|
|
|
MB();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2004-06-07 15:33:53 +00:00
|
|
|
static inline ompi_atomic_rmb(void)
|
2004-05-05 23:19:32 +00:00
|
|
|
{
|
|
|
|
MB();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2004-06-07 15:33:53 +00:00
|
|
|
static inline ompi_atomic_wmb(void)
|
2004-05-05 19:19:47 +00:00
|
|
|
{
|
2004-05-05 23:19:32 +00:00
|
|
|
MB();
|
|
|
|
}
|
|
|
|
|
2004-05-05 19:19:47 +00:00
|
|
|
|
2004-06-07 15:33:53 +00:00
|
|
|
static inline int ompi_atomic_cmpset_32(volatile uint32_t *addr,
|
2004-05-05 23:19:32 +00:00
|
|
|
uint32_t old,
|
|
|
|
uint32_t new)
|
|
|
|
{
|
|
|
|
uint32_t ret = old;
|
2004-05-05 19:19:47 +00:00
|
|
|
|
2004-05-05 23:19:32 +00:00
|
|
|
__asm__ __volatile (
|
|
|
|
LOCK "cmpxchgl %1,%2 \n\
|
|
|
|
setz %%al \n\
|
|
|
|
movzbl %%al,%0 \n"
|
|
|
|
: "+a" (ret)
|
|
|
|
: "r" (new), "m" (*addr)
|
|
|
|
: "memory");
|
|
|
|
|
|
|
|
return (ret == old);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2004-06-07 15:33:53 +00:00
|
|
|
static inline int ompi_atomic_cmpset_acq_32(volatile uint32_t *addr,
|
2004-05-05 23:19:32 +00:00
|
|
|
uint32_t old,
|
|
|
|
uint32_t new)
|
|
|
|
{
|
2004-06-07 15:33:53 +00:00
|
|
|
return ompi_atomic_cmpset_32(addr, old, new);
|
2004-05-05 19:19:47 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2004-06-07 15:33:53 +00:00
|
|
|
static inline int ompi_atomic_cmpset_rel_32(volatile uint32_t *addr,
|
2004-05-05 23:19:32 +00:00
|
|
|
uint32_t old,
|
2004-05-05 19:19:47 +00:00
|
|
|
uint32_t new)
|
|
|
|
{
|
2004-06-07 15:33:53 +00:00
|
|
|
return ompi_atomic_cmpset_32(addr, old, new);
|
2004-05-05 19:19:47 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2004-06-07 15:33:53 +00:00
|
|
|
static inline int ompi_atomic_cmpset_64(volatile uint64_t *addr,
|
2004-05-05 23:19:32 +00:00
|
|
|
uint64_t old,
|
|
|
|
uint64_t new)
|
2004-05-05 19:19:47 +00:00
|
|
|
{
|
|
|
|
/*
|
|
|
|
* Compare EDX:EAX with m64. If equal, set ZF and load ECX:EBX into
|
|
|
|
* m64. Else, clear ZF and load m64 into EDX:EAX.
|
|
|
|
*/
|
|
|
|
|
2004-05-05 23:19:32 +00:00
|
|
|
uint64_t ret = old;
|
2004-05-05 19:19:47 +00:00
|
|
|
struct { uint32_t lo; uint32_t hi; } *p = (struct lwords *) &new;
|
|
|
|
|
2004-05-05 23:19:32 +00:00
|
|
|
__asm__ __volatile(
|
|
|
|
LOCK "cmpxchg8b %1\n"
|
|
|
|
: "+A" (ret)
|
|
|
|
: "m" (*addr), "b" (p->lo), "c" (p->hi)
|
|
|
|
: "memory");
|
|
|
|
|
|
|
|
return (ret == old);
|
|
|
|
}
|
2004-05-05 19:19:47 +00:00
|
|
|
|
2004-05-05 23:19:32 +00:00
|
|
|
|
2004-06-07 15:33:53 +00:00
|
|
|
static inline int ompi_atomic_cmpset_acq_64(volatile uint64_t *addr,
|
2004-05-05 23:19:32 +00:00
|
|
|
uint64_t old,
|
|
|
|
uint64_t new)
|
|
|
|
{
|
2004-06-07 15:33:53 +00:00
|
|
|
return ompi_atomic_cpmset_64(addr, old, new);
|
2004-05-05 19:19:47 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2004-06-07 15:33:53 +00:00
|
|
|
static inline int ompi_atomic_cmpset_rel_64(volatile uint64_t *addr,
|
2004-05-05 23:19:32 +00:00
|
|
|
uint64_t old,
|
2004-05-05 19:19:47 +00:00
|
|
|
uint64_t new)
|
|
|
|
{
|
2004-06-07 15:33:53 +00:00
|
|
|
return ompi_atomic_cpmset_64(addr, old, new);
|
2004-05-05 19:19:47 +00:00
|
|
|
}
|
|
|
|
|
2004-06-07 15:33:53 +00:00
|
|
|
#endif /* ! OMPI_SYS_ATOMIC_H_INCLUDED */
|