#ifndef UV_ATOMIC_OPS_H_
#define UV_ATOMIC_OPS_H_
#include "internal.h" /* UV_UNUSED */
#if defined(__SUNPRO_C) || defined(__SUNPRO_CC)
#include <atomic.h>
#endif
UV_UNUSED(
static
int
cmpxchgi(
int
* ptr,
int
oldval,
int
newval));
UV_UNUSED(
static
void
cpu_relax(
void
));
UV_UNUSED(
static
int
cmpxchgi(
int
* ptr,
int
oldval,
int
newval)) {
#if defined(__i386__) || defined(__x86_64__)
int
out;
__asm__ __volatile__ (
"lock; cmpxchg %2, %1;"
:
"=a"
(out),
"+m"
(*(
volatile
int
*) ptr)
:
"r"
(newval),
"0"
(oldval)
:
"memory"
);
return
out;
#elif defined(_AIX) && defined(__xlC__)
const
int
out = (*(
volatile
int
*) ptr);
__compare_and_swap(ptr, &oldval, newval);
return
out;
#elif defined(__MVS__)
unsigned
int
op4;
if
(__plo_CSST(ptr, (unsigned
int
*) &oldval, newval,
(unsigned
int
*) ptr, *ptr, &op4))
return
oldval;
else
return
op4;
#elif defined(__SUNPRO_C) || defined(__SUNPRO_CC)
return
atomic_cas_uint((uint_t *)ptr, (uint_t)oldval, (uint_t)newval);
#else
return
__sync_val_compare_and_swap(ptr, oldval, newval);
#endif
}
UV_UNUSED(
static
void
cpu_relax(
void
)) {
#if defined(__i386__) || defined(__x86_64__)
__asm__ __volatile__ (
"rep; nop"
);
#endif
}
#endif /* UV_ATOMIC_OPS_H_ */