1/* Copyright (c) 2013, Ben Noordhuis <[email protected]>
2 *
3 * Permission to use, copy, modify, and/or distribute this software for any
4 * purpose with or without fee is hereby granted, provided that the above
5 * copyright notice and this permission notice appear in all copies.
6 *
7 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
8 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
9 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
10 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
11 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
12 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
13 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
14 */
15
16#ifndef UV_ATOMIC_OPS_H_
17#define UV_ATOMIC_OPS_H_
18
19#include "internal.h" /* UV_UNUSED */
20
21#if defined(__SUNPRO_C) || defined(__SUNPRO_CC)
22#include <atomic.h>
23#endif
24
25UV_UNUSED(static int cmpxchgi(int* ptr, int oldval, int newval));
26UV_UNUSED(static void cpu_relax(void));
27
28/* Prefer hand-rolled assembly over the gcc builtins because the latter also
29 * issue full memory barriers.
30 */
31UV_UNUSED(static int cmpxchgi(int* ptr, int oldval, int newval)) {
32#if defined(__i386__) || defined(__x86_64__)
33 int out;
34 __asm__ __volatile__ ("lock; cmpxchg %2, %1;"
35 : "=a" (out), "+m" (*(volatile int*) ptr)
36 : "r" (newval), "0" (oldval)
37 : "memory");
38 return out;
39#elif defined(_AIX) && defined(__xlC__)
40 const int out = (*(volatile int*) ptr);
41 __compare_and_swap(ptr, &oldval, newval);
42 return out;
43#elif defined(__MVS__)
44 unsigned int op4;
45 if (__plo_CSST(ptr, (unsigned int*) &oldval, newval,
46 (unsigned int*) ptr, *ptr, &op4))
47 return oldval;
48 else
49 return op4;
50#elif defined(__SUNPRO_C) || defined(__SUNPRO_CC)
51 return atomic_cas_uint((uint_t *)ptr, (uint_t)oldval, (uint_t)newval);
52#else
53 return __sync_val_compare_and_swap(ptr, oldval, newval);
54#endif
55}
56
57UV_UNUSED(static void cpu_relax(void)) {
58#if defined(__i386__) || defined(__x86_64__)
59 __asm__ __volatile__ ("rep; nop"); /* a.k.a. PAUSE */
60#endif
61}
62
63#endif /* UV_ATOMIC_OPS_H_ */
64