Skip to content

Instantly share code, notes, and snippets.

@bnoordhuis
Last active December 15, 2015 07:39
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save bnoordhuis/3fafd09f65a789dd98bc to your computer and use it in GitHub Desktop.
Save bnoordhuis/3fafd09f65a789dd98bc to your computer and use it in GitHub Desktop.
c89 + atomic_ops = libuv
/* Copyright (c) 2013, Ben Noordhuis <info@bnoordhuis.nl>
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#include "uv-atomic.h"
#include <stdlib.h>
#include <assert.h>
int main(void)
{
{
void *a = (void *) &a;
void *b = (void *) &b;
uv_store_ptr(&a, NULL);
assert(a == NULL);
b = uv_load_ptr(&a);
assert(b == NULL);
b = uv_exchange_ptr(&a, (void *) 0xDEADBEEF);
assert(a == (void *) 0xDEADBEEF);
assert(b == NULL);
}
{
int8_t a = 1;
int8_t b = 2;
int rc = uv_compare_exchange_weak_int8(&a, &b, 2);
assert(rc == 0);
assert(a == 1);
assert(b == 1);
rc = uv_compare_exchange_weak_int8(&a, &b, 2);
assert(a == 2);
assert(b == 1);
}
{
uint16_t a = 1;
uint16_t b = 2;
int rc = uv_compare_exchange_weak_uint16(&a, &b, 2);
assert(rc == 0);
assert(a == 1);
assert(b == 1);
rc = uv_compare_exchange_weak_uint16(&a, &b, 2);
assert(a == 2);
assert(b == 1);
}
{
char s[] = "ab";
char *a = s;
char *b = uv_fetch_add_ptr((void **) &a, 1);
assert(a == s + 1);
assert(b == s);
}
{
char s[] = "ab";
char *a = s + 1;
char *b = uv_fetch_sub_ptr((void **) &a, 1);
assert(a == s);
assert(b == s + 1);
}
{
int16_t a = 0x1010;
int16_t b = uv_fetch_and_int16(&a, 0x0101);
assert(a == 0x0000);
assert(b == 0x1010);
}
{
int16_t a = 0x1010;
int16_t b = uv_fetch_or_int16(&a, 0x0101);
assert(a == 0x1111);
assert(b == 0x1010);
}
{
int16_t a = 0x1010;
int16_t b = uv_fetch_xor_int16(&a, 0x0101);
assert(a == 0x1111);
assert(b == 0x1010);
}
{
int16_t a = 0x1100;
int16_t b = uv_fetch_xor_int16(&a, 0x0110);
assert(a == 0x1010);
assert(b == 0x1100);
}
return 0;
}
/* Copyright (c) 2013, Ben Noordhuis <info@bnoordhuis.nl>
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#ifndef UV_ATOMIC_H_
#define UV_ATOMIC_H_
#include <stddef.h>
#include <stdint.h>
#if defined(__i386__)
#define UV_ARCH ia32
#define UV_POINTER_BITS 32
#elif defined(__x86_64__)
#define UV_ARCH x64
#define UV_POINTER_BITS 64
#else
#define UV_ARCH unknown
#define UV_POINTER_BITS 0
#endif
#if defined(__GNUC__)
#define UV_COMPILER gcc
#elif defined(_MSC_VER)
#define UV_COMPILER msvc
#else
#define UV_COMPILER unknown
#endif
#if !defined(__STDC__)
#define UV_INLINE(declaration) inline static declaration
#elif UV_COMPILER == gcc
#define UV_INLINE(declaration) __attribute__((unused)) static declaration
#else
#define UV_INLINE(declaration) static declaration
#endif
#define UV_ACCESS_ONCE(type, ptr) \
(* (volatile type *) (ptr))
#define UV_ACCESS_ONCE_PTR(type, ptr) \
(* (type * volatile *) (ptr))
#define UV_ATOMIC_MEMORY_ORDER_MAP(V, arg) \
V(relaxed, arg) \
V(consume, arg) \
V(acquire, arg) \
V(release, arg) \
V(acq_rel, arg) \
V(seq_cst, arg) \
#define UV_ATOMIC_INTEGER_TYPE_MAP(V, arg) \
V(int8, arg) \
V(uint8, arg) \
V(int16, arg) \
V(uint16, arg) \
V(int32, arg) \
V(uint32, arg) \
V(int64, arg) \
V(uint64, arg) \
#define UV_ATOMIC_INTEGER_TYPE_AND_MEMORY_ORDER_MAP(V, arg) \
V(int8, relaxed, arg) \
V(int8, consume, arg) \
V(int8, acquire, arg) \
V(int8, release, arg) \
V(int8, acq_rel, arg) \
V(int8, seq_cst, arg) \
V(uint8, relaxed, arg) \
V(uint8, consume, arg) \
V(uint8, acquire, arg) \
V(uint8, release, arg) \
V(uint8, acq_rel, arg) \
V(uint8, seq_cst, arg) \
V(int16, relaxed, arg) \
V(int16, consume, arg) \
V(int16, acquire, arg) \
V(int16, release, arg) \
V(int16, acq_rel, arg) \
V(int16, seq_cst, arg) \
V(uint16, relaxed, arg) \
V(uint16, consume, arg) \
V(uint16, acquire, arg) \
V(uint16, release, arg) \
V(uint16, acq_rel, arg) \
V(uint16, seq_cst, arg) \
V(int32, relaxed, arg) \
V(int32, consume, arg) \
V(int32, acquire, arg) \
V(int32, release, arg) \
V(int32, acq_rel, arg) \
V(int32, seq_cst, arg) \
V(uint32, relaxed, arg) \
V(uint32, consume, arg) \
V(uint32, acquire, arg) \
V(uint32, release, arg) \
V(uint32, acq_rel, arg) \
V(uint32, seq_cst, arg) \
V(int64, relaxed, arg) \
V(int64, consume, arg) \
V(int64, acquire, arg) \
V(int64, release, arg) \
V(int64, acq_rel, arg) \
V(int64, seq_cst, arg) \
V(uint64, relaxed, arg) \
V(uint64, consume, arg) \
V(uint64, acquire, arg) \
V(uint64, release, arg) \
V(uint64, acq_rel, arg) \
V(uint64, seq_cst, arg) \
UV_INLINE(void *uv_load_ptr(void **ptr));
UV_INLINE(void uv_store_ptr(void **ptr, void *val));
UV_INLINE(void *uv_exchange_ptr(void **ptr, void *val));
UV_INLINE(int uv_compare_exchange_ptr_weak(void **ptr,
void **oldval,
void *newval));
UV_INLINE(int uv_compare_exchange_ptr_strong(void **ptr,
void **oldval,
void *newval));
UV_INLINE(void *uv_fetch_add_ptr(void **ptr, ptrdiff_t diff));
UV_INLINE(void *uv_fetch_sub_ptr(void **ptr, ptrdiff_t diff));
#define V(model, _) \
UV_INLINE(void *uv_load_ptr_ ## model(void **ptr)); \
UV_INLINE(void uv_store_ptr_ ## model(void **ptr, void *val)); \
UV_INLINE(void *uv_exchange_ptr_ ## model(void **ptr, void *val)); \
UV_INLINE(int uv_compare_exchange_weak_ptr_ ## model(void **ptr, \
void **oldval, \
void *newval)); \
UV_INLINE(int uv_compare_exchange_strong_ptr_ ## model(void **ptr, \
void **oldval, \
void *newval)); \
UV_INLINE(void *uv_fetch_add_ptr_ ## model(void **ptr, ptrdiff_t diff)); \
UV_INLINE(void *uv_fetch_sub_ptr_ ## model(void **ptr, ptrdiff_t diff));
UV_ATOMIC_MEMORY_ORDER_MAP(V, _)
#undef V
#define V(type, _) \
UV_INLINE(type ## _t uv_load_ ## type(type ## _t *ptr)); \
UV_INLINE(void uv_store_ ## type(type ## _t *ptr, type ## _t val)); \
UV_INLINE(type ## _t uv_exchange_ ## type(type ## _t *ptr, type ## _t val));\
UV_INLINE(int uv_compare_exchange_weak_ ## type(type ## _t *ptr, \
type ## _t *oldval, \
type ## _t newval)); \
UV_INLINE(int uv_compare_exchange_strong_ ## type(type ## _t *ptr, \
type ## _t *oldval, \
type ## _t newval)); \
UV_INLINE(type ## _t uv_fetch_add_ ## type(type ## _t *ptr, \
type ## _t diff)); \
UV_INLINE(type ## _t uv_fetch_sub_ ## type(type ## _t *ptr, \
type ## _t diff)); \
UV_INLINE(type ## _t uv_fetch_and_ ## type(type ## _t *ptr, \
type ## _t val)); \
UV_INLINE(type ## _t uv_fetch_or_ ## type(type ## _t *ptr, \
type ## _t val)); \
UV_INLINE(type ## _t uv_fetch_xor_ ## type(type ## _t *ptr, \
type ## _t val));
UV_ATOMIC_INTEGER_TYPE_MAP(V, _)
#undef V
#define V(type, model, _) \
UV_INLINE(type ## _t uv_load_ ## type ## _ ## model(type ## _t *ptr)); \
UV_INLINE(void uv_store_ ## type ## _ ## model(type ## _t *ptr, \
type ## _t val)); \
UV_INLINE(type ## _t uv_exchange_ ## type ## _ ## model(type ## _t *ptr, \
type ## _t val)); \
UV_INLINE(int uv_compare_exchange_weak_ ## type ## _ ## model( \
type ## _t *ptr, \
type ## _t *oldval, \
type ## _t newval)); \
UV_INLINE(int uv_compare_exchange_strong_ ## type ## _ ## model( \
type ## _t *ptr, \
type ## _t *oldval, \
type ## _t newval)); \
UV_INLINE(type ## _t uv_fetch_add_ ## type ## _ ## model(type ## _t *ptr, \
type ## _t diff)); \
UV_INLINE(type ## _t uv_fetch_sub_ ## type ## _ ## model(type ## _t *ptr, \
type ## _t diff)); \
UV_INLINE(type ## _t uv_fetch_and_ ## type ## _ ## model(type ## _t *ptr, \
type ## _t val)); \
UV_INLINE(type ## _t uv_fetch_or_ ## type ## _ ## model(type ## _t *ptr, \
type ## _t val)); \
UV_INLINE(type ## _t uv_fetch_xor_ ## type ## _ ## model(type ## _t *ptr, \
type ## _t val));
UV_ATOMIC_INTEGER_TYPE_AND_MEMORY_ORDER_MAP(V, _)
#undef V
#define V(type) \
UV_INLINE(void *uv_load_ptr(void **ptr)) \
{ \
return (void *) uv_load_ ## type((type ## _t *) ptr); \
} \
UV_INLINE(void uv_store_ptr(void **ptr, void *val)) \
{ \
uv_store_ ## type((type ## _t *) ptr, (type ## _t ) val); \
} \
UV_INLINE(void *uv_exchange_ptr(void **ptr, void *val)) \
{ \
return (void *) uv_exchange_ ## type((type ## _t *) ptr, \
(type ## _t) val); \
} \
UV_INLINE(int uv_compare_exchange_ptr_weak(void **ptr, \
void **oldval, \
void *newval)) \
{ \
return uv_compare_exchange_weak_ ## type((type ## _t *) ptr, \
(type ## _t *) oldval, \
(type ## _t) newval); \
} \
UV_INLINE(int uv_compare_exchange_ptr_strong(void **ptr, \
void **oldval, \
void *newval)) \
{ \
return uv_compare_exchange_strong_ ## type((type ## _t *) ptr, \
(type ## _t *) oldval, \
(type ## _t) newval); \
} \
UV_INLINE(void *uv_fetch_add_ptr(void **ptr, ptrdiff_t diff)) \
{ \
return (void *) uv_fetch_add_ ## type((type ## _t *) ptr, diff); \
} \
UV_INLINE(void *uv_fetch_sub_ptr(void **ptr, ptrdiff_t diff)) \
{ \
return (void *) uv_fetch_sub_ ## type((type ## _t *) ptr, diff); \
}
#if UV_POINTER_BITS == 32
V(uint32)
#elif UV_POINTER_BITS == 64
V(uint64)
#endif
#undef V
#define V(model, type) \
UV_INLINE(void *uv_load_ptr_ ## model(void **ptr)) \
{ \
return (void *) uv_load_ ## type ## _ ## model((type ## _t *) ptr); \
} \
UV_INLINE(void uv_store_ptr_ ## model(void **ptr, void *val)) \
{ \
uv_store_ ## type ## _ ## model((type ## _t *) ptr, (type ## _t) val); \
} \
UV_INLINE(void *uv_exchange_ptr_ ## model(void **ptr, void *val)) \
{ \
return (void *) uv_exchange_ ## type ## _ ## model((type ## _t *) ptr, \
(type ## _t) val); \
} \
UV_INLINE(int uv_compare_exchange_weak_ptr_ ## model(void **ptr, \
void **oldval, \
void *newval)) \
{ \
return uv_compare_exchange_weak_ ## type ## _ ## model( \
(type ## _t *) ptr, \
(type ## _t *) oldval, \
(type ## _t) newval); \
} \
UV_INLINE(int uv_compare_exchange_strong_ptr_ ## model(void **ptr, \
void **oldval, \
void *newval)) \
{ \
return uv_compare_exchange_strong_ ## type ## _ ## model( \
(type ## _t *) ptr, \
(type ## _t *) oldval, \
(type ## _t) newval); \
} \
UV_INLINE(void *uv_fetch_add_ptr_ ## model(void **ptr, ptrdiff_t diff)) \
{ \
return (void *) uv_fetch_add_ ## type ## _ ## model((type ## _t *) ptr, \
diff); \
} \
UV_INLINE(void *uv_fetch_sub_ptr_ ## model(void **ptr, ptrdiff_t diff)) \
{ \
return (void *) uv_fetch_sub_ ## type ## _ ## model((type ## _t *) ptr, \
diff); \
}
#if UV_POINTER_BITS == 32
UV_ATOMIC_MEMORY_ORDER_MAP(V, uint32)
#elif UV_POINTER_BITS == 64
UV_ATOMIC_MEMORY_ORDER_MAP(V, uint64)
#endif
#undef V
#define V(type, _) \
UV_INLINE(type ## _t uv_load_ ## type(type ## _t *ptr)) \
{ \
return uv_load_ ## type ## _seq_cst(ptr); \
} \
UV_INLINE(void uv_store_ ## type(type ## _t *ptr, type ## _t val)) \
{ \
uv_store_ ## type ## _seq_cst(ptr, val); \
} \
UV_INLINE(type ## _t uv_exchange_ ## type(type ## _t *ptr, type ## _t val)) \
{ \
return uv_exchange_ ## type ## _seq_cst(ptr, val); \
} \
UV_INLINE(int uv_compare_exchange_weak_ ## type(type ## _t *ptr, \
type ## _t *oldval, \
type ## _t newval)) \
{ \
return uv_compare_exchange_weak_ ## type ## _seq_cst(ptr, \
oldval, \
newval); \
} \
UV_INLINE(int uv_compare_exchange_strong_ ## type(type ## _t *ptr, \
type ## _t *oldval, \
type ## _t newval)) \
{ \
return uv_compare_exchange_strong_ ## type ## _seq_cst(ptr, \
oldval, \
newval); \
}
UV_ATOMIC_INTEGER_TYPE_MAP(V, _)
#undef V
#define V(type, op) \
UV_INLINE(type ## _t uv_fetch_ ## op ## _ ## type(type ## _t *ptr, \
type ## _t val)) \
{ \
return uv_fetch_ ## op ## _ ## type ## _seq_cst(ptr, val); \
}
UV_ATOMIC_INTEGER_TYPE_MAP(V, add)
UV_ATOMIC_INTEGER_TYPE_MAP(V, sub)
UV_ATOMIC_INTEGER_TYPE_MAP(V, and)
UV_ATOMIC_INTEGER_TYPE_MAP(V, or)
UV_ATOMIC_INTEGER_TYPE_MAP(V, xor)
#undef V
#define V(type, model, _) \
UV_INLINE(type ## _t uv_load_ ## type ## _ ## model(type ## _t *ptr)) \
{ \
return UV_ACCESS_ONCE(type ## _t, ptr); \
} \
UV_INLINE(void uv_store_ ## type ## _ ## model(type ## _t *ptr, \
type ## _t val)) \
{ \
UV_ACCESS_ONCE(type ## _t, ptr) = val; \
}
UV_ATOMIC_INTEGER_TYPE_AND_MEMORY_ORDER_MAP(V, _)
#undef V
#if UV_ARCH == x64
#define V(type, model, _) \
UV_INLINE(type ## _t uv_exchange_ ## type ## _ ## model(type ## _t *ptr, \
type ## _t val)) \
{ \
__asm__ __volatile__ ("xchg %0, %2;" \
: "=a" (val) \
: "a" (val), "m" (*ptr) \
: "memory"); \
return val; \
} \
UV_INLINE(int uv_compare_exchange_weak_ ## type ## _ ## model( \
type ## _t *ptr, \
type ## _t *oldval, \
type ## _t newval)) \
{ \
return uv_compare_exchange_strong_ ## type ## _ ## model(ptr, \
oldval, \
newval); \
} \
UV_INLINE(int uv_compare_exchange_strong_ ## type ## _ ## model( \
type ## _t *ptr, \
type ## _t *oldval, \
type ## _t newval)) \
{ \
type ## _t out; \
__asm__ __volatile__ ("lock; cmpxchg %2, %1;" \
: "=a" (out), "+m" (*ptr) \
: "r" (newval), "0" (*oldval) \
: "memory"); \
return (*oldval == out) || (*oldval = out, 0); \
} \
UV_INLINE(type ## _t uv_fetch_add_ ## type ## _ ## model(type ## _t *ptr, \
type ## _t diff)) \
{ \
type ## _t out; \
__asm__ __volatile__ ("lock; xadd %0, %2" \
: "=a" (out) \
: "a" (diff), "m" (*ptr) \
: "memory"); \
return out; \
} \
UV_INLINE(type ## _t uv_fetch_sub_ ## type ## _ ## model(type ## _t *ptr, \
type ## _t diff)) \
{ \
return uv_fetch_add_ ## type ## _ ## model(ptr, -diff); \
}
UV_ATOMIC_INTEGER_TYPE_AND_MEMORY_ORDER_MAP(V, _)
#undef V
#define V(type, model, op) \
UV_INLINE(type ## _t uv_fetch_ ## op ## _ ## type ## _ ## model( \
type ## _t *ptr, \
type ## _t val)) \
{ \
type ## _t oldval; \
type ## _t newval; \
do { \
oldval = newval = uv_load_ ## type ## _ ## model(ptr); \
__asm__ __volatile__ (#op " %1, %0" \
: "=r" (newval) \
: "g" (val), "0" (newval)); \
} \
while (0 == uv_compare_exchange_strong_ ## type ## _ ## model(ptr, \
&oldval, \
newval)); \
return oldval; \
}
UV_ATOMIC_INTEGER_TYPE_AND_MEMORY_ORDER_MAP(V, and)
UV_ATOMIC_INTEGER_TYPE_AND_MEMORY_ORDER_MAP(V, or)
UV_ATOMIC_INTEGER_TYPE_AND_MEMORY_ORDER_MAP(V, xor)
#undef V
#endif /* UV_ARCH == x64 */
#undef UV_ATOMIC_INTEGER_TYPE_AND_MEMORY_ORDER_MAP
#undef UV_ATOMIC_INTEGER_TYPE_MAP
#undef UV_ATOMIC_MEMORY_ORDER_MAP
#undef UV_ACCESS_ONCE_PTR
#undef UV_ACCESS_ONCE
#undef UV_POINTER_BITS
#undef UV_COMPILER
#undef UV_ARCH
#endif /* UV_ATOMIC_H_ */
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment