# upstream trunk commits r1907442, r1907441 --- apr.orig/atomic/unix/builtins64.c +++ apr/atomic/unix/builtins64.c @@ -27,7 +27,7 @@ APR_DECLARE(apr_uint64_t) apr_atomic_read64(volatile apr_uint64_t *mem) { -#if HAVE__ATOMIC_BUILTINS +#if HAVE__ATOMIC_BUILTINS64 return __atomic_load_n(mem, __ATOMIC_SEQ_CST); #elif WEAK_MEMORY_ORDERING || APR_SIZEOF_VOIDP < 8 /* No __sync_load() available => apr_atomic_add64(mem, 0) */ @@ -39,7 +39,7 @@ APR_DECLARE(apr_uint64_t) apr_atomic_rea APR_DECLARE(void) apr_atomic_set64(volatile apr_uint64_t *mem, apr_uint64_t val) { -#if HAVE__ATOMIC_BUILTINS +#if HAVE__ATOMIC_BUILTINS64 __atomic_store_n(mem, val, __ATOMIC_SEQ_CST); #elif WEAK_MEMORY_ORDERING || APR_SIZEOF_VOIDP < 8 /* No __sync_store() available => apr_atomic_xchg64(mem, val) */ @@ -52,7 +52,7 @@ APR_DECLARE(void) apr_atomic_set64(volat APR_DECLARE(apr_uint64_t) apr_atomic_add64(volatile apr_uint64_t *mem, apr_uint64_t val) { -#if HAVE__ATOMIC_BUILTINS +#if HAVE__ATOMIC_BUILTINS64 return __atomic_fetch_add(mem, val, __ATOMIC_SEQ_CST); #else return __sync_fetch_and_add(mem, val); @@ -61,7 +61,7 @@ APR_DECLARE(apr_uint64_t) apr_atomic_add APR_DECLARE(void) apr_atomic_sub64(volatile apr_uint64_t *mem, apr_uint64_t val) { -#if HAVE__ATOMIC_BUILTINS +#if HAVE__ATOMIC_BUILTINS64 __atomic_fetch_sub(mem, val, __ATOMIC_SEQ_CST); #else __sync_fetch_and_sub(mem, val); @@ -70,7 +70,7 @@ APR_DECLARE(void) apr_atomic_sub64(volat APR_DECLARE(apr_uint64_t) apr_atomic_inc64(volatile apr_uint64_t *mem) { -#if HAVE__ATOMIC_BUILTINS +#if HAVE__ATOMIC_BUILTINS64 return __atomic_fetch_add(mem, 1, __ATOMIC_SEQ_CST); #else return __sync_fetch_and_add(mem, 1); @@ -79,7 +79,7 @@ APR_DECLARE(apr_uint64_t) apr_atomic_inc APR_DECLARE(int) apr_atomic_dec64(volatile apr_uint64_t *mem) { -#if HAVE__ATOMIC_BUILTINS +#if HAVE__ATOMIC_BUILTINS64 return __atomic_sub_fetch(mem, 1, __ATOMIC_SEQ_CST); #else return __sync_sub_and_fetch(mem, 1); @@ -89,7 +89,7 @@ APR_DECLARE(int) apr_atomic_dec64(volati APR_DECLARE(apr_uint64_t) apr_atomic_cas64(volatile apr_uint64_t *mem, apr_uint64_t val, apr_uint64_t cmp) { -#if HAVE__ATOMIC_BUILTINS +#if HAVE__ATOMIC_BUILTINS64 __atomic_compare_exchange_n(mem, &cmp, val, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); return cmp; #else @@ -99,7 +99,7 @@ APR_DECLARE(apr_uint64_t) apr_atomic_cas APR_DECLARE(apr_uint64_t) apr_atomic_xchg64(volatile apr_uint64_t *mem, apr_uint64_t val) { -#if HAVE__ATOMIC_BUILTINS +#if HAVE__ATOMIC_BUILTINS64 return __atomic_exchange_n(mem, val, __ATOMIC_SEQ_CST); #else __sync_synchronize();