diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2021-07-23 11:24:09 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2021-07-23 11:24:09 +0000 |
commit | e36b37583bebd229102f46c4ed7d2f6fad8697d4 (patch) | |
tree | 73937b6f051fcaaa1ccbdfbaa9f3a1f36bbedb9e /include/gcc/arm | |
parent | Initial commit. (diff) | |
download | ck-e36b37583bebd229102f46c4ed7d2f6fad8697d4.tar.xz ck-e36b37583bebd229102f46c4ed7d2f6fad8697d4.zip |
Adding upstream version 0.6.0.upstream/0.6.0
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'include/gcc/arm')
-rw-r--r-- | include/gcc/arm/ck_f_pr.h | 162 | ||||
-rw-r--r-- | include/gcc/arm/ck_pr.h | 563 |
2 files changed, 725 insertions, 0 deletions
diff --git a/include/gcc/arm/ck_f_pr.h b/include/gcc/arm/ck_f_pr.h new file mode 100644 index 0000000..c508f85 --- /dev/null +++ b/include/gcc/arm/ck_f_pr.h @@ -0,0 +1,162 @@ +/* DO NOT EDIT. This is auto-generated from feature.sh */ +#define CK_F_PR_ADD_16 +#define CK_F_PR_ADD_32 +#define CK_F_PR_ADD_8 +#define CK_F_PR_ADD_CHAR +#define CK_F_PR_ADD_INT +#define CK_F_PR_ADD_PTR +#define CK_F_PR_ADD_SHORT +#define CK_F_PR_ADD_UINT +#define CK_F_PR_AND_16 +#define CK_F_PR_AND_32 +#define CK_F_PR_AND_8 +#define CK_F_PR_AND_CHAR +#define CK_F_PR_AND_INT +#define CK_F_PR_AND_PTR +#define CK_F_PR_AND_SHORT +#define CK_F_PR_AND_UINT +#define CK_F_PR_BARRIER +#define CK_F_PR_CAS_16 +#define CK_F_PR_CAS_16_VALUE +#define CK_F_PR_CAS_32 +#define CK_F_PR_CAS_32_VALUE +#if defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7A__) +#define CK_F_PR_CAS_64 +#define CK_F_PR_CAS_64_VALUE +#define CK_F_PR_CAS_DOUBLE +#define CK_F_PR_CAS_DOUBLE_VALUE +#endif +#define CK_F_PR_CAS_8 +#define CK_F_PR_CAS_8_VALUE +#define CK_F_PR_CAS_CHAR +#define CK_F_PR_CAS_CHAR_VALUE +#define CK_F_PR_CAS_INT +#define CK_F_PR_CAS_INT_VALUE +#define CK_F_PR_CAS_PTR +#if defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7A__) +#define CK_F_PR_CAS_PTR_2 +#define CK_F_PR_CAS_PTR_2_VALUE +#endif +#define CK_F_PR_CAS_PTR_VALUE +#define CK_F_PR_CAS_SHORT +#define CK_F_PR_CAS_SHORT_VALUE +#define CK_F_PR_CAS_UINT +#define CK_F_PR_CAS_UINT_VALUE +#define CK_F_PR_DEC_16 +#define CK_F_PR_DEC_32 +#define CK_F_PR_DEC_8 +#define CK_F_PR_DEC_CHAR +#define CK_F_PR_DEC_INT +#define CK_F_PR_DEC_PTR +#define CK_F_PR_DEC_SHORT +#define CK_F_PR_DEC_UINT +#define CK_F_PR_FAA_16 +#define CK_F_PR_FAA_32 +#define CK_F_PR_FAA_8 +#define CK_F_PR_FAA_CHAR +#define CK_F_PR_FAA_INT +#define CK_F_PR_FAA_PTR +#define CK_F_PR_FAA_SHORT +#define CK_F_PR_FAA_UINT +#define CK_F_PR_FAS_16 +#define CK_F_PR_FAS_32 +#define CK_F_PR_FAS_8 +#define CK_F_PR_FAS_CHAR +#define CK_F_PR_FAS_INT +#define CK_F_PR_FAS_PTR +#define CK_F_PR_FAS_SHORT +#define CK_F_PR_FAS_UINT +#define CK_F_PR_FENCE_ATOMIC +#define CK_F_PR_FENCE_ATOMIC_LOAD +#define CK_F_PR_FENCE_ATOMIC_STORE +#define CK_F_PR_FENCE_LOAD +#define CK_F_PR_FENCE_LOAD_ATOMIC +#define CK_F_PR_FENCE_LOAD_DEPENDS +#define CK_F_PR_FENCE_LOAD_STORE +#define CK_F_PR_FENCE_MEMORY +#define CK_F_PR_FENCE_STORE +#define CK_F_PR_FENCE_STORE_ATOMIC +#define CK_F_PR_FENCE_STORE_LOAD +#define CK_F_PR_FENCE_STRICT_ATOMIC +#define CK_F_PR_FENCE_STRICT_ATOMIC_LOAD +#define CK_F_PR_FENCE_STRICT_ATOMIC_STORE +#define CK_F_PR_FENCE_STRICT_LOAD +#define CK_F_PR_FENCE_STRICT_LOAD_ATOMIC +#define CK_F_PR_FENCE_STRICT_LOAD_STORE +#define CK_F_PR_FENCE_STRICT_MEMORY +#define CK_F_PR_FENCE_STRICT_STORE +#define CK_F_PR_FENCE_STRICT_STORE_ATOMIC +#define CK_F_PR_FENCE_STRICT_STORE_LOAD +#define CK_F_PR_INC_16 +#define CK_F_PR_INC_32 +#define CK_F_PR_INC_8 +#define CK_F_PR_INC_CHAR +#define CK_F_PR_INC_INT +#define CK_F_PR_INC_PTR +#define CK_F_PR_INC_SHORT +#define CK_F_PR_INC_UINT +#define CK_F_PR_LOAD_16 +#define CK_F_PR_LOAD_32 +#if defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7A__) +#define CK_F_PR_LOAD_64 +#define CK_F_PR_LOAD_DOUBLE +#endif +#define CK_F_PR_LOAD_8 +#define CK_F_PR_LOAD_CHAR +#define CK_F_PR_LOAD_INT +#define CK_F_PR_LOAD_PTR +#define CK_F_PR_LOAD_SHORT +#define CK_F_PR_LOAD_UINT +#define CK_F_PR_NEG_16 +#define CK_F_PR_NEG_32 +#define CK_F_PR_NEG_8 +#define CK_F_PR_NEG_CHAR +#define CK_F_PR_NEG_INT +#define CK_F_PR_NEG_PTR +#define CK_F_PR_NEG_SHORT +#define CK_F_PR_NEG_UINT +#define CK_F_PR_NOT_16 +#define CK_F_PR_NOT_32 +#define CK_F_PR_NOT_8 +#define CK_F_PR_NOT_CHAR +#define CK_F_PR_NOT_INT +#define CK_F_PR_NOT_PTR +#define CK_F_PR_NOT_SHORT +#define CK_F_PR_NOT_UINT +#define CK_F_PR_OR_16 +#define CK_F_PR_OR_32 +#define CK_F_PR_OR_8 +#define CK_F_PR_OR_CHAR +#define CK_F_PR_OR_INT +#define CK_F_PR_OR_PTR +#define CK_F_PR_OR_SHORT +#define CK_F_PR_OR_UINT +#define CK_F_PR_STALL +#define CK_F_PR_STORE_16 +#define CK_F_PR_STORE_32 +#if defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7A__) +#define CK_F_PR_STORE_64 +#define CK_F_PR_STORE_DOUBLE +#endif +#define CK_F_PR_STORE_8 +#define CK_F_PR_STORE_CHAR +#define CK_F_PR_STORE_INT +#define CK_F_PR_STORE_PTR +#define CK_F_PR_STORE_SHORT +#define CK_F_PR_STORE_UINT +#define CK_F_PR_SUB_16 +#define CK_F_PR_SUB_32 +#define CK_F_PR_SUB_8 +#define CK_F_PR_SUB_CHAR +#define CK_F_PR_SUB_INT +#define CK_F_PR_SUB_PTR +#define CK_F_PR_SUB_SHORT +#define CK_F_PR_SUB_UINT +#define CK_F_PR_XOR_16 +#define CK_F_PR_XOR_32 +#define CK_F_PR_XOR_8 +#define CK_F_PR_XOR_CHAR +#define CK_F_PR_XOR_INT +#define CK_F_PR_XOR_PTR +#define CK_F_PR_XOR_SHORT +#define CK_F_PR_XOR_UINT diff --git a/include/gcc/arm/ck_pr.h b/include/gcc/arm/ck_pr.h new file mode 100644 index 0000000..841ca21 --- /dev/null +++ b/include/gcc/arm/ck_pr.h @@ -0,0 +1,563 @@ +/* + * Copyright 2009-2015 Samy Al Bahra. + * Copyright 2013-2015 Olivier Houchard. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND + * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS + * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY + * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + * SUCH DAMAGE. + */ + +#ifndef CK_PR_ARM_H +#define CK_PR_ARM_H + +#ifndef CK_PR_H +#error Do not include this file directly, use ck_pr.h +#endif + +#include <ck_cc.h> +#include <ck_md.h> + +/* + * The following represent supported atomic operations. + * These operations may be emulated. + */ +#include "ck_f_pr.h" + +/* + * Minimum interface requirement met. + */ +#define CK_F_PR + +CK_CC_INLINE static void +ck_pr_stall(void) +{ + + __asm__ __volatile__("" ::: "memory"); + return; +} + +#if defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7A__) +#define CK_ISB __asm __volatile("isb" : : "r" (0) : "memory") +#define CK_DMB __asm __volatile("dmb" : : "r" (0) : "memory") +#define CK_DSB __asm __volatile("dsb" : : "r" (0) : "memory") +/* FreeBSD's toolchain doesn't accept dmb st, so use the opcode instead */ +#ifdef __FreeBSD__ +#define CK_DMB_ST __asm __volatile(".word 0xf57ff05e" : : "r" (0) : "memory") +#else +#define CK_DMB_ST __asm __volatile("dmb st" : : "r" (0) : "memory") +#endif /* __FreeBSD__ */ +#else +/* armv6 doesn't have dsb/dmb/isb, and no way to wait only for stores */ +#define CK_ISB \ + __asm __volatile("mcr p15, 0, %0, c7, c5, 4" : : "r" (0) : "memory") +#define CK_DSB \ + __asm __volatile("mcr p15, 0, %0, c7, c10, 4" : : "r" (0) : "memory") +#define CK_DMB \ + __asm __volatile("mcr p15, 0, %0, c7, c10, 5" : : "r" (0) : "memory") +#define CK_DMB_ST CK_DMB +#endif + +#define CK_PR_FENCE(T, I) \ + CK_CC_INLINE static void \ + ck_pr_fence_strict_##T(void) \ + { \ + I; \ + } + +CK_PR_FENCE(atomic, CK_DMB_ST) +CK_PR_FENCE(atomic_store, CK_DMB_ST) +CK_PR_FENCE(atomic_load, CK_DMB_ST) +CK_PR_FENCE(store_atomic, CK_DMB_ST) +CK_PR_FENCE(load_atomic, CK_DMB) +CK_PR_FENCE(store, CK_DMB_ST) +CK_PR_FENCE(store_load, CK_DMB) +CK_PR_FENCE(load, CK_DMB) +CK_PR_FENCE(load_store, CK_DMB) +CK_PR_FENCE(memory, CK_DMB) +CK_PR_FENCE(acquire, CK_DMB) +CK_PR_FENCE(release, CK_DMB) +CK_PR_FENCE(acqrel, CK_DMB) +CK_PR_FENCE(lock, CK_DMB) +CK_PR_FENCE(unlock, CK_DMB) + +#undef CK_PR_FENCE + +#undef CK_ISB +#undef CK_DSB +#undef CK_DMB +#undef CK_DMB_ST + +#define CK_PR_LOAD(S, M, T, C, I) \ + CK_CC_INLINE static T \ + ck_pr_md_load_##S(const M *target) \ + { \ + long r = 0; \ + __asm__ __volatile__(I " %0, [%1];" \ + : "=r" (r) \ + : "r" (target) \ + : "memory"); \ + return ((T)r); \ + } + +CK_PR_LOAD(ptr, void, void *, uint32_t, "ldr") + +#define CK_PR_LOAD_S(S, T, I) CK_PR_LOAD(S, T, T, T, I) + +CK_PR_LOAD_S(32, uint32_t, "ldr") +CK_PR_LOAD_S(16, uint16_t, "ldrh") +CK_PR_LOAD_S(8, uint8_t, "ldrb") +CK_PR_LOAD_S(uint, unsigned int, "ldr") +CK_PR_LOAD_S(int, int, "ldr") +CK_PR_LOAD_S(short, short, "ldrh") +CK_PR_LOAD_S(char, char, "ldrb") + +#undef CK_PR_LOAD_S +#undef CK_PR_LOAD + +#if defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7A__) + +#define CK_PR_DOUBLE_LOAD(T, N) \ +CK_CC_INLINE static T \ +ck_pr_md_load_##N(const T *target) \ +{ \ + register T ret; \ + \ + __asm __volatile("ldrexd %0, [%1]" \ + : "=&r" (ret) \ + : "r" (target) \ + : "memory", "cc"); \ + return (ret); \ +} + +CK_PR_DOUBLE_LOAD(uint64_t, 64) +#ifndef CK_PR_DISABLE_DOUBLE +CK_PR_DOUBLE_LOAD(double, double) +#endif +#undef CK_PR_DOUBLE_LOAD +#endif + +#define CK_PR_STORE(S, M, T, C, I) \ + CK_CC_INLINE static void \ + ck_pr_md_store_##S(M *target, T v) \ + { \ + __asm__ __volatile__(I " %1, [%0]" \ + : \ + : "r" (target), \ + "r" (v) \ + : "memory"); \ + return; \ + } + +CK_PR_STORE(ptr, void, const void *, uint32_t, "str") + +#define CK_PR_STORE_S(S, T, I) CK_PR_STORE(S, T, T, T, I) + +CK_PR_STORE_S(32, uint32_t, "str") +CK_PR_STORE_S(16, uint16_t, "strh") +CK_PR_STORE_S(8, uint8_t, "strb") +CK_PR_STORE_S(uint, unsigned int, "str") +CK_PR_STORE_S(int, int, "str") +CK_PR_STORE_S(short, short, "strh") +CK_PR_STORE_S(char, char, "strb") + +#undef CK_PR_STORE_S +#undef CK_PR_STORE + +#if defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7A__) + +#define CK_PR_DOUBLE_STORE(T, N) \ +CK_CC_INLINE static void \ +ck_pr_md_store_##N(const T *target, T value) \ +{ \ + T tmp; \ + uint32_t flag; \ + __asm __volatile("1: \n" \ + "ldrexd %0, [%2]\n" \ + "strexd %1, %3, [%2]\n" \ + "teq %1, #0\n" \ + "it ne \n" \ + "bne 1b\n" \ + : "=&r" (tmp), "=&r" (flag) \ + : "r" (target), "r" (value) \ + : "memory", "cc"); \ +} + +CK_PR_DOUBLE_STORE(uint64_t, 64) +#ifndef CK_PR_DISABLE_DOUBLE +CK_PR_DOUBLE_STORE(double, double) +#endif + +#undef CK_PR_DOUBLE_STORE + +#define CK_PR_DOUBLE_CAS_VALUE(T, N) \ +CK_CC_INLINE static bool \ +ck_pr_cas_##N##_value(T *target, T compare, T set, T *value) \ +{ \ + T previous; \ + int tmp; \ + \ + __asm__ __volatile__("1:" \ + "ldrexd %0, [%4];" \ + "cmp %Q0, %Q2;" \ + "ittt eq;" \ + "cmpeq %R0, %R2;" \ + "strexdeq %1, %3, [%4];" \ + "cmpeq %1, #1;" \ + "beq 1b;" \ + :"=&r" (previous), "=&r" (tmp) \ + : "r" (compare), "r" (set) , \ + "r"(target) \ + : "memory", "cc"); \ + *value = previous; \ + return (*value == compare); \ +} + +CK_PR_DOUBLE_CAS_VALUE(uint64_t, 64) +#ifndef CK_PR_DISABLE_DOUBLE +CK_PR_DOUBLE_CAS_VALUE(double, double) +#endif + +#undef CK_PR_DOUBLE_CAS_VALUE + +CK_CC_INLINE static bool +ck_pr_cas_ptr_2_value(void *target, void *compare, void *set, void *value) +{ + uint32_t *_compare = CK_CPP_CAST(uint32_t *, compare); + uint32_t *_set = CK_CPP_CAST(uint32_t *, set); + uint64_t __compare = ((uint64_t)_compare[0]) | ((uint64_t)_compare[1] << 32); + uint64_t __set = ((uint64_t)_set[0]) | ((uint64_t)_set[1] << 32); + + return (ck_pr_cas_64_value(CK_CPP_CAST(uint64_t *, target), + __compare, + __set, + CK_CPP_CAST(uint64_t *, value))); +} + +#define CK_PR_DOUBLE_CAS(T, N) \ +CK_CC_INLINE static bool \ +ck_pr_cas_##N(T *target, T compare, T set) \ +{ \ + int ret; \ + T tmp; \ + \ + __asm__ __volatile__("1:" \ + "mov %0, #0;" \ + "ldrexd %1, [%4];" \ + "cmp %Q1, %Q2;" \ + "itttt eq;" \ + "cmpeq %R1, %R2;" \ + "strexdeq %1, %3, [%4];" \ + "moveq %0, #1;" \ + "cmpeq %1, #1;" \ + "beq 1b;" \ + : "=&r" (ret), "=&r" (tmp) \ + : "r" (compare), "r" (set) , \ + "r"(target) \ + : "memory", "cc"); \ + \ + return (ret); \ +} + +CK_PR_DOUBLE_CAS(uint64_t, 64) +#ifndef CK_PR_DISABLE_DOUBLE +CK_PR_DOUBLE_CAS(double, double) +#endif + +CK_CC_INLINE static bool +ck_pr_cas_ptr_2(void *target, void *compare, void *set) +{ + uint32_t *_compare = CK_CPP_CAST(uint32_t *, compare); + uint32_t *_set = CK_CPP_CAST(uint32_t *, set); + uint64_t __compare = ((uint64_t)_compare[0]) | ((uint64_t)_compare[1] << 32); + uint64_t __set = ((uint64_t)_set[0]) | ((uint64_t)_set[1] << 32); + return (ck_pr_cas_64(CK_CPP_CAST(uint64_t *, target), + __compare, + __set)); +} + +#endif + +CK_CC_INLINE static bool +ck_pr_cas_ptr_value(void *target, void *compare, void *set, void *value) +{ + void *previous, *tmp; + __asm__ __volatile__("1:" + "ldrex %0, [%2];" + "cmp %0, %4;" + "itt eq;" + "strexeq %1, %3, [%2];" + "cmpeq %1, #1;" + "beq 1b;" + : "=&r" (previous), + "=&r" (tmp) + : "r" (target), + "r" (set), + "r" (compare) + : "memory", "cc"); + *(void **)value = previous; + return (previous == compare); +} + +CK_CC_INLINE static bool +ck_pr_cas_ptr(void *target, void *compare, void *set) +{ + void *previous, *tmp; + __asm__ __volatile__("1:" + "ldrex %0, [%2];" + "cmp %0, %4;" + "itt eq;" + "strexeq %1, %3, [%2];" + "cmpeq %1, #1;" + "beq 1b;" + : "=&r" (previous), + "=&r" (tmp) + : "r" (target), + "r" (set), + "r" (compare) + : "memory", "cc"); + return (previous == compare); +} + +#define CK_PR_CAS(N, T, W) \ + CK_CC_INLINE static bool \ + ck_pr_cas_##N##_value(T *target, T compare, T set, T *value) \ + { \ + T previous = 0, tmp = 0; \ + __asm__ __volatile__("1:" \ + "ldrex" W " %0, [%2];" \ + "cmp %0, %4;" \ + "itt eq;" \ + "strex" W "eq %1, %3, [%2];" \ + "cmpeq %1, #1;" \ + "beq 1b;" \ + /* \ + * Using "+&" instead of "=&" to avoid bogus \ + * clang warnings. \ + */ \ + : "+&r" (previous), \ + "+&r" (tmp) \ + : "r" (target), \ + "r" (set), \ + "r" (compare) \ + : "memory", "cc"); \ + *value = previous; \ + return (previous == compare); \ + } \ + CK_CC_INLINE static bool \ + ck_pr_cas_##N(T *target, T compare, T set) \ + { \ + T previous = 0, tmp = 0; \ + __asm__ __volatile__("1:" \ + "ldrex" W " %0, [%2];" \ + "cmp %0, %4;" \ + "itt eq;" \ + "strex" W "eq %1, %3, [%2];" \ + "cmpeq %1, #1;" \ + "beq 1b;" \ + : "+&r" (previous), \ + "+&r" (tmp) \ + : "r" (target), \ + "r" (set), \ + "r" (compare) \ + : "memory", "cc"); \ + return (previous == compare); \ + } + +CK_PR_CAS(32, uint32_t, "") +CK_PR_CAS(uint, unsigned int, "") +CK_PR_CAS(int, int, "") +CK_PR_CAS(16, uint16_t, "h") +CK_PR_CAS(8, uint8_t, "b") +CK_PR_CAS(short, short, "h") +CK_PR_CAS(char, char, "b") + + +#undef CK_PR_CAS + +#define CK_PR_FAS(N, M, T, W) \ + CK_CC_INLINE static T \ + ck_pr_fas_##N(M *target, T v) \ + { \ + T previous = 0; \ + T tmp = 0; \ + __asm__ __volatile__("1:" \ + "ldrex" W " %0, [%2];" \ + "strex" W " %1, %3, [%2];" \ + "cmp %1, #0;" \ + "bne 1b;" \ + : "+&r" (previous), \ + "+&r" (tmp) \ + : "r" (target), \ + "r" (v) \ + : "memory", "cc"); \ + return (previous); \ + } + +CK_PR_FAS(32, uint32_t, uint32_t, "") +CK_PR_FAS(ptr, void, void *, "") +CK_PR_FAS(int, int, int, "") +CK_PR_FAS(uint, unsigned int, unsigned int, "") +CK_PR_FAS(16, uint16_t, uint16_t, "h") +CK_PR_FAS(8, uint8_t, uint8_t, "b") +CK_PR_FAS(short, short, short, "h") +CK_PR_FAS(char, char, char, "b") + + +#undef CK_PR_FAS + +#define CK_PR_UNARY(O, N, M, T, I, W) \ + CK_CC_INLINE static void \ + ck_pr_##O##_##N(M *target) \ + { \ + T previous = 0; \ + T tmp = 0; \ + __asm__ __volatile__("1:" \ + "ldrex" W " %0, [%2];" \ + I ";" \ + "strex" W " %1, %0, [%2];" \ + "cmp %1, #0;" \ + "bne 1b;" \ + : "+&r" (previous), \ + "+&r" (tmp) \ + : "r" (target) \ + : "memory", "cc"); \ + return; \ + } + +CK_PR_UNARY(inc, ptr, void, void *, "add %0, %0, #1", "") +CK_PR_UNARY(dec, ptr, void, void *, "sub %0, %0, #1", "") +CK_PR_UNARY(not, ptr, void, void *, "mvn %0, %0", "") +CK_PR_UNARY(neg, ptr, void, void *, "neg %0, %0", "") + +#define CK_PR_UNARY_S(S, T, W) \ + CK_PR_UNARY(inc, S, T, T, "add %0, %0, #1", W) \ + CK_PR_UNARY(dec, S, T, T, "sub %0, %0, #1", W) \ + CK_PR_UNARY(not, S, T, T, "mvn %0, %0", W) \ + CK_PR_UNARY(neg, S, T, T, "neg %0, %0", W) \ + +CK_PR_UNARY_S(32, uint32_t, "") +CK_PR_UNARY_S(uint, unsigned int, "") +CK_PR_UNARY_S(int, int, "") +CK_PR_UNARY_S(16, uint16_t, "h") +CK_PR_UNARY_S(8, uint8_t, "b") +CK_PR_UNARY_S(short, short, "h") +CK_PR_UNARY_S(char, char, "b") + +#undef CK_PR_UNARY_S +#undef CK_PR_UNARY + +#define CK_PR_BINARY(O, N, M, T, I, W) \ + CK_CC_INLINE static void \ + ck_pr_##O##_##N(M *target, T delta) \ + { \ + T previous = 0; \ + T tmp = 0; \ + __asm__ __volatile__("1:" \ + "ldrex" W " %0, [%2];" \ + I " %0, %0, %3;" \ + "strex" W " %1, %0, [%2];" \ + "cmp %1, #0;" \ + "bne 1b;" \ + : "+&r" (previous), \ + "+&r" (tmp) \ + : "r" (target), \ + "r" (delta) \ + : "memory", "cc"); \ + return; \ + } + +CK_PR_BINARY(and, ptr, void, uintptr_t, "and", "") +CK_PR_BINARY(add, ptr, void, uintptr_t, "add", "") +CK_PR_BINARY(or, ptr, void, uintptr_t, "orr", "") +CK_PR_BINARY(sub, ptr, void, uintptr_t, "sub", "") +CK_PR_BINARY(xor, ptr, void, uintptr_t, "eor", "") + +#define CK_PR_BINARY_S(S, T, W) \ + CK_PR_BINARY(and, S, T, T, "and", W) \ + CK_PR_BINARY(add, S, T, T, "add", W) \ + CK_PR_BINARY(or, S, T, T, "orr", W) \ + CK_PR_BINARY(sub, S, T, T, "sub", W) \ + CK_PR_BINARY(xor, S, T, T, "eor", W) + +CK_PR_BINARY_S(32, uint32_t, "") +CK_PR_BINARY_S(uint, unsigned int, "") +CK_PR_BINARY_S(int, int, "") +CK_PR_BINARY_S(16, uint16_t, "h") +CK_PR_BINARY_S(8, uint8_t, "b") +CK_PR_BINARY_S(short, short, "h") +CK_PR_BINARY_S(char, char, "b") + +#undef CK_PR_BINARY_S +#undef CK_PR_BINARY + +CK_CC_INLINE static void * +ck_pr_faa_ptr(void *target, uintptr_t delta) +{ + uintptr_t previous, r, tmp; + + __asm__ __volatile__("1:" + "ldrex %0, [%3];" + "add %1, %4, %0;" + "strex %2, %1, [%3];" + "cmp %2, #0;" + "bne 1b;" + : "=&r" (previous), + "=&r" (r), + "=&r" (tmp) + : "r" (target), + "r" (delta) + : "memory", "cc"); + + return (void *)(previous); +} + +#define CK_PR_FAA(S, T, W) \ + CK_CC_INLINE static T \ + ck_pr_faa_##S(T *target, T delta) \ + { \ + T previous = 0, r = 0, tmp = 0; \ + __asm__ __volatile__("1:" \ + "ldrex" W " %0, [%3];" \ + "add %1, %4, %0;" \ + "strex" W " %2, %1, [%3];" \ + "cmp %2, #0;" \ + "bne 1b;" \ + : "+&r" (previous), \ + "+&r" (r), \ + "+&r" (tmp) \ + : "r" (target), \ + "r" (delta) \ + : "memory", "cc"); \ + return (previous); \ + } + +CK_PR_FAA(32, uint32_t, "") +CK_PR_FAA(uint, unsigned int, "") +CK_PR_FAA(int, int, "") +CK_PR_FAA(16, uint16_t, "h") +CK_PR_FAA(8, uint8_t, "b") +CK_PR_FAA(short, short, "h") +CK_PR_FAA(char, char, "b") + +#undef CK_PR_FAA + +#endif /* CK_PR_ARM_H */ + |