diff options
Diffstat (limited to '')
-rwxr-xr-x | scripts/atomic/fallbacks/acquire | 9 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/add_negative | 16 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/add_unless | 16 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/andnot | 7 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/dec | 7 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/dec_and_test | 15 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/dec_if_positive | 15 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/dec_unless_positive | 14 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/fence | 11 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/fetch_add_unless | 23 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/inc | 7 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/inc_and_test | 15 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/inc_not_zero | 14 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/inc_unless_negative | 14 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/read_acquire | 16 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/release | 8 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/set_release | 12 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/sub_and_test | 16 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/try_cmpxchg | 11 |
19 files changed, 246 insertions, 0 deletions
diff --git a/scripts/atomic/fallbacks/acquire b/scripts/atomic/fallbacks/acquire new file mode 100755 index 000000000..ef764085c --- /dev/null +++ b/scripts/atomic/fallbacks/acquire @@ -0,0 +1,9 @@ +cat <<EOF +static __always_inline ${ret} +arch_${atomic}_${pfx}${name}${sfx}_acquire(${params}) +{ + ${ret} ret = arch_${atomic}_${pfx}${name}${sfx}_relaxed(${args}); + __atomic_acquire_fence(); + return ret; +} +EOF diff --git a/scripts/atomic/fallbacks/add_negative b/scripts/atomic/fallbacks/add_negative new file mode 100755 index 000000000..15caa2eb2 --- /dev/null +++ b/scripts/atomic/fallbacks/add_negative @@ -0,0 +1,16 @@ +cat <<EOF +/** + * arch_${atomic}_add_negative - add and test if negative + * @i: integer value to add + * @v: pointer of type ${atomic}_t + * + * Atomically adds @i to @v and returns true + * if the result is negative, or false when + * result is greater than or equal to zero. + */ +static __always_inline bool +arch_${atomic}_add_negative(${int} i, ${atomic}_t *v) +{ + return arch_${atomic}_add_return(i, v) < 0; +} +EOF diff --git a/scripts/atomic/fallbacks/add_unless b/scripts/atomic/fallbacks/add_unless new file mode 100755 index 000000000..9e5159c2c --- /dev/null +++ b/scripts/atomic/fallbacks/add_unless @@ -0,0 +1,16 @@ +cat << EOF +/** + * arch_${atomic}_add_unless - add unless the number is already a given value + * @v: pointer of type ${atomic}_t + * @a: the amount to add to v... + * @u: ...unless v is equal to u. + * + * Atomically adds @a to @v, if @v was not already @u. + * Returns true if the addition was done. + */ +static __always_inline bool +arch_${atomic}_add_unless(${atomic}_t *v, ${int} a, ${int} u) +{ + return arch_${atomic}_fetch_add_unless(v, a, u) != u; +} +EOF diff --git a/scripts/atomic/fallbacks/andnot b/scripts/atomic/fallbacks/andnot new file mode 100755 index 000000000..5a42f54a3 --- /dev/null +++ b/scripts/atomic/fallbacks/andnot @@ -0,0 +1,7 @@ +cat <<EOF +static __always_inline ${ret} +arch_${atomic}_${pfx}andnot${sfx}${order}(${int} i, ${atomic}_t *v) +{ + ${retstmt}arch_${atomic}_${pfx}and${sfx}${order}(~i, v); +} +EOF diff --git a/scripts/atomic/fallbacks/dec b/scripts/atomic/fallbacks/dec new file mode 100755 index 000000000..8c144c818 --- /dev/null +++ b/scripts/atomic/fallbacks/dec @@ -0,0 +1,7 @@ +cat <<EOF +static __always_inline ${ret} +arch_${atomic}_${pfx}dec${sfx}${order}(${atomic}_t *v) +{ + ${retstmt}arch_${atomic}_${pfx}sub${sfx}${order}(1, v); +} +EOF diff --git a/scripts/atomic/fallbacks/dec_and_test b/scripts/atomic/fallbacks/dec_and_test new file mode 100755 index 000000000..8549f359b --- /dev/null +++ b/scripts/atomic/fallbacks/dec_and_test @@ -0,0 +1,15 @@ +cat <<EOF +/** + * arch_${atomic}_dec_and_test - decrement and test + * @v: pointer of type ${atomic}_t + * + * Atomically decrements @v by 1 and + * returns true if the result is 0, or false for all other + * cases. + */ +static __always_inline bool +arch_${atomic}_dec_and_test(${atomic}_t *v) +{ + return arch_${atomic}_dec_return(v) == 0; +} +EOF diff --git a/scripts/atomic/fallbacks/dec_if_positive b/scripts/atomic/fallbacks/dec_if_positive new file mode 100755 index 000000000..86bdced34 --- /dev/null +++ b/scripts/atomic/fallbacks/dec_if_positive @@ -0,0 +1,15 @@ +cat <<EOF +static __always_inline ${ret} +arch_${atomic}_dec_if_positive(${atomic}_t *v) +{ + ${int} dec, c = arch_${atomic}_read(v); + + do { + dec = c - 1; + if (unlikely(dec < 0)) + break; + } while (!arch_${atomic}_try_cmpxchg(v, &c, dec)); + + return dec; +} +EOF diff --git a/scripts/atomic/fallbacks/dec_unless_positive b/scripts/atomic/fallbacks/dec_unless_positive new file mode 100755 index 000000000..c531d5afe --- /dev/null +++ b/scripts/atomic/fallbacks/dec_unless_positive @@ -0,0 +1,14 @@ +cat <<EOF +static __always_inline bool +arch_${atomic}_dec_unless_positive(${atomic}_t *v) +{ + ${int} c = arch_${atomic}_read(v); + + do { + if (unlikely(c > 0)) + return false; + } while (!arch_${atomic}_try_cmpxchg(v, &c, c - 1)); + + return true; +} +EOF diff --git a/scripts/atomic/fallbacks/fence b/scripts/atomic/fallbacks/fence new file mode 100755 index 000000000..07757d8e3 --- /dev/null +++ b/scripts/atomic/fallbacks/fence @@ -0,0 +1,11 @@ +cat <<EOF +static __always_inline ${ret} +arch_${atomic}_${pfx}${name}${sfx}(${params}) +{ + ${ret} ret; + __atomic_pre_full_fence(); + ret = arch_${atomic}_${pfx}${name}${sfx}_relaxed(${args}); + __atomic_post_full_fence(); + return ret; +} +EOF diff --git a/scripts/atomic/fallbacks/fetch_add_unless b/scripts/atomic/fallbacks/fetch_add_unless new file mode 100755 index 000000000..68ce13c8b --- /dev/null +++ b/scripts/atomic/fallbacks/fetch_add_unless @@ -0,0 +1,23 @@ +cat << EOF +/** + * arch_${atomic}_fetch_add_unless - add unless the number is already a given value + * @v: pointer of type ${atomic}_t + * @a: the amount to add to v... + * @u: ...unless v is equal to u. + * + * Atomically adds @a to @v, so long as @v was not already @u. + * Returns original value of @v + */ +static __always_inline ${int} +arch_${atomic}_fetch_add_unless(${atomic}_t *v, ${int} a, ${int} u) +{ + ${int} c = arch_${atomic}_read(v); + + do { + if (unlikely(c == u)) + break; + } while (!arch_${atomic}_try_cmpxchg(v, &c, c + a)); + + return c; +} +EOF diff --git a/scripts/atomic/fallbacks/inc b/scripts/atomic/fallbacks/inc new file mode 100755 index 000000000..3c2c37391 --- /dev/null +++ b/scripts/atomic/fallbacks/inc @@ -0,0 +1,7 @@ +cat <<EOF +static __always_inline ${ret} +arch_${atomic}_${pfx}inc${sfx}${order}(${atomic}_t *v) +{ + ${retstmt}arch_${atomic}_${pfx}add${sfx}${order}(1, v); +} +EOF diff --git a/scripts/atomic/fallbacks/inc_and_test b/scripts/atomic/fallbacks/inc_and_test new file mode 100755 index 000000000..0cf23fe1e --- /dev/null +++ b/scripts/atomic/fallbacks/inc_and_test @@ -0,0 +1,15 @@ +cat <<EOF +/** + * arch_${atomic}_inc_and_test - increment and test + * @v: pointer of type ${atomic}_t + * + * Atomically increments @v by 1 + * and returns true if the result is zero, or false for all + * other cases. + */ +static __always_inline bool +arch_${atomic}_inc_and_test(${atomic}_t *v) +{ + return arch_${atomic}_inc_return(v) == 0; +} +EOF diff --git a/scripts/atomic/fallbacks/inc_not_zero b/scripts/atomic/fallbacks/inc_not_zero new file mode 100755 index 000000000..ed8a1f562 --- /dev/null +++ b/scripts/atomic/fallbacks/inc_not_zero @@ -0,0 +1,14 @@ +cat <<EOF +/** + * arch_${atomic}_inc_not_zero - increment unless the number is zero + * @v: pointer of type ${atomic}_t + * + * Atomically increments @v by 1, if @v is non-zero. + * Returns true if the increment was done. + */ +static __always_inline bool +arch_${atomic}_inc_not_zero(${atomic}_t *v) +{ + return arch_${atomic}_add_unless(v, 1, 0); +} +EOF diff --git a/scripts/atomic/fallbacks/inc_unless_negative b/scripts/atomic/fallbacks/inc_unless_negative new file mode 100755 index 000000000..95d8ce482 --- /dev/null +++ b/scripts/atomic/fallbacks/inc_unless_negative @@ -0,0 +1,14 @@ +cat <<EOF +static __always_inline bool +arch_${atomic}_inc_unless_negative(${atomic}_t *v) +{ + ${int} c = arch_${atomic}_read(v); + + do { + if (unlikely(c < 0)) + return false; + } while (!arch_${atomic}_try_cmpxchg(v, &c, c + 1)); + + return true; +} +EOF diff --git a/scripts/atomic/fallbacks/read_acquire b/scripts/atomic/fallbacks/read_acquire new file mode 100755 index 000000000..a0ea1d26e --- /dev/null +++ b/scripts/atomic/fallbacks/read_acquire @@ -0,0 +1,16 @@ +cat <<EOF +static __always_inline ${ret} +arch_${atomic}_read_acquire(const ${atomic}_t *v) +{ + ${int} ret; + + if (__native_word(${atomic}_t)) { + ret = smp_load_acquire(&(v)->counter); + } else { + ret = arch_${atomic}_read(v); + __atomic_acquire_fence(); + } + + return ret; +} +EOF diff --git a/scripts/atomic/fallbacks/release b/scripts/atomic/fallbacks/release new file mode 100755 index 000000000..b46feb56d --- /dev/null +++ b/scripts/atomic/fallbacks/release @@ -0,0 +1,8 @@ +cat <<EOF +static __always_inline ${ret} +arch_${atomic}_${pfx}${name}${sfx}_release(${params}) +{ + __atomic_release_fence(); + ${retstmt}arch_${atomic}_${pfx}${name}${sfx}_relaxed(${args}); +} +EOF diff --git a/scripts/atomic/fallbacks/set_release b/scripts/atomic/fallbacks/set_release new file mode 100755 index 000000000..05cdb7f42 --- /dev/null +++ b/scripts/atomic/fallbacks/set_release @@ -0,0 +1,12 @@ +cat <<EOF +static __always_inline void +arch_${atomic}_set_release(${atomic}_t *v, ${int} i) +{ + if (__native_word(${atomic}_t)) { + smp_store_release(&(v)->counter, i); + } else { + __atomic_release_fence(); + arch_${atomic}_set(v, i); + } +} +EOF diff --git a/scripts/atomic/fallbacks/sub_and_test b/scripts/atomic/fallbacks/sub_and_test new file mode 100755 index 000000000..260f37341 --- /dev/null +++ b/scripts/atomic/fallbacks/sub_and_test @@ -0,0 +1,16 @@ +cat <<EOF +/** + * arch_${atomic}_sub_and_test - subtract value from variable and test result + * @i: integer value to subtract + * @v: pointer of type ${atomic}_t + * + * Atomically subtracts @i from @v and returns + * true if the result is zero, or false for all + * other cases. + */ +static __always_inline bool +arch_${atomic}_sub_and_test(${int} i, ${atomic}_t *v) +{ + return arch_${atomic}_sub_return(i, v) == 0; +} +EOF diff --git a/scripts/atomic/fallbacks/try_cmpxchg b/scripts/atomic/fallbacks/try_cmpxchg new file mode 100755 index 000000000..890f850ed --- /dev/null +++ b/scripts/atomic/fallbacks/try_cmpxchg @@ -0,0 +1,11 @@ +cat <<EOF +static __always_inline bool +arch_${atomic}_try_cmpxchg${order}(${atomic}_t *v, ${int} *old, ${int} new) +{ + ${int} r, o = *old; + r = arch_${atomic}_cmpxchg${order}(v, o, new); + if (unlikely(r != o)) + *old = r; + return likely(r == o); +} +EOF |