From 2c3c1048746a4622d8c89a29670120dc8fab93c4 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Sun, 7 Apr 2024 20:49:45 +0200 Subject: Adding upstream version 6.1.76. Signed-off-by: Daniel Baumann --- arch/mips/include/asm/local.h | 175 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 175 insertions(+) create mode 100644 arch/mips/include/asm/local.h (limited to 'arch/mips/include/asm/local.h') diff --git a/arch/mips/include/asm/local.h b/arch/mips/include/asm/local.h new file mode 100644 index 000000000..08366b1fd --- /dev/null +++ b/arch/mips/include/asm/local.h @@ -0,0 +1,175 @@ +/* SPDX-License-Identifier: GPL-2.0 */ +#ifndef _ARCH_MIPS_LOCAL_H +#define _ARCH_MIPS_LOCAL_H + +#include +#include +#include +#include +#include +#include + +typedef struct +{ + atomic_long_t a; +} local_t; + +#define LOCAL_INIT(i) { ATOMIC_LONG_INIT(i) } + +#define local_read(l) atomic_long_read(&(l)->a) +#define local_set(l, i) atomic_long_set(&(l)->a, (i)) + +#define local_add(i, l) atomic_long_add((i), (&(l)->a)) +#define local_sub(i, l) atomic_long_sub((i), (&(l)->a)) +#define local_inc(l) atomic_long_inc(&(l)->a) +#define local_dec(l) atomic_long_dec(&(l)->a) + +/* + * Same as above, but return the result value + */ +static __inline__ long local_add_return(long i, local_t * l) +{ + unsigned long result; + + if (kernel_uses_llsc) { + unsigned long temp; + + __asm__ __volatile__( + " .set push \n" + " .set "MIPS_ISA_ARCH_LEVEL" \n" + __SYNC(full, loongson3_war) " \n" + "1:" __stringify(LONG_LL) " %1, %2 \n" + __stringify(LONG_ADDU) " %0, %1, %3 \n" + __stringify(LONG_SC) " %0, %2 \n" + __stringify(SC_BEQZ) " %0, 1b \n" + __stringify(LONG_ADDU) " %0, %1, %3 \n" + " .set pop \n" + : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) + : "Ir" (i), "m" (l->a.counter) + : "memory"); + } else { + unsigned long flags; + + local_irq_save(flags); + result = l->a.counter; + result += i; + l->a.counter = result; + local_irq_restore(flags); + } + + return result; +} + +static __inline__ long local_sub_return(long i, local_t * l) +{ + unsigned long result; + + if (kernel_uses_llsc) { + unsigned long temp; + + __asm__ __volatile__( + " .set push \n" + " .set "MIPS_ISA_ARCH_LEVEL" \n" + __SYNC(full, loongson3_war) " \n" + "1:" __stringify(LONG_LL) " %1, %2 \n" + __stringify(LONG_SUBU) " %0, %1, %3 \n" + __stringify(LONG_SUBU) " %0, %1, %3 \n" + __stringify(LONG_SC) " %0, %2 \n" + __stringify(SC_BEQZ) " %0, 1b \n" + __stringify(LONG_SUBU) " %0, %1, %3 \n" + " .set pop \n" + : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) + : "Ir" (i), "m" (l->a.counter) + : "memory"); + } else { + unsigned long flags; + + local_irq_save(flags); + result = l->a.counter; + result -= i; + l->a.counter = result; + local_irq_restore(flags); + } + + return result; +} + +#define local_cmpxchg(l, o, n) \ + ((long)cmpxchg_local(&((l)->a.counter), (o), (n))) +#define local_xchg(l, n) (atomic_long_xchg((&(l)->a), (n))) + +/** + * local_add_unless - add unless the number is a given value + * @l: pointer of type local_t + * @a: the amount to add to l... + * @u: ...unless l is equal to u. + * + * Atomically adds @a to @l, so long as it was not @u. + * Returns non-zero if @l was not @u, and zero otherwise. + */ +#define local_add_unless(l, a, u) \ +({ \ + long c, old; \ + c = local_read(l); \ + while (c != (u) && (old = local_cmpxchg((l), c, c + (a))) != c) \ + c = old; \ + c != (u); \ +}) +#define local_inc_not_zero(l) local_add_unless((l), 1, 0) + +#define local_dec_return(l) local_sub_return(1, (l)) +#define local_inc_return(l) local_add_return(1, (l)) + +/* + * local_sub_and_test - subtract value from variable and test result + * @i: integer value to subtract + * @l: pointer of type local_t + * + * Atomically subtracts @i from @l and returns + * true if the result is zero, or false for all + * other cases. + */ +#define local_sub_and_test(i, l) (local_sub_return((i), (l)) == 0) + +/* + * local_inc_and_test - increment and test + * @l: pointer of type local_t + * + * Atomically increments @l by 1 + * and returns true if the result is zero, or false for all + * other cases. + */ +#define local_inc_and_test(l) (local_inc_return(l) == 0) + +/* + * local_dec_and_test - decrement by 1 and test + * @l: pointer of type local_t + * + * Atomically decrements @l by 1 and + * returns true if the result is 0, or false for all other + * cases. + */ +#define local_dec_and_test(l) (local_sub_return(1, (l)) == 0) + +/* + * local_add_negative - add and test if negative + * @l: pointer of type local_t + * @i: integer value to add + * + * Atomically adds @i to @l and returns true + * if the result is negative, or false when + * result is greater than or equal to zero. + */ +#define local_add_negative(i, l) (local_add_return(i, (l)) < 0) + +/* Use these for per-cpu local_t variables: on some archs they are + * much more efficient than these naive implementations. Note they take + * a variable, not an address. + */ + +#define __local_inc(l) ((l)->a.counter++) +#define __local_dec(l) ((l)->a.counter++) +#define __local_add(i, l) ((l)->a.counter+=(i)) +#define __local_sub(i, l) ((l)->a.counter-=(i)) + +#endif /* _ARCH_MIPS_LOCAL_H */ -- cgit v1.2.3