diff options
Diffstat (limited to '')
-rw-r--r-- | arch/csky/lib/Makefile | 6 | ||||
-rw-r--r-- | arch/csky/lib/delay.c | 39 | ||||
-rw-r--r-- | arch/csky/lib/error-inject.c | 10 | ||||
-rw-r--r-- | arch/csky/lib/string.c | 134 | ||||
-rw-r--r-- | arch/csky/lib/usercopy.c | 214 |
5 files changed, 403 insertions, 0 deletions
diff --git a/arch/csky/lib/Makefile b/arch/csky/lib/Makefile new file mode 100644 index 000000000..d0ce6e2d7 --- /dev/null +++ b/arch/csky/lib/Makefile @@ -0,0 +1,6 @@ +# SPDX-License-Identifier: GPL-2.0-only +lib-y := usercopy.o delay.o +obj-$(CONFIG_FUNCTION_ERROR_INJECTION) += error-inject.o +ifneq ($(CONFIG_HAVE_EFFICIENT_UNALIGNED_STRING_OPS), y) +lib-y += string.o +endif diff --git a/arch/csky/lib/delay.c b/arch/csky/lib/delay.c new file mode 100644 index 000000000..22570b079 --- /dev/null +++ b/arch/csky/lib/delay.c @@ -0,0 +1,39 @@ +// SPDX-License-Identifier: GPL-2.0 +// Copyright (C) 2018 Hangzhou C-SKY Microsystems co.,ltd. +#include <linux/kernel.h> +#include <linux/module.h> +#include <linux/init.h> +#include <linux/delay.h> + +void __delay(unsigned long loops) +{ + asm volatile ( + "mov r0, r0\n" + "1:declt %0\n" + "bf 1b" + : "=r"(loops) + : "0"(loops)); +} +EXPORT_SYMBOL(__delay); + +void __const_udelay(unsigned long xloops) +{ + unsigned long long loops; + + loops = (unsigned long long)xloops * loops_per_jiffy * HZ; + + __delay(loops >> 32); +} +EXPORT_SYMBOL(__const_udelay); + +void __udelay(unsigned long usecs) +{ + __const_udelay(usecs * 0x10C7UL); /* 2**32 / 1000000 (rounded up) */ +} +EXPORT_SYMBOL(__udelay); + +void __ndelay(unsigned long nsecs) +{ + __const_udelay(nsecs * 0x5UL); /* 2**32 / 1000000000 (rounded up) */ +} +EXPORT_SYMBOL(__ndelay); diff --git a/arch/csky/lib/error-inject.c b/arch/csky/lib/error-inject.c new file mode 100644 index 000000000..c15fb36fe --- /dev/null +++ b/arch/csky/lib/error-inject.c @@ -0,0 +1,10 @@ +// SPDX-License-Identifier: GPL-2.0 + +#include <linux/error-injection.h> +#include <linux/kprobes.h> + +void override_function_with_return(struct pt_regs *regs) +{ + instruction_pointer_set(regs, regs->lr); +} +NOKPROBE_SYMBOL(override_function_with_return); diff --git a/arch/csky/lib/string.c b/arch/csky/lib/string.c new file mode 100644 index 000000000..d65626fca --- /dev/null +++ b/arch/csky/lib/string.c @@ -0,0 +1,134 @@ +// SPDX-License-Identifier: GPL-2.0-only +/* + * String functions optimized for hardware which doesn't + * handle unaligned memory accesses efficiently. + * + * Copyright (C) 2021 Matteo Croce + */ + +#include <linux/types.h> +#include <linux/module.h> + +/* Minimum size for a word copy to be convenient */ +#define BYTES_LONG sizeof(long) +#define WORD_MASK (BYTES_LONG - 1) +#define MIN_THRESHOLD (BYTES_LONG * 2) + +/* convenience union to avoid cast between different pointer types */ +union types { + u8 *as_u8; + unsigned long *as_ulong; + uintptr_t as_uptr; +}; + +union const_types { + const u8 *as_u8; + unsigned long *as_ulong; + uintptr_t as_uptr; +}; + +void *memcpy(void *dest, const void *src, size_t count) +{ + union const_types s = { .as_u8 = src }; + union types d = { .as_u8 = dest }; + int distance = 0; + + if (count < MIN_THRESHOLD) + goto copy_remainder; + + /* Copy a byte at time until destination is aligned. */ + for (; d.as_uptr & WORD_MASK; count--) + *d.as_u8++ = *s.as_u8++; + + distance = s.as_uptr & WORD_MASK; + + if (distance) { + unsigned long last, next; + + /* + * s is distance bytes ahead of d, and d just reached + * the alignment boundary. Move s backward to word align it + * and shift data to compensate for distance, in order to do + * word-by-word copy. + */ + s.as_u8 -= distance; + + next = s.as_ulong[0]; + for (; count >= BYTES_LONG; count -= BYTES_LONG) { + last = next; + next = s.as_ulong[1]; + + d.as_ulong[0] = last >> (distance * 8) | + next << ((BYTES_LONG - distance) * 8); + + d.as_ulong++; + s.as_ulong++; + } + + /* Restore s with the original offset. */ + s.as_u8 += distance; + } else { + /* + * If the source and dest lower bits are the same, do a simple + * 32/64 bit wide copy. + */ + for (; count >= BYTES_LONG; count -= BYTES_LONG) + *d.as_ulong++ = *s.as_ulong++; + } + +copy_remainder: + while (count--) + *d.as_u8++ = *s.as_u8++; + + return dest; +} +EXPORT_SYMBOL(memcpy); + +/* + * Simply check if the buffer overlaps an call memcpy() in case, + * otherwise do a simple one byte at time backward copy. + */ +void *memmove(void *dest, const void *src, size_t count) +{ + if (dest < src || src + count <= dest) + return memcpy(dest, src, count); + + if (dest > src) { + const char *s = src + count; + char *tmp = dest + count; + + while (count--) + *--tmp = *--s; + } + return dest; +} +EXPORT_SYMBOL(memmove); + +void *memset(void *s, int c, size_t count) +{ + union types dest = { .as_u8 = s }; + + if (count >= MIN_THRESHOLD) { + unsigned long cu = (unsigned long)c; + + /* Compose an ulong with 'c' repeated 4/8 times */ + cu |= cu << 8; + cu |= cu << 16; + /* Suppress warning on 32 bit machines */ + cu |= (cu << 16) << 16; + + for (; count && dest.as_uptr & WORD_MASK; count--) + *dest.as_u8++ = c; + + /* Copy using the largest size allowed */ + for (; count >= BYTES_LONG; count -= BYTES_LONG) + *dest.as_ulong++ = cu; + } + + /* copy the remainder */ + while (count--) + *dest.as_u8++ = c; + + return s; +} +EXPORT_SYMBOL(memset); diff --git a/arch/csky/lib/usercopy.c b/arch/csky/lib/usercopy.c new file mode 100644 index 000000000..3c01c5442 --- /dev/null +++ b/arch/csky/lib/usercopy.c @@ -0,0 +1,214 @@ +// SPDX-License-Identifier: GPL-2.0 +// Copyright (C) 2018 Hangzhou C-SKY Microsystems co.,ltd. + +#include <linux/uaccess.h> +#include <linux/types.h> + +unsigned long raw_copy_from_user(void *to, const void *from, + unsigned long n) +{ + int tmp, nsave; + + __asm__ __volatile__( + "0: cmpnei %1, 0 \n" + " bf 7f \n" + " mov %3, %1 \n" + " or %3, %2 \n" + " andi %3, 3 \n" + " cmpnei %3, 0 \n" + " bf 1f \n" + " br 5f \n" + "1: cmplti %0, 16 \n" + " bt 3f \n" + "2: ldw %3, (%2, 0) \n" + "10: ldw %4, (%2, 4) \n" + " stw %3, (%1, 0) \n" + " stw %4, (%1, 4) \n" + "11: ldw %3, (%2, 8) \n" + "12: ldw %4, (%2, 12) \n" + " stw %3, (%1, 8) \n" + " stw %4, (%1, 12) \n" + " addi %2, 16 \n" + " addi %1, 16 \n" + " subi %0, 16 \n" + " br 1b \n" + "3: cmplti %0, 4 \n" + " bt 5f \n" + "4: ldw %3, (%2, 0) \n" + " stw %3, (%1, 0) \n" + " addi %2, 4 \n" + " addi %1, 4 \n" + " subi %0, 4 \n" + " br 3b \n" + "5: cmpnei %0, 0 \n" + " bf 7f \n" + "6: ldb %3, (%2, 0) \n" + " stb %3, (%1, 0) \n" + " addi %2, 1 \n" + " addi %1, 1 \n" + " subi %0, 1 \n" + " br 5b \n" + "8: stw %3, (%1, 0) \n" + " subi %0, 4 \n" + " bf 7f \n" + "9: subi %0, 8 \n" + " bf 7f \n" + "13: stw %3, (%1, 8) \n" + " subi %0, 12 \n" + " bf 7f \n" + ".section __ex_table, \"a\" \n" + ".align 2 \n" + ".long 2b, 7f \n" + ".long 4b, 7f \n" + ".long 6b, 7f \n" + ".long 10b, 8b \n" + ".long 11b, 9b \n" + ".long 12b,13b \n" + ".previous \n" + "7: \n" + : "=r"(n), "=r"(to), "=r"(from), "=r"(nsave), + "=r"(tmp) + : "0"(n), "1"(to), "2"(from) + : "memory"); + + return n; +} +EXPORT_SYMBOL(raw_copy_from_user); + +unsigned long raw_copy_to_user(void *to, const void *from, + unsigned long n) +{ + int w0, w1, w2, w3; + + __asm__ __volatile__( + "0: cmpnei %1, 0 \n" + " bf 8f \n" + " mov %3, %1 \n" + " or %3, %2 \n" + " andi %3, 3 \n" + " cmpnei %3, 0 \n" + " bf 1f \n" + " br 5f \n" + "1: cmplti %0, 16 \n" /* 4W */ + " bt 3f \n" + " ldw %3, (%2, 0) \n" + " ldw %4, (%2, 4) \n" + " ldw %5, (%2, 8) \n" + " ldw %6, (%2, 12) \n" + "2: stw %3, (%1, 0) \n" + "9: stw %4, (%1, 4) \n" + "10: stw %5, (%1, 8) \n" + "11: stw %6, (%1, 12) \n" + " addi %2, 16 \n" + " addi %1, 16 \n" + " subi %0, 16 \n" + " br 1b \n" + "3: cmplti %0, 4 \n" /* 1W */ + " bt 5f \n" + " ldw %3, (%2, 0) \n" + "4: stw %3, (%1, 0) \n" + " addi %2, 4 \n" + " addi %1, 4 \n" + " subi %0, 4 \n" + " br 3b \n" + "5: cmpnei %0, 0 \n" /* 1B */ + " bf 13f \n" + " ldb %3, (%2, 0) \n" + "6: stb %3, (%1, 0) \n" + " addi %2, 1 \n" + " addi %1, 1 \n" + " subi %0, 1 \n" + " br 5b \n" + "7: subi %0, 4 \n" + "8: subi %0, 4 \n" + "12: subi %0, 4 \n" + " br 13f \n" + ".section __ex_table, \"a\" \n" + ".align 2 \n" + ".long 2b, 13f \n" + ".long 4b, 13f \n" + ".long 6b, 13f \n" + ".long 9b, 12b \n" + ".long 10b, 8b \n" + ".long 11b, 7b \n" + ".previous \n" + "13: \n" + : "=r"(n), "=r"(to), "=r"(from), "=r"(w0), + "=r"(w1), "=r"(w2), "=r"(w3) + : "0"(n), "1"(to), "2"(from) + : "memory"); + + return n; +} +EXPORT_SYMBOL(raw_copy_to_user); + +/* + * __clear_user: - Zero a block of memory in user space, with less checking. + * @to: Destination address, in user space. + * @n: Number of bytes to zero. + * + * Zero a block of memory in user space. Caller must check + * the specified block with access_ok() before calling this function. + * + * Returns number of bytes that could not be cleared. + * On success, this will be zero. + */ +unsigned long +__clear_user(void __user *to, unsigned long n) +{ + int data, value, tmp; + + __asm__ __volatile__( + "0: cmpnei %1, 0 \n" + " bf 7f \n" + " mov %3, %1 \n" + " andi %3, 3 \n" + " cmpnei %3, 0 \n" + " bf 1f \n" + " br 5f \n" + "1: cmplti %0, 32 \n" /* 4W */ + " bt 3f \n" + "8: stw %2, (%1, 0) \n" + "10: stw %2, (%1, 4) \n" + "11: stw %2, (%1, 8) \n" + "12: stw %2, (%1, 12) \n" + "13: stw %2, (%1, 16) \n" + "14: stw %2, (%1, 20) \n" + "15: stw %2, (%1, 24) \n" + "16: stw %2, (%1, 28) \n" + " addi %1, 32 \n" + " subi %0, 32 \n" + " br 1b \n" + "3: cmplti %0, 4 \n" /* 1W */ + " bt 5f \n" + "4: stw %2, (%1, 0) \n" + " addi %1, 4 \n" + " subi %0, 4 \n" + " br 3b \n" + "5: cmpnei %0, 0 \n" /* 1B */ + "9: bf 7f \n" + "6: stb %2, (%1, 0) \n" + " addi %1, 1 \n" + " subi %0, 1 \n" + " br 5b \n" + ".section __ex_table,\"a\" \n" + ".align 2 \n" + ".long 8b, 9b \n" + ".long 10b, 9b \n" + ".long 11b, 9b \n" + ".long 12b, 9b \n" + ".long 13b, 9b \n" + ".long 14b, 9b \n" + ".long 15b, 9b \n" + ".long 16b, 9b \n" + ".long 4b, 9b \n" + ".long 6b, 9b \n" + ".previous \n" + "7: \n" + : "=r"(n), "=r" (data), "=r"(value), "=r"(tmp) + : "0"(n), "1"(to), "2"(0) + : "memory"); + + return n; +} +EXPORT_SYMBOL(__clear_user); |