summaryrefslogtreecommitdiffstats
path: root/libc-top-half/musl/src/string/memset.c
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 13:54:38 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 13:54:38 +0000
commit8c1ab65c0f548d20b7f177bdb736daaf603340e1 (patch)
treedf55b7e75bf43f2bf500845b105afe3ac3a5157e /libc-top-half/musl/src/string/memset.c
parentInitial commit. (diff)
downloadwasi-libc-8c1ab65c0f548d20b7f177bdb736daaf603340e1.tar.xz
wasi-libc-8c1ab65c0f548d20b7f177bdb736daaf603340e1.zip
Adding upstream version 0.0~git20221206.8b7148f.upstream/0.0_git20221206.8b7148f
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'libc-top-half/musl/src/string/memset.c')
-rw-r--r--libc-top-half/musl/src/string/memset.c94
1 files changed, 94 insertions, 0 deletions
diff --git a/libc-top-half/musl/src/string/memset.c b/libc-top-half/musl/src/string/memset.c
new file mode 100644
index 0000000..f64c9cf
--- /dev/null
+++ b/libc-top-half/musl/src/string/memset.c
@@ -0,0 +1,94 @@
+#include <string.h>
+#include <stdint.h>
+
+void *memset(void *dest, int c, size_t n)
+{
+#if defined(__wasm_bulk_memory__)
+ if (n > BULK_MEMORY_THRESHOLD)
+ return __builtin_memset(dest, c, n);
+#endif
+ unsigned char *s = dest;
+ size_t k;
+
+ /* Fill head and tail with minimal branching. Each
+ * conditional ensures that all the subsequently used
+ * offsets are well-defined and in the dest region. */
+
+ if (!n) return dest;
+ s[0] = c;
+ s[n-1] = c;
+ if (n <= 2) return dest;
+ s[1] = c;
+ s[2] = c;
+ s[n-2] = c;
+ s[n-3] = c;
+ if (n <= 6) return dest;
+ s[3] = c;
+ s[n-4] = c;
+ if (n <= 8) return dest;
+
+ /* Advance pointer to align it at a 4-byte boundary,
+ * and truncate n to a multiple of 4. The previous code
+ * already took care of any head/tail that get cut off
+ * by the alignment. */
+
+ k = -(uintptr_t)s & 3;
+ s += k;
+ n -= k;
+ n &= -4;
+
+#ifdef __GNUC__
+ typedef uint32_t __attribute__((__may_alias__)) u32;
+ typedef uint64_t __attribute__((__may_alias__)) u64;
+
+ u32 c32 = ((u32)-1)/255 * (unsigned char)c;
+
+ /* In preparation to copy 32 bytes at a time, aligned on
+ * an 8-byte bounary, fill head/tail up to 28 bytes each.
+ * As in the initial byte-based head/tail fill, each
+ * conditional below ensures that the subsequent offsets
+ * are valid (e.g. !(n<=24) implies n>=28). */
+
+ *(u32 *)(s+0) = c32;
+ *(u32 *)(s+n-4) = c32;
+ if (n <= 8) return dest;
+ *(u32 *)(s+4) = c32;
+ *(u32 *)(s+8) = c32;
+ *(u32 *)(s+n-12) = c32;
+ *(u32 *)(s+n-8) = c32;
+ if (n <= 24) return dest;
+ *(u32 *)(s+12) = c32;
+ *(u32 *)(s+16) = c32;
+ *(u32 *)(s+20) = c32;
+ *(u32 *)(s+24) = c32;
+ *(u32 *)(s+n-28) = c32;
+ *(u32 *)(s+n-24) = c32;
+ *(u32 *)(s+n-20) = c32;
+ *(u32 *)(s+n-16) = c32;
+
+ /* Align to a multiple of 8 so we can fill 64 bits at a time,
+ * and avoid writing the same bytes twice as much as is
+ * practical without introducing additional branching. */
+
+ k = 24 + ((uintptr_t)s & 4);
+ s += k;
+ n -= k;
+
+ /* If this loop is reached, 28 tail bytes have already been
+ * filled, so any remainder when n drops below 32 can be
+ * safely ignored. */
+
+ u64 c64 = c32 | ((u64)c32 << 32);
+ for (; n >= 32; n-=32, s+=32) {
+ *(u64 *)(s+0) = c64;
+ *(u64 *)(s+8) = c64;
+ *(u64 *)(s+16) = c64;
+ *(u64 *)(s+24) = c64;
+ }
+#else
+ /* Pure C fallback with no aliasing violations. */
+ for (; n; n--, s++) *s = c;
+#endif
+
+ return dest;
+}