summaryrefslogtreecommitdiffstats
path: root/include/atomic/generic-msvc.h
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-13 12:24:36 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-13 12:24:36 +0000
commit06eaf7232e9a920468c0f8d74dcf2fe8b555501c (patch)
treee2c7b5777f728320e5b5542b6213fd3591ba51e2 /include/atomic/generic-msvc.h
parentInitial commit. (diff)
downloadmariadb-06eaf7232e9a920468c0f8d74dcf2fe8b555501c.tar.xz
mariadb-06eaf7232e9a920468c0f8d74dcf2fe8b555501c.zip
Adding upstream version 1:10.11.6.upstream/1%10.11.6
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'include/atomic/generic-msvc.h')
-rw-r--r--include/atomic/generic-msvc.h140
1 files changed, 140 insertions, 0 deletions
diff --git a/include/atomic/generic-msvc.h b/include/atomic/generic-msvc.h
new file mode 100644
index 00000000..ff2a5434
--- /dev/null
+++ b/include/atomic/generic-msvc.h
@@ -0,0 +1,140 @@
+#ifndef ATOMIC_MSC_INCLUDED
+#define ATOMIC_MSC_INCLUDED
+
+/* Copyright (c) 2006, 2014, Oracle and/or its affiliates. All rights reserved.
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; version 2 of the License.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1335 USA */
+
+#include <windows.h>
+
+static inline int my_atomic_cas32(int32 volatile *a, int32 *cmp, int32 set)
+{
+ int32 initial_cmp= *cmp;
+ int32 initial_a= InterlockedCompareExchange((volatile LONG*)a,
+ set, initial_cmp);
+ int ret= (initial_a == initial_cmp);
+ if (!ret)
+ *cmp= initial_a;
+ return ret;
+}
+
+static inline int my_atomic_cas64(int64 volatile *a, int64 *cmp, int64 set)
+{
+ int64 initial_cmp= *cmp;
+ int64 initial_a= InterlockedCompareExchange64((volatile LONGLONG*)a,
+ (LONGLONG)set,
+ (LONGLONG)initial_cmp);
+ int ret= (initial_a == initial_cmp);
+ if (!ret)
+ *cmp= initial_a;
+ return ret;
+}
+
+static inline int my_atomic_casptr(void * volatile *a, void **cmp, void *set)
+{
+ void *initial_cmp= *cmp;
+ void *initial_a= InterlockedCompareExchangePointer(a, set, initial_cmp);
+ int ret= (initial_a == initial_cmp);
+ if (!ret)
+ *cmp= initial_a;
+ return ret;
+}
+
+static inline int32 my_atomic_add32(int32 volatile *a, int32 v)
+{
+ return (int32)InterlockedExchangeAdd((volatile LONG*)a, v);
+}
+
+static inline int64 my_atomic_add64(int64 volatile *a, int64 v)
+{
+ return (int64)InterlockedExchangeAdd64((volatile LONGLONG*)a, (LONGLONG)v);
+}
+
+
+/*
+ According to MSDN:
+
+ Simple reads and writes to properly-aligned 32-bit variables are atomic
+ operations.
+ ...
+ Simple reads and writes to properly aligned 64-bit variables are atomic on
+ 64-bit Windows. Reads and writes to 64-bit values are not guaranteed to be
+ atomic on 32-bit Windows.
+
+ https://msdn.microsoft.com/en-us/library/windows/desktop/ms684122(v=vs.85).aspx
+*/
+
+static inline int32 my_atomic_load32(int32 volatile *a)
+{
+ int32 value= *a;
+ MemoryBarrier();
+ return value;
+}
+
+static inline int64 my_atomic_load64(int64 volatile *a)
+{
+#ifdef _M_X64
+ int64 value= *a;
+ MemoryBarrier();
+ return value;
+#else
+ return (int64) InterlockedCompareExchange64((volatile LONGLONG *) a, 0, 0);
+#endif
+}
+
+static inline void* my_atomic_loadptr(void * volatile *a)
+{
+ void *value= *a;
+ MemoryBarrier();
+ return value;
+}
+
+static inline int32 my_atomic_fas32(int32 volatile *a, int32 v)
+{
+ return (int32)InterlockedExchange((volatile LONG*)a, v);
+}
+
+static inline int64 my_atomic_fas64(int64 volatile *a, int64 v)
+{
+ return (int64)InterlockedExchange64((volatile LONGLONG*)a, v);
+}
+
+static inline void * my_atomic_fasptr(void * volatile *a, void * v)
+{
+ return InterlockedExchangePointer(a, v);
+}
+
+static inline void my_atomic_store32(int32 volatile *a, int32 v)
+{
+ MemoryBarrier();
+ *a= v;
+}
+
+static inline void my_atomic_store64(int64 volatile *a, int64 v)
+{
+#ifdef _M_X64
+ MemoryBarrier();
+ *a= v;
+#else
+ (void) InterlockedExchange64((volatile LONGLONG *) a, v);
+#endif
+}
+
+static inline void my_atomic_storeptr(void * volatile *a, void *v)
+{
+ MemoryBarrier();
+ *a= v;
+}
+
+#endif /* ATOMIC_MSC_INCLUDED */