summaryrefslogtreecommitdiffstats
path: root/deps/jemalloc/include/jemalloc/internal/atomic_msvc.h
blob: 67057ce508950be95bbe167353039503ed6ff59e (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
#ifndef JEMALLOC_INTERNAL_ATOMIC_MSVC_H
#define JEMALLOC_INTERNAL_ATOMIC_MSVC_H

#define ATOMIC_INIT(...) {__VA_ARGS__}

typedef enum {
	atomic_memory_order_relaxed,
	atomic_memory_order_acquire,
	atomic_memory_order_release,
	atomic_memory_order_acq_rel,
	atomic_memory_order_seq_cst
} atomic_memory_order_t;

typedef char atomic_repr_0_t;
typedef short atomic_repr_1_t;
typedef long atomic_repr_2_t;
typedef __int64 atomic_repr_3_t;

ATOMIC_INLINE void
atomic_fence(atomic_memory_order_t mo) {
	_ReadWriteBarrier();
#  if defined(_M_ARM) || defined(_M_ARM64)
	/* ARM needs a barrier for everything but relaxed. */
	if (mo != atomic_memory_order_relaxed) {
		MemoryBarrier();
	}
#  elif defined(_M_IX86) || defined (_M_X64)
	/* x86 needs a barrier only for seq_cst. */
	if (mo == atomic_memory_order_seq_cst) {
		MemoryBarrier();
	}
#  else
#  error "Don't know how to create atomics for this platform for MSVC."
#  endif
	_ReadWriteBarrier();
}

#define ATOMIC_INTERLOCKED_REPR(lg_size) atomic_repr_ ## lg_size ## _t

#define ATOMIC_CONCAT(a, b) ATOMIC_RAW_CONCAT(a, b)
#define ATOMIC_RAW_CONCAT(a, b) a ## b

#define ATOMIC_INTERLOCKED_NAME(base_name, lg_size) ATOMIC_CONCAT(	\
    base_name, ATOMIC_INTERLOCKED_SUFFIX(lg_size))

#define ATOMIC_INTERLOCKED_SUFFIX(lg_size)				\
    ATOMIC_CONCAT(ATOMIC_INTERLOCKED_SUFFIX_, lg_size)

#define ATOMIC_INTERLOCKED_SUFFIX_0 8
#define ATOMIC_INTERLOCKED_SUFFIX_1 16
#define ATOMIC_INTERLOCKED_SUFFIX_2
#define ATOMIC_INTERLOCKED_SUFFIX_3 64

#define JEMALLOC_GENERATE_ATOMICS(type, short_type, lg_size)		\
typedef struct {							\
	ATOMIC_INTERLOCKED_REPR(lg_size) repr;				\
} atomic_##short_type##_t;						\
									\
ATOMIC_INLINE type							\
atomic_load_##short_type(const atomic_##short_type##_t *a,		\
    atomic_memory_order_t mo) {						\
	ATOMIC_INTERLOCKED_REPR(lg_size) ret = a->repr;			\
	if (mo != atomic_memory_order_relaxed) {			\
		atomic_fence(atomic_memory_order_acquire);		\
	}								\
	return (type) ret;						\
}									\
									\
ATOMIC_INLINE void							\
atomic_store_##short_type(atomic_##short_type##_t *a,			\
    type val, atomic_memory_order_t mo) {				\
	if (mo != atomic_memory_order_relaxed) {			\
		atomic_fence(atomic_memory_order_release);		\
	}								\
	a->repr = (ATOMIC_INTERLOCKED_REPR(lg_size)) val;		\
	if (mo == atomic_memory_order_seq_cst) {			\
		atomic_fence(atomic_memory_order_seq_cst);		\
	}								\
}									\
									\
ATOMIC_INLINE type							\
atomic_exchange_##short_type(atomic_##short_type##_t *a, type val,	\
    atomic_memory_order_t mo) {						\
	return (type)ATOMIC_INTERLOCKED_NAME(_InterlockedExchange,	\
	    lg_size)(&a->repr, (ATOMIC_INTERLOCKED_REPR(lg_size))val);	\
}									\
									\
ATOMIC_INLINE bool							\
atomic_compare_exchange_weak_##short_type(atomic_##short_type##_t *a,	\
    type *expected, type desired, atomic_memory_order_t success_mo,	\
    atomic_memory_order_t failure_mo) {					\
	ATOMIC_INTERLOCKED_REPR(lg_size) e =				\
	    (ATOMIC_INTERLOCKED_REPR(lg_size))*expected;		\
	ATOMIC_INTERLOCKED_REPR(lg_size) d =				\
	    (ATOMIC_INTERLOCKED_REPR(lg_size))desired;			\
	ATOMIC_INTERLOCKED_REPR(lg_size) old =				\
	    ATOMIC_INTERLOCKED_NAME(_InterlockedCompareExchange, 	\
		lg_size)(&a->repr, d, e);				\
	if (old == e) {							\
		return true;						\
	} else {							\
		*expected = (type)old;					\
		return false;						\
	}								\
}									\
									\
ATOMIC_INLINE bool							\
atomic_compare_exchange_strong_##short_type(atomic_##short_type##_t *a,	\
    type *expected, type desired, atomic_memory_order_t success_mo,	\
    atomic_memory_order_t failure_mo) {					\
	/* We implement the weak version with strong semantics. */	\
	return atomic_compare_exchange_weak_##short_type(a, expected,	\
	    desired, success_mo, failure_mo);				\
}


#define JEMALLOC_GENERATE_INT_ATOMICS(type, short_type, lg_size)	\
JEMALLOC_GENERATE_ATOMICS(type, short_type, lg_size)			\
									\
ATOMIC_INLINE type							\
atomic_fetch_add_##short_type(atomic_##short_type##_t *a,		\
    type val, atomic_memory_order_t mo) {				\
	return (type)ATOMIC_INTERLOCKED_NAME(_InterlockedExchangeAdd,	\
	    lg_size)(&a->repr, (ATOMIC_INTERLOCKED_REPR(lg_size))val);	\
}									\
									\
ATOMIC_INLINE type							\
atomic_fetch_sub_##short_type(atomic_##short_type##_t *a,		\
    type val, atomic_memory_order_t mo) {				\
	/*								\
	 * MSVC warns on negation of unsigned operands, but for us it	\
	 * gives exactly the right semantics (MAX_TYPE + 1 - operand).	\
	 */								\
	__pragma(warning(push))						\
	__pragma(warning(disable: 4146))				\
	return atomic_fetch_add_##short_type(a, -val, mo);		\
	__pragma(warning(pop))						\
}									\
ATOMIC_INLINE type							\
atomic_fetch_and_##short_type(atomic_##short_type##_t *a,		\
    type val, atomic_memory_order_t mo) {				\
	return (type)ATOMIC_INTERLOCKED_NAME(_InterlockedAnd, lg_size)(	\
	    &a->repr, (ATOMIC_INTERLOCKED_REPR(lg_size))val);		\
}									\
ATOMIC_INLINE type							\
atomic_fetch_or_##short_type(atomic_##short_type##_t *a,		\
    type val, atomic_memory_order_t mo) {				\
	return (type)ATOMIC_INTERLOCKED_NAME(_InterlockedOr, lg_size)(	\
	    &a->repr, (ATOMIC_INTERLOCKED_REPR(lg_size))val);		\
}									\
ATOMIC_INLINE type							\
atomic_fetch_xor_##short_type(atomic_##short_type##_t *a,		\
    type val, atomic_memory_order_t mo) {				\
	return (type)ATOMIC_INTERLOCKED_NAME(_InterlockedXor, lg_size)(	\
	    &a->repr, (ATOMIC_INTERLOCKED_REPR(lg_size))val);		\
}

#endif /* JEMALLOC_INTERNAL_ATOMIC_MSVC_H */