1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
|
/* SPDX-License-Identifier: GPL-2.0-only */
/*
* Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com)
*/
#ifndef __ASM_ARC_CMPXCHG_H
#define __ASM_ARC_CMPXCHG_H
#include <linux/build_bug.h>
#include <linux/types.h>
#include <asm/barrier.h>
#include <asm/smp.h>
#ifdef CONFIG_ARC_HAS_LLSC
/*
* if (*ptr == @old)
* *ptr = @new
*/
#define __cmpxchg(ptr, old, new) \
({ \
__typeof__(*(ptr)) _prev; \
\
__asm__ __volatile__( \
"1: llock %0, [%1] \n" \
" brne %0, %2, 2f \n" \
" scond %3, [%1] \n" \
" bnz 1b \n" \
"2: \n" \
: "=&r"(_prev) /* Early clobber prevent reg reuse */ \
: "r"(ptr), /* Not "m": llock only supports reg */ \
"ir"(old), \
"r"(new) /* Not "ir": scond can't take LIMM */ \
: "cc", \
"memory"); /* gcc knows memory is clobbered */ \
\
_prev; \
})
#define arch_cmpxchg_relaxed(ptr, old, new) \
({ \
__typeof__(ptr) _p_ = (ptr); \
__typeof__(*(ptr)) _o_ = (old); \
__typeof__(*(ptr)) _n_ = (new); \
__typeof__(*(ptr)) _prev_; \
\
switch(sizeof((_p_))) { \
case 4: \
_prev_ = __cmpxchg(_p_, _o_, _n_); \
break; \
default: \
BUILD_BUG(); \
} \
_prev_; \
})
#else
#define arch_cmpxchg(ptr, old, new) \
({ \
volatile __typeof__(ptr) _p_ = (ptr); \
__typeof__(*(ptr)) _o_ = (old); \
__typeof__(*(ptr)) _n_ = (new); \
__typeof__(*(ptr)) _prev_; \
unsigned long __flags; \
\
BUILD_BUG_ON(sizeof(_p_) != 4); \
\
/* \
* spin lock/unlock provide the needed smp_mb() before/after \
*/ \
atomic_ops_lock(__flags); \
_prev_ = *_p_; \
if (_prev_ == _o_) \
*_p_ = _n_; \
atomic_ops_unlock(__flags); \
_prev_; \
})
#endif
/*
* xchg
*/
#ifdef CONFIG_ARC_HAS_LLSC
#define __arch_xchg(ptr, val) \
({ \
__asm__ __volatile__( \
" ex %0, [%1] \n" /* set new value */ \
: "+r"(val) \
: "r"(ptr) \
: "memory"); \
_val_; /* get old value */ \
})
#define arch_xchg_relaxed(ptr, val) \
({ \
__typeof__(ptr) _p_ = (ptr); \
__typeof__(*(ptr)) _val_ = (val); \
\
switch(sizeof(*(_p_))) { \
case 4: \
_val_ = __arch_xchg(_p_, _val_); \
break; \
default: \
BUILD_BUG(); \
} \
_val_; \
})
#else /* !CONFIG_ARC_HAS_LLSC */
/*
* EX instructions is baseline and present in !LLSC too. But in this
* regime it still needs use @atomic_ops_lock spinlock to allow interop
* with cmpxchg() which uses spinlock in !LLSC
* (llist.h use xchg and cmpxchg on sama data)
*/
#define arch_xchg(ptr, val) \
({ \
__typeof__(ptr) _p_ = (ptr); \
__typeof__(*(ptr)) _val_ = (val); \
\
unsigned long __flags; \
\
atomic_ops_lock(__flags); \
\
__asm__ __volatile__( \
" ex %0, [%1] \n" \
: "+r"(_val_) \
: "r"(_p_) \
: "memory"); \
\
atomic_ops_unlock(__flags); \
_val_; \
})
#endif
#endif
|