1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
|
/*
* %CopyrightBegin%
*
* Copyright Ericsson AB 2008-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
* compliance with the License. You should have received a copy of the
* Erlang Public License along with this software. If not, it can be
* retrieved online at http://www.erlang.org/.
*
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
* the License for the specific language governing rights and limitations
* under the License.
*
* %CopyrightEnd%
*/
/*
* Native ethread atomics on TILE64/TILEPro.
*
*/
#ifndef ETHREAD_TILE_ATOMIC_H
#define ETHREAD_TILE_ATOMIC_H
#define ETHR_HAVE_NATIVE_ATOMIC32 1
#include <atomic.h>
/* An atomic is an aligned int accessed via locked operations.
*/
typedef struct {
volatile ethr_sint32_t counter;
} ethr_native_atomic32_t;
#define ETHR_MEMORY_BARRIER __insn_mf()
#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
static ETHR_INLINE ethr_sint32_t *
ethr_native_atomic32_addr(ethr_native_atomic32_t *var)
{
return (ethr_sint32_t *) &var->counter;
}
static ETHR_INLINE void
ethr_native_atomic32_init(ethr_native_atomic32_t *var, ethr_sint32_t i)
{
var->counter = i;
}
static ETHR_INLINE void
ethr_native_atomic32_set(ethr_native_atomic32_t *var, ethr_sint32_t i)
{
atomic_exchange_acq(&var->counter, i);
}
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_read(ethr_native_atomic32_t *var)
{
return var->counter;
}
static ETHR_INLINE void
ethr_native_atomic32_add(ethr_native_atomic32_t *var, ethr_sint32_t incr)
{
ETHR_MEMORY_BARRIER;
atomic_add(&var->counter, incr);
ETHR_MEMORY_BARRIER;
}
static ETHR_INLINE void
ethr_native_atomic32_inc(ethr_native_atomic32_t *var)
{
ETHR_MEMORY_BARRIER;
atomic_increment(&var->counter);
ETHR_MEMORY_BARRIER;
}
static ETHR_INLINE void
ethr_native_atomic32_dec(ethr_native_atomic32_t *var)
{
ETHR_MEMORY_BARRIER;
atomic_decrement(&var->counter);
ETHR_MEMORY_BARRIER;
}
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_add_return(ethr_native_atomic32_t *var, ethr_sint32_t incr)
{
ethr_sint32_t res;
ETHR_MEMORY_BARRIER;
res = atomic_exchange_and_add(&var->counter, incr) + incr;
ETHR_MEMORY_BARRIER;
return res;
}
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_inc_return(ethr_native_atomic32_t *var)
{
return ethr_native_atomic32_add_return(var, 1);
}
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_dec_return(ethr_native_atomic32_t *var)
{
return ethr_native_atomic32_add_return(var, -1);
}
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_and_retold(ethr_native_atomic32_t *var, ethr_sint32_t mask)
{
ethr_sint32_t res;
ETHR_MEMORY_BARRIER;
res = atomic_and_val(&var->counter, mask);
ETHR_MEMORY_BARRIER;
return res;
}
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_or_retold(ethr_native_atomic32_t *var, ethr_sint32_t mask)
{
ethr_sint32_t res;
ETHR_MEMORY_BARRIER;
res = atomic_or_val(&var->counter, mask);
ETHR_MEMORY_BARRIER;
return res;
}
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_xchg(ethr_native_atomic32_t *var, ethr_sint32_t val)
{
ETHR_MEMORY_BARRIER;
return atomic_exchange_acq(&var->counter, val);
}
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_cmpxchg(ethr_native_atomic32_t *var,
ethr_sint32_t new,
ethr_sint32_t expected)
{
ETHR_MEMORY_BARRIER;
return atomic_compare_and_exchange_val_acq(&var->counter, new, expected);
}
/*
* Atomic ops with at least specified barriers.
*/
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_read_acqb(ethr_native_atomic32_t *var)
{
ethr_sint32_t res = ethr_native_atomic32_read(var);
ETHR_MEMORY_BARRIER;
return res;
}
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_inc_return_acqb(ethr_native_atomic32_t *var)
{
return ethr_native_atomic32_inc_return(var);
}
static ETHR_INLINE void
ethr_native_atomic32_set_relb(ethr_native_atomic32_t *var, ethr_sint32_t val)
{
ETHR_MEMORY_BARRIER;
ethr_native_atomic32_set(var, val);
}
static ETHR_INLINE void
ethr_native_atomic32_dec_relb(ethr_native_atomic32_t *var)
{
ethr_native_atomic32_dec(var);
}
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_dec_return_relb(ethr_native_atomic32_t *var)
{
return ethr_native_atomic32_dec_return(var);
}
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_cmpxchg_acqb(ethr_native_atomic32_t *var,
ethr_sint32_t new,
ethr_sint32_t exp)
{
return ethr_native_atomic32_cmpxchg(var, new, exp);
}
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_cmpxchg_relb(ethr_native_atomic32_t *var,
ethr_sint32_t new,
ethr_sint32_t exp)
{
return ethr_native_atomic32_cmpxchg(var, new, exp);
}
#endif /* ETHR_TRY_INLINE_FUNCS */
#endif /* ETHREAD_TILE_ATOMIC_H */
|