Statistics
| Revision:

root / tmp / org.txm.statsengine.r.core.win32 / res / win32 / library / BH / include / boost / atomic / detail / ops_gcc_sparc.hpp @ 2486

History | View | Annotate | Download (7.3 kB)

1
/*
2
 * Distributed under the Boost Software License, Version 1.0.
3
 * (See accompanying file LICENSE_1_0.txt or copy at
4
 * http://www.boost.org/LICENSE_1_0.txt)
5
 *
6
 * Copyright (c) 2010 Helge Bahmann
7
 * Copyright (c) 2013 Tim Blechmann
8
 * Copyright (c) 2014 Andrey Semashev
9
 */
10
/*!
11
 * \file   atomic/detail/ops_gcc_sparc.hpp
12
 *
13
 * This header contains implementation of the \c operations template.
14
 */
15

    
16
#ifndef BOOST_ATOMIC_DETAIL_OPS_GCC_SPARC_HPP_INCLUDED_
17
#define BOOST_ATOMIC_DETAIL_OPS_GCC_SPARC_HPP_INCLUDED_
18

    
19
#include <boost/memory_order.hpp>
20
#include <boost/atomic/detail/config.hpp>
21
#include <boost/atomic/detail/storage_type.hpp>
22
#include <boost/atomic/detail/operations_fwd.hpp>
23
#include <boost/atomic/capabilities.hpp>
24
#include <boost/atomic/detail/ops_cas_based.hpp>
25
#include <boost/atomic/detail/ops_extending_cas_based.hpp>
26

    
27
#ifdef BOOST_HAS_PRAGMA_ONCE
28
#pragma once
29
#endif
30

    
31
namespace boost {
32
namespace atomics {
33
namespace detail {
34

    
35
struct gcc_sparc_cas_base
36
{
37
    static BOOST_FORCEINLINE void fence_before(memory_order order) BOOST_NOEXCEPT
38
    {
39
        if (order == memory_order_seq_cst)
40
            __asm__ __volatile__ ("membar #Sync" ::: "memory");
41
        else if ((order & memory_order_release) != 0)
42
            __asm__ __volatile__ ("membar #StoreStore | #LoadStore" ::: "memory");
43
    }
44

    
45
    static BOOST_FORCEINLINE void fence_after(memory_order order) BOOST_NOEXCEPT
46
    {
47
        if (order == memory_order_seq_cst)
48
            __asm__ __volatile__ ("membar #Sync" ::: "memory");
49
        else if ((order & (memory_order_consume | memory_order_acquire)) != 0)
50
            __asm__ __volatile__ ("membar #StoreStore | #LoadStore" ::: "memory");
51
    }
52

    
53
    static BOOST_FORCEINLINE void fence_after_store(memory_order order) BOOST_NOEXCEPT
54
    {
55
        if (order == memory_order_seq_cst)
56
            __asm__ __volatile__ ("membar #Sync" ::: "memory");
57
    }
58
};
59

    
60
template< bool Signed >
61
struct gcc_sparc_cas32 :
62
    public gcc_sparc_cas_base
63
{
64
    typedef typename make_storage_type< 4u, Signed >::type storage_type;
65
    typedef typename make_storage_type< 4u, Signed >::aligned aligned_storage_type;
66

    
67
    static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
68
    {
69
        fence_before_store(order);
70
        storage = v;
71
        fence_after_store(order);
72
    }
73

    
74
    static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
75
    {
76
        storage_type v = storage;
77
        fence_after(order);
78
        return v;
79
    }
80

    
81
    static BOOST_FORCEINLINE bool compare_exchange_strong(
82
        storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
83
    {
84
        fence_before(success_order);
85
        storage_type previous = expected;
86
        __asm__ __volatile__
87
        (
88
            "cas [%1], %2, %0"
89
            : "+r" (desired)
90
            : "r" (&storage), "r" (previous)
91
            : "memory"
92
        );
93
        const bool success = (desired == previous);
94
        if (success)
95
            fence_after(success_order);
96
        else
97
            fence_after(failure_order);
98
        expected = desired;
99
        return success;
100
    }
101

    
102
    static BOOST_FORCEINLINE bool compare_exchange_weak(
103
        storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
104
    {
105
        return compare_exchange_strong(storage, expected, desired, success_order, failure_order);
106
    }
107

    
108
    static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
109
    {
110
        base_type::fence_before(order);
111
        __asm__ __volatile__
112
        (
113
            "swap [%1], %0"
114
            : "+r" (v)
115
            : "r" (&storage)
116
            : "memory"
117
        );
118
        base_type::fence_after(order);
119
        return v;
120
    }
121

    
122
    static BOOST_FORCEINLINE bool is_lock_free(storage_type const volatile&) BOOST_NOEXCEPT
123
    {
124
        return true;
125
    }
126
};
127

    
128
template< bool Signed >
129
struct operations< 4u, Signed > :
130
    public cas_based_operations< gcc_sparc_cas32< Signed > >
131
{
132
};
133

    
134
template< bool Signed >
135
struct operations< 1u, Signed > :
136
    public extending_cas_based_operations< operations< 4u, Signed >, 1u, Signed >
137
{
138
};
139

    
140
template< bool Signed >
141
struct operations< 2u, Signed > :
142
    public extending_cas_based_operations< operations< 4u, Signed >, 2u, Signed >
143
{
144
};
145

    
146
template< bool Signed >
147
struct gcc_sparc_cas64 :
148
    public gcc_sparc_cas_base
149
{
150
    typedef typename make_storage_type< 8u, Signed >::type storage_type;
151
    typedef typename make_storage_type< 8u, Signed >::aligned aligned_storage_type;
152

    
153
    static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
154
    {
155
        fence_before_store(order);
156
        storage = v;
157
        fence_after_store(order);
158
    }
159

    
160
    static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
161
    {
162
        storage_type v = storage;
163
        fence_after(order);
164
        return v;
165
    }
166

    
167
    static BOOST_FORCEINLINE bool compare_exchange_strong(
168
        storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
169
    {
170
        fence_before(success_order);
171
        storage_type previous = expected;
172
        __asm__ __volatile__
173
        (
174
            "casx [%1], %2, %0"
175
            : "+r" (desired)
176
            : "r" (&storage), "r" (previous)
177
            : "memory"
178
        );
179
        const bool success = (desired == previous);
180
        if (success)
181
            fence_after(success_order);
182
        else
183
            fence_after(failure_order);
184
        expected = desired;
185
        return success;
186
    }
187

    
188
    static BOOST_FORCEINLINE bool compare_exchange_weak(
189
        storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
190
    {
191
        return compare_exchange_strong(storage, expected, desired, success_order, failure_order);
192
    }
193

    
194
    static BOOST_FORCEINLINE bool is_lock_free(storage_type const volatile&) BOOST_NOEXCEPT
195
    {
196
        return true;
197
    }
198
};
199

    
200
template< bool Signed >
201
struct operations< 8u, Signed > :
202
    public cas_based_operations< cas_based_exchange< gcc_sparc_cas64< Signed > > >
203
{
204
};
205

    
206

    
207
BOOST_FORCEINLINE void thread_fence(memory_order order) BOOST_NOEXCEPT
208
{
209
    switch (order)
210
    {
211
    case memory_order_release:
212
        __asm__ __volatile__ ("membar #StoreStore | #LoadStore" ::: "memory");
213
        break;
214
    case memory_order_consume:
215
    case memory_order_acquire:
216
        __asm__ __volatile__ ("membar #LoadLoad | #LoadStore" ::: "memory");
217
        break;
218
    case memory_order_acq_rel:
219
        __asm__ __volatile__ ("membar #LoadLoad | #LoadStore | #StoreStore" ::: "memory");
220
        break;
221
    case memory_order_seq_cst:
222
        __asm__ __volatile__ ("membar #Sync" ::: "memory");
223
        break;
224
    case memory_order_relaxed:
225
    default:
226
        break;
227
    }
228
}
229

    
230
BOOST_FORCEINLINE void signal_fence(memory_order order) BOOST_NOEXCEPT
231
{
232
    if (order != memory_order_relaxed)
233
        __asm__ __volatile__ ("" ::: "memory");
234
}
235

    
236
} // namespace detail
237
} // namespace atomics
238
} // namespace boost
239

    
240
#endif // BOOST_ATOMIC_DETAIL_OPS_GCC_SPARC_HPP_INCLUDED_