Statistics
| Revision:

root / tmp / org.txm.statsengine.r.core.win32 / res / win32 / library / BH / include / boost / atomic / detail / gcc-armv6plus.hpp @ 2486

History | View | Annotate | Download (7.8 kB)

1
#ifndef BOOST_ATOMIC_DETAIL_GCC_ARMV6PLUS_HPP
2
#define BOOST_ATOMIC_DETAIL_GCC_ARMV6PLUS_HPP
3

    
4
//  Distributed under the Boost Software License, Version 1.0.
5
//  See accompanying file LICENSE_1_0.txt or copy at
6
//  http://www.boost.org/LICENSE_1_0.txt)
7
//
8
//  Copyright (c) 2009 Helge Bahmann
9
//  Copyright (c) 2009 Phil Endecott
10
//  Copyright (c) 2013 Tim Blechmann
11
//  ARM Code by Phil Endecott, based on other architectures.
12

    
13
#include <boost/cstdint.hpp>
14
#include <boost/atomic/detail/config.hpp>
15

    
16
#ifdef BOOST_HAS_PRAGMA_ONCE
17
#pragma once
18
#endif
19

    
20
// From the ARM Architecture Reference Manual for architecture v6:
21
//
22
// LDREX{<cond>} <Rd>, [<Rn>]
23
// <Rd> Specifies the destination register for the memory word addressed by <Rd>
24
// <Rn> Specifies the register containing the address.
25
//
26
// STREX{<cond>} <Rd>, <Rm>, [<Rn>]
27
// <Rd> Specifies the destination register for the returned status value.
28
//      0  if the operation updates memory
29
//      1  if the operation fails to update memory
30
// <Rm> Specifies the register containing the word to be stored to memory.
31
// <Rn> Specifies the register containing the address.
32
// Rd must not be the same register as Rm or Rn.
33
//
34
// ARM v7 is like ARM v6 plus:
35
// There are half-word and byte versions of the LDREX and STREX instructions,
36
// LDREXH, LDREXB, STREXH and STREXB.
37
// There are also double-word versions, LDREXD and STREXD.
38
// (Actually it looks like these are available from version 6k onwards.)
39
// FIXME these are not yet used; should be mostly a matter of copy-and-paste.
40
// I think you can supply an immediate offset to the address.
41
//
42
// A memory barrier is effected using a "co-processor 15" instruction,
43
// though a separate assembler mnemonic is available for it in v7.
44

    
45
namespace boost {
46
namespace atomics {
47
namespace detail {
48

    
49
// "Thumb 1" is a subset of the ARM instruction set that uses a 16-bit encoding.  It
50
// doesn't include all instructions and in particular it doesn't include the co-processor
51
// instruction used for the memory barrier or the load-locked/store-conditional
52
// instructions.  So, if we're compiling in "Thumb 1" mode, we need to wrap all of our
53
// asm blocks with code to temporarily change to ARM mode.
54
//
55
// You can only change between ARM and Thumb modes when branching using the bx instruction.
56
// bx takes an address specified in a register.  The least significant bit of the address
57
// indicates the mode, so 1 is added to indicate that the destination code is Thumb.
58
// A temporary register is needed for the address and is passed as an argument to these
59
// macros.  It must be one of the "low" registers accessible to Thumb code, specified
60
// using the "l" attribute in the asm statement.
61
//
62
// Architecture v7 introduces "Thumb 2", which does include (almost?) all of the ARM
63
// instruction set.  So in v7 we don't need to change to ARM mode; we can write "universal
64
// assembler" which will assemble to Thumb 2 or ARM code as appropriate.  The only thing
65
// we need to do to make this "universal" assembler mode work is to insert "IT" instructions
66
// to annotate the conditional instructions.  These are ignored in other modes (e.g. v6),
67
// so they can always be present.
68

    
69
#if defined(__thumb__) && !defined(__thumb2__)
70
#define BOOST_ATOMIC_ARM_ASM_START(TMPREG) "adr " #TMPREG ", 1f\n" "bx " #TMPREG "\n" ".arm\n" ".align 4\n" "1: "
71
#define BOOST_ATOMIC_ARM_ASM_END(TMPREG)   "adr " #TMPREG ", 1f + 1\n" "bx " #TMPREG "\n" ".thumb\n" ".align 2\n" "1: "
72
#else
73
// The tmpreg is wasted in this case, which is non-optimal.
74
#define BOOST_ATOMIC_ARM_ASM_START(TMPREG)
75
#define BOOST_ATOMIC_ARM_ASM_END(TMPREG)
76
#endif
77

    
78
#if defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7R__) || defined(__ARM_ARCH_7M__) || defined(__ARM_ARCH_7EM__) || defined(__ARM_ARCH_7S__)
79
#define BOOST_ATOMIC_ARM_DMB "dmb\n"
80
#else
81
#define BOOST_ATOMIC_ARM_DMB "mcr\tp15, 0, r0, c7, c10, 5\n"
82
#endif
83

    
84
inline void
85
arm_barrier(void) BOOST_NOEXCEPT
86
{
87
    int brtmp;
88
    __asm__ __volatile__
89
    (
90
        BOOST_ATOMIC_ARM_ASM_START(%0)
91
        BOOST_ATOMIC_ARM_DMB
92
        BOOST_ATOMIC_ARM_ASM_END(%0)
93
        : "=&l" (brtmp) :: "memory"
94
    );
95
}
96

    
97
inline void
98
platform_fence_before(memory_order order) BOOST_NOEXCEPT
99
{
100
    switch(order)
101
    {
102
    case memory_order_release:
103
    case memory_order_acq_rel:
104
    case memory_order_seq_cst:
105
        arm_barrier();
106
    case memory_order_consume:
107
    default:;
108
    }
109
}
110

    
111
inline void
112
platform_fence_after(memory_order order) BOOST_NOEXCEPT
113
{
114
    switch(order)
115
    {
116
    case memory_order_acquire:
117
    case memory_order_acq_rel:
118
    case memory_order_seq_cst:
119
        arm_barrier();
120
    default:;
121
    }
122
}
123

    
124
inline void
125
platform_fence_before_store(memory_order order) BOOST_NOEXCEPT
126
{
127
    platform_fence_before(order);
128
}
129

    
130
inline void
131
platform_fence_after_store(memory_order order) BOOST_NOEXCEPT
132
{
133
    if (order == memory_order_seq_cst)
134
        arm_barrier();
135
}
136

    
137
inline void
138
platform_fence_after_load(memory_order order) BOOST_NOEXCEPT
139
{
140
    platform_fence_after(order);
141
}
142

    
143
template<typename T>
144
inline bool
145
platform_cmpxchg32(T & expected, T desired, volatile T * ptr) BOOST_NOEXCEPT
146
{
147
    int success;
148
    int tmp;
149
    __asm__ __volatile__
150
    (
151
        BOOST_ATOMIC_ARM_ASM_START(%2)
152
        "mov     %1, #0\n"        // success = 0
153
        "ldrex   %0, %3\n"      // expected' = *(&i)
154
        "teq     %0, %4\n"        // flags = expected'==expected
155
        "ittt    eq\n"
156
        "strexeq %2, %5, %3\n"  // if (flags.equal) *(&i) = desired, tmp = !OK
157
        "teqeq   %2, #0\n"        // if (flags.equal) flags = tmp==0
158
        "moveq   %1, #1\n"        // if (flags.equal) success = 1
159
        BOOST_ATOMIC_ARM_ASM_END(%2)
160
            : "=&r" (expected),  // %0
161
            "=&r" (success),   // %1
162
            "=&l" (tmp),       // %2
163
            "+Q" (*ptr)          // %3
164
            : "r" (expected),    // %4
165
            "r" (desired) // %5
166
            : "cc"
167
    );
168
    return success;
169
}
170

    
171
}
172
}
173

    
174
#define BOOST_ATOMIC_THREAD_FENCE 2
175
inline void
176
atomic_thread_fence(memory_order order)
177
{
178
    switch(order)
179
    {
180
    case memory_order_acquire:
181
    case memory_order_release:
182
    case memory_order_acq_rel:
183
    case memory_order_seq_cst:
184
        atomics::detail::arm_barrier();
185
    default:;
186
    }
187
}
188

    
189
#define BOOST_ATOMIC_SIGNAL_FENCE 2
190
inline void
191
atomic_signal_fence(memory_order)
192
{
193
    __asm__ __volatile__ ("" ::: "memory");
194
}
195

    
196
class atomic_flag
197
{
198
private:
199
    uint32_t v_;
200

    
201
public:
202
    BOOST_CONSTEXPR atomic_flag(void) BOOST_NOEXCEPT : v_(0) {}
203

    
204
    void
205
    clear(memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
206
    {
207
        atomics::detail::platform_fence_before_store(order);
208
        const_cast<volatile uint32_t &>(v_) = 0;
209
        atomics::detail::platform_fence_after_store(order);
210
    }
211

    
212
    bool
213
    test_and_set(memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
214
    {
215
        atomics::detail::platform_fence_before(order);
216
        uint32_t expected = v_;
217
        do {
218
            if (expected == 1)
219
                break;
220
        } while (!atomics::detail::platform_cmpxchg32(expected, (uint32_t)1, &v_));
221
        atomics::detail::platform_fence_after(order);
222
        return expected;
223
    }
224

    
225
    BOOST_DELETED_FUNCTION(atomic_flag(const atomic_flag &))
226
    BOOST_DELETED_FUNCTION(atomic_flag& operator=(const atomic_flag &))
227
};
228

    
229
#define BOOST_ATOMIC_FLAG_LOCK_FREE 2
230

    
231
}
232

    
233
#undef BOOST_ATOMIC_ARM_ASM_START
234
#undef BOOST_ATOMIC_ARM_ASM_END
235

    
236
#include <boost/atomic/detail/base.hpp>
237

    
238
#if !defined(BOOST_ATOMIC_FORCE_FALLBACK)
239

    
240
#define BOOST_ATOMIC_CHAR_LOCK_FREE 2
241
#define BOOST_ATOMIC_CHAR16_T_LOCK_FREE 2
242
#define BOOST_ATOMIC_CHAR32_T_LOCK_FREE 2
243
#define BOOST_ATOMIC_WCHAR_T_LOCK_FREE 2
244
#define BOOST_ATOMIC_SHORT_LOCK_FREE 2
245
#define BOOST_ATOMIC_INT_LOCK_FREE 2
246
#define BOOST_ATOMIC_LONG_LOCK_FREE 2
247
#define BOOST_ATOMIC_LLONG_LOCK_FREE 0
248
#define BOOST_ATOMIC_POINTER_LOCK_FREE 2
249
#define BOOST_ATOMIC_BOOL_LOCK_FREE 2
250

    
251
#include <boost/atomic/detail/cas32weak.hpp>
252

    
253
#endif /* !defined(BOOST_ATOMIC_FORCE_FALLBACK) */
254

    
255
#endif