Statistics
| Revision:

root / tmp / org.txm.statsengine.r.core.win32 / res / win32 / library / BH / include / boost / atomic / detail / linux-arm.hpp @ 2486

History | View | Annotate | Download (5.2 kB)

1
#ifndef BOOST_ATOMIC_DETAIL_LINUX_ARM_HPP
2
#define BOOST_ATOMIC_DETAIL_LINUX_ARM_HPP
3

    
4
//  Distributed under the Boost Software License, Version 1.0.
5
//  See accompanying file LICENSE_1_0.txt or copy at
6
//  http://www.boost.org/LICENSE_1_0.txt)
7
//
8
//  Copyright (c) 2009, 2011 Helge Bahmann
9
//  Copyright (c) 2009 Phil Endecott
10
//  Copyright (c) 2013 Tim Blechmann
11
//  Linux-specific code by Phil Endecott
12

    
13
// Different ARM processors have different atomic instructions.  In particular,
14
// architecture versions before v6 (which are still in widespread use, e.g. the
15
// Intel/Marvell XScale chips like the one in the NSLU2) have only atomic swap.
16
// On Linux the kernel provides some support that lets us abstract away from
17
// these differences: it provides emulated CAS and barrier functions at special
18
// addresses that are guaranteed not to be interrupted by the kernel.  Using
19
// this facility is slightly slower than inline assembler would be, but much
20
// faster than a system call.
21
//
22
// While this emulated CAS is "strong" in the sense that it does not fail
23
// "spuriously" (i.e.: it never fails to perform the exchange when the value
24
// found equals the value expected), it does not return the found value on
25
// failure. To satisfy the atomic API, compare_exchange_{weak|strong} must
26
// return the found value on failure, and we have to manually load this value
27
// after the emulated CAS reports failure. This in turn introduces a race
28
// between the CAS failing (due to the "wrong" value being found) and subsequently
29
// loading (which might turn up the "right" value). From an application's
30
// point of view this looks like "spurious failure", and therefore the
31
// emulated CAS is only good enough to provide compare_exchange_weak
32
// semantics.
33

    
34
#include <cstddef>
35
#include <boost/cstdint.hpp>
36
#include <boost/memory_order.hpp>
37
#include <boost/atomic/detail/config.hpp>
38

    
39
#ifdef BOOST_HAS_PRAGMA_ONCE
40
#pragma once
41
#endif
42

    
43
namespace boost {
44
namespace atomics {
45
namespace detail {
46

    
47
inline void
48
arm_barrier(void)
49
{
50
    void (*kernel_dmb)(void) = (void (*)(void)) 0xffff0fa0;
51
    kernel_dmb();
52
}
53

    
54
inline void
55
platform_fence_before(memory_order order)
56
{
57
    switch(order) {
58
        case memory_order_release:
59
        case memory_order_acq_rel:
60
        case memory_order_seq_cst:
61
            arm_barrier();
62
        case memory_order_consume:
63
        default:;
64
    }
65
}
66

    
67
inline void
68
platform_fence_after(memory_order order)
69
{
70
    switch(order) {
71
        case memory_order_acquire:
72
        case memory_order_acq_rel:
73
        case memory_order_seq_cst:
74
            arm_barrier();
75
        default:;
76
    }
77
}
78

    
79
inline void
80
platform_fence_before_store(memory_order order)
81
{
82
    platform_fence_before(order);
83
}
84

    
85
inline void
86
platform_fence_after_store(memory_order order)
87
{
88
    if (order == memory_order_seq_cst)
89
        arm_barrier();
90
}
91

    
92
inline void
93
platform_fence_after_load(memory_order order)
94
{
95
    platform_fence_after(order);
96
}
97

    
98
template<typename T>
99
inline bool
100
platform_cmpxchg32(T & expected, T desired, volatile T * ptr)
101
{
102
    typedef T (*kernel_cmpxchg32_t)(T oldval, T newval, volatile T * ptr);
103

    
104
    if (((kernel_cmpxchg32_t) 0xffff0fc0)(expected, desired, ptr) == 0) {
105
        return true;
106
    } else {
107
        expected = *ptr;
108
        return false;
109
    }
110
}
111

    
112
}
113
}
114

    
115
#define BOOST_ATOMIC_THREAD_FENCE 2
116
inline void
117
atomic_thread_fence(memory_order order)
118
{
119
    switch(order) {
120
        case memory_order_acquire:
121
        case memory_order_release:
122
        case memory_order_acq_rel:
123
        case memory_order_seq_cst:
124
            atomics::detail::arm_barrier();
125
        default:;
126
    }
127
}
128

    
129
#define BOOST_ATOMIC_SIGNAL_FENCE 2
130
inline void
131
atomic_signal_fence(memory_order)
132
{
133
    __asm__ __volatile__ ("" ::: "memory");
134
}
135

    
136
class atomic_flag
137
{
138
private:
139
    atomic_flag(const atomic_flag &) /* = delete */ ;
140
    atomic_flag & operator=(const atomic_flag &) /* = delete */ ;
141
    uint32_t v_;
142
public:
143
    BOOST_CONSTEXPR atomic_flag(void) BOOST_NOEXCEPT : v_(0) {}
144

    
145
    void
146
    clear(memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
147
    {
148
        atomics::detail::platform_fence_before_store(order);
149
        const_cast<volatile uint32_t &>(v_) = 0;
150
        atomics::detail::platform_fence_after_store(order);
151
    }
152

    
153
    bool
154
    test_and_set(memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
155
    {
156
        atomics::detail::platform_fence_before(order);
157
        uint32_t expected = v_;
158
        do {
159
            if (expected == 1)
160
                break;
161
        } while (!atomics::detail::platform_cmpxchg32(expected, (uint32_t)1, &v_));
162
        atomics::detail::platform_fence_after(order);
163
        return expected;
164
    }
165
};
166

    
167
#define BOOST_ATOMIC_FLAG_LOCK_FREE 2
168

    
169
}
170

    
171
#include <boost/atomic/detail/base.hpp>
172

    
173
#if !defined(BOOST_ATOMIC_FORCE_FALLBACK)
174

    
175
#define BOOST_ATOMIC_CHAR_LOCK_FREE 2
176
#define BOOST_ATOMIC_CHAR16_T_LOCK_FREE 2
177
#define BOOST_ATOMIC_CHAR32_T_LOCK_FREE 2
178
#define BOOST_ATOMIC_WCHAR_T_LOCK_FREE 2
179
#define BOOST_ATOMIC_SHORT_LOCK_FREE 2
180
#define BOOST_ATOMIC_INT_LOCK_FREE 2
181
#define BOOST_ATOMIC_LONG_LOCK_FREE 2
182
#define BOOST_ATOMIC_LLONG_LOCK_FREE 0
183
#define BOOST_ATOMIC_POINTER_LOCK_FREE 2
184
#define BOOST_ATOMIC_BOOL_LOCK_FREE 2
185

    
186
#include <boost/atomic/detail/cas32weak.hpp>
187

    
188
#endif /* !defined(BOOST_ATOMIC_FORCE_FALLBACK) */
189

    
190
#endif