00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029 #ifndef __TBB_atomic_H
00030 #define __TBB_atomic_H
00031
00032 #include "tbb_stddef.h"
00033 #include <cstddef>
00034
00035 #if _MSC_VER
00036 #define __TBB_LONG_LONG __int64
00037 #else
00038 #define __TBB_LONG_LONG long long
00039 #endif
00040
00041 #include "tbb_machine.h"
00042
00043 #if defined(_MSC_VER) && !defined(__INTEL_COMPILER)
00044
00045 #pragma warning (push)
00046 #pragma warning (disable: 4244 4267)
00047 #endif
00048
00049 namespace tbb {
00050
00052 enum memory_semantics {
00054 full_fence,
00056 acquire,
00058 release,
00060 relaxed
00061 };
00062
00064 namespace internal {
00065
00066 #if __TBB_ATTRIBUTE_ALIGNED_PRESENT
00067 #define __TBB_DECL_ATOMIC_FIELD(t,f,a) t f __attribute__ ((aligned(a)));
00068 #elif __TBB_DECLSPEC_ALIGN_PRESENT
00069 #define __TBB_DECL_ATOMIC_FIELD(t,f,a) __declspec(align(a)) t f;
00070 #else
00071 #error Do not know syntax for forcing alignment.
00072 #endif
00073
00074 template<size_t S>
00075 struct atomic_rep;
00076
00077 template<>
00078 struct atomic_rep<1> {
00079 typedef int8_t word;
00080 };
00081 template<>
00082 struct atomic_rep<2> {
00083 typedef int16_t word;
00084 };
00085 template<>
00086 struct atomic_rep<4> {
00087 #if _MSC_VER && !_WIN64
00088
00089 typedef intptr_t word;
00090 #else
00091 typedef int32_t word;
00092 #endif
00093 };
00094 #if __TBB_64BIT_ATOMICS
00095 template<>
00096 struct atomic_rep<8> {
00097 typedef int64_t word;
00098 };
00099 #endif
00100
00101 template<typename value_type, size_t size>
00102 struct aligned_storage;
00103
00104
00105 #if __TBB_ATOMIC_CTORS
00106 #define ATOMIC_STORAGE_PARTIAL_SPECIALIZATION(S) \
00107 template<typename value_type> \
00108 struct aligned_storage<value_type,S> { \
00109 __TBB_DECL_ATOMIC_FIELD(value_type,my_value,S) \
00110 aligned_storage() = default ; \
00111 constexpr aligned_storage(value_type value):my_value(value){} \
00112 }; \
00113
00114 #else
00115 #define ATOMIC_STORAGE_PARTIAL_SPECIALIZATION(S) \
00116 template<typename value_type> \
00117 struct aligned_storage<value_type,S> { \
00118 __TBB_DECL_ATOMIC_FIELD(value_type,my_value,S) \
00119 }; \
00120
00121 #endif
00122
00123 template<typename value_type>
00124 struct aligned_storage<value_type,1> {
00125 value_type my_value;
00126 #if __TBB_ATOMIC_CTORS
00127 aligned_storage() = default ;
00128 constexpr aligned_storage(value_type value):my_value(value){}
00129 #endif
00130 };
00131
00132 ATOMIC_STORAGE_PARTIAL_SPECIALIZATION(2)
00133 ATOMIC_STORAGE_PARTIAL_SPECIALIZATION(4)
00134 #if __TBB_64BIT_ATOMICS
00135 ATOMIC_STORAGE_PARTIAL_SPECIALIZATION(8)
00136 #endif
00137
00138 template<size_t Size, memory_semantics M>
00139 struct atomic_traits;
00140
00141 #define __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(S,M) \
00142 template<> struct atomic_traits<S,M> { \
00143 typedef atomic_rep<S>::word word; \
00144 inline static word compare_and_swap( volatile void* location, word new_value, word comparand ) { \
00145 return __TBB_machine_cmpswp##S##M(location,new_value,comparand); \
00146 } \
00147 inline static word fetch_and_add( volatile void* location, word addend ) { \
00148 return __TBB_machine_fetchadd##S##M(location,addend); \
00149 } \
00150 inline static word fetch_and_store( volatile void* location, word value ) { \
00151 return __TBB_machine_fetchstore##S##M(location,value); \
00152 } \
00153 };
00154
00155 #define __TBB_DECL_ATOMIC_PRIMITIVES(S) \
00156 template<memory_semantics M> \
00157 struct atomic_traits<S,M> { \
00158 typedef atomic_rep<S>::word word; \
00159 inline static word compare_and_swap( volatile void* location, word new_value, word comparand ) { \
00160 return __TBB_machine_cmpswp##S(location,new_value,comparand); \
00161 } \
00162 inline static word fetch_and_add( volatile void* location, word addend ) { \
00163 return __TBB_machine_fetchadd##S(location,addend); \
00164 } \
00165 inline static word fetch_and_store( volatile void* location, word value ) { \
00166 return __TBB_machine_fetchstore##S(location,value); \
00167 } \
00168 };
00169
00170 template<memory_semantics M>
00171 struct atomic_load_store_traits;
00172
00173 #define __TBB_DECL_ATOMIC_LOAD_STORE_PRIMITIVES(M) \
00174 template<> struct atomic_load_store_traits<M> { \
00175 template <typename T> \
00176 inline static T load( const volatile T& location ) { \
00177 return __TBB_load_##M( location ); \
00178 } \
00179 template <typename T> \
00180 inline static void store( volatile T& location, T value ) { \
00181 __TBB_store_##M( location, value ); \
00182 } \
00183 }
00184
00185 #if __TBB_USE_FENCED_ATOMICS
00186 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(1,full_fence)
00187 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(2,full_fence)
00188 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(4,full_fence)
00189 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(1,acquire)
00190 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(2,acquire)
00191 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(4,acquire)
00192 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(1,release)
00193 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(2,release)
00194 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(4,release)
00195 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(1,relaxed)
00196 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(2,relaxed)
00197 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(4,relaxed)
00198 #if __TBB_64BIT_ATOMICS
00199 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(8,full_fence)
00200 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(8,acquire)
00201 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(8,release)
00202 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(8,relaxed)
00203 #endif
00204 #else
00205 __TBB_DECL_ATOMIC_PRIMITIVES(1)
00206 __TBB_DECL_ATOMIC_PRIMITIVES(2)
00207 __TBB_DECL_ATOMIC_PRIMITIVES(4)
00208 #if __TBB_64BIT_ATOMICS
00209 __TBB_DECL_ATOMIC_PRIMITIVES(8)
00210 #endif
00211 #endif
00212
00213 __TBB_DECL_ATOMIC_LOAD_STORE_PRIMITIVES(full_fence);
00214 __TBB_DECL_ATOMIC_LOAD_STORE_PRIMITIVES(acquire);
00215 __TBB_DECL_ATOMIC_LOAD_STORE_PRIMITIVES(release);
00216 __TBB_DECL_ATOMIC_LOAD_STORE_PRIMITIVES(relaxed);
00217
00219
00221 #define __TBB_MINUS_ONE(T) (T(T(0)-T(1)))
00222
00224
00226 template<typename T>
00227 struct atomic_impl {
00228 protected:
00229 aligned_storage<T,sizeof(T)> my_storage;
00230 private:
00231
00233 template<typename value_type>
00234 union converter {
00235 typedef typename atomic_rep<sizeof(value_type)>::word bits_type;
00236 converter(){}
00237 converter(value_type a_value) : value(a_value) {}
00238 value_type value;
00239 bits_type bits;
00240 };
00241
00242 template<typename value_t>
00243 union ptr_converter;
00244
00245 template<typename value_t>
00246 union ptr_converter<value_t *> {
00247 typedef typename atomic_rep<sizeof(value_t)>::word * bits_ptr_type;
00248 ptr_converter(){}
00249 ptr_converter(value_t* a_value) : value(a_value) {}
00250 value_t* value;
00251 bits_ptr_type bits;
00252 };
00253
00254 template<typename value_t>
00255 static typename converter<value_t>::bits_type to_bits(value_t value){
00256 return converter<value_t>(value).bits;
00257 }
00258 template<typename value_t>
00259 static value_t to_value(typename converter<value_t>::bits_type bits){
00260 converter<value_t> u;
00261 u.bits = bits;
00262 return u.value;
00263 }
00264
00265
00266
00267 template<typename value_t>
00268 static typename ptr_converter<value_t*>::bits_ptr_type to_bits_ptr(value_t* value){
00269
00270
00271
00272
00273
00274 #if !__SUNPRO_CC
00275 return ptr_converter<value_t*>(value).bits;
00276 #else
00277 return typename ptr_converter<value_t*>::bits_ptr_type (value);
00278 #endif
00279 }
00280
00281 public:
00282 typedef T value_type;
00283
00284 #if __TBB_ATOMIC_CTORS
00285 atomic_impl() = default ;
00286 constexpr atomic_impl(value_type value):my_storage(value){}
00287 #endif
00288 template<memory_semantics M>
00289 value_type fetch_and_store( value_type value ) {
00290 return to_value<value_type>(internal::atomic_traits<sizeof(value_type),M>::fetch_and_store(&my_storage.my_value,to_bits(value)));
00291 }
00292
00293 value_type fetch_and_store( value_type value ) {
00294 return fetch_and_store<full_fence>(value);
00295 }
00296
00297 template<memory_semantics M>
00298 value_type compare_and_swap( value_type value, value_type comparand ) {
00299 return to_value<value_type>(internal::atomic_traits<sizeof(value_type),M>::compare_and_swap(&my_storage.my_value,to_bits(value),to_bits(comparand)));
00300 }
00301
00302 value_type compare_and_swap( value_type value, value_type comparand ) {
00303 return compare_and_swap<full_fence>(value,comparand);
00304 }
00305
00306 operator value_type() const volatile {
00307 return to_value<value_type>(__TBB_load_with_acquire(*to_bits_ptr(&my_storage.my_value)));
00308 }
00309
00310 template<memory_semantics M>
00311 value_type load () const {
00312 return to_value<value_type>(internal::atomic_load_store_traits<M>::load(*to_bits_ptr(&my_storage.my_value)));
00313 }
00314
00315 value_type load () const {
00316 return load<acquire>();
00317 }
00318
00319 template<memory_semantics M>
00320 void store ( value_type value ) {
00321 internal::atomic_load_store_traits<M>::store( *to_bits_ptr(&my_storage.my_value), to_bits(value));
00322 }
00323
00324 void store ( value_type value ) {
00325 store<release>( value );
00326 }
00327
00328 protected:
00329 value_type store_with_release( value_type rhs ) {
00330 __TBB_store_with_release(*to_bits_ptr(&my_storage.my_value),to_bits(rhs));
00331 return rhs;
00332 }
00333 };
00334
00336
00339 template<typename I, typename D, typename StepType>
00340 struct atomic_impl_with_arithmetic: atomic_impl<I> {
00341 public:
00342 typedef I value_type;
00343 #if __TBB_ATOMIC_CTORS
00344 atomic_impl_with_arithmetic() = default ;
00345 constexpr atomic_impl_with_arithmetic(value_type value): atomic_impl<I>(value){}
00346 #endif
00347 template<memory_semantics M>
00348 value_type fetch_and_add( D addend ) {
00349 return value_type(internal::atomic_traits<sizeof(value_type),M>::fetch_and_add( &this->my_storage.my_value, addend*sizeof(StepType) ));
00350 }
00351
00352 value_type fetch_and_add( D addend ) {
00353 return fetch_and_add<full_fence>(addend);
00354 }
00355
00356 template<memory_semantics M>
00357 value_type fetch_and_increment() {
00358 return fetch_and_add<M>(1);
00359 }
00360
00361 value_type fetch_and_increment() {
00362 return fetch_and_add(1);
00363 }
00364
00365 template<memory_semantics M>
00366 value_type fetch_and_decrement() {
00367 return fetch_and_add<M>(__TBB_MINUS_ONE(D));
00368 }
00369
00370 value_type fetch_and_decrement() {
00371 return fetch_and_add(__TBB_MINUS_ONE(D));
00372 }
00373
00374 public:
00375 value_type operator+=( D value ) {
00376 return fetch_and_add(value)+value;
00377 }
00378
00379 value_type operator-=( D value ) {
00380
00381
00382 return operator+=(D(0)-value);
00383 }
00384
00385 value_type operator++() {
00386 return fetch_and_add(1)+1;
00387 }
00388
00389 value_type operator--() {
00390 return fetch_and_add(__TBB_MINUS_ONE(D))-1;
00391 }
00392
00393 value_type operator++(int) {
00394 return fetch_and_add(1);
00395 }
00396
00397 value_type operator--(int) {
00398 return fetch_and_add(__TBB_MINUS_ONE(D));
00399 }
00400 };
00401
00402 }
00404
00406
00408 template<typename T>
00409 struct atomic: internal::atomic_impl<T> {
00410 #if __TBB_ATOMIC_CTORS
00411 atomic() = default;
00412 constexpr atomic(T arg): internal::atomic_impl<T>(arg) {}
00413 #endif
00414 T operator=( T rhs ) {
00415
00416 return this->store_with_release(rhs);
00417 }
00418 atomic<T>& operator=( const atomic<T>& rhs ) {this->store_with_release(rhs); return *this;}
00419 };
00420
00421 #if __TBB_ATOMIC_CTORS
00422 #define __TBB_DECL_ATOMIC(T) \
00423 template<> struct atomic<T>: internal::atomic_impl_with_arithmetic<T,T,char> { \
00424 atomic() = default; \
00425 constexpr atomic(T arg): internal::atomic_impl_with_arithmetic<T,T,char>(arg) {} \
00426 \
00427 T operator=( T rhs ) {return store_with_release(rhs);} \
00428 atomic<T>& operator=( const atomic<T>& rhs ) {store_with_release(rhs); return *this;} \
00429 };
00430 #else
00431 #define __TBB_DECL_ATOMIC(T) \
00432 template<> struct atomic<T>: internal::atomic_impl_with_arithmetic<T,T,char> { \
00433 T operator=( T rhs ) {return store_with_release(rhs);} \
00434 atomic<T>& operator=( const atomic<T>& rhs ) {store_with_release(rhs); return *this;} \
00435 };
00436 #endif
00437
00438 #if __TBB_64BIT_ATOMICS
00439
00440 __TBB_DECL_ATOMIC(__TBB_LONG_LONG)
00441 __TBB_DECL_ATOMIC(unsigned __TBB_LONG_LONG)
00442 #else
00443
00444 #endif
00445 __TBB_DECL_ATOMIC(long)
00446 __TBB_DECL_ATOMIC(unsigned long)
00447
00448 #if _MSC_VER && !_WIN64
00449 #if __TBB_ATOMIC_CTORS
00450
00451
00452
00453
00454
00455 #define __TBB_DECL_ATOMIC_ALT(T,U) \
00456 template<> struct atomic<T>: internal::atomic_impl_with_arithmetic<T,T,char> { \
00457 atomic() = default ; \
00458 constexpr atomic(T arg): internal::atomic_impl_with_arithmetic<T,T,char>(arg) {} \
00459 T operator=( U rhs ) {return store_with_release(T(rhs));} \
00460 atomic<T>& operator=( const atomic<T>& rhs ) {store_with_release(rhs); return *this;} \
00461 };
00462 #else
00463 #define __TBB_DECL_ATOMIC_ALT(T,U) \
00464 template<> struct atomic<T>: internal::atomic_impl_with_arithmetic<T,T,char> { \
00465 T operator=( U rhs ) {return store_with_release(T(rhs));} \
00466 atomic<T>& operator=( const atomic<T>& rhs ) {store_with_release(rhs); return *this;} \
00467 };
00468 #endif
00469 __TBB_DECL_ATOMIC_ALT(unsigned,size_t)
00470 __TBB_DECL_ATOMIC_ALT(int,ptrdiff_t)
00471 #else
00472 __TBB_DECL_ATOMIC(unsigned)
00473 __TBB_DECL_ATOMIC(int)
00474 #endif
00475
00476 __TBB_DECL_ATOMIC(unsigned short)
00477 __TBB_DECL_ATOMIC(short)
00478 __TBB_DECL_ATOMIC(char)
00479 __TBB_DECL_ATOMIC(signed char)
00480 __TBB_DECL_ATOMIC(unsigned char)
00481
00482 #if !_MSC_VER || defined(_NATIVE_WCHAR_T_DEFINED)
00483 __TBB_DECL_ATOMIC(wchar_t)
00484 #endif
00485
00487 template<typename T> struct atomic<T*>: internal::atomic_impl_with_arithmetic<T*,ptrdiff_t,T> {
00488 #if __TBB_ATOMIC_CTORS
00489 atomic() = default ;
00490 constexpr atomic(T* arg): internal::atomic_impl_with_arithmetic<T*,ptrdiff_t,T>(arg) {}
00491 #endif
00492 T* operator=( T* rhs ) {
00493
00494 return this->store_with_release(rhs);
00495 }
00496 atomic<T*>& operator=( const atomic<T*>& rhs ) {
00497 this->store_with_release(rhs); return *this;
00498 }
00499 T* operator->() const {
00500 return (*this);
00501 }
00502 };
00503
00505 template<> struct atomic<void*>: internal::atomic_impl<void*> {
00506 #if __TBB_ATOMIC_CTORS
00507 atomic() = default ;
00508 constexpr atomic(void* arg): internal::atomic_impl<void*>(arg) {}
00509 #endif
00510 void* operator=( void* rhs ) {
00511
00512 return this->store_with_release(rhs);
00513 }
00514 atomic<void*>& operator=( const atomic<void*>& rhs ) {
00515 this->store_with_release(rhs); return *this;
00516 }
00517 };
00518
00519
00520
00521
00522 template <memory_semantics M, typename T>
00523 T load ( const atomic<T>& a ) { return a.template load<M>(); }
00524
00525 template <memory_semantics M, typename T>
00526 void store ( atomic<T>& a, T value ) { return a.template store<M>(value); }
00527
00528 namespace interface6{
00530 template<typename T>
00531 atomic<T> make_atomic(T t) {
00532 atomic<T> a;
00533 store<relaxed>(a,t);
00534 return a;
00535 }
00536 }
00537 using interface6::make_atomic;
00538
00539 namespace internal {
00540
00541
00542 template<typename T>
00543 inline atomic<T>& as_atomic( T& t ) {
00544 return (atomic<T>&)t;
00545 }
00546 }
00547
00548 }
00549
00550 #if _MSC_VER && !__INTEL_COMPILER
00551 #pragma warning (pop)
00552 #endif // warnings 4244, 4267 are back
00553
00554 #endif