00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029 #ifndef __TBB_queuing_mutex_H
00030 #define __TBB_queuing_mutex_H
00031
00032 #include "tbb_config.h"
00033
00034 #if !TBB_USE_EXCEPTIONS && _MSC_VER
00035
00036 #pragma warning (push)
00037 #pragma warning (disable: 4530)
00038 #endif
00039
00040 #include <cstring>
00041
00042 #if !TBB_USE_EXCEPTIONS && _MSC_VER
00043 #pragma warning (pop)
00044 #endif
00045
00046 #include "atomic.h"
00047 #include "tbb_profiling.h"
00048
00049 namespace tbb {
00050
00052
00053 class queuing_mutex {
00054 public:
00056 queuing_mutex() {
00057 q_tail = NULL;
00058 #if TBB_USE_THREADING_TOOLS
00059 internal_construct();
00060 #endif
00061 }
00062
00064
00066 class scoped_lock: internal::no_copy {
00068 void initialize() {
00069 mutex = NULL;
00070 #if TBB_USE_ASSERT
00071 internal::poison_pointer(next);
00072 #endif
00073 }
00074
00075 public:
00077
00078 scoped_lock() {initialize();}
00079
00081 scoped_lock( queuing_mutex& m ) {
00082 initialize();
00083 acquire(m);
00084 }
00085
00087 ~scoped_lock() {
00088 if( mutex ) release();
00089 }
00090
00092 void __TBB_EXPORTED_METHOD acquire( queuing_mutex& m );
00093
00095 bool __TBB_EXPORTED_METHOD try_acquire( queuing_mutex& m );
00096
00098 void __TBB_EXPORTED_METHOD release();
00099
00100 private:
00102 queuing_mutex* mutex;
00103
00105 scoped_lock *next;
00106
00108
00111 uintptr_t going;
00112 };
00113
00114 void __TBB_EXPORTED_METHOD internal_construct();
00115
00116
00117 static const bool is_rw_mutex = false;
00118 static const bool is_recursive_mutex = false;
00119 static const bool is_fair_mutex = true;
00120
00121 private:
00123 atomic<scoped_lock*> q_tail;
00124
00125 };
00126
00127 __TBB_DEFINE_PROFILING_SET_NAME(queuing_mutex)
00128
00129 }
00130
00131 #endif