00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021 #ifndef __TBB_queuing_mutex_H
00022 #define __TBB_queuing_mutex_H
00023
00024 #include "tbb_config.h"
00025
00026 #if !TBB_USE_EXCEPTIONS && _MSC_VER
00027
00028 #pragma warning (push)
00029 #pragma warning (disable: 4530)
00030 #endif
00031
00032 #include <cstring>
00033
00034 #if !TBB_USE_EXCEPTIONS && _MSC_VER
00035 #pragma warning (pop)
00036 #endif
00037
00038 #include "atomic.h"
00039 #include "tbb_profiling.h"
00040
00041 namespace tbb {
00042
00044
00045 class queuing_mutex {
00046 public:
00048 queuing_mutex() {
00049 q_tail = NULL;
00050 #if TBB_USE_THREADING_TOOLS
00051 internal_construct();
00052 #endif
00053 }
00054
00056
00058 class scoped_lock: internal::no_copy {
00060 void initialize() {
00061 mutex = NULL;
00062 #if TBB_USE_ASSERT
00063 internal::poison_pointer(next);
00064 #endif
00065 }
00066 public:
00068
00069 scoped_lock() {initialize();}
00070
00072 scoped_lock( queuing_mutex& m ) {
00073 initialize();
00074 acquire(m);
00075 }
00076
00078 ~scoped_lock() {
00079 if( mutex ) release();
00080 }
00081
00083 void __TBB_EXPORTED_METHOD acquire( queuing_mutex& m );
00084
00086 bool __TBB_EXPORTED_METHOD try_acquire( queuing_mutex& m );
00087
00089 void __TBB_EXPORTED_METHOD release();
00090
00091 private:
00093 queuing_mutex* mutex;
00094
00096 scoped_lock *next;
00097
00099
00102 uintptr_t going;
00103 };
00104
00105 void __TBB_EXPORTED_METHOD internal_construct();
00106
00107
00108 static const bool is_rw_mutex = false;
00109 static const bool is_recursive_mutex = false;
00110 static const bool is_fair_mutex = true;
00111 private:
00113 atomic<scoped_lock*> q_tail;
00114
00115 };
00116
00117 __TBB_DEFINE_PROFILING_SET_NAME(queuing_mutex)
00118
00119 }
00120
00121 #endif