00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029 #ifndef __TBB_concurrent_lru_cache_H
00030 #define __TBB_concurrent_lru_cache_H
00031
00032 #if ! TBB_PREVIEW_CONCURRENT_LRU_CACHE
00033 #error Set TBB_PREVIEW_CONCURRENT_LRU_CACHE to include concurrent_lru_cache.h
00034 #endif
00035
00036 #include <map>
00037 #include <list>
00038
00039 #include "tbb_stddef.h"
00040 #include "atomic.h"
00041 #include "internal/_aggregator_impl.h"
00042
00043 namespace tbb{
00044 namespace interface6 {
00045
00046
00047 template <typename key_type, typename value_type, typename value_functor_type = value_type (*)(key_type) >
00048 class concurrent_lru_cache : internal::no_assign{
00049 private:
00050 typedef concurrent_lru_cache self_type;
00051 typedef value_functor_type value_function_type;
00052 typedef std::size_t ref_counter_type;
00053 struct map_value_type;
00054 typedef std::map<key_type, map_value_type> map_storage_type;
00055 typedef std::list<typename map_storage_type::iterator> lru_list_type;
00056 struct map_value_type {
00057 value_type my_value;
00058 ref_counter_type my_ref_counter;
00059 typename lru_list_type::iterator my_lru_list_iterator;
00060 bool my_is_ready;
00061
00062 map_value_type (value_type const& a_value, ref_counter_type a_ref_counter, typename lru_list_type::iterator a_lru_list_iterator, bool a_is_ready)
00063 : my_value(a_value), my_ref_counter(a_ref_counter), my_lru_list_iterator (a_lru_list_iterator), my_is_ready(a_is_ready)
00064 {}
00065 };
00066
00067 class handle_object;
00068
00069 struct aggregator_operation;
00070 typedef aggregator_operation aggregated_operation_type;
00071 typedef tbb::internal::aggregating_functor<self_type,aggregated_operation_type> aggregator_function_type;
00072 friend class tbb::internal::aggregating_functor<self_type,aggregated_operation_type>;
00073 typedef tbb::internal::aggregator<aggregator_function_type, aggregated_operation_type> aggregator_type;
00074
00075 private:
00076 value_function_type my_value_function;
00077 std::size_t const my_number_of_lru_history_items;
00078 map_storage_type my_map_storage;
00079 lru_list_type my_lru_list;
00080 aggregator_type my_aggregator;
00081
00082 public:
00083 typedef handle_object handle;
00084
00085 public:
00086 concurrent_lru_cache(value_function_type f, std::size_t number_of_lru_history_items)
00087 : my_value_function(f),my_number_of_lru_history_items(number_of_lru_history_items)
00088 {
00089 my_aggregator.initialize_handler(aggregator_function_type(this));
00090 }
00091
00092 handle_object operator[](key_type k){
00093 retrieve_aggregator_operation op(k);
00094 my_aggregator.execute(&op);
00095 if (op.is_new_value_needed()){
00096 op.result().second.my_value = my_value_function(k);
00097 __TBB_store_with_release(op.result().second.my_is_ready, true);
00098 }else{
00099 tbb::internal::spin_wait_while_eq(op.result().second.my_is_ready,false);
00100 }
00101 return handle_object(*this,op.result());
00102 }
00103 private:
00104 void signal_end_of_usage(typename map_storage_type::reference value_ref){
00105 signal_end_of_usage_aggregator_operation op(value_ref);
00106 my_aggregator.execute(&op);
00107 }
00108
00109 private:
00110 struct handle_move_t:no_assign{
00111 concurrent_lru_cache & my_cache_ref;
00112 typename map_storage_type::reference my_map_record_ref;
00113 handle_move_t(concurrent_lru_cache & cache_ref, typename map_storage_type::reference value_ref):my_cache_ref(cache_ref),my_map_record_ref(value_ref) {};
00114 };
00115 class handle_object {
00116 concurrent_lru_cache * my_cache_pointer;
00117 typename map_storage_type::reference my_map_record_ref;
00118 public:
00119 handle_object(concurrent_lru_cache & cache_ref, typename map_storage_type::reference value_ref):my_cache_pointer(&cache_ref), my_map_record_ref(value_ref) {}
00120 handle_object(handle_move_t m):my_cache_pointer(&m.my_cache_ref), my_map_record_ref(m.my_map_record_ref){}
00121 operator handle_move_t(){ return move(*this);}
00122 value_type& value(){
00123 __TBB_ASSERT(my_cache_pointer,"get value from moved from object?");
00124 return my_map_record_ref.second.my_value;
00125 }
00126 ~handle_object(){
00127 if (my_cache_pointer){
00128 my_cache_pointer->signal_end_of_usage(my_map_record_ref);
00129 }
00130 }
00131 private:
00132 friend handle_move_t move(handle_object& h){
00133 return handle_object::move(h);
00134 }
00135 static handle_move_t move(handle_object& h){
00136 __TBB_ASSERT(h.my_cache_pointer,"move from the same object twice ?");
00137 concurrent_lru_cache * cache_pointer = NULL;
00138 std::swap(cache_pointer,h.my_cache_pointer);
00139 return handle_move_t(*cache_pointer,h.my_map_record_ref);
00140 }
00141 private:
00142 void operator=(handle_object&);
00143 #if __SUNPRO_CC
00144
00145
00146 public:
00147 #endif
00148 handle_object(handle_object &);
00149 };
00150 private:
00151
00152 struct aggregator_operation : tbb::internal::aggregated_operation<aggregator_operation>{
00153 enum e_op_type {op_retive, op_signal_end_of_usage};
00154
00155
00156 e_op_type my_operation_type;
00157 aggregator_operation(e_op_type operation_type): my_operation_type(operation_type) {}
00158 void cast_and_handle(self_type& container ){
00159 if (my_operation_type==op_retive){
00160 static_cast<retrieve_aggregator_operation*>(this)->handle(container);
00161 }else{
00162 static_cast<signal_end_of_usage_aggregator_operation*>(this)->handle(container);
00163 }
00164 }
00165 };
00166 struct retrieve_aggregator_operation : aggregator_operation, private internal::no_assign {
00167 key_type my_key;
00168 typename map_storage_type::pointer my_result_map_record_pointer;
00169 bool my_is_new_value_needed;
00170 retrieve_aggregator_operation(key_type key):aggregator_operation(aggregator_operation::op_retive),my_key(key),my_is_new_value_needed(false){}
00171 void handle(self_type& container ){
00172 my_result_map_record_pointer = & container.retrieve_serial(my_key,my_is_new_value_needed);
00173 }
00174 typename map_storage_type::reference result(){ return * my_result_map_record_pointer; }
00175 bool is_new_value_needed(){return my_is_new_value_needed;}
00176 };
00177 struct signal_end_of_usage_aggregator_operation : aggregator_operation, private internal::no_assign {
00178 typename map_storage_type::reference my_map_record_ref;
00179 signal_end_of_usage_aggregator_operation(typename map_storage_type::reference map_record_ref):aggregator_operation(aggregator_operation::op_signal_end_of_usage),my_map_record_ref(map_record_ref){}
00180 void handle(self_type& container ){
00181 container.signal_end_of_usage_serial(my_map_record_ref);
00182 }
00183 };
00184
00185 private:
00186 void handle_operations(aggregator_operation* op_list){
00187 while(op_list){
00188 op_list->cast_and_handle(*this);
00189 aggregator_operation* tmp = op_list;
00190 op_list=op_list->next;
00191 tbb::internal::itt_store_word_with_release(tmp->status, uintptr_t(1));
00192 }
00193 }
00194
00195 private:
00196 typename map_storage_type::reference retrieve_serial(key_type k, bool& is_new_value_needed){
00197 typename map_storage_type::iterator it = my_map_storage.find(k);
00198 if (it == my_map_storage.end()){
00199 it = my_map_storage.insert(it,std::make_pair(k,map_value_type(value_type(),0,my_lru_list.end(),false)));
00200 is_new_value_needed = true;
00201 }else {
00202 typename lru_list_type::iterator list_it = it->second.my_lru_list_iterator;
00203 if (list_it!=my_lru_list.end()) {
00204 __TBB_ASSERT(!it->second.my_ref_counter,"item to be evicted should not have a live references");
00205
00206
00207 my_lru_list.erase(list_it);
00208 it->second.my_lru_list_iterator= my_lru_list.end();
00209 }
00210 }
00211 ++(it->second.my_ref_counter);
00212 return *it;
00213 }
00214
00215 void signal_end_of_usage_serial(typename map_storage_type::reference map_record_ref){
00216 typename map_storage_type::iterator it = my_map_storage.find(map_record_ref.first);
00217 __TBB_ASSERT(it!=my_map_storage.end(),"cache should not return past-end iterators to outer world");
00218 __TBB_ASSERT(&(*it) == &map_record_ref,"dangling reference has been returned to outside world? data race ?");
00219 __TBB_ASSERT( my_lru_list.end()== std::find(my_lru_list.begin(),my_lru_list.end(),it),
00220 "object in use should not be in list of unused objects ");
00221 if (! --(it->second.my_ref_counter)){
00222
00223 if (my_lru_list.size()>=my_number_of_lru_history_items){
00224
00225 size_t number_of_elements_to_evict = 1 + my_lru_list.size() - my_number_of_lru_history_items;
00226 for (size_t i=0; i<number_of_elements_to_evict; ++i){
00227 typename map_storage_type::iterator it_to_evict = my_lru_list.back();
00228 __TBB_ASSERT(!it_to_evict->second.my_ref_counter,"item to be evicted should not have a live references");
00229 my_lru_list.pop_back();
00230 my_map_storage.erase(it_to_evict);
00231 }
00232 }
00233 my_lru_list.push_front(it);
00234 it->second.my_lru_list_iterator = my_lru_list.begin();
00235 }
00236 }
00237 };
00238 }
00239
00240 using interface6::concurrent_lru_cache;
00241
00242 }
00243 #endif //__TBB_concurrent_lru_cache_H