00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029
00030
00031
00032
00033
00034
00035
00036
00037
00038
00039
00040
00041
00042
00043
00044
00045
00046
00047
00048 #ifndef _POOL_ALLOCATOR_H
00049 #define _POOL_ALLOCATOR_H 1
00050
00051 #include <bits/c++config.h>
00052 #include <cstdlib>
00053 #include <new>
00054 #include <bits/functexcept.h>
00055 #include <ext/atomicity.h>
00056 #include <ext/concurrence.h>
00057
00058 _GLIBCXX_BEGIN_NAMESPACE(__gnu_cxx)
00059
00060 using std::size_t;
00061 using std::ptrdiff_t;
00062
00063
00064
00065
00066
00067
00068
00069
00070
00071
00072
00073
00074
00075
00076
00077
00078
00079
00080
00081 class __pool_alloc_base
00082 {
00083 protected:
00084
00085 enum { _S_align = 8 };
00086 enum { _S_max_bytes = 128 };
00087 enum { _S_free_list_size = (size_t)_S_max_bytes / (size_t)_S_align };
00088
00089 union _Obj
00090 {
00091 union _Obj* _M_free_list_link;
00092 char _M_client_data[1];
00093 };
00094
00095 static _Obj* volatile _S_free_list[_S_free_list_size];
00096
00097
00098 static char* _S_start_free;
00099 static char* _S_end_free;
00100 static size_t _S_heap_size;
00101
00102 size_t
00103 _M_round_up(size_t __bytes)
00104 { return ((__bytes + (size_t)_S_align - 1) & ~((size_t)_S_align - 1)); }
00105
00106 _Obj* volatile*
00107 _M_get_free_list(size_t __bytes);
00108
00109 __mutex&
00110 _M_get_mutex();
00111
00112
00113
00114 void*
00115 _M_refill(size_t __n);
00116
00117
00118
00119 char*
00120 _M_allocate_chunk(size_t __n, int& __nobjs);
00121 };
00122
00123
00124
00125 template<typename _Tp>
00126 class __pool_alloc : private __pool_alloc_base
00127 {
00128 private:
00129 static _Atomic_word _S_force_new;
00130
00131 public:
00132 typedef size_t size_type;
00133 typedef ptrdiff_t difference_type;
00134 typedef _Tp* pointer;
00135 typedef const _Tp* const_pointer;
00136 typedef _Tp& reference;
00137 typedef const _Tp& const_reference;
00138 typedef _Tp value_type;
00139
00140 template<typename _Tp1>
00141 struct rebind
00142 { typedef __pool_alloc<_Tp1> other; };
00143
00144 __pool_alloc() throw() { }
00145
00146 __pool_alloc(const __pool_alloc&) throw() { }
00147
00148 template<typename _Tp1>
00149 __pool_alloc(const __pool_alloc<_Tp1>&) throw() { }
00150
00151 ~__pool_alloc() throw() { }
00152
00153 pointer
00154 address(reference __x) const { return &__x; }
00155
00156 const_pointer
00157 address(const_reference __x) const { return &__x; }
00158
00159 size_type
00160 max_size() const throw()
00161 { return size_t(-1) / sizeof(_Tp); }
00162
00163
00164
00165 void
00166 construct(pointer __p, const _Tp& __val)
00167 { ::new(__p) _Tp(__val); }
00168
00169 void
00170 destroy(pointer __p) { __p->~_Tp(); }
00171
00172 pointer
00173 allocate(size_type __n, const void* = 0);
00174
00175 void
00176 deallocate(pointer __p, size_type __n);
00177 };
00178
00179 template<typename _Tp>
00180 inline bool
00181 operator==(const __pool_alloc<_Tp>&, const __pool_alloc<_Tp>&)
00182 { return true; }
00183
00184 template<typename _Tp>
00185 inline bool
00186 operator!=(const __pool_alloc<_Tp>&, const __pool_alloc<_Tp>&)
00187 { return false; }
00188
00189 template<typename _Tp>
00190 _Atomic_word
00191 __pool_alloc<_Tp>::_S_force_new;
00192
00193 template<typename _Tp>
00194 _Tp*
00195 __pool_alloc<_Tp>::allocate(size_type __n, const void*)
00196 {
00197 pointer __ret = 0;
00198 if (__builtin_expect(__n != 0, true))
00199 {
00200 if (__builtin_expect(__n > this->max_size(), false))
00201 std::__throw_bad_alloc();
00202
00203
00204
00205
00206 if (_S_force_new == 0)
00207 {
00208 if (std::getenv("GLIBCXX_FORCE_NEW"))
00209 __atomic_add_dispatch(&_S_force_new, 1);
00210 else
00211 __atomic_add_dispatch(&_S_force_new, -1);
00212 }
00213
00214 const size_t __bytes = __n * sizeof(_Tp);
00215 if (__bytes > size_t(_S_max_bytes) || _S_force_new == 1)
00216 __ret = static_cast<_Tp*>(::operator new(__bytes));
00217 else
00218 {
00219 _Obj* volatile* __free_list = _M_get_free_list(__bytes);
00220
00221 __scoped_lock sentry(_M_get_mutex());
00222 _Obj* __restrict__ __result = *__free_list;
00223 if (__builtin_expect(__result == 0, 0))
00224 __ret = static_cast<_Tp*>(_M_refill(_M_round_up(__bytes)));
00225 else
00226 {
00227 *__free_list = __result->_M_free_list_link;
00228 __ret = reinterpret_cast<_Tp*>(__result);
00229 }
00230 if (__builtin_expect(__ret == 0, 0))
00231 std::__throw_bad_alloc();
00232 }
00233 }
00234 return __ret;
00235 }
00236
00237 template<typename _Tp>
00238 void
00239 __pool_alloc<_Tp>::deallocate(pointer __p, size_type __n)
00240 {
00241 if (__builtin_expect(__n != 0 && __p != 0, true))
00242 {
00243 const size_t __bytes = __n * sizeof(_Tp);
00244 if (__bytes > static_cast<size_t>(_S_max_bytes) || _S_force_new == 1)
00245 ::operator delete(__p);
00246 else
00247 {
00248 _Obj* volatile* __free_list = _M_get_free_list(__bytes);
00249 _Obj* __q = reinterpret_cast<_Obj*>(__p);
00250
00251 __scoped_lock sentry(_M_get_mutex());
00252 __q ->_M_free_list_link = *__free_list;
00253 *__free_list = __q;
00254 }
00255 }
00256 }
00257
00258 _GLIBCXX_END_NAMESPACE
00259
00260 #endif