00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029
00030
00031
00032
00033
00034
00035
00036
00037
00038
00039
00040
00041
00042
00043
00044
00045
00046
00047
#ifndef _POOL_ALLOCATOR_H
00048
#define _POOL_ALLOCATOR_H 1
00049
00050
#include <bits/c++config.h>
00051
#include <new>
00052
#include <bits/functexcept.h>
00053
#include <bits/stl_threads.h>
00054
#include <bits/atomicity.h>
00055
00056
namespace __gnu_cxx
00057 {
00058
using std::__throw_bad_alloc;
00059
00060
00061
00062
00063
00064
00065
00066
00067
00068
00069
00070
00071
00072
00073
00074
00075
00076
00077
00078
00079
00080
00081
00082
00083
template<
bool __threads>
00084 struct __pool_base
00085 {
00086
enum { _S_align = 8 };
00087
enum { _S_max_bytes = 128 };
00088
enum { _S_freelists = _S_max_bytes / _S_align };
00089
00090
union _Obj
00091 {
00092
union _Obj* _M_free_list_link;
00093
char _M_client_data[1];
00094 };
00095
00096
static _Obj*
volatile _S_free_list[_S_freelists];
00097
00098
00099
static char* _S_start_free;
00100
static char* _S_end_free;
00101
static size_t _S_heap_size;
00102
00103
static _STL_mutex_lock _S_lock;
00104
static _Atomic_word _S_force_new;
00105
00106
static size_t
00107 _S_round_up(size_t __bytes)
00108 {
return ((__bytes + (size_t)_S_align - 1) & ~((size_t)_S_align - 1)); }
00109
00110
static size_t
00111 _S_freelist_index(size_t __bytes)
00112 {
return ((__bytes + (size_t)_S_align - 1) / (size_t)_S_align - 1); }
00113
00114
00115
00116
static void*
00117 _S_refill(size_t __n);
00118
00119
00120
00121
static char*
00122 _S_chunk_alloc(size_t __n,
int& __nobjs);
00123
00124
00125
00126
struct _Lock
00127 {
00128 _Lock() {
if (__threads) _S_lock._M_acquire_lock(); }
00129 ~_Lock() {
if (__threads) _S_lock._M_release_lock(); }
00130 } __attribute__ ((__unused__));
00131
friend struct _Lock;
00132 };
00133
00134
typedef __pool_base<true> __pool_alloc_base;
00135
00136
template<
typename _Tp>
00137
class __pool_alloc :
private __pool_alloc_base
00138 {
00139
public:
00140
typedef size_t size_type;
00141
typedef ptrdiff_t difference_type;
00142
typedef _Tp* pointer;
00143
typedef const _Tp* const_pointer;
00144
typedef _Tp& reference;
00145
typedef const _Tp& const_reference;
00146
typedef _Tp value_type;
00147
00148
template<
typename _Tp1>
00149
struct rebind
00150 {
typedef __pool_alloc<_Tp1> other; };
00151
00152 __pool_alloc() throw() { }
00153
00154 __pool_alloc(
const __pool_alloc&) throw() { }
00155
00156
template<
typename _Tp1>
00157 __pool_alloc(
const __pool_alloc<_Tp1>&) throw() { }
00158
00159 ~__pool_alloc() throw() { }
00160
00161 pointer
00162 address(reference __x)
const {
return &__x; }
00163
00164 const_pointer
00165 address(const_reference __x)
const {
return &__x; }
00166
00167 size_type
00168 max_size() const throw()
00169 {
return size_t(-1) /
sizeof(_Tp); }
00170
00171
00172
00173
void
00174 construct(pointer __p,
const _Tp& __val)
00175 { ::new(__p) _Tp(__val); }
00176
00177
void
00178 destroy(pointer __p) { __p->~_Tp(); }
00179
00180 pointer
00181 allocate(size_type __n,
const void* = 0);
00182
00183
void
00184 deallocate(pointer __p, size_type __n);
00185 };
00186
00187
template<
typename _Tp>
00188
inline bool
00189 operator==(
const __pool_alloc<_Tp>&,
const __pool_alloc<_Tp>&)
00190 {
return true; }
00191
00192
template<
typename _Tp>
00193
inline bool
00194 operator!=(
const __pool_alloc<_Tp>&,
const __pool_alloc<_Tp>&)
00195 {
return false; }
00196
00197
00198
00199
00200
template<
bool __threads>
00201
char*
00202 __pool_base<__threads>::_S_chunk_alloc(size_t __n,
int& __nobjs)
00203 {
00204
char* __result;
00205 size_t __total_bytes = __n * __nobjs;
00206 size_t __bytes_left = _S_end_free - _S_start_free;
00207
00208
if (__bytes_left >= __total_bytes)
00209 {
00210 __result = _S_start_free;
00211 _S_start_free += __total_bytes;
00212
return __result ;
00213 }
00214
else if (__bytes_left >= __n)
00215 {
00216 __nobjs = (
int)(__bytes_left / __n);
00217 __total_bytes = __n * __nobjs;
00218 __result = _S_start_free;
00219 _S_start_free += __total_bytes;
00220
return __result;
00221 }
00222
else
00223 {
00224 size_t __bytes_to_get = (2 * __total_bytes
00225 + _S_round_up(_S_heap_size >> 4));
00226
00227
if (__bytes_left > 0)
00228 {
00229 _Obj*
volatile* __free_list = (_S_free_list
00230 + _S_freelist_index(__bytes_left));
00231
00232 ((_Obj*)(
void*)_S_start_free)->_M_free_list_link = *__free_list;
00233 *__free_list = (_Obj*)(
void*)_S_start_free;
00234 }
00235
00236 _S_start_free = static_cast<char*>(::operator
new(__bytes_to_get));
00237
if (_S_start_free == 0)
00238 {
00239 size_t __i;
00240 _Obj*
volatile* __free_list;
00241 _Obj* __p;
00242
00243
00244
00245 __i = __n;
00246
for (; __i <= (size_t) _S_max_bytes; __i += (size_t) _S_align)
00247 {
00248 __free_list = _S_free_list + _S_freelist_index(__i);
00249 __p = *__free_list;
00250
if (__p != 0)
00251 {
00252 *__free_list = __p -> _M_free_list_link;
00253 _S_start_free = (
char*)__p;
00254 _S_end_free = _S_start_free + __i;
00255
return _S_chunk_alloc(__n, __nobjs);
00256
00257
00258 }
00259 }
00260 _S_end_free = 0;
00261 _S_start_free = static_cast<char*>(::operator
new(__bytes_to_get));
00262
00263
00264 }
00265 _S_heap_size += __bytes_to_get;
00266 _S_end_free = _S_start_free + __bytes_to_get;
00267
return _S_chunk_alloc(__n, __nobjs);
00268 }
00269 }
00270
00271
00272
00273
00274
template<
bool __threads>
00275
void*
00276 __pool_base<__threads>::_S_refill(size_t __n)
00277 {
00278
int __nobjs = 20;
00279
char* __chunk = _S_chunk_alloc(__n, __nobjs);
00280 _Obj*
volatile* __free_list;
00281 _Obj* __result;
00282 _Obj* __current_obj;
00283 _Obj* __next_obj;
00284
int __i;
00285
00286
if (1 == __nobjs)
00287
return __chunk;
00288 __free_list = _S_free_list + _S_freelist_index(__n);
00289
00290
00291 __result = (_Obj*)(
void*)__chunk;
00292 *__free_list = __next_obj = (_Obj*)(
void*)(__chunk + __n);
00293
for (__i = 1; ; __i++)
00294 {
00295 __current_obj = __next_obj;
00296 __next_obj = (_Obj*)(
void*)((
char*)__next_obj + __n);
00297
if (__nobjs - 1 == __i)
00298 {
00299 __current_obj -> _M_free_list_link = 0;
00300
break;
00301 }
00302
else
00303 __current_obj -> _M_free_list_link = __next_obj;
00304 }
00305
return __result;
00306 }
00307
00308
template<
typename _Tp>
00309 _Tp*
00310 __pool_alloc<_Tp>::allocate(size_type __n,
const void*)
00311 {
00312 pointer __ret = 0;
00313
if (__n)
00314 {
00315
if (__n <= max_size())
00316 {
00317
const size_t __bytes = __n *
sizeof(_Tp);
00318
00319
00320
00321
if (_S_force_new == 0)
00322 {
00323
if (getenv(
"GLIBCXX_FORCE_NEW"))
00324 __atomic_add(&_S_force_new, 1);
00325
else
00326 __atomic_add(&_S_force_new, -1);
00327 }
00328
00329
if ((__bytes > (size_t) _S_max_bytes) || (_S_force_new > 0))
00330 __ret = static_cast<_Tp*>(::operator
new(__bytes));
00331
else
00332 {
00333 _Obj*
volatile* __free_list = (_S_free_list
00334 + _S_freelist_index(__bytes));
00335
00336
00337
00338 _Lock __lock_instance;
00339 _Obj* __restrict__ __result = *__free_list;
00340
if (__builtin_expect(__result == 0, 0))
00341 __ret = static_cast<_Tp*>(_S_refill(_S_round_up(__bytes)));
00342
else
00343 {
00344 *__free_list = __result->_M_free_list_link;
00345 __ret = reinterpret_cast<_Tp*>(__result);
00346 }
00347
if (__builtin_expect(__ret == 0, 0))
00348 __throw_bad_alloc();
00349 }
00350 }
00351
else
00352 __throw_bad_alloc();
00353 }
00354
return __ret;
00355 }
00356
00357
template<
typename _Tp>
00358
void
00359 __pool_alloc<_Tp>::deallocate(pointer __p, size_type __n)
00360 {
00361
if (__n)
00362 {
00363
const size_t __bytes = __n *
sizeof(_Tp);
00364
if ((__bytes > (size_t) _S_max_bytes) || (_S_force_new > 0))
00365 ::operator
delete(__p);
00366
else
00367 {
00368 _Obj*
volatile* __free_list = (_S_free_list
00369 + _S_freelist_index(__bytes));
00370 _Obj* __q = (_Obj*)__p;
00371
00372
00373
00374
00375 _Lock __lock_instance;
00376 __q -> _M_free_list_link = *__free_list;
00377 *__free_list = __q;
00378 }
00379 }
00380 }
00381
00382
template<
bool __threads>
00383
typename __pool_base<__threads>::_Obj*
volatile
00384 __pool_base<__threads>::_S_free_list[_S_freelists];
00385
00386
template<
bool __threads>
00387
char* __pool_base<__threads>::_S_start_free = 0;
00388
00389
template<
bool __threads>
00390
char* __pool_base<__threads>::_S_end_free = 0;
00391
00392
template<
bool __threads>
00393 size_t __pool_base<__threads>::_S_heap_size = 0;
00394
00395
template<
bool __threads>
00396 _STL_mutex_lock
00397 __pool_base<__threads>::_S_lock __STL_MUTEX_INITIALIZER;
00398
00399
template<
bool __threads>
00400 _Atomic_word
00401 __pool_base<__threads>::_S_force_new = 0;
00402 }
00403
00404
#endif