376 lines
9.3 KiB
C++
376 lines
9.3 KiB
C++
/******************************************************************************/
|
|
/* ALLOCATORS - INLINE CODE ***************************************************/
|
|
/******************************************************************************/
|
|
|
|
#ifndef _H_LW_LIB_INLINE_ALLOC
|
|
#define _H_LW_LIB_INLINE_ALLOC
|
|
#include <lw/lib/Alloc.hh>
|
|
namespace lw {
|
|
|
|
|
|
/*= T_PoolHelper::T_List =====================================================*/
|
|
|
|
template< typename T , size_t P >
|
|
inline T_PoolHelper::T_List< T , P >::T_List(
|
|
T_List< T , P >*& n ) noexcept
|
|
: next( n ) , free( P )
|
|
{
|
|
for ( size_t i = 0 ; i < P ; i ++ ) {
|
|
storage[ i ].list = this;
|
|
}
|
|
for ( size_t i = 0 ; i < BitmapSize ; i ++ ) {
|
|
bitmap[ i ] = 0;
|
|
}
|
|
n = this;
|
|
}
|
|
|
|
template< typename T , size_t P >
|
|
inline T* T_PoolHelper::T_List< T , P >::findUnused( ) noexcept
|
|
{
|
|
size_t i = 0;
|
|
while ( bitmap[ i ] == 255 ) {
|
|
assert( i < BitmapSize );
|
|
i ++;
|
|
}
|
|
|
|
size_t j = 0;
|
|
while ( ( bitmap[ i ] & ( 1 << j ) ) != 0 ) {
|
|
j ++;
|
|
}
|
|
assert( i * 8 + j < P );
|
|
|
|
bitmap[ i ] = bitmap[ i ] | ( 1 << j );
|
|
free --;
|
|
return &storage[ i * 8 + j ];
|
|
}
|
|
|
|
template< typename T , size_t P >
|
|
inline size_t T_PoolHelper::T_List< T , P >::indexOf(
|
|
T const* const item ) noexcept
|
|
{
|
|
assert( item >= &storage[ 0 ] );
|
|
const size_t offset( ( (char*) item ) - ( (char*) &storage[ 0 ] ) );
|
|
assert( offset % sizeof( T ) == 0 );
|
|
const size_t index( offset / sizeof( T ) );
|
|
assert( index < P );
|
|
return index;
|
|
}
|
|
|
|
template< typename T , size_t P >
|
|
inline void T_PoolHelper::T_List< T , P >::destroy(
|
|
T_List< T , P >*& head ) noexcept
|
|
{
|
|
while ( head != nullptr ) {
|
|
T_Self* const n( head->next );
|
|
delete head;
|
|
head = n;
|
|
}
|
|
}
|
|
|
|
|
|
/*= T_PoolHelper::T_Allocator ================================================*/
|
|
|
|
template< typename T , size_t P , size_t M >
|
|
inline T_PoolHelper::T_Allocator< T , P , M >::T_Allocator( ) noexcept
|
|
: free_( nullptr ) , partial_( nullptr ) , full_( nullptr ) ,
|
|
nFree_( 1 )
|
|
{
|
|
new T_List_( free_ );
|
|
}
|
|
|
|
template< typename T , size_t P , size_t M >
|
|
inline T_PoolHelper::T_Allocator< T , P , M >::~T_Allocator( )
|
|
{
|
|
T_List_::destroy( free_ );
|
|
T_List_::destroy( partial_ );
|
|
T_List_::destroy( full_ );
|
|
}
|
|
|
|
/*----------------------------------------------------------------------------*/
|
|
|
|
template< typename T , size_t P , size_t M >
|
|
inline T* T_PoolHelper::T_Allocator< T , P , M >::allocate( ) noexcept
|
|
{
|
|
if ( partial_ == nullptr ) {
|
|
if ( free_ == nullptr ) {
|
|
new T_List_( partial_ );
|
|
} else {
|
|
assert( nFree_ > 0 );
|
|
partial_ = free_;
|
|
free_ = free_->next;
|
|
partial_->next = nullptr;
|
|
nFree_ --;
|
|
}
|
|
}
|
|
|
|
T* const data( partial_->findUnused( ) );
|
|
if ( partial_->free == 0 ) {
|
|
T_List_* const nf( partial_ );
|
|
partial_ = nf->next;
|
|
nf->next = full_;
|
|
full_ = nf;
|
|
}
|
|
return data;
|
|
}
|
|
|
|
template< typename T , size_t P , size_t M >
|
|
void T_PoolHelper::T_Allocator< T , P , M >::free(
|
|
T* const item ) noexcept
|
|
{
|
|
T_List_* const list( reinterpret_cast< T_List_* >( item->list ) );
|
|
const size_t index( list->indexOf( item ) );
|
|
|
|
list->bitmap[ index / 8 ] = list->bitmap[ index / 8 ] & ~( 1 << ( index & 7 ) );
|
|
list->free ++;
|
|
|
|
if ( list->free == 1 ) {
|
|
// Full list is now partial
|
|
moveList( list , full_ , partial_ );
|
|
} else if ( list->free == P ) {
|
|
// Partial list is now free
|
|
if ( nFree_ == M ) {
|
|
T_List_* p( nullptr );
|
|
moveList( list , partial_ , p );
|
|
delete list;
|
|
} else {
|
|
moveList( list , partial_ , free_ );
|
|
nFree_ ++;
|
|
}
|
|
}
|
|
}
|
|
|
|
/*----------------------------------------------------------------------------*/
|
|
|
|
template< typename T , size_t P , size_t M >
|
|
size_t T_PoolHelper::T_Allocator< T , P , M >::countFreeLists( ) const noexcept
|
|
{
|
|
return nFree_;
|
|
}
|
|
|
|
template< typename T , size_t P , size_t M >
|
|
size_t T_PoolHelper::T_Allocator< T , P , M >::countPartialLists( ) const noexcept
|
|
{
|
|
return countLists( partial_ );
|
|
}
|
|
|
|
template< typename T , size_t P , size_t M >
|
|
size_t T_PoolHelper::T_Allocator< T , P , M >::countFullLists( ) const noexcept
|
|
{
|
|
return countLists( full_ );
|
|
}
|
|
|
|
template< typename T , size_t P , size_t M >
|
|
void T_PoolHelper::T_Allocator< T , P , M >::getUsage(
|
|
size_t& total ,
|
|
size_t& free ,
|
|
size_t& used ) const noexcept
|
|
{
|
|
free = countFreeLists( ) * P;
|
|
used = countFullLists( ) * P;
|
|
total = free + used;
|
|
|
|
T_List_ const* p( partial_ );
|
|
while ( p != nullptr ) {
|
|
free += p->free;
|
|
used += P - p->free;
|
|
total += P;
|
|
p = p->next;
|
|
}
|
|
}
|
|
|
|
/*----------------------------------------------------------------------------*/
|
|
|
|
template< typename T , size_t P , size_t M >
|
|
void T_PoolHelper::T_Allocator< T , P , M >::moveList(
|
|
T_List_* const list ,
|
|
T_List_*& from ,
|
|
T_List_*& to ) noexcept
|
|
{
|
|
T_List_** ptr( &from );
|
|
while ( *ptr != list ) {
|
|
ptr = &( (*ptr)->next );
|
|
assert( *ptr != nullptr );
|
|
}
|
|
*ptr = list->next;
|
|
list->next = to;
|
|
to = list;
|
|
}
|
|
|
|
template< typename T , size_t P , size_t M >
|
|
size_t T_PoolHelper::T_Allocator< T , P , M >::countLists(
|
|
T_List_ const* head ) noexcept
|
|
{
|
|
size_t nLists( 0 );
|
|
while ( head != nullptr ) {
|
|
nLists ++;
|
|
head = head->next;
|
|
}
|
|
return nLists;
|
|
}
|
|
|
|
|
|
/*= T_PoolAllocator ==========================================================*/
|
|
|
|
template< size_t OS , size_t OA , size_t P , size_t M >
|
|
inline void* T_PoolAllocator< OS , OA , P , M >::allocate(
|
|
const size_t requested ) noexcept
|
|
{
|
|
#ifdef LW_CFG_NO_ALLOCATORS
|
|
return ::operator new( requested );
|
|
#else
|
|
assert( requested <= OS );
|
|
return reinterpret_cast< char* >( alloc_.allocate( ) );
|
|
#endif
|
|
}
|
|
|
|
template< size_t OS , size_t OA , size_t P , size_t M >
|
|
void T_PoolAllocator< OS , OA , P , M >::free(
|
|
void* const item ) noexcept
|
|
{
|
|
#ifdef LW_CFG_NO_ALLOCATORS
|
|
::operator delete( item );
|
|
#else
|
|
alloc_.free( reinterpret_cast< T_Storage_* >( item ) );
|
|
#endif
|
|
}
|
|
|
|
/*----------------------------------------------------------------------------*/
|
|
|
|
template< size_t OS , size_t OA , size_t P , size_t M >
|
|
size_t T_PoolAllocator< OS , OA , P , M >::countFreeLists( ) const noexcept
|
|
{
|
|
return alloc_.countFreeLists( );
|
|
}
|
|
|
|
template< size_t OS , size_t OA , size_t P , size_t M >
|
|
size_t T_PoolAllocator< OS , OA , P , M >::countPartialLists( ) const noexcept
|
|
{
|
|
return alloc_.countPartialLists( );
|
|
}
|
|
|
|
template< size_t OS , size_t OA , size_t P , size_t M >
|
|
size_t T_PoolAllocator< OS , OA , P , M >::countFullLists( ) const noexcept
|
|
{
|
|
return alloc_.countFullLists( );
|
|
}
|
|
|
|
template< size_t OS , size_t OA , size_t P , size_t M >
|
|
void T_PoolAllocator< OS , OA , P , M >::getUsage(
|
|
size_t& total ,
|
|
size_t& free ,
|
|
size_t& used ) const noexcept
|
|
{
|
|
return alloc_.getUsage( total , free , used );
|
|
}
|
|
|
|
|
|
/*= T_ThreadedPoolAllocator ==================================================*/
|
|
|
|
template< size_t OS , size_t OA , size_t P , size_t M >
|
|
inline void* T_ThreadedPoolAllocator< OS , OA , P , M >::allocate(
|
|
const size_t requested ) noexcept
|
|
{
|
|
#ifdef LW_CFG_NO_ALLOCATORS
|
|
return ::operator new( requested );
|
|
#else
|
|
assert( requested <= OS );
|
|
|
|
T_Storage_* const alloc( ([this](){
|
|
T_Storage_* const fs( takeFromStack( ) );
|
|
return fs ? fs : alloc_.allocate( );
|
|
})() );
|
|
alloc->extra.pool = this;
|
|
return reinterpret_cast< char* >( alloc );
|
|
#endif
|
|
}
|
|
|
|
template< size_t OS , size_t OA , size_t P , size_t M >
|
|
void T_ThreadedPoolAllocator< OS , OA , P , M >::free(
|
|
void* const item ) noexcept
|
|
{
|
|
#ifdef LW_CFG_NO_ALLOCATORS
|
|
::operator delete( item );
|
|
#else
|
|
T_Storage_* const storage( reinterpret_cast< T_Storage_* >( item ) );
|
|
if ( storage->extra.pool == this ) {
|
|
alloc_.free( storage );
|
|
} else {
|
|
storage->extra.pool->addToStack( storage );
|
|
}
|
|
|
|
T_Storage_* const fs( takeFromStack( ) );
|
|
if ( fs ) {
|
|
alloc_.free( fs );
|
|
}
|
|
#endif
|
|
}
|
|
|
|
/*----------------------------------------------------------------------------*/
|
|
|
|
template< size_t OS , size_t OA , size_t P , size_t M >
|
|
size_t T_ThreadedPoolAllocator< OS , OA , P , M >::countFreeLists( ) const noexcept
|
|
{
|
|
return alloc_.countFreeLists( );
|
|
}
|
|
|
|
template< size_t OS , size_t OA , size_t P , size_t M >
|
|
size_t T_ThreadedPoolAllocator< OS , OA , P , M >::countPartialLists( ) const noexcept
|
|
{
|
|
return alloc_.countPartialLists( );
|
|
}
|
|
|
|
template< size_t OS , size_t OA , size_t P , size_t M >
|
|
size_t T_ThreadedPoolAllocator< OS , OA , P , M >::countFullLists( ) const noexcept
|
|
{
|
|
return alloc_.countFullLists( );
|
|
}
|
|
|
|
template< size_t OS , size_t OA , size_t P , size_t M >
|
|
void T_ThreadedPoolAllocator< OS , OA , P , M >::getUsage(
|
|
size_t& total ,
|
|
size_t& free ,
|
|
size_t& used ) const noexcept
|
|
{
|
|
return alloc_.getUsage( total , free , used );
|
|
}
|
|
|
|
/*----------------------------------------------------------------------------*/
|
|
|
|
template< size_t OS , size_t OA , size_t P , size_t M >
|
|
typename T_ThreadedPoolAllocator< OS , OA , P , M >::T_Storage_*
|
|
T_ThreadedPoolAllocator< OS , OA , P , M >::takeFromStack( ) noexcept
|
|
{
|
|
T_FreeHead_ o( freeHead_.load( std::memory_order_relaxed ) );
|
|
if ( o.head == nullptr ) {
|
|
return nullptr;
|
|
}
|
|
|
|
T_FreeHead_ next;
|
|
do {
|
|
next.aba = o.aba + 1;
|
|
next.head = o.head->extra.next;
|
|
} while ( !freeHead_.compare_exchange_weak( o , next ,
|
|
std::memory_order_acq_rel ,
|
|
std::memory_order_acquire ) );
|
|
return o.head;
|
|
}
|
|
|
|
template< size_t OS , size_t OA , size_t P , size_t M >
|
|
void T_ThreadedPoolAllocator< OS , OA , P , M >::addToStack(
|
|
T_Storage_* storage ) noexcept
|
|
{
|
|
assert( storage->extra.pool == this );
|
|
T_FreeHead_ o( freeHead_.load( std::memory_order_relaxed ) );
|
|
T_FreeHead_ r;
|
|
do {
|
|
storage->extra.next = o.head;
|
|
r.aba = o.aba + 1;
|
|
r.head = storage;
|
|
} while ( !freeHead_.compare_exchange_weak( o , r ,
|
|
std::memory_order_acq_rel ,
|
|
std::memory_order_acquire ) );
|
|
}
|
|
|
|
|
|
}
|
|
#endif //_H_LW_LIB_INLINE_ALLOC
|