232 lines
6.8 KiB
C++
232 lines
6.8 KiB
C++
|
/******************************************************************************/
|
||
|
/* HASH INDEX *****************************************************************/
|
||
|
/******************************************************************************/
|
||
|
|
||
|
|
||
|
#include <lw/lib/HashIndex.hh>
|
||
|
#include <lw/lib/Utilities.hh>
|
||
|
using namespace lw;
|
||
|
|
||
|
|
||
|
/*= T_HashIndex =============================================================*/
|
||
|
|
||
|
constexpr uint32_t T_HashIndex::INVALID_INDEX;
|
||
|
uint32_t T_HashIndex::invalidIndex_ = T_HashIndex::INVALID_INDEX;
|
||
|
|
||
|
T_HashIndex::T_HashIndex( ) noexcept
|
||
|
: T_HashIndex( DEFAULT_SIZE , DEFAULT_SIZE )
|
||
|
{ }
|
||
|
|
||
|
T_HashIndex::T_HashIndex( uint32_t hashSize , uint32_t indexSize , uint32_t growth ) noexcept
|
||
|
: hashSize_( hashSize ) , hash_( &invalidIndex_ ) ,
|
||
|
indexSize_( indexSize ) , indexGrowth_( growth ) ,
|
||
|
indexUsed_( 0 ) , index_( &invalidIndex_ ) ,
|
||
|
hashMask_( hashSize - 1 ) , lookupMask_( 0 )
|
||
|
{
|
||
|
assert( IsPowerOf2( hashSize ) && "size must be a power of 2" );
|
||
|
assert( growth > 0 && "growth must be greater than 0" );
|
||
|
}
|
||
|
|
||
|
/*----------------------------------------------------------------------------*/
|
||
|
|
||
|
T_HashIndex::T_HashIndex( T_HashIndex const& other )
|
||
|
: hashSize_( other.hashSize_ ) , indexSize_( other.indexSize_ ) ,
|
||
|
indexGrowth_( other.indexGrowth_ ) , indexUsed_( other.indexUsed_ ) ,
|
||
|
hashMask_( other.hashMask_ ) , lookupMask_( other.lookupMask_ )
|
||
|
{
|
||
|
if ( indexUsed_ == 0 ) {
|
||
|
hash_ = &invalidIndex_;
|
||
|
index_ = &invalidIndex_;
|
||
|
} else {
|
||
|
hash_ = ( uint32_t* ) malloc( hashSize_ * sizeof( uint32_t ) );
|
||
|
index_ = ( uint32_t* ) malloc( indexSize_ * sizeof( uint32_t ) );
|
||
|
indexReverse_ = ( uint32_t* ) malloc( indexSize_ * sizeof( uint32_t ) );
|
||
|
memcpy( hash_ , other.hash_ , hashSize_ * sizeof( uint32_t ) );
|
||
|
memcpy( index_ , other.index_ , indexSize_ * sizeof( uint32_t ) );
|
||
|
memcpy( indexReverse_ , other.indexReverse_ , indexSize_ * sizeof( uint32_t ) );
|
||
|
}
|
||
|
}
|
||
|
|
||
|
|
||
|
T_HashIndex::T_HashIndex( T_HashIndex&& other ) noexcept
|
||
|
: T_HashIndex( )
|
||
|
{
|
||
|
swap( *this , other );
|
||
|
}
|
||
|
|
||
|
/*----------------------------------------------------------------------------*/
|
||
|
|
||
|
T_HashIndex& T_HashIndex::operator =( T_HashIndex const& other )
|
||
|
{
|
||
|
if ( hash_ != &invalidIndex_ ) {
|
||
|
::free( hash_ );
|
||
|
::free( index_ );
|
||
|
::free( indexReverse_ );
|
||
|
}
|
||
|
hashSize_ = other.hashSize_;
|
||
|
indexSize_ = other.indexSize_;
|
||
|
indexGrowth_ = other.indexGrowth_;
|
||
|
indexUsed_ = other.indexUsed_;
|
||
|
hashMask_ = other.hashMask_;
|
||
|
lookupMask_ = other.lookupMask_;
|
||
|
|
||
|
if ( indexUsed_ == 0 ) {
|
||
|
hash_ = &invalidIndex_;
|
||
|
index_ = &invalidIndex_;
|
||
|
} else {
|
||
|
hash_ = ( uint32_t* ) malloc( hashSize_ * sizeof( uint32_t ) );
|
||
|
index_ = ( uint32_t* ) malloc( indexSize_ * sizeof( uint32_t ) );
|
||
|
indexReverse_ = ( uint32_t* ) malloc( indexSize_ * sizeof( uint32_t ) );
|
||
|
memcpy( hash_ , other.hash_ , hashSize_ * sizeof( uint32_t ) );
|
||
|
memcpy( index_ , other.index_ , indexSize_ * sizeof( uint32_t ) );
|
||
|
memcpy( indexReverse_ , other.indexReverse_ , indexSize_ * sizeof( uint32_t ) );
|
||
|
}
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
T_HashIndex& T_HashIndex::operator =( T_HashIndex&& other ) noexcept
|
||
|
{
|
||
|
swap( *this , other );
|
||
|
other.clear( );
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
/*----------------------------------------------------------------------------*/
|
||
|
|
||
|
void lw::swap( T_HashIndex& lhs , T_HashIndex& rhs ) noexcept
|
||
|
{
|
||
|
using std::swap;
|
||
|
|
||
|
swap( lhs.hashSize_ , rhs.hashSize_ );
|
||
|
swap( lhs.hash_ , rhs.hash_ );
|
||
|
|
||
|
swap( lhs.indexSize_ , rhs.indexSize_ );
|
||
|
swap( lhs.indexGrowth_ , rhs.indexGrowth_ );
|
||
|
swap( lhs.indexUsed_ , rhs.indexUsed_ );
|
||
|
swap( lhs.index_ , rhs.index_ );
|
||
|
swap( lhs.indexReverse_ , rhs.indexReverse_ );
|
||
|
|
||
|
swap( lhs.hashMask_ , rhs.hashMask_ );
|
||
|
swap( lhs.lookupMask_ , rhs.lookupMask_ );
|
||
|
}
|
||
|
|
||
|
/*----------------------------------------------------------------------------*/
|
||
|
|
||
|
void T_HashIndex::free( )
|
||
|
{
|
||
|
if ( hash_ != &invalidIndex_ ) {
|
||
|
::free( hash_ );
|
||
|
::free( index_ );
|
||
|
::free( indexReverse_ );
|
||
|
hash_ = &invalidIndex_;
|
||
|
index_ = &invalidIndex_;
|
||
|
}
|
||
|
lookupMask_ = 0;
|
||
|
indexUsed_ = 0;
|
||
|
}
|
||
|
|
||
|
void T_HashIndex::clear( )
|
||
|
{
|
||
|
if ( hash_ != &invalidIndex_ ) {
|
||
|
memset( hash_ , 0xff , hashSize_ * sizeof( uint32_t ) );
|
||
|
memset( index_ , 0xff , indexSize_ * sizeof( uint32_t ) );
|
||
|
indexUsed_ = 0;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
/*----------------------------------------------------------------------------*/
|
||
|
|
||
|
void T_HashIndex::enlargeIndex( uint32_t needed )
|
||
|
{
|
||
|
const uint32_t mod = needed % indexGrowth_;
|
||
|
const uint32_t newSize = ( mod == 0 )
|
||
|
? needed
|
||
|
: ( needed + indexGrowth_ - mod );
|
||
|
if ( index_ != &invalidIndex_ ) {
|
||
|
index_ = ( uint32_t* ) realloc( index_ ,
|
||
|
newSize * sizeof( uint32_t ) );
|
||
|
indexReverse_ = ( uint32_t* ) realloc( indexReverse_ ,
|
||
|
newSize * sizeof( uint32_t ) );
|
||
|
memset( index_ + indexSize_ , 0xff ,
|
||
|
( newSize - indexSize_ ) * sizeof( uint32_t ) );
|
||
|
memset( indexReverse_ + indexSize_ , 0xff ,
|
||
|
( newSize - indexSize_ ) * sizeof( uint32_t ) );
|
||
|
}
|
||
|
indexSize_ = newSize;
|
||
|
}
|
||
|
|
||
|
void T_HashIndex::allocateIfNecessary( )
|
||
|
{
|
||
|
if ( hash_ == &invalidIndex_ ) {
|
||
|
hash_ = ( uint32_t* ) malloc( hashSize_ * sizeof( uint32_t ) );
|
||
|
index_ = ( uint32_t* ) malloc( indexSize_ * sizeof( uint32_t ) );
|
||
|
indexReverse_ = ( uint32_t* ) malloc( indexSize_ * sizeof( uint32_t ) );
|
||
|
memset( hash_ , 0xff , hashSize_ * sizeof( uint32_t ) );
|
||
|
memset( index_ , 0xff , indexSize_ * sizeof( uint32_t ) );
|
||
|
memset( indexReverse_ , 0xff , indexSize_ * sizeof( uint32_t ) );
|
||
|
lookupMask_ = INVALID_INDEX;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
/*----------------------------------------------------------------------------*/
|
||
|
|
||
|
void T_HashIndex::add( uint32_t key )
|
||
|
{
|
||
|
const uint32_t index = indexUsed_;
|
||
|
if ( index >= indexSize_ ) {
|
||
|
enlargeIndex( index + indexGrowth_ );
|
||
|
}
|
||
|
allocateIfNecessary( );
|
||
|
|
||
|
assert( index_[ index ] == INVALID_INDEX );
|
||
|
|
||
|
const uint32_t hti = key & hashMask_;
|
||
|
const uint32_t oldIndex = hash_[ hti ];
|
||
|
hash_[ hti ] = index;
|
||
|
index_[ index ] = oldIndex;
|
||
|
indexReverse_[ index ] = hti;
|
||
|
indexUsed_ ++;
|
||
|
}
|
||
|
|
||
|
void T_HashIndex::remove( uint32_t index )
|
||
|
{
|
||
|
assert( hash_ != &invalidIndex_ );
|
||
|
assert( index < indexUsed_ );
|
||
|
|
||
|
// Follow the chain until we find the reference to the index.
|
||
|
const auto key( indexReverse_[ index ] );
|
||
|
uint32_t* ptr = &hash_[ key & hashMask_ ];
|
||
|
assert( *ptr != INVALID_INDEX );
|
||
|
while ( *ptr != index ) {
|
||
|
ptr = &index_[ *ptr ];
|
||
|
assert( *ptr != INVALID_INDEX );
|
||
|
}
|
||
|
|
||
|
// Update the reference so it points to the next item in the chain
|
||
|
*ptr = index_[ index ];
|
||
|
|
||
|
// If the index wasn't the last used index, swap it with the last used
|
||
|
// value
|
||
|
const uint32_t last = indexUsed_ - 1;
|
||
|
if ( index != last ) {
|
||
|
const uint32_t reverse = indexReverse_[ last ];
|
||
|
assert( reverse != INVALID_INDEX );
|
||
|
|
||
|
index_[ index ] = index_[ last ];
|
||
|
indexReverse_[ index ] = reverse;
|
||
|
|
||
|
// Change the reference to the item we moved in its chain
|
||
|
uint32_t* ptr = &hash_[ reverse ];
|
||
|
assert( *ptr != INVALID_INDEX );
|
||
|
while ( *ptr != last ) {
|
||
|
ptr = &index_[ *ptr ];
|
||
|
assert( *ptr != INVALID_INDEX );
|
||
|
}
|
||
|
*ptr = index;
|
||
|
}
|
||
|
index_[ last ] = INVALID_INDEX;
|
||
|
indexReverse_[ last ] = INVALID_INDEX;
|
||
|
indexUsed_ = last;
|
||
|
}
|
||
|
|