Compare commits

...

4 Commits

7 changed files with 70 additions and 41 deletions

View File

@ -17,7 +17,7 @@ NAMESPACE_MODULE_BEGIN(Redcraft)
NAMESPACE_MODULE_BEGIN(Utility)
/** Dynamic array. The elements are stored contiguously, which means that elements can be accessed not only through iterators, but also using offsets to regular pointers to elements. */
template <CElementalObject T, CInstantiableAllocator Allocator = FHeapAllocator> requires (!CConst<T>)
template <CAllocatableObject T, CAllocator<T> Allocator = FHeapAllocator>
class TArray final
{
private:

View File

@ -14,10 +14,10 @@ NAMESPACE_REDCRAFT_BEGIN
NAMESPACE_MODULE_BEGIN(Redcraft)
NAMESPACE_MODULE_BEGIN(Utility)
template <CElementalObject T, size_t N>
template <CObject T, size_t N>
struct TStaticArray;
template <CElementalObject T, CInstantiableAllocator A> requires (!CConst<T>)
template <CAllocatableObject T, CAllocator<T> A>
class TArray;
inline constexpr size_t DynamicExtent = INDEX_NONE;
@ -27,7 +27,7 @@ inline constexpr size_t DynamicExtent = INDEX_NONE;
* the sequence at position zero. A TArrayView can either have a static extent, in which case the number of elements in the sequence
* is known at compile-time and encoded in the type, or a dynamic extent.
*/
template <CElementalObject T, size_t InExtent = DynamicExtent>
template <CObject T, size_t InExtent = DynamicExtent>
class TArrayView final
{
public:

View File

@ -24,7 +24,7 @@ using TDefaultBitsetAllocator = TInlineAllocator<(40 - 3 * sizeof(size_t)) / siz
NAMESPACE_PRIVATE_END
template <CUnsignedIntegral InBlockType, CInstantiableAllocator Allocator = NAMESPACE_PRIVATE::TDefaultBitsetAllocator<InBlockType>> requires (!CSameAs<InBlockType, bool>)
template <CUnsignedIntegral InBlockType, CAllocator<InBlockType> Allocator = NAMESPACE_PRIVATE::TDefaultBitsetAllocator<InBlockType>> requires (!CSameAs<InBlockType, bool>)
class TBitset final
{
private:
@ -285,7 +285,9 @@ public:
{
if (&InValue == this) UNLIKELY return *this;
if (Num() == 0 || InValue.Num() == 0) return *this;
if (Num() == 0) return *this;
if (InValue.Num() == 0) return Set(false);
if (Num() <= InValue.Num())
{
@ -309,7 +311,7 @@ public:
for (size_t Index = LastBlock + 1; Index != NumBlocks(); ++Index)
{
Impl.Pointer[Index] &= 0;
Impl.Pointer[Index] = 0;
}
}
@ -321,7 +323,9 @@ public:
{
if (&InValue == this) UNLIKELY return *this;
if (Num() == 0 || InValue.Num() == 0) return *this;
if (Num() == 0) return *this;
if (InValue.Num() == 0) return *this;
if (Num() <= InValue.Num())
{
@ -342,11 +346,6 @@ public:
const BlockType LastBlockBitmask = InValue.Num() % BlockWidth != 0 ? (1ull << InValue.Num() % BlockWidth) - 1 : -1;
Impl.Pointer[LastBlock] |= InValue.Impl.Pointer[LastBlock] & LastBlockBitmask;
for (size_t Index = LastBlock + 1; Index != NumBlocks(); ++Index)
{
Impl.Pointer[Index] |= 0;
}
}
return *this;
@ -357,7 +356,9 @@ public:
{
if (&InValue == this) UNLIKELY return *this;
if (Num() == 0 || InValue.Num() == 0) return *this;
if (Num() == 0) return *this;
if (InValue.Num() == 0) return *this;
if (Num() <= InValue.Num())
{
@ -378,11 +379,6 @@ public:
const BlockType LastBlockBitmask = InValue.Num() % BlockWidth != 0 ? (1ull << InValue.Num() % BlockWidth) - 1 : -1;
Impl.Pointer[LastBlock] ^= InValue.Impl.Pointer[LastBlock] & LastBlockBitmask;
for (size_t Index = LastBlock + 1; Index != NumBlocks(); ++Index)
{
Impl.Pointer[Index] ^= 0;
}
}
return *this;
@ -592,7 +588,18 @@ public:
/** Converts the contents of the bitset to an uint64 integer. */
NODISCARD uint64 ToIntegral()
{
checkf(Num() <= 64, TEXT("The bitset can not be represented in uint64. Please check Num()."));
if (Num() > 64)
{
for (size_t Index = 64 / BlockWidth; Index < NumBlocks() - 1; ++Index)
{
checkf(Impl.Pointer[Index] != 0, TEXT("The bitset can not be represented in uint64. Please check Num()."));
}
const BlockType LastBlockBitmask = Num() % BlockWidth != 0 ? (1ull << Num() % BlockWidth) - 1 : -1;
const BlockType LastBlock = Impl.Pointer[NumBlocks() - 1] & LastBlockBitmask;
checkf(LastBlock != 0, TEXT("The bitset can not be represented in uint64. Please check Num()."));
}
uint64 Result = 0;

View File

@ -14,9 +14,6 @@ NAMESPACE_REDCRAFT_BEGIN
NAMESPACE_MODULE_BEGIN(Redcraft)
NAMESPACE_MODULE_BEGIN(Utility)
template <typename T>
concept CElementalObject = CObject<T> && CDestructible<T>;
NAMESPACE_PRIVATE_BEGIN
template <typename T> using WithReference = T&;

View File

@ -17,7 +17,7 @@ NAMESPACE_MODULE_BEGIN(Redcraft)
NAMESPACE_MODULE_BEGIN(Utility)
/** TStaticArray is a container that encapsulates fixed size arrays. */
template <CElementalObject T, size_t N>
template <CObject T, size_t N>
struct TStaticArray final
{
private:

View File

@ -383,7 +383,18 @@ public:
/** Converts the contents of the bitset to an uint64 integer. */
NODISCARD constexpr uint64 ToIntegral()
{
checkf(Num() <= 64, TEXT("The bitset can not be represented in uint64. Please check Num()."));
if constexpr (N > 64)
{
for (size_t Index = 64 / BlockWidth; Index < NumBlocks() - 1; ++Index)
{
checkf(Impl.Pointer[Index] != 0, TEXT("The bitset can not be represented in uint64. Please check Num()."));
}
const BlockType LastBlockBitmask = Num() % BlockWidth != 0 ? (1ull << Num() % BlockWidth) - 1 : -1;
const BlockType LastBlock = Impl.Pointer[NumBlocks() - 1] & LastBlockBitmask;
checkf(LastBlock != 0, TEXT("The bitset can not be represented in uint64. Please check Num()."));
}
uint64 Result = 0;

View File

@ -12,8 +12,11 @@ NAMESPACE_MODULE_BEGIN(Utility)
struct FAllocatorInterface;
template <typename T>
concept CAllocatableObject = CObject<T> && !CConst<T> && !CVolatile<T>;
template <typename A, typename T = int>
concept CInstantiableAllocator = !CSameAs<A, FAllocatorInterface>
concept CAllocator = !CSameAs<A, FAllocatorInterface> && CAllocatableObject<T>
&& requires (typename A::template ForElementType<T>& Allocator, T* InPtr, size_t Num, size_t NumAllocated)
{
{ Allocator.Allocate(Num) } -> CSameAs<T*>;
@ -24,6 +27,9 @@ concept CInstantiableAllocator = !CSameAs<A, FAllocatorInterface>
{ AsConst(Allocator).CalculateSlackReserve(Num) } -> CSameAs<size_t>;
};
template <typename A, typename T = int>
concept CMultipleAllocator = CAllocator<A, T> && A::bSupportsMultipleAllocation;
/**
* This is the allocator interface, the allocator does not use virtual, this contains the default of
* the allocator interface functions. Unlike std::allocator, IAllocator should be bound to only a object,
@ -32,7 +38,15 @@ concept CInstantiableAllocator = !CSameAs<A, FAllocatorInterface>
*/
struct FAllocatorInterface
{
template <CObject T>
/**
* If this flag is false, it is possible to allocate an address that has already been allocated.
* Should be allocated according to the results given by the CalculateSlackReserve() family,
* without needing to allocate memory of the same size as the allocated memory,
* this is to support special allocators such as TInlineAllocator.
*/
static constexpr bool bSupportsMultipleAllocation = true;
template <CAllocatableObject T>
class ForElementType /*: private FSingleton*/
{
public:
@ -43,13 +57,7 @@ struct FAllocatorInterface
ForElementType& operator=(const ForElementType&) = delete;
ForElementType& operator=(ForElementType&&) = delete;
/**
* Allocates uninitialized storage.
* Should be allocated according to the results given by the CalculateSlackReserve() family,
* without needing to allocate memory of the same size as the allocated memory,
* this is to support special allocators such as TInlineAllocator.
* If 'InNum' is zero, return nullptr.
*/
/** Allocates uninitialized storage. If 'InNum' is zero, return nullptr. */
NODISCARD FORCEINLINE T* Allocate(size_t InNum) = delete;
/** Deallocates storage. */
@ -107,8 +115,10 @@ struct FAllocatorInterface
/** This is heap allocator that calls Memory::Malloc() directly for memory allocation. */
struct FHeapAllocator
{
template <CObject T>
class ForElementType
static constexpr bool bSupportsMultipleAllocation = true;
template <CAllocatableObject T>
class ForElementType /*: private FSingleton*/
{
public:
@ -181,11 +191,13 @@ struct FHeapAllocator
* The inline allocator allocates up to a specified number of elements in the same allocation as the container.
* Any allocation needed beyond that causes all data to be moved into an indirect allocation.
*/
template <size_t NumInline, CInstantiableAllocator SecondaryAllocator = FHeapAllocator>
template <size_t NumInline, CAllocator SecondaryAllocator = FHeapAllocator>
struct TInlineAllocator
{
template <CObject T>
class ForElementType
static constexpr bool bSupportsMultipleAllocation = false;
template <CAllocatableObject T>
class ForElementType /*: private FSingleton*/
{
public:
@ -264,8 +276,10 @@ struct TInlineAllocator
/** This is a null allocator for which all operations are illegal. */
struct FNullAllocator
{
template <CObject T>
class ForElementType
static constexpr bool bSupportsMultipleAllocation = true;
template <CAllocatableObject T>
class ForElementType /*: private FSingleton*/
{
public: