Compare commits

...

4 Commits

6 changed files with 343 additions and 362 deletions

View File

@ -124,7 +124,6 @@ NAMESPACE_UNNAMED_END
void TestArray()
{
TestArrayTemplate<FDefaultAllocator, 0>();
TestArrayTemplate<FHeapAllocator, 0>();
TestArrayTemplate<TInlineAllocator<8>, 8>();
TestArrayTemplate<TFixedAllocator<64>, 64>();

File diff suppressed because it is too large Load Diff

View File

@ -14,7 +14,7 @@ NAMESPACE_REDCRAFT_BEGIN
NAMESPACE_MODULE_BEGIN(Redcraft)
NAMESPACE_MODULE_BEGIN(Utility)
template <CObject T, size_t InExtent>
template <CElementalObject T, size_t InExtent>
class TArrayView;
NAMESPACE_PRIVATE_BEGIN
@ -55,8 +55,8 @@ public:
FORCEINLINE constexpr TArrayViewIterator& operator++() { ++Pointer; CheckThis(); return *this; }
FORCEINLINE constexpr TArrayViewIterator& operator--() { --Pointer; CheckThis(); return *this; }
FORCEINLINE constexpr TArrayViewIterator operator++(int) { TArrayViewIterator Temp = *this; ++Pointer; CheckThis(); return Temp; }
FORCEINLINE constexpr TArrayViewIterator operator--(int) { TArrayViewIterator Temp = *this; --Pointer; CheckThis(); return Temp; }
FORCEINLINE constexpr TArrayViewIterator operator++(int) { TArrayViewIterator Temp = *this; ++*this; return Temp; }
FORCEINLINE constexpr TArrayViewIterator operator--(int) { TArrayViewIterator Temp = *this; --*this; return Temp; }
FORCEINLINE constexpr TArrayViewIterator& operator+=(ptrdiff Offset) { Pointer += Offset; CheckThis(); return *this; }
FORCEINLINE constexpr TArrayViewIterator& operator-=(ptrdiff Offset) { Pointer -= Offset; CheckThis(); return *this; }
@ -103,7 +103,7 @@ private:
template <typename U>
friend class TArrayViewIterator;
template <CObject U, size_t InExtent>
template <CElementalObject U, size_t InExtent>
friend class NAMESPACE_REDCRAFT::TArrayView;
};
@ -116,10 +116,10 @@ struct TEnableArrayNum<false> { size_t ArrayNum; };
NAMESPACE_PRIVATE_END
template <CObject T, size_t N>
template <CElementalObject T, size_t N>
struct TStaticArray;
template <CObject T, typename A> requires (!CConst<T> && CDestructible<T> && CInstantiableAllocator<A>)
template <CElementalObject T, CInstantiableAllocator A> requires (!CConst<T>)
class TArray;
inline constexpr size_t DynamicExtent = INDEX_NONE;
@ -129,7 +129,7 @@ inline constexpr size_t DynamicExtent = INDEX_NONE;
* the sequence at position zero. A TArrayView can either have a static extent, in which case the number of elements in the sequence
* is known at compile-time and encoded in the type, or a dynamic extent.
*/
template <CObject T, size_t InExtent = DynamicExtent>
template <CElementalObject T, size_t InExtent = DynamicExtent>
class TArrayView final : private NAMESPACE_PRIVATE::TEnableArrayNum<InExtent == DynamicExtent>
{
private:
@ -140,6 +140,8 @@ public:
using ElementType = T;
using Reference = T&;
using Iterator = NAMESPACE_PRIVATE::TArrayViewIterator<ElementType>;
using ReverseIterator = TReverseIterator<Iterator>;

View File

@ -14,6 +14,9 @@ NAMESPACE_REDCRAFT_BEGIN
NAMESPACE_MODULE_BEGIN(Redcraft)
NAMESPACE_MODULE_BEGIN(Utility)
template <typename T>
concept CElementalObject = CObject<T> && CDestructible<T>;
NAMESPACE_PRIVATE_BEGIN
template <typename T> using WithReference = T&;

View File

@ -17,12 +17,15 @@ NAMESPACE_MODULE_BEGIN(Redcraft)
NAMESPACE_MODULE_BEGIN(Utility)
/** TStaticArray is a container that encapsulates fixed size arrays. */
template <CObject T, size_t N>
template <CElementalObject T, size_t N>
struct TStaticArray final
{
using ElementType = T;
using Reference = T&;
using ConstReference = const T&;
using Iterator = TArrayView< T, N>::Iterator;
using ConstIterator = TArrayView<const T, N>::Iterator;

View File

@ -55,6 +55,40 @@ struct FAllocatorInterface
};
};
#define ALLOCATOR_WRAPPER_BEGIN(Allocator, Name) \
\
struct PREPROCESSOR_JOIN(F, Name) : private FSingleton
#define ALLOCATOR_WRAPPER_END(Allocator, Name) ; \
\
template <typename A, bool = CEmpty<A> && !CFinal<A>> \
struct PREPROCESSOR_JOIN(T, Name); \
\
template <typename A> \
struct PREPROCESSOR_JOIN(T, Name)<A, true> : public PREPROCESSOR_JOIN(F, Name), private A \
{ \
NODISCARD FORCEINLINE A& operator*() { return *this; } \
NODISCARD FORCEINLINE const A& operator*() const { return *this; } \
NODISCARD FORCEINLINE A* operator->() { return this; } \
NODISCARD FORCEINLINE const A* operator->() const { return this; } \
}; \
\
template <typename A> \
struct PREPROCESSOR_JOIN(T, Name)<A, false> : public PREPROCESSOR_JOIN(F, Name) \
{ \
NODISCARD FORCEINLINE A& operator*() { return AllocatorInstance; } \
NODISCARD FORCEINLINE const A& operator*() const { return AllocatorInstance; } \
NODISCARD FORCEINLINE A* operator->() { return &AllocatorInstance; } \
NODISCARD FORCEINLINE const A* operator->() const { return &AllocatorInstance; } \
\
private: \
\
A AllocatorInstance; \
\
}; \
\
PREPROCESSOR_JOIN(T, Name)<typename Allocator::template ForElementType<T>> Name;
/** This is heap allocator that calls Memory::Malloc() directly for memory allocation. */
struct FHeapAllocator : public FAllocatorInterface
{
@ -120,13 +154,11 @@ struct FHeapAllocator : public FAllocatorInterface
};
};
using FDefaultAllocator = FHeapAllocator;
/**
* The inline allocator allocates up to a specified number of elements in the same allocation as the container.
* Any allocation needed beyond that causes all data to be moved into an indirect allocation.
*/
template <size_t NumInline, CInstantiableAllocator SecondaryAllocator = FDefaultAllocator>
template <size_t NumInline, CInstantiableAllocator SecondaryAllocator = FHeapAllocator>
struct TInlineAllocator : public FAllocatorInterface
{
template <CObject T>
@ -140,23 +172,23 @@ struct TInlineAllocator : public FAllocatorInterface
check(InNum >= NumInline);
if (InNum == NumInline) return reinterpret_cast<T*>(&InlineStorage);
if (InNum == NumInline) return Impl.GetInline();
return Secondary.Allocate(InNum);
return Impl->Allocate(InNum);
}
FORCEINLINE void Deallocate(T* InPtr)
{
if (InPtr == reinterpret_cast<T*>(&InlineStorage)) return;
if (InPtr == Impl.GetInline()) return;
Secondary.Deallocate(InPtr);
Impl->Deallocate(InPtr);
}
NODISCARD FORCEINLINE bool IsTransferable(T* InPtr) const
{
if (InPtr == reinterpret_cast<const T*>(&InlineStorage)) return false;
if (InPtr == Impl.GetInline()) return false;
return Secondary.IsTransferable(InPtr);
return Impl->IsTransferable(InPtr);
}
NODISCARD FORCEINLINE size_t CalculateSlackGrow(size_t Num, size_t NumAllocated) const
@ -166,7 +198,7 @@ struct TInlineAllocator : public FAllocatorInterface
if (Num <= NumInline) return NumInline;
return Secondary.CalculateSlackGrow(Num, NumAllocated <= NumInline ? 0 : NumAllocated);
return Impl->CalculateSlackGrow(Num, NumAllocated <= NumInline ? 0 : NumAllocated);
}
NODISCARD FORCEINLINE size_t CalculateSlackShrink(size_t Num, size_t NumAllocated) const
@ -176,21 +208,26 @@ struct TInlineAllocator : public FAllocatorInterface
if (Num <= NumInline) return NumInline;
return Secondary.CalculateSlackShrink(Num, NumAllocated);
return Impl->CalculateSlackShrink(Num, NumAllocated);
}
NODISCARD FORCEINLINE size_t CalculateSlackReserve(size_t Num) const
{
if (Num <= NumInline) return NumInline;
return Secondary.CalculateSlackReserve(Num);
return Impl->CalculateSlackReserve(Num);
}
private:
TAlignedStorage<sizeof(T), alignof(T)> InlineStorage[NumInline];
ALLOCATOR_WRAPPER_BEGIN(SecondaryAllocator, Impl)
{
TAlignedStorage<sizeof(T), alignof(T)> InlineStorage[NumInline];
typename SecondaryAllocator::template ForElementType<T> Secondary;
NODISCARD FORCEINLINE T* GetInline() { return reinterpret_cast< T*>(&InlineStorage); }
NODISCARD FORCEINLINE const T* GetInline() const { return reinterpret_cast<const T*>(&InlineStorage); }
}
ALLOCATOR_WRAPPER_END(SecondaryAllocator, Impl)
};
};