32 #ifndef VSMC_UTILITY_ALIGNED_MEMORY 33 #define VSMC_UTILITY_ALIGNED_MEMORY 46 #elif defined(VSMC_MSVC) 50 #if VSMC_HAS_TBB_MALLOC 51 #include <tbb/scalable_allocator.h> 55 #include <mkl_service.h> 60 #ifndef VSMC_ALIGNED_MEMORY_TYPE 61 #if VSMC_HAS_TBB_MALLOC 62 #define VSMC_ALIGNED_MEMORY_TYPE ::vsmc::AlignedMemoryTBB 64 #define VSMC_ALIGNED_MEMORY_TYPE ::vsmc::AlignedMemoryMKL 65 #elif VSMC_HAS_POSIX || defined(VSMC_MSVC) 66 #define VSMC_ALIGNED_MEMORY_TYPE ::vsmc::AlignedMemorySYS 68 #define VSMC_ALIGNED_MEMORY_TYPE ::vsmc::AlignedMemorySTD 74 #ifndef VSMC_ALIGNMENT 75 #define VSMC_ALIGNMENT 32 78 #define VSMC_RUNTIME_ASSERT_UTILITY_ALIGNED_MEMORY_POWER_OF_TWO(alignment) \ 79 VSMC_RUNTIME_ASSERT( \ 80 (alignment != 0 && (alignment & (alignment - 1)) == 0), \ 81 "**aligned_malloc** USED WITH ALIGNMENT NOT A POWER OF TWO") 83 #define VSMC_RUNTIME_ASSERT_UTILITY_ALIGNED_MEMORY_SIZEOF_VOID(alignemnt) \ 84 VSMC_RUNTIME_ASSERT((alignment >= sizeof(void *)), \ 85 "**aligned_malloc** USED WITH ALIGNMENT LESS THAN sizeof(void *)") 87 #define VSMC_RUNTIME_ASSERT_UTILITY_ALIGNED_MEMORY \ 88 VSMC_RUNTIME_ASSERT_UTILITY_ALIGNED_MEMORY_POWER_OF_TWO(alignment); \ 89 VSMC_RUNTIME_ASSERT_UTILITY_ALIGNED_MEMORY_SIZEOF_VOID(alignment); 110 void *orig_ptr = std::malloc(n + alignment +
sizeof(
void *));
111 if (orig_ptr ==
nullptr)
112 throw std::bad_alloc();
114 uintptr_t address =
reinterpret_cast<uintptr_t
>(orig_ptr);
115 uintptr_t offset = alignment - (address +
sizeof(
void *)) % alignment;
117 reinterpret_cast<void *
>(address + offset +
sizeof(
void *));
118 void **orig =
reinterpret_cast<void **
>(address + offset);
126 std::free(*reinterpret_cast<void **>(
127 reinterpret_cast<uintptr_t>(ptr) -
sizeof(
void *)));
150 if (posix_memalign(&ptr, alignment, n) != 0)
151 throw std::bad_alloc();
159 #elif defined(VSMC_MSVC) 171 void *ptr = _aligned_malloc(n, alignment);
173 throw std::bad_alloc();
178 static void aligned_free(
void *ptr) { _aligned_free(ptr); }
181 #endif // VSMC_HAS_POSIX 183 #if VSMC_HAS_TBB_MALLOC 198 void *ptr = scalable_aligned_malloc(n, alignment);
200 throw std::bad_alloc();
208 #endif // VSMC_HAS_TBB_MALLOC 224 void *ptr = mkl_malloc(n, static_cast<int>(alignment));
226 throw std::bad_alloc();
234 #endif // VSMC_HAS_MKL 255 static_assert(Alignment != 0 && (Alignment & (Alignment - 1)) == 0,
256 "**AlignedAllocator** USED WITH Alignment OTHER THAN A POWER OF TWO " 259 static_assert(Alignment >=
sizeof(
void *),
260 "**AlignedAllocator** USED WITH Alignment LESS THAN sizeof(void *)");
268 using reference =
typename std::add_lvalue_reference<T>::type;
272 template <
typename U>
283 template <
typename U>
285 :
std::allocator<T>(static_cast<
std::allocator<U>>(other))
295 Memory::aligned_malloc(
sizeof(T) * n, Alignment));
301 Memory::aligned_free(ptr);
305 template <std::
size_t Alignment,
typename Memory>
308 using value_type = void;
309 using pointer =
void *;
310 using const_pointer =
const void *;
318 template <std::
size_t Alignment,
typename Memory>
321 using value_type =
const void;
322 using pointer =
const void *;
323 using const_pointer =
const void *;
331 template <
typename T1,
typename T2, std::
size_t Alignment,
typename Memory>
338 template <
typename T1,
typename T2, std::
size_t Alignment,
typename Memory>
347 template <
typename T>
348 using Allocator =
typename std::conditional<std::is_scalar<T>::value,
353 template <
typename T>
358 template <
typename T>
359 using Vector =
typename std::conditional<std::is_scalar<T>::value,
364 #endif // VSMC_UTILITY_ALIGNED_MEMORY
static void * aligned_malloc(std::size_t n, std::size_t alignment)
std::vector< T, AlignedAllocator< T >> AlignedVector
Vector type using AlignedAllocator.
typename std::conditional< std::is_scalar< T >::value, AlignedVector< T >, std::vector< T >>::type Vector
AlignedVector for scalar type and std::vector for others.
Aligned memory using std::malloc and std::free
Aligned memory using Intel TBB scalable_aligned_malloc and scalable_aligned_free. ...
std::ptrdiff_t difference_type
static void aligned_free(void *ptr)
Aligned memory using Intel MKL mkl_malloc and mkl_free
#define VSMC_ALIGNMENT
Defualt alignment.
bool operator!=(const SingleParticle< T > &sp1, const SingleParticle< T > &sp2)
#define VSMC_RUNTIME_ASSERT_UTILITY_ALIGNED_MEMORY
typename std::add_lvalue_reference< T >::type reference
static void * aligned_malloc(std::size_t n, std::size_t alignment)
static void aligned_free(void *ptr)
static void * aligned_malloc(std::size_t n, std::size_t alignment)
typename std::add_lvalue_reference< const T >::type const_reference
static pointer allocate(size_type n, const void *=nullptr)
static void aligned_free(void *ptr)
Aligned memory using native system aligned memory allocation.
AlignedAllocator(const AlignedAllocator< U, Alignment, Memory > &other)
bool operator==(const SingleParticle< T > &sp1, const SingleParticle< T > &sp2)
static void deallocate(pointer ptr, size_type)
typename std::conditional< std::is_scalar< T >::value, AlignedAllocator< T >, std::allocator< T >>::type Allocator
AlignedAllocator for scalar type and std::allocator for others.
static void * aligned_malloc(std::size_t n, std::size_t alignment)
static void aligned_free(void *ptr)
#define VSMC_ALIGNED_MEMORY_TYPE
Default AlignedMemory type.
std::true_type is_always_equal