vSMC
vSMC: Scalable Monte Carlo
aligned_memory.hpp
Go to the documentation of this file.
1 //============================================================================
2 // vSMC/include/vsmc/utility/aligned_memory.hpp
3 //----------------------------------------------------------------------------
4 // vSMC: Scalable Monte Carlo
5 //----------------------------------------------------------------------------
6 // Copyright (c) 2013-2016, Yan Zhou
7 // All rights reserved.
8 //
9 // Redistribution and use in source and binary forms, with or without
10 // modification, are permitted provided that the following conditions are met:
11 //
12 // Redistributions of source code must retain the above copyright notice,
13 // this list of conditions and the following disclaimer.
14 //
15 // Redistributions in binary form must reproduce the above copyright notice,
16 // this list of conditions and the following disclaimer in the documentation
17 // and/or other materials provided with the distribution.
18 //
19 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS
20 // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
21 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
22 // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
23 // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
24 // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
25 // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
26 // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
27 // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
28 // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
29 // POSSIBILITY OF SUCH DAMAGE.
30 //============================================================================
31 
32 #ifndef VSMC_UTILITY_ALIGNED_MEMORY
33 #define VSMC_UTILITY_ALIGNED_MEMORY
34 
35 #include <vsmc/internal/assert.hpp>
36 #include <vsmc/internal/config.h>
37 
38 #include <cstddef>
39 #include <cstdlib>
40 #include <limits>
41 #include <memory>
42 #include <vector>
43 
44 #if VSMC_HAS_POSIX
45 #include <stdlib.h>
46 #elif defined(VSMC_MSVC)
47 #include <malloc.h>
48 #endif
49 
50 #if VSMC_HAS_TBB_MALLOC
51 #include <tbb/scalable_allocator.h>
52 #endif
53 
54 #if VSMC_HAS_MKL
55 #include <mkl_service.h>
56 #endif
57 
60 #ifndef VSMC_ALIGNED_MEMORY_TYPE
61 #if VSMC_HAS_TBB_MALLOC
62 #define VSMC_ALIGNED_MEMORY_TYPE ::vsmc::AlignedMemoryTBB
63 #elif VSMC_HAS_MKL
64 #define VSMC_ALIGNED_MEMORY_TYPE ::vsmc::AlignedMemoryMKL
65 #elif VSMC_HAS_POSIX || defined(VSMC_MSVC)
66 #define VSMC_ALIGNED_MEMORY_TYPE ::vsmc::AlignedMemorySYS
67 #else
68 #define VSMC_ALIGNED_MEMORY_TYPE ::vsmc::AlignedMemorySTD
69 #endif
70 #endif
71 
74 #ifndef VSMC_ALIGNMENT
75 #define VSMC_ALIGNMENT 32
76 #endif
77 
78 #define VSMC_RUNTIME_ASSERT_UTILITY_ALIGNED_MEMORY_POWER_OF_TWO(alignment) \
79  VSMC_RUNTIME_ASSERT( \
80  (alignment != 0 && (alignment & (alignment - 1)) == 0), \
81  "**aligned_malloc** USED WITH ALIGNMENT NOT A POWER OF TWO")
82 
83 #define VSMC_RUNTIME_ASSERT_UTILITY_ALIGNED_MEMORY_SIZEOF_VOID(alignemnt) \
84  VSMC_RUNTIME_ASSERT((alignment >= sizeof(void *)), \
85  "**aligned_malloc** USED WITH ALIGNMENT LESS THAN sizeof(void *)")
86 
87 #define VSMC_RUNTIME_ASSERT_UTILITY_ALIGNED_MEMORY \
88  VSMC_RUNTIME_ASSERT_UTILITY_ALIGNED_MEMORY_POWER_OF_TWO(alignment); \
89  VSMC_RUNTIME_ASSERT_UTILITY_ALIGNED_MEMORY_SIZEOF_VOID(alignment);
90 
91 namespace vsmc
92 {
93 
101 {
102  public:
103  static void *aligned_malloc(std::size_t n, std::size_t alignment)
104  {
106 
107  if (n == 0)
108  return nullptr;
109 
110  void *orig_ptr = std::malloc(n + alignment + sizeof(void *));
111  if (orig_ptr == nullptr)
112  throw std::bad_alloc();
113 
114  uintptr_t address = reinterpret_cast<uintptr_t>(orig_ptr);
115  uintptr_t offset = alignment - (address + sizeof(void *)) % alignment;
116  void *ptr =
117  reinterpret_cast<void *>(address + offset + sizeof(void *));
118  void **orig = reinterpret_cast<void **>(address + offset);
119  *orig = orig_ptr;
120 
121  return ptr;
122  }
123 
124  static void aligned_free(void *ptr)
125  {
126  std::free(*reinterpret_cast<void **>(
127  reinterpret_cast<uintptr_t>(ptr) - sizeof(void *)));
128  }
129 }; // class AlignedMemmorySTD
130 
131 #if VSMC_HAS_POSIX
132 
140 {
141  public:
142  static void *aligned_malloc(std::size_t n, std::size_t alignment)
143  {
145 
146  if (n == 0)
147  return nullptr;
148 
149  void *ptr;
150  if (posix_memalign(&ptr, alignment, n) != 0)
151  throw std::bad_alloc();
152 
153  return ptr;
154  }
155 
156  static void aligned_free(void *ptr) { free(ptr); }
157 }; // class AlignedMallocSYS
158 
159 #elif defined(VSMC_MSVC)
160 
161 class AlignedMemorySYS
162 {
163  public:
164  static void *aligned_malloc(std::size_t n, std::size_t alignment)
165  {
167 
168  if (n == 0)
169  return nullptr;
170 
171  void *ptr = _aligned_malloc(n, alignment);
172  if (ptr == nullptr)
173  throw std::bad_alloc();
174 
175  return ptr;
176  }
177 
178  static void aligned_free(void *ptr) { _aligned_free(ptr); }
179 }; // class AlignedMemorySYS
180 
181 #endif // VSMC_HAS_POSIX
182 
183 #if VSMC_HAS_TBB_MALLOC
184 
189 {
190  public:
191  static void *aligned_malloc(std::size_t n, std::size_t alignment)
192  {
194 
195  if (n == 0)
196  return nullptr;
197 
198  void *ptr = scalable_aligned_malloc(n, alignment);
199  if (ptr == nullptr)
200  throw std::bad_alloc();
201 
202  return ptr;
203  }
204 
205  static void aligned_free(void *ptr) { scalable_aligned_free(ptr); }
206 }; // class AlignedMemoryTBB
207 
208 #endif // VSMC_HAS_TBB_MALLOC
209 
210 #if VSMC_HAS_MKL
211 
215 {
216  public:
217  static void *aligned_malloc(std::size_t n, std::size_t alignment)
218  {
220 
221  if (n == 0)
222  return nullptr;
223 
224  void *ptr = mkl_malloc(n, static_cast<int>(alignment));
225  if (ptr == nullptr)
226  throw std::bad_alloc();
227 
228  return ptr;
229  }
230 
231  static void aligned_free(void *ptr) { mkl_free(ptr); }
232 }; // class AlignedMemoryMKL
233 
234 #endif // VSMC_HAS_MKL
235 
239 
251 template <typename T, std::size_t Alignment = VSMC_ALIGNMENT,
252  typename Memory = AlignedMemory>
253 class AlignedAllocator : public std::allocator<T>
254 {
255  static_assert(Alignment != 0 && (Alignment & (Alignment - 1)) == 0,
256  "**AlignedAllocator** USED WITH Alignment OTHER THAN A POWER OF TWO "
257  "POSITIVE INTEGER");
258 
259  static_assert(Alignment >= sizeof(void *),
260  "**AlignedAllocator** USED WITH Alignment LESS THAN sizeof(void *)");
261 
262  public:
263  using value_type = T;
264  using size_type = std::size_t;
265  using difference_type = std::ptrdiff_t;
266  using pointer = T *;
267  using const_pointer = const T *;
268  using reference = typename std::add_lvalue_reference<T>::type;
269  using const_reference = typename std::add_lvalue_reference<const T>::type;
270  using is_always_equal = std::true_type;
271 
272  template <typename U>
273  class rebind
274  {
275  public:
277  }; // class rebind
278 
279  AlignedAllocator() = default;
280 
282 
283  template <typename U>
285  : std::allocator<T>(static_cast<std::allocator<U>>(other))
286  {
287  }
288 
289  static pointer allocate(size_type n, const void * = nullptr)
290  {
291  if (n == 0)
292  return nullptr;
293 
294  return static_cast<pointer>(
295  Memory::aligned_malloc(sizeof(T) * n, Alignment));
296  }
297 
298  static void deallocate(pointer ptr, size_type)
299  {
300  if (ptr != nullptr)
301  Memory::aligned_free(ptr);
302  }
303 }; // class AlignedAllocator
304 
305 template <std::size_t Alignment, typename Memory>
306 class AlignedAllocator<void, Alignment, Memory>
307 {
308  using value_type = void;
309  using pointer = void *;
310  using const_pointer = const void *;
311 
312  template <class U>
313  struct rebind {
315  };
316 }; // class AlignedAllocator
317 
318 template <std::size_t Alignment, typename Memory>
319 class AlignedAllocator<const void, Alignment, Memory>
320 {
321  using value_type = const void;
322  using pointer = const void *;
323  using const_pointer = const void *;
324 
325  template <class U>
326  struct rebind {
328  };
329 }; // class AlignedAllocator
330 
331 template <typename T1, typename T2, std::size_t Alignment, typename Memory>
334 {
335  return true;
336 }
337 
338 template <typename T1, typename T2, std::size_t Alignment, typename Memory>
341 {
342  return false;
343 }
344 
347 template <typename T>
348 using Allocator = typename std::conditional<std::is_scalar<T>::value,
349  AlignedAllocator<T>, std::allocator<T>>::type;
350 
353 template <typename T>
354 using AlignedVector = std::vector<T, AlignedAllocator<T>>;
355 
358 template <typename T>
359 using Vector = typename std::conditional<std::is_scalar<T>::value,
360  AlignedVector<T>, std::vector<T>>::type;
361 
362 } // namespace vsmc
363 
364 #endif // VSMC_UTILITY_ALIGNED_MEMORY
Definition: monitor.hpp:49
static void * aligned_malloc(std::size_t n, std::size_t alignment)
std::vector< T, AlignedAllocator< T >> AlignedVector
Vector type using AlignedAllocator.
typename std::conditional< std::is_scalar< T >::value, AlignedVector< T >, std::vector< T >>::type Vector
AlignedVector for scalar type and std::vector for others.
Aligned memory using std::malloc and std::free
Aligned memory using Intel TBB scalable_aligned_malloc and scalable_aligned_free. ...
std::ptrdiff_t difference_type
static void aligned_free(void *ptr)
STL namespace.
Aligned memory using Intel MKL mkl_malloc and mkl_free
#define VSMC_ALIGNMENT
Defualt alignment.
bool operator!=(const SingleParticle< T > &sp1, const SingleParticle< T > &sp2)
#define VSMC_RUNTIME_ASSERT_UTILITY_ALIGNED_MEMORY
typename std::add_lvalue_reference< T >::type reference
static void * aligned_malloc(std::size_t n, std::size_t alignment)
static void aligned_free(void *ptr)
static void * aligned_malloc(std::size_t n, std::size_t alignment)
typename std::add_lvalue_reference< const T >::type const_reference
static pointer allocate(size_type n, const void *=nullptr)
static void aligned_free(void *ptr)
Aligned memory using native system aligned memory allocation.
Aligned allocator.
AlignedAllocator(const AlignedAllocator< U, Alignment, Memory > &other)
bool operator==(const SingleParticle< T > &sp1, const SingleParticle< T > &sp2)
static void deallocate(pointer ptr, size_type)
typename std::conditional< std::is_scalar< T >::value, AlignedAllocator< T >, std::allocator< T >>::type Allocator
AlignedAllocator for scalar type and std::allocator for others.
static void * aligned_malloc(std::size_t n, std::size_t alignment)
static void aligned_free(void *ptr)
#define VSMC_ALIGNED_MEMORY_TYPE
Default AlignedMemory type.
std::true_type is_always_equal