rippled
SlabAllocator.h
1 //------------------------------------------------------------------------------
2 /*
3  This file is part of rippled: https://github.com/ripple/rippled
4  Copyright 2022, Nikolaos D. Bougalis <nikb@bougalis.net>
5 
6  Permission to use, copy, modify, and/or distribute this software for any
7  purpose with or without fee is hereby granted, provided that the above
8  copyright notice and this permission notice appear in all copies.
9 
10  THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
11  WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
12  MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
13  ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
14  WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
15  ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
16  OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
17 */
18 //==============================================================================
19 
20 #ifndef RIPPLE_BASICS_SLABALLOCATOR_H_INCLUDED
21 #define RIPPLE_BASICS_SLABALLOCATOR_H_INCLUDED
22 
23 #include <ripple/beast/type_name.h>
24 
25 #include <boost/align.hpp>
26 #include <boost/container/static_vector.hpp>
27 #include <boost/predef.h>
28 
29 #include <algorithm>
30 #include <atomic>
31 #include <cassert>
32 #include <cstdint>
33 #include <cstring>
34 #include <mutex>
35 
36 #if BOOST_OS_LINUX
37 #include <sys/mman.h>
38 #endif
39 
40 namespace ripple {
41 
42 template <typename Type>
44 {
45  static_assert(
46  sizeof(Type) >= sizeof(std::uint8_t*),
47  "SlabAllocator: the requested object must be larger than a pointer.");
48 
49  static_assert(alignof(Type) == 8 || alignof(Type) == 4);
50 
52  struct SlabBlock
53  {
54  // A mutex to protect the freelist for this block:
56 
57  // A linked list of appropriately sized free buffers:
58  std::uint8_t* l_ = nullptr;
59 
60  // The next memory block
62 
63  // The underlying memory block:
64  std::uint8_t const* const p_ = nullptr;
65 
66  // The extent of the underlying memory block:
68 
70  SlabBlock* next,
71  std::uint8_t* data,
73  std::size_t item)
74  : next_(next), p_(data), size_(size)
75  {
76  // We don't need to grab the mutex here, since we're the only
77  // ones with access at this moment.
78 
79  while (data + item <= p_ + size_)
80  {
81  // Use memcpy to avoid unaligned UB
82  // (will optimize to equivalent code)
83  std::memcpy(data, &l_, sizeof(std::uint8_t*));
84  l_ = data;
85  data += item;
86  }
87  }
88 
90  {
91  // Calling this destructor will release the allocated memory but
92  // will not properly destroy any objects that are constructed in
93  // the block itself.
94  }
95 
96  SlabBlock(SlabBlock const& other) = delete;
97  SlabBlock&
98  operator=(SlabBlock const& other) = delete;
99 
100  SlabBlock(SlabBlock&& other) = delete;
101  SlabBlock&
102  operator=(SlabBlock&& other) = delete;
103 
105  bool
106  own(std::uint8_t const* p) const noexcept
107  {
108  return (p >= p_) && (p < p_ + size_);
109  }
110 
111  std::uint8_t*
112  allocate() noexcept
113  {
114  std::uint8_t* ret;
115 
116  {
117  std::lock_guard l(m_);
118 
119  ret = l_;
120 
121  if (ret)
122  {
123  // Use memcpy to avoid unaligned UB
124  // (will optimize to equivalent code)
125  std::memcpy(&l_, ret, sizeof(std::uint8_t*));
126  }
127  }
128 
129  return ret;
130  }
131 
141  void
142  deallocate(std::uint8_t* ptr) noexcept
143  {
144  assert(own(ptr));
145 
146  std::lock_guard l(m_);
147 
148  // Use memcpy to avoid unaligned UB
149  // (will optimize to equivalent code)
150  std::memcpy(ptr, &l_, sizeof(std::uint8_t*));
151  l_ = ptr;
152  }
153  };
154 
155 private:
156  // A linked list of slabs
158 
159  // The alignment requirements of the item we're allocating:
161 
162  // The size of an item, including the extra bytes requested and
163  // any padding needed for alignment purposes:
165 
166  // The size of each individual slab:
168 
169 public:
178  constexpr explicit SlabAllocator(
179  std::size_t extra,
180  std::size_t alloc = 0,
181  std::size_t align = 0)
182  : itemAlignment_(align ? align : alignof(Type))
183  , itemSize_(
184  boost::alignment::align_up(sizeof(Type) + extra, itemAlignment_))
185  , slabSize_(alloc)
186  {
187  assert((itemAlignment_ & (itemAlignment_ - 1)) == 0);
188  }
189 
190  SlabAllocator(SlabAllocator const& other) = delete;
192  operator=(SlabAllocator const& other) = delete;
193 
194  SlabAllocator(SlabAllocator&& other) = delete;
196  operator=(SlabAllocator&& other) = delete;
197 
199  {
200  // FIXME: We can't destroy the memory blocks we've allocated, because
201  // we can't be sure that they are not being used. Cleaning the
202  // shutdown process up could make this possible.
203  }
204 
206  constexpr std::size_t
207  size() const noexcept
208  {
209  return itemSize_;
210  }
211 
217  std::uint8_t*
218  allocate() noexcept
219  {
220  auto slab = slabs_.load();
221 
222  while (slab != nullptr)
223  {
224  if (auto ret = slab->allocate())
225  return ret;
226 
227  slab = slab->next_;
228  }
229 
230  // No slab can satisfy our request, so we attempt to allocate a new
231  // one here:
233 
234  // We want to allocate the memory at a 2 MiB boundary, to make it
235  // possible to use hugepage mappings on Linux:
236  auto buf =
237  boost::alignment::aligned_alloc(megabytes(std::size_t(2)), size);
238 
239  // clang-format off
240  if (!buf) [[unlikely]]
241  return nullptr;
242  // clang-format on
243 
244 #if BOOST_OS_LINUX
245  // When allocating large blocks, attempt to leverage Linux's
246  // transparent hugepage support. It is unclear and difficult
247  // to accurately determine if doing this impacts performance
248  // enough to justify using platform-specific tricks.
249  if (size >= megabytes(std::size_t(4)))
250  madvise(buf, size, MADV_HUGEPAGE);
251 #endif
252 
253  // We need to carve out a bit of memory for the slab header
254  // and then align the rest appropriately:
255  auto slabData = reinterpret_cast<void*>(
256  reinterpret_cast<std::uint8_t*>(buf) + sizeof(SlabBlock));
257  auto slabSize = size - sizeof(SlabBlock);
258 
259  // This operation is essentially guaranteed not to fail but
260  // let's be careful anyways.
261  if (!boost::alignment::align(
262  itemAlignment_, itemSize_, slabData, slabSize))
263  {
264  boost::alignment::aligned_free(buf);
265  return nullptr;
266  }
267 
268  slab = new (buf) SlabBlock(
269  slabs_.load(),
270  reinterpret_cast<std::uint8_t*>(slabData),
271  slabSize,
272  itemSize_);
273 
274  // Link the new slab
275  while (!slabs_.compare_exchange_weak(
276  slab->next_,
277  slab,
278  std::memory_order_release,
279  std::memory_order_relaxed))
280  {
281  ; // Nothing to do
282  }
283 
284  return slab->allocate();
285  }
286 
294  bool
295  deallocate(std::uint8_t* ptr) noexcept
296  {
297  assert(ptr);
298 
299  for (auto slab = slabs_.load(); slab != nullptr; slab = slab->next_)
300  {
301  if (slab->own(ptr))
302  {
303  slab->deallocate(ptr);
304  return true;
305  }
306  }
307 
308  return false;
309  }
310 };
311 
313 template <typename Type>
315 {
316 private:
317  // The list of allocators that belong to this set
318  boost::container::static_vector<SlabAllocator<Type>, 64> allocators_;
319 
321 
322 public:
324  {
325  friend class SlabAllocatorSet;
326 
327  private:
331 
332  public:
333  constexpr SlabConfig(
334  std::size_t extra_,
335  std::size_t alloc_ = 0,
336  std::size_t align_ = alignof(Type))
337  : extra(extra_), alloc(alloc_), align(align_)
338  {
339  }
340  };
341 
343  {
344  // Ensure that the specified allocators are sorted from smallest to
345  // largest by size:
346  std::sort(
347  std::begin(cfg),
348  std::end(cfg),
349  [](SlabConfig const& a, SlabConfig const& b) {
350  return a.extra < b.extra;
351  });
352 
353  // We should never have two slabs of the same size
354  if (std::adjacent_find(
355  std::begin(cfg),
356  std::end(cfg),
357  [](SlabConfig const& a, SlabConfig const& b) {
358  return a.extra == b.extra;
359  }) != cfg.end())
360  {
361  throw std::runtime_error(
362  "SlabAllocatorSet<" + beast::type_name<Type>() +
363  ">: duplicate slab size");
364  }
365 
366  for (auto const& c : cfg)
367  {
368  auto& a = allocators_.emplace_back(c.extra, c.alloc, c.align);
369 
370  if (a.size() > maxSize_)
371  maxSize_ = a.size();
372  }
373  }
374 
375  SlabAllocatorSet(SlabAllocatorSet const& other) = delete;
377  operator=(SlabAllocatorSet const& other) = delete;
378 
379  SlabAllocatorSet(SlabAllocatorSet&& other) = delete;
381  operator=(SlabAllocatorSet&& other) = delete;
382 
384  {
385  }
386 
395  std::uint8_t*
396  allocate(std::size_t extra) noexcept
397  {
398  if (auto const size = sizeof(Type) + extra; size <= maxSize_)
399  {
400  for (auto& a : allocators_)
401  {
402  if (a.size() >= size)
403  return a.allocate();
404  }
405  }
406 
407  return nullptr;
408  }
409 
417  bool
418  deallocate(std::uint8_t* ptr) noexcept
419  {
420  for (auto& a : allocators_)
421  {
422  if (a.deallocate(ptr))
423  return true;
424  }
425 
426  return false;
427  }
428 };
429 
430 } // namespace ripple
431 
432 #endif // RIPPLE_BASICS_SLABALLOCATOR_H_INCLUDED
ripple::SlabAllocator::SlabBlock::deallocate
void deallocate(std::uint8_t *ptr) noexcept
Return an item to this allocator's freelist.
Definition: SlabAllocator.h:142
ripple::SlabAllocator::~SlabAllocator
~SlabAllocator()
Definition: SlabAllocator.h:198
ripple::SlabAllocator::operator=
SlabAllocator & operator=(SlabAllocator const &other)=delete
ripple::SlabAllocatorSet::allocate
std::uint8_t * allocate(std::size_t extra) noexcept
Returns a suitably aligned pointer, if one is available.
Definition: SlabAllocator.h:396
cstring
ripple::SlabAllocatorSet::deallocate
bool deallocate(std::uint8_t *ptr) noexcept
Returns the memory block to the allocator.
Definition: SlabAllocator.h:418
std::vector
STL class.
ripple::SlabAllocator::SlabBlock::own
bool own(std::uint8_t const *p) const noexcept
Determines whether the given pointer belongs to this allocator.
Definition: SlabAllocator.h:106
ripple::SlabAllocatorSet::SlabConfig::SlabConfig
constexpr SlabConfig(std::size_t extra_, std::size_t alloc_=0, std::size_t align_=alignof(Type))
Definition: SlabAllocator.h:333
std::lock_guard
STL class.
boost
Definition: IPAddress.h:103
ripple::SlabAllocatorSet::SlabConfig::alloc
std::size_t alloc
Definition: SlabAllocator.h:329
ripple::SlabAllocator::SlabBlock::operator=
SlabBlock & operator=(SlabBlock const &other)=delete
std::sort
T sort(T... args)
algorithm
ripple::SlabAllocatorSet::SlabConfig::align
std::size_t align
Definition: SlabAllocator.h:330
ripple::SlabAllocator::itemAlignment_
const std::size_t itemAlignment_
Definition: SlabAllocator.h:160
ripple::SlabAllocatorSet::operator=
SlabAllocatorSet & operator=(SlabAllocatorSet const &other)=delete
ripple::SlabAllocatorSet::SlabConfig
Definition: SlabAllocator.h:323
ripple::SlabAllocator::SlabBlock::SlabBlock
SlabBlock(SlabBlock *next, std::uint8_t *data, std::size_t size, std::size_t item)
Definition: SlabAllocator.h:69
ripple::SlabAllocatorSet::SlabConfig::extra
std::size_t extra
Definition: SlabAllocator.h:328
ripple::SlabAllocator::slabs_
std::atomic< SlabBlock * > slabs_
Definition: SlabAllocator.h:157
ripple::megabytes
constexpr auto megabytes(T value) noexcept
Definition: ByteUtilities.h:34
ripple::SlabAllocator::SlabAllocator
constexpr SlabAllocator(std::size_t extra, std::size_t alloc=0, std::size_t align=0)
Constructs a slab allocator able to allocate objects of a fixed size.
Definition: SlabAllocator.h:178
ripple::SlabAllocator::size
constexpr std::size_t size() const noexcept
Returns the size of the memory block this allocator returns.
Definition: SlabAllocator.h:207
ripple::SlabAllocator::deallocate
bool deallocate(std::uint8_t *ptr) noexcept
Returns the memory block to the allocator.
Definition: SlabAllocator.h:295
ripple::SlabAllocator::SlabBlock::p_
std::uint8_t const *const p_
Definition: SlabAllocator.h:64
cstdint
std::runtime_error
STL class.
ripple::SlabAllocator::SlabBlock::next_
SlabBlock * next_
Definition: SlabAllocator.h:61
std::uint8_t
atomic
ripple::SlabAllocator::SlabBlock::m_
std::mutex m_
Definition: SlabAllocator.h:55
ripple::SlabAllocator::itemSize_
const std::size_t itemSize_
Definition: SlabAllocator.h:164
ripple
Use hash_* containers for keys that do not need a cryptographically secure hashing algorithm.
Definition: RCLCensorshipDetector.h:29
ripple::SlabAllocator::allocate
std::uint8_t * allocate() noexcept
Returns a suitably aligned pointer, if one is available.
Definition: SlabAllocator.h:218
ripple::SlabAllocatorSet::allocators_
boost::container::static_vector< SlabAllocator< Type >, 64 > allocators_
Definition: SlabAllocator.h:318
std::begin
T begin(T... args)
ripple::SlabAllocator::SlabBlock::~SlabBlock
~SlabBlock()
Definition: SlabAllocator.h:89
cassert
std::adjacent_find
T adjacent_find(T... args)
mutex
std::size_t
std::memcpy
T memcpy(T... args)
std::end
T end(T... args)
ripple::SlabAllocator::slabSize_
const std::size_t slabSize_
Definition: SlabAllocator.h:167
ripple::SlabAllocator::SlabBlock
A block of memory that is owned by a slab allocator.
Definition: SlabAllocator.h:52
ripple::SlabAllocatorSet::~SlabAllocatorSet
~SlabAllocatorSet()
Definition: SlabAllocator.h:383
ripple::SlabAllocator::SlabBlock::l_
std::uint8_t * l_
Definition: SlabAllocator.h:58
ripple::SlabAllocator::SlabBlock::size_
const std::size_t size_
Definition: SlabAllocator.h:67
ripple::SlabAllocatorSet
A collection of slab allocators of various sizes for a given type.
Definition: SlabAllocator.h:314
ripple::SlabAllocator
Definition: SlabAllocator.h:43
ripple::SlabAllocator::SlabBlock::allocate
std::uint8_t * allocate() noexcept
Definition: SlabAllocator.h:112
ripple::SlabAllocatorSet::SlabAllocatorSet
constexpr SlabAllocatorSet(std::vector< SlabConfig > cfg)
Definition: SlabAllocator.h:342
ripple::SlabAllocatorSet::maxSize_
std::size_t maxSize_
Definition: SlabAllocator.h:320