Thrill  0.1
pool.cpp
Go to the documentation of this file.
1 /*******************************************************************************
2  * thrill/mem/pool.cpp
3  *
4  * Part of Project Thrill - http://project-thrill.org
5  *
6  * Copyright (C) 2016-2017 Timo Bingmann <[email protected]>
7  *
8  * All rights reserved. Published under the BSD-2 license in the LICENSE file.
9  ******************************************************************************/
10 
11 #include <thrill/mem/pool.hpp>
12 
13 #include <tlx/die.hpp>
14 #include <tlx/math/ffs.hpp>
16 
17 #include <iostream>
18 #include <limits>
19 #include <new>
20 
21 namespace thrill {
22 namespace mem {
23 
24 /******************************************************************************/
25 
26 Pool& GPool() {
27  static Pool* pool = new Pool();
28  return *pool;
29 }
30 
31 #if defined(__clang__) || defined(__GNUC__)
32 static __attribute__ ((destructor)) void s_gpool_destroy() { // NOLINT
33  // deallocate memory arenas but do not destroy the pool
34  GPool().DeallocateAll();
35 }
36 #endif
37 
38 /******************************************************************************/
39 // Pool::Arena
40 
41 struct Pool::Slot {
42  uint32_t size;
43  uint32_t next;
44 };
45 
46 struct Pool::Arena {
47  //! magic word
48  size_t magic;
49  //! total size of this Arena
50  size_t total_size;
51  //! next and prev pointers for free list.
52  Arena * next_arena, * prev_arena;
53  //! oversize arena
54  bool oversize;
55  //! first sentinel Slot which is never used for payload data, instead size =
56  //! remaining free size, and next = pointer to first free byte.
57  union {
58  uint32_t free_size;
59  Slot head_slot;
60  };
61  // following here are actual data slots
62  // Slot slots[num_slots()];
63 
64  //! the number of available payload slots (excluding head_slot)
65  uint32_t num_slots() const {
66  return static_cast<uint32_t>(
67  (total_size - sizeof(Arena)) / sizeof(Slot));
68  }
69 
70  Slot * begin() { return &head_slot + 1; }
71  Slot * end() { return &head_slot + 1 + num_slots(); }
72  Slot * slot(size_t i) { return &head_slot + 1 + i; }
73 
74  void * find_free(size_t size);
75 };
76 
77 /******************************************************************************/
78 // Pool::ObjectPool for small items
79 
80 struct Pool::ObjectArena {
81  //! magic word
82  size_t magic;
83  //! next and prev pointers for free list.
84  ObjectArena* next_arena, * prev_arena;
85  //! number of slots free
86  size_t free_slots;
87  //! array of flag words: each bit in the flag words indicates if a slot is
88  //! used (=0) for free (=1)
89  size_t flags[1];
90 
91  char * begin(size_t num_flags) {
92  return reinterpret_cast<char*>(flags + num_flags);
93  }
94 };
95 
96 class Pool::ObjectPool
97 {
98 public:
99  ObjectPool(size_t size);
100  ~ObjectPool();
101 
102  //! allocate a new free arena
103  void AllocateObjectArena();
104 
105  //! allocate a slot in the object pool
106  void * allocate();
107 
108  //! free a slot
109  void deallocate(void* ptr);
110 
111  //! verify arena chains
112  void self_verify();
113 
114 private:
115  //! object size in this pool
116  size_t size_;
117  //! arena chain with free slots
118  ObjectArena* free_ = nullptr;
119  //! arenas completely full
120  ObjectArena* full_ = nullptr;
121 
122  static const size_t default_arena_size = 16384;
123 
124  //! number of slots in an ObjectArena
125  size_t num_slots_;
126  //! number of flag words in an ObjectArena
127  size_t num_flags_;
128 
129  //! total number of object slots allocated
130  size_t total_slots_ = 0;
131  //! total number of free object slots
132  size_t total_free_ = 0;
133 };
134 
135 Pool::ObjectPool::ObjectPool(size_t size)
136  : size_(size) {
137  // calculate number of slots for given object size
138  num_slots_ =
139  8 * (default_arena_size - sizeof(ObjectArena) + sizeof(size_t))
140  / (8 * size_ + 1);
141 
142  // calculate number of bit flag words
143  num_flags_ = (num_slots_ + (8 * sizeof(size_t) - 1)) / (8 * sizeof(size_t));
144 
145  if (debug) {
146  printf("ObjectPool() size_=%zu num_slots_=%zu num_flags_=%zu\n",
147  size_, num_slots_, num_flags_);
148  }
149 
150  die_unless(
151  default_arena_size >=
152  // header
153  sizeof(ObjectArena) - sizeof(size_t)
154  // flags
155  + num_flags_ * sizeof(size_t)
156  // data slots
157  + num_slots_ * size_);
158 }
159 
160 Pool::ObjectPool::~ObjectPool() {
161  if (total_slots_ != total_free_) {
162  printf("~ObjectPool() size_=%zu total_used_=%zu\n",
163  size_, total_slots_ - total_free_);
164  }
165 }
166 
167 void Pool::ObjectPool::AllocateObjectArena() {
168  // Allocate space for the new block
169  ObjectArena* new_arena =
170  reinterpret_cast<ObjectArena*>(
171  bypass_aligned_alloc(default_arena_size, default_arena_size));
172  if (!new_arena) {
173  // if (!die_on_failure) return nullptr;
174  fprintf(stderr, "out-of-memory - mem::ObjectPool cannot allocate a new ObjectArena."
175  " size_=%zu\n", size_);
176  abort();
177  }
178 
179  die_unequal(
180  new_arena,
181  reinterpret_cast<ObjectArena*>(
182  reinterpret_cast<uintptr_t>(new_arena) & ~(default_arena_size - 1)));
183 
184  new_arena->magic = 0xAEEA1111AEEA2222LLU + size_;
185  new_arena->prev_arena = nullptr;
186  new_arena->next_arena = free_;
187  if (free_)
188  free_->prev_arena = new_arena;
189  free_ = new_arena;
190 
191  new_arena->free_slots = num_slots_;
192  for (size_t i = 0; i < num_flags_; ++i)
193  free_->flags[i] = ~size_t(0);
194 
195  total_slots_ += num_slots_;
196  total_free_ += num_slots_;
197 }
198 
199 void* Pool::ObjectPool::allocate() {
200  if (debug) {
201  printf("ObjectPool::allocate() size_=%zu\n", size_);
202  }
203 
204  // allocate arenas, keep at least one extra free arena
205  while (free_ == nullptr || total_free_ <= num_slots_)
206  AllocateObjectArena();
207 
208  size_t slot = size_t(-1);
209  for (size_t i = 0; i < num_flags_; ++i) {
210  unsigned s = tlx::ffs(free_->flags[i]);
211  if (s != 0) {
212  slot = i * 8 * sizeof(size_t) + (s - 1);
213  free_->flags[i] &= ~(size_t(1) << (s - 1));
214  break;
215  }
216  }
217 
218  void* ptr = free_->begin(num_flags_) + slot * size_;
219 
220  if (--free_->free_slots == 0)
221  {
222  ObjectArena* next_free = free_->next_arena;
223 
224  // put now full ObjectArena into full_ list
225  free_->next_arena = full_;
226  if (full_)
227  full_->prev_arena = free_;
228  full_ = free_;
229 
230  free_ = next_free;
231  if (next_free)
232  next_free->prev_arena = nullptr;
233  }
234 
235  --total_free_;
236 
237  return ptr;
238 }
239 
240 void Pool::ObjectPool::deallocate(void* ptr) {
241  if (debug) {
242  printf("ObjectPool::deallocate() size_=%zu\n", size_);
243  }
244 
245  // find arena containing ptr
246  ObjectArena* const arena =
247  reinterpret_cast<ObjectArena*>(
248  reinterpret_cast<uintptr_t>(ptr) & ~(default_arena_size - 1));
249  die_unless(arena->magic == 0xAEEA1111AEEA2222LLU + size_);
250 
251  if (!(ptr >= arena->begin(num_flags_) &&
252  ptr < arena->begin(num_flags_) + num_slots_ * size_)) {
253  assert(!"deallocate() of memory not in any arena.");
254  abort();
255  }
256 
257  // calculate the slot directly
258  size_t slot =
259  (reinterpret_cast<char*>(ptr) - arena->begin(num_flags_)) / size_;
260 
261  size_t fa = slot / (8 * sizeof(size_t));
262  size_t fb = slot % (8 * sizeof(size_t));
263  size_t mask = (size_t(1) << fb);
264  die_unless((arena->flags[fa] & mask) == 0);
265 
266  // set free bit
267  arena->flags[fa] |= mask;
268 
269  if (arena->free_slots == 0)
270  {
271  if (debug)
272  printf("ObjectPool::deallocate() splice free arena from full_ list\n");
273 
274  // splice arena from doubly linked list (full_)
275  if (arena->prev_arena)
276  arena->prev_arena->next_arena = arena->next_arena;
277  else {
278  die_unless(full_ == arena);
279  full_ = arena->next_arena;
280  }
281  if (arena->next_arena)
282  arena->next_arena->prev_arena = arena->prev_arena;
283 
284  // put ObjectArena with newly freed slot into free list
285  if (free_)
286  free_->prev_arena = arena;
287  arena->next_arena = free_;
288  arena->prev_arena = nullptr;
289  free_ = arena;
290  }
291 
292  ++arena->free_slots;
293  ++total_free_;
294 
295  if (arena->free_slots == num_slots_ && total_free_ > 16 * num_slots_)
296  {
297  if (debug)
298  printf("ObjectPool::deallocate() splice empty arena from free_ list\n");
299 
300  // splice arena from doubly linked list (full_)
301  if (arena->prev_arena)
302  arena->prev_arena->next_arena = arena->next_arena;
303  else {
304  die_unless(free_ == arena);
305  free_ = arena->next_arena;
306  }
307  if (arena->next_arena)
308  arena->next_arena->prev_arena = arena->prev_arena;
309 
310  bypass_aligned_free(arena, default_arena_size);
311  total_free_ -= num_slots_;
312  total_slots_ -= num_slots_;
313  }
314 }
315 
316 void Pool::ObjectPool::self_verify() {
317  if (debug) {
318  printf("ObjectPool::print() size_=%zu\n", size_);
319  }
320 
321  size_t total_slots = 0, total_free = 0, total_used = 0;
322 
323  for (ObjectArena* arena = free_; arena != nullptr;
324  arena = arena->next_arena)
325  {
326  size_t arena_free = 0;
327 
328  for (size_t i = 0; i < num_slots_; ++i) {
329  size_t fa = i / (8 * sizeof(size_t));
330  size_t fb = i % (8 * sizeof(size_t));
331 
332  if ((arena->flags[fa] & (size_t(1) << fb)) == 0) {
333  // slot is used
334  total_used++;
335  }
336  else {
337  // slot is free
338  arena_free++;
339  total_free++;
340  }
341  }
342 
343  die_unless(arena_free != 0);
344  total_slots += num_slots_;
345 
346  if (arena->next_arena)
347  die_unless(arena->next_arena->prev_arena == arena);
348  if (arena->prev_arena)
349  die_unless(arena->prev_arena->next_arena == arena);
350  }
351 
352  for (ObjectArena* arena = full_; arena != nullptr;
353  arena = arena->next_arena)
354  {
355  size_t arena_free = 0;
356 
357  for (size_t i = 0; i < num_slots_; ++i) {
358  size_t fa = i / (8 * sizeof(size_t));
359  size_t fb = i % (8 * sizeof(size_t));
360 
361  if ((arena->flags[fa] & (size_t(1) << fb)) == 0) {
362  // slot is used
363  total_used++;
364  }
365  else {
366  // slot is free
367  arena_free++;
368  total_free++;
369  }
370  }
371 
372  die_unequal(arena_free, 0u);
373  total_slots += num_slots_;
374 
375  if (arena->next_arena)
376  die_unless(arena->next_arena->prev_arena == arena);
377  if (arena->prev_arena)
378  die_unless(arena->prev_arena->next_arena == arena);
379  }
380 
381  die_unequal(total_slots, total_slots_);
382  die_unequal(total_free, total_free_);
383  die_unequal(total_used, total_slots_ - total_free_);
384 }
385 
386 /******************************************************************************/
387 // internal methods
388 
389 //! determine bin for size.
390 static inline size_t calc_bin_for_size(size_t size) {
391  if (size == 0)
392  return 0;
393  else
394  return 1 + tlx::integer_log2_floor_template(size);
395 }
396 
397 //! lowest size still in bin
398 static inline size_t bin_lower_bound(size_t bin) {
399  if (bin == 0)
400  return 0;
401  else
402  return (size_t(1) << (bin - 1));
403 }
404 
405 /******************************************************************************/
406 // Pool
407 
408 Pool::Pool(size_t default_arena_size) noexcept
409  : default_arena_size_(default_arena_size) {
410  std::unique_lock<std::mutex> lock(mutex_);
411 
412  for (size_t i = 0; i < num_bins + 1; ++i)
413  arena_bin_[i] = nullptr;
414 
416  allocs_.resize(check_limit);
417 
418  while (free_ < min_free_)
420 
421  object_32_ = new ObjectPool(32);
422  object_64_ = new ObjectPool(64);
423  object_128_ = new ObjectPool(128);
424  object_256_ = new ObjectPool(256);
425 }
426 
427 Pool::~Pool() noexcept {
428  std::unique_lock<std::mutex> lock(mutex_);
429  if (size_ != 0) {
430  std::cout << "~Pool() pool still contains "
431  << sizeof(Slot) * size_ << " bytes" << std::endl;
432 
433  for (size_t i = 0; i < allocs_.size(); ++i) {
434  if (allocs_[i].first == nullptr) continue;
435  std::cout << "~Pool() has ptr=" << allocs_[i].first
436  << " size=" << allocs_[i].second << std::endl;
437  }
438  }
439  assert(size_ == 0);
440 
441  delete object_32_;
442  delete object_64_;
443  delete object_128_;
444  delete object_256_;
445 
447 }
448 
449 void* Pool::ArenaFindFree(Arena* arena, size_t bin, size_t n, size_t bytes) {
450  // iterative over free areas to find a possible fit
451  Slot* prev_slot = &arena->head_slot;
452  Slot* curr_slot = arena->begin() + prev_slot->next;
453 
454  while (curr_slot != arena->end() && curr_slot->size < n) {
455  prev_slot = curr_slot;
456  curr_slot = arena->begin() + curr_slot->next;
457  }
458 
459  // if curr_slot == end, then no suitable continuous area was found.
460  if (TLX_UNLIKELY(curr_slot == arena->end()))
461  return nullptr;
462 
463  arena->free_size -= n;
464 
465  prev_slot->next += n;
466  size_ += n;
467  free_ -= n;
468 
469  if (curr_slot->size > n) {
470  // splits free area, since it is larger than needed
471  Slot* next_slot = arena->begin() + prev_slot->next;
472  assert(next_slot != arena->end());
473 
474  next_slot->size = curr_slot->size - n;
475  next_slot->next = curr_slot->next;
476  }
477  else {
478  // join used areas
479  prev_slot->next = curr_slot->next;
480  }
481 
482  if (arena->free_size < bin_lower_bound(bin) && !arena->oversize) {
483  // recategorize bin into smaller chain.
484 
485  size_t new_bin = calc_bin_for_size(arena->free_size);
486 
487  if (debug) {
488  std::cout << "Recategorize arena, previous free "
489  << arena->free_size + n
490  << " now free " << arena->free_size
491  << " from bin " << bin
492  << " to bin " << new_bin
493  << std::endl;
494  }
495  assert(bin != new_bin);
496 
497  // splice out arena from current bin
498  if (arena->prev_arena)
499  arena->prev_arena->next_arena = arena->next_arena;
500  else
501  arena_bin_[bin] = arena->next_arena;
502 
503  if (arena->next_arena)
504  arena->next_arena->prev_arena = arena->prev_arena;
505 
506  // insert at top of new bin
507  arena->prev_arena = nullptr;
508  arena->next_arena = arena_bin_[new_bin];
509  if (arena_bin_[new_bin])
510  arena_bin_[new_bin]->prev_arena = arena;
511  arena_bin_[new_bin] = arena;
512  }
513 
514  // allocate more sparse memory
515  while (free_ < min_free_) {
516  if (!AllocateFreeArena(default_arena_size_, false)) break;
517  }
518 
519  if (debug_check_pairing) {
520  size_t i;
521  for (i = 0; i < allocs_.size(); ++i) {
522  if (allocs_[i].first == nullptr) {
523  allocs_[i].first = curr_slot;
524  allocs_[i].second = bytes;
525  break;
526  }
527  }
528  if (i == allocs_.size()) {
529  assert(!"Increase allocs array in Pool().");
530  abort();
531  }
532  }
533 
534  return reinterpret_cast<void*>(curr_slot);
535 }
536 
537 Pool::Arena* Pool::AllocateFreeArena(size_t arena_size, bool die_on_failure) {
538 
539  if (debug) {
540  std::cout << "AllocateFreeArena()"
541  << " arena_size=" << arena_size
542  << " die_on_failure=" << die_on_failure << std::endl;
543  }
544 
545  // Allocate space for the new block
546  Arena* new_arena =
547  reinterpret_cast<Arena*>(
549  if (!new_arena) {
550  if (!die_on_failure) return nullptr;
551  fprintf(stderr, "out-of-memory - mem::Pool cannot allocate a new Arena."
552  " size_=%zu\n", size_);
553  abort();
554  }
555 
556  die_unequal(
557  new_arena,
558  reinterpret_cast<Arena*>(
559  reinterpret_cast<uintptr_t>(new_arena) & ~(default_arena_size_ - 1)));
560 
561  new_arena->magic = 0xAEEAAEEAAEEAAEEALLU;
562  new_arena->total_size = arena_size;
563 
564  // put new area into right chain at the front
565  Arena** root;
566  if (arena_size <= default_arena_size_) {
567  size_t bin = calc_bin_for_size(new_arena->num_slots());
568  die_unless(bin < num_bins);
569  root = &arena_bin_[bin];
570  new_arena->oversize = false;
571  }
572  else {
573  root = &arena_bin_[num_bins];
574  new_arena->oversize = true;
575  }
576 
577  new_arena->prev_arena = nullptr;
578  new_arena->next_arena = *root;
579  if (*root)
580  (*root)->prev_arena = new_arena;
581  *root = new_arena;
582 
583  new_arena->free_size = new_arena->num_slots();
584  new_arena->head_slot.next = 0;
585 
586  new_arena->slot(0)->size = new_arena->num_slots();
587  new_arena->slot(0)->next = new_arena->num_slots();
588 
589  free_ += new_arena->num_slots();
590 
591  Arena* check_arena =
592  reinterpret_cast<Arena*>(
593  reinterpret_cast<uintptr_t>(new_arena) & ~(default_arena_size_ - 1));
594  die_unless(check_arena->magic == 0xAEEAAEEAAEEAAEEALLU);
595 
596  return new_arena;
597 }
598 
600  std::unique_lock<std::mutex> lock(mutex_);
602 }
603 
605  for (size_t i = 0; i <= num_bins; ++i) {
606  Arena* curr_arena = arena_bin_[i];
607  while (curr_arena != nullptr) {
608  Arena* next_arena = curr_arena->next_arena;
609  bypass_aligned_free(curr_arena, curr_arena->total_size);
610  curr_arena = next_arena;
611  }
612  }
613  min_free_ = 0;
614 }
615 
616 size_t Pool::max_size() const noexcept {
617  return sizeof(Slot) * std::numeric_limits<uint32_t>::max();
618 }
619 
620 size_t Pool::bytes_per_arena(size_t arena_size) {
621  return arena_size - sizeof(Arena);
622 }
623 
624 void* Pool::allocate(size_t bytes) {
625  // return malloc(bytes);
626 
627  std::unique_lock<std::mutex> lock(mutex_);
628 
629  if (debug) {
630  std::cout << "Pool::allocate() bytes " << bytes
631  << std::endl;
632  }
633 
634  if (bytes <= 32)
635  return object_32_->allocate();
636  if (bytes <= 64)
637  return object_64_->allocate();
638  if (bytes <= 128)
639  return object_128_->allocate();
640  if (bytes <= 256)
641  return object_256_->allocate();
642 
643  // round up to whole slot size, and divide by slot size
644  uint32_t n =
645  static_cast<uint32_t>((bytes + sizeof(Slot) - 1) / sizeof(Slot));
646 
647  // check whether n is too large for allocation in our default Arenas, then
648  // allocate a special larger one.
649  if (n * sizeof(Slot) > bytes_per_arena(default_arena_size_)) {
650  if (debug) {
651  std::cout << "Allocate overflow arena of size "
652  << n * sizeof(Slot) << std::endl;
653  }
654  Arena* sp_arena = AllocateFreeArena(sizeof(Arena) + n * sizeof(Slot));
655 
656  void* ptr = ArenaFindFree(sp_arena, num_bins, n, bytes);
657  if (ptr != nullptr)
658  return ptr;
659  }
660 
661  // find bin for n slots
662  size_t bin = calc_bin_for_size(n);
663  while (bin < num_bins)
664  {
665  if (debug)
666  std::cout << "Searching in bin " << bin << std::endl;
667 
668  Arena* curr_arena = arena_bin_[bin];
669 
670  while (curr_arena != nullptr)
671  {
672  // find an arena with at least n free slots
673  if (curr_arena->free_size >= n)
674  {
675  void* ptr = ArenaFindFree(curr_arena, bin, n, bytes);
676  if (ptr != nullptr)
677  return ptr;
678  }
679 
680  // advance to next arena in free list order
681  curr_arena = curr_arena->next_arena;
682  }
683 
684  // look into larger bin
685  ++bin;
686  }
687 
688  // allocate new arena with default size
689  Arena* curr_arena = AllocateFreeArena(default_arena_size_);
690  bin = calc_bin_for_size(curr_arena->num_slots());
691 
692  // look into new arena
693  void* ptr = ArenaFindFree(curr_arena, bin, n, bytes);
694  if (ptr != nullptr)
695  return ptr;
696 
697  die("Pool::allocate() failed, no memory available.");
698 }
699 
700 void Pool::deallocate(void* ptr, size_t bytes) {
701  // return free(ptr);
702 
703  if (ptr == nullptr) return;
704 
705  std::unique_lock<std::mutex> lock(mutex_);
706  if (debug) {
707  std::cout << "Pool::deallocate() ptr " << ptr
708  << " bytes " << bytes << std::endl;
709  }
710 
711  if (debug_check_pairing) {
712  size_t i;
713  for (i = 0; i < allocs_.size(); ++i) {
714  if (allocs_[i].first != ptr) continue;
715  if (bytes != allocs_[i].second) {
716  assert(!"Mismatching deallocate() size in Pool().");
717  abort();
718  }
719  allocs_[i].first = nullptr;
720  allocs_[i].second = 0;
721  break;
722  }
723  if (i == allocs_.size()) {
724  assert(!"Unknown deallocate() in Pool().");
725  abort();
726  }
727  }
728 
729  if (bytes <= 32)
730  return object_32_->deallocate(ptr);
731  if (bytes <= 64)
732  return object_64_->deallocate(ptr);
733  if (bytes <= 128)
734  return object_128_->deallocate(ptr);
735  if (bytes <= 256)
736  return object_256_->deallocate(ptr);
737 
738  // round up to whole slot size, and divide by slot size
739  uint32_t n
740  = static_cast<uint32_t>((bytes + sizeof(Slot) - 1) / sizeof(Slot));
741  assert(n <= size_);
742 
743  // find arena containing ptr
744  Arena* arena =
745  reinterpret_cast<Arena*>(
746  reinterpret_cast<uintptr_t>(ptr) & ~(default_arena_size_ - 1));
747  die_unless(arena->magic == 0xAEEAAEEAAEEAAEEALLU);
748 
749  if (!(ptr >= arena->begin() && ptr < arena->end())) {
750  assert(!"deallocate() of memory not in any arena.");
751  abort();
752  }
753 
754  Slot* prev_slot = &arena->head_slot;
755  Slot* ptr_slot = reinterpret_cast<Slot*>(ptr);
756 
757  // advance prev_slot until the next jumps over ptr.
758  while (arena->begin() + prev_slot->next < ptr_slot) {
759  prev_slot = arena->begin() + prev_slot->next;
760  }
761 
762  // fill deallocated slot with free information
763  ptr_slot->next = prev_slot->next;
764  ptr_slot->size = n;
765 
766  prev_slot->next = static_cast<uint32_t>(ptr_slot - arena->begin());
767 
768  // defragment free slots, but exempt the head_slot
769  if (prev_slot == &arena->head_slot)
770  prev_slot = arena->begin() + arena->head_slot.next;
771 
772  while (prev_slot->next != arena->num_slots() &&
773  prev_slot->next == prev_slot - arena->begin() + prev_slot->size)
774  {
775  // join free slots
776  Slot* next_slot = arena->begin() + prev_slot->next;
777  prev_slot->size += next_slot->size;
778  prev_slot->next = next_slot->next;
779  }
780 
781  arena->free_size += n;
782  size_ -= n;
783  free_ += n;
784 
785  // always deallocate oversize arenas
786  if (arena->oversize)
787  {
788  if (debug)
789  std::cout << "destroy special arena" << std::endl;
790 
791  // splice out arena from current bin
792  if (arena->prev_arena)
793  arena->prev_arena->next_arena = arena->next_arena;
794  else
795  arena_bin_[num_bins] = arena->next_arena;
796 
797  if (arena->next_arena)
798  arena->next_arena->prev_arena = arena->prev_arena;
799 
800  free_ -= arena->num_slots();
801  bypass_aligned_free(arena, arena->total_size);
802  return;
803  }
804 
805  // check if this arena is empty and free_ space is beyond our min_free_
806  // limit, then simply deallocate it.
807  if (arena->free_size == arena->num_slots() &&
808  free_ >= min_free_ + arena->num_slots())
809  {
810  if (debug)
811  std::cout << "destroy empty arena" << std::endl;
812 
813  size_t bin = calc_bin_for_size(arena->free_size - n);
814 
815  // splice out arena from current bin
816  if (arena->prev_arena)
817  arena->prev_arena->next_arena = arena->next_arena;
818  else
819  arena_bin_[bin] = arena->next_arena;
820 
821  if (arena->next_arena)
822  arena->next_arena->prev_arena = arena->prev_arena;
823 
824  // free arena
825  free_ -= arena->num_slots();
826  bypass_aligned_free(arena, arena->total_size);
827  return;
828  }
829 
830  if (calc_bin_for_size(arena->free_size - n) !=
831  calc_bin_for_size(arena->free_size))
832  {
833  // recategorize arena into larger chain.
834  if (debug)
835  std::cout << "recategorize arena into larger chain." << std::endl;
836 
837  size_t bin = calc_bin_for_size(arena->free_size - n);
838  size_t new_bin = calc_bin_for_size(arena->free_size);
839 
840  if (debug) {
841  std::cout << "Recategorize arena, previous free "
842  << arena->free_size
843  << " now free " << arena->free_size + n
844  << " from bin " << bin
845  << " to bin " << new_bin
846  << std::endl;
847  }
848 
849  // splice out arena from current bin
850  if (arena->prev_arena)
851  arena->prev_arena->next_arena = arena->next_arena;
852  else
853  arena_bin_[bin] = arena->next_arena;
854 
855  if (arena->next_arena)
856  arena->next_arena->prev_arena = arena->prev_arena;
857 
858  // insert at top of new bin
859  arena->prev_arena = nullptr;
860  arena->next_arena = arena_bin_[new_bin];
861  if (arena_bin_[new_bin])
862  arena_bin_[new_bin]->prev_arena = arena;
863  arena_bin_[new_bin] = arena;
864  }
865 }
866 
867 void Pool::print(bool debug) {
868  if (debug) {
869  std::cout << "Pool::print()"
870  << " size_=" << size_ << " free_=" << free_ << std::endl;
871  }
872 
873  size_t total_free = 0, total_size = 0;
874 
875  for (size_t bin = 0; bin < num_bins; ++bin)
876  {
877  for (Arena* curr_arena = arena_bin_[bin]; curr_arena != nullptr;
878  curr_arena = curr_arena->next_arena)
879  {
880  std::ostringstream oss;
881 
882  size_t arena_bin = calc_bin_for_size(curr_arena->free_size);
883  die_unequal(arena_bin, bin);
884 
885  size_t slot = curr_arena->head_slot.next;
886  size_t size = 0, free = 0;
887 
888  // used area at beginning
889  size += slot;
890 
891  while (slot != curr_arena->num_slots()) {
892  if (debug)
893  oss << " slot[" << slot
894  << ",size=" << curr_arena->slot(slot)->size
895  << ",next=" << curr_arena->slot(slot)->next << ']';
896 
897  if (curr_arena->slot(slot)->next <= slot) {
898  std::cout << "invalid chain:" << oss.str() << std::endl;
899  abort();
900  }
901 
902  free += curr_arena->slot(slot)->size;
903  size += curr_arena->slot(slot)->next - slot - curr_arena->slot(slot)->size;
904  slot = curr_arena->slot(slot)->next;
905  }
906 
907  if (debug) {
908  std::cout << "arena[" << bin << ':' << curr_arena << "]"
909  << " free_size=" << curr_arena->free_size
910  << " head_slot.next=" << curr_arena->head_slot.next
911  << oss.str()
912  << std::endl;
913  }
914 
915  die_unequal(curr_arena->head_slot.size, free);
916 
917  total_free += free;
918  total_size += size;
919 
920  if (curr_arena->next_arena)
921  die_unless(curr_arena->next_arena->prev_arena == curr_arena);
922  if (curr_arena->prev_arena)
923  die_unless(curr_arena->prev_arena->next_arena == curr_arena);
924  }
925  }
926 
927  die_unequal(total_size, size_);
928  die_unequal(total_free, free_);
929 }
930 
932  print(false);
933 }
934 
935 } // namespace mem
936 } // namespace thrill
937 
938 /******************************************************************************/
size_t max_size() const noexcept
maximum size possible to allocate
Definition: pool.cpp:616
static uint_pair max()
return an uint_pair instance containing the largest value possible
Definition: uint_types.hpp:226
#define die_unless(X)
Definition: die.hpp:27
static unsigned ffs(int i)
find first set bit in integer, or zero if none are set.
Definition: ffs.hpp:79
ObjectPool * object_32_
object areas for small fixed size items
Definition: pool.hpp:166
std::vector< std::pair< void *, size_t > > allocs_
array of allocations for checking
Definition: pool.hpp:172
void * bypass_aligned_alloc(size_t alignment, size_t size) noexcept
bypass malloc tracker and access aligned_alloc() directly
static unsigned integer_log2_floor_template(IntegerType i)
calculate the log2 floor of an integer type
static size_t bin_lower_bound(size_t bin)
lowest size still in bin
Definition: pool.cpp:398
#define die(msg)
Instead of std::terminate(), throw the output the message via an exception.
Definition: die.hpp:22
#define TLX_UNLIKELY(c)
Definition: likely.hpp:24
void bypass_aligned_free(void *ptr, size_t size) noexcept
bypass malloc tracker and access aligned_alloc() directly
size_t bytes_per_arena(size_t arena_size)
calculate maximum bytes fitting into an Arena with given size.
Definition: pool.cpp:620
void self_verify()
Print out structure of the arenas.
Definition: pool.cpp:931
static constexpr bool debug_check_pairing
debug flag to check pairing of allocate()/deallocate() client calls
Definition: pool.hpp:79
A simple memory allocation manager.
Definition: pool.hpp:74
void * ArenaFindFree(Arena *curr_arena, size_t bin, size_t n, size_t bytes)
find free area inside an Arena
Definition: pool.cpp:449
Arena * AllocateFreeArena(size_t arena_size, bool die_on_failure=true)
allocate a new Arena blob
Definition: pool.cpp:537
size_t min_free_
minimum amount of spare memory to keep in the Pool.
Definition: pool.hpp:163
void * allocate(size_t bytes)
allocate a continuous segment of n bytes in the arenas
Definition: pool.cpp:624
Pool & GPool()
singleton instance of global pool for I/O data structures
Definition: pool.cpp:26
#define die_unequal(X, Y)
Definition: die.hpp:50
static size_t calc_bin_for_size(size_t size)
determine bin for size.
Definition: pool.cpp:390
void print(bool debug=true)
Print out structure of the arenas.
Definition: pool.cpp:867
ObjectPool * object_256_
Definition: pool.hpp:169
Pool(size_t default_arena_size=16384) noexcept
construct with base allocator
Definition: pool.cpp:408
void IntDeallocateAll()
deallocate all Arenas
Definition: pool.cpp:604
static const size_t bytes
number of bytes in uint_pair
Definition: uint_types.hpp:75
std::mutex mutex_
Definition: pool.hpp:140
~Pool() noexcept
dtor
Definition: pool.cpp:427
size_t free_
number of free slots in all arenas
Definition: pool.hpp:155
void DeallocateAll()
deallocate all Arenas
Definition: pool.cpp:599
void free(void *ptr) NOEXCEPT
exported free symbol that overrides loading from libc
static constexpr bool debug
Definition: pool.hpp:76
size_t size_
overall number of used slots
Definition: pool.hpp:157
Arena * arena_bin_[num_bins+1]
Definition: pool.hpp:152
ObjectPool * object_128_
Definition: pool.hpp:168
static constexpr size_t check_limit
Definition: pool.hpp:80
ObjectPool * object_64_
Definition: pool.hpp:167
size_t default_arena_size_
size of default Arena allocation
Definition: pool.hpp:160
void deallocate(void *ptr, size_t bytes)
Definition: pool.cpp:700
static const size_t num_bins
number of bins
Definition: pool.hpp:148