libstdc++
bitmap_allocator.h
Go to the documentation of this file.
1 // Bitmap Allocator. -*- C++ -*-
2 
3 // Copyright (C) 2004-2021 Free Software Foundation, Inc.
4 //
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
9 // any later version.
10 
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
15 
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
19 
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
24 
25 /** @file ext/bitmap_allocator.h
26  * This file is a GNU extension to the Standard C++ Library.
27  */
28 
29 #ifndef _BITMAP_ALLOCATOR_H
30 #define _BITMAP_ALLOCATOR_H 1
31 
32 #include <utility> // For std::pair.
33 #include <bits/functexcept.h> // For __throw_bad_alloc().
34 #include <functional> // For greater_equal, and less_equal.
35 #include <new> // For operator new.
36 #include <debug/debug.h> // _GLIBCXX_DEBUG_ASSERT
37 #include <ext/concurrence.h>
38 #include <bits/move.h>
39 
40 /** @brief The constant in the expression below is the alignment
41  * required in bytes.
42  */
43 #define _BALLOC_ALIGN_BYTES 8
44 
45 namespace __gnu_cxx _GLIBCXX_VISIBILITY(default)
46 {
47 _GLIBCXX_BEGIN_NAMESPACE_VERSION
48 
49  namespace __detail
50  {
51  /** @class __mini_vector bitmap_allocator.h bitmap_allocator.h
52  *
53  * @brief __mini_vector<> is a stripped down version of the
54  * full-fledged std::vector<>.
55  *
56  * It is to be used only for built-in types or PODs. Notable
57  * differences are:
58  *
59  * 1. Not all accessor functions are present.
60  * 2. Used ONLY for PODs.
61  * 3. No Allocator template argument. Uses ::operator new() to get
62  * memory, and ::operator delete() to free it.
63  * Caveat: The dtor does NOT free the memory allocated, so this a
64  * memory-leaking vector!
65  */
66  template<typename _Tp>
68  {
70  __mini_vector& operator=(const __mini_vector&);
71 
72  public:
73  typedef _Tp value_type;
74  typedef _Tp* pointer;
75  typedef _Tp& reference;
76  typedef const _Tp& const_reference;
77  typedef std::size_t size_type;
78  typedef std::ptrdiff_t difference_type;
79  typedef pointer iterator;
80 
81  private:
82  pointer _M_start;
83  pointer _M_finish;
84  pointer _M_end_of_storage;
85 
86  size_type
87  _M_space_left() const throw()
88  { return _M_end_of_storage - _M_finish; }
89 
90  _GLIBCXX_NODISCARD pointer
91  allocate(size_type __n)
92  { return static_cast<pointer>(::operator new(__n * sizeof(_Tp))); }
93 
94  void
95  deallocate(pointer __p, size_type)
96  { ::operator delete(__p); }
97 
98  public:
99  // Members used: size(), push_back(), pop_back(),
100  // insert(iterator, const_reference), erase(iterator),
101  // begin(), end(), back(), operator[].
102 
103  __mini_vector()
104  : _M_start(0), _M_finish(0), _M_end_of_storage(0) { }
105 
106  size_type
107  size() const throw()
108  { return _M_finish - _M_start; }
109 
110  iterator
111  begin() const throw()
112  { return this->_M_start; }
113 
114  iterator
115  end() const throw()
116  { return this->_M_finish; }
117 
118  reference
119  back() const throw()
120  { return *(this->end() - 1); }
121 
122  reference
123  operator[](const size_type __pos) const throw()
124  { return this->_M_start[__pos]; }
125 
126  void
127  insert(iterator __pos, const_reference __x);
128 
129  void
130  push_back(const_reference __x)
131  {
132  if (this->_M_space_left())
133  {
134  *this->end() = __x;
135  ++this->_M_finish;
136  }
137  else
138  this->insert(this->end(), __x);
139  }
140 
141  void
142  pop_back() throw()
143  { --this->_M_finish; }
144 
145  void
146  erase(iterator __pos) throw();
147 
148  void
149  clear() throw()
150  { this->_M_finish = this->_M_start; }
151  };
152 
153  // Out of line function definitions.
154  template<typename _Tp>
156  insert(iterator __pos, const_reference __x)
157  {
158  if (this->_M_space_left())
159  {
160  size_type __to_move = this->_M_finish - __pos;
161  iterator __dest = this->end();
162  iterator __src = this->end() - 1;
163 
164  ++this->_M_finish;
165  while (__to_move)
166  {
167  *__dest = *__src;
168  --__dest; --__src; --__to_move;
169  }
170  *__pos = __x;
171  }
172  else
173  {
174  size_type __new_size = this->size() ? this->size() * 2 : 1;
175  iterator __new_start = this->allocate(__new_size);
176  iterator __first = this->begin();
177  iterator __start = __new_start;
178  while (__first != __pos)
179  {
180  *__start = *__first;
181  ++__start; ++__first;
182  }
183  *__start = __x;
184  ++__start;
185  while (__first != this->end())
186  {
187  *__start = *__first;
188  ++__start; ++__first;
189  }
190  if (this->_M_start)
191  this->deallocate(this->_M_start, this->size());
192 
193  this->_M_start = __new_start;
194  this->_M_finish = __start;
195  this->_M_end_of_storage = this->_M_start + __new_size;
196  }
197  }
198 
199  template<typename _Tp>
200  void __mini_vector<_Tp>::
201  erase(iterator __pos) throw()
202  {
203  while (__pos + 1 != this->end())
204  {
205  *__pos = __pos[1];
206  ++__pos;
207  }
208  --this->_M_finish;
209  }
210 
211 
212  template<typename _Tp>
213  struct __mv_iter_traits
214  {
215  typedef typename _Tp::value_type value_type;
216  typedef typename _Tp::difference_type difference_type;
217  };
218 
219  template<typename _Tp>
220  struct __mv_iter_traits<_Tp*>
221  {
222  typedef _Tp value_type;
223  typedef std::ptrdiff_t difference_type;
224  };
225 
226  enum
227  {
228  bits_per_byte = 8,
229  bits_per_block = sizeof(std::size_t) * std::size_t(bits_per_byte)
230  };
231 
232  template<typename _ForwardIterator, typename _Tp, typename _Compare>
233  _ForwardIterator
234  __lower_bound(_ForwardIterator __first, _ForwardIterator __last,
235  const _Tp& __val, _Compare __comp)
236  {
237  typedef typename __mv_iter_traits<_ForwardIterator>::difference_type
238  _DistanceType;
239 
240  _DistanceType __len = __last - __first;
241  _DistanceType __half;
242  _ForwardIterator __middle;
243 
244  while (__len > 0)
245  {
246  __half = __len >> 1;
247  __middle = __first;
248  __middle += __half;
249  if (__comp(*__middle, __val))
250  {
251  __first = __middle;
252  ++__first;
253  __len = __len - __half - 1;
254  }
255  else
256  __len = __half;
257  }
258  return __first;
259  }
260 
261  /** @brief The number of Blocks pointed to by the address pair
262  * passed to the function.
263  */
264  template<typename _AddrPair>
265  inline std::size_t
266  __num_blocks(_AddrPair __ap)
267  { return (__ap.second - __ap.first) + 1; }
268 
269  /** @brief The number of Bit-maps pointed to by the address pair
270  * passed to the function.
271  */
272  template<typename _AddrPair>
273  inline std::size_t
274  __num_bitmaps(_AddrPair __ap)
275  { return __num_blocks(__ap) / std::size_t(bits_per_block); }
276 
277  // _Tp should be a pointer type.
278  template<typename _Tp>
279  class _Inclusive_between
280  : public std::unary_function<typename std::pair<_Tp, _Tp>, bool>
281  {
282  typedef _Tp pointer;
283  pointer _M_ptr_value;
284  typedef typename std::pair<_Tp, _Tp> _Block_pair;
285 
286  public:
287  _Inclusive_between(pointer __ptr) : _M_ptr_value(__ptr)
288  { }
289 
290  bool
291  operator()(_Block_pair __bp) const throw()
292  {
293  if (std::less_equal<pointer>()(_M_ptr_value, __bp.second)
294  && std::greater_equal<pointer>()(_M_ptr_value, __bp.first))
295  return true;
296  else
297  return false;
298  }
299  };
300 
301  // Used to pass a Functor to functions by reference.
302  template<typename _Functor>
303  class _Functor_Ref
304  : public std::unary_function<typename _Functor::argument_type,
305  typename _Functor::result_type>
306  {
307  _Functor& _M_fref;
308 
309  public:
310  typedef typename _Functor::argument_type argument_type;
311  typedef typename _Functor::result_type result_type;
312 
313  _Functor_Ref(_Functor& __fref) : _M_fref(__fref)
314  { }
315 
316  result_type
317  operator()(argument_type __arg)
318  { return _M_fref(__arg); }
319  };
320 
321  /** @class _Ffit_finder bitmap_allocator.h bitmap_allocator.h
322  *
323  * @brief The class which acts as a predicate for applying the
324  * first-fit memory allocation policy for the bitmap allocator.
325  */
326  // _Tp should be a pointer type, and _Alloc is the Allocator for
327  // the vector.
328  template<typename _Tp>
330  : public std::unary_function<typename std::pair<_Tp, _Tp>, bool>
331  {
332  typedef typename std::pair<_Tp, _Tp> _Block_pair;
334  typedef typename _BPVector::difference_type _Counter_type;
335 
336  std::size_t* _M_pbitmap;
337  _Counter_type _M_data_offset;
338 
339  public:
340  _Ffit_finder() : _M_pbitmap(0), _M_data_offset(0)
341  { }
342 
343  bool
344  operator()(_Block_pair __bp) throw()
345  {
346  using std::size_t;
347  // Set the _rover to the last physical location bitmap,
348  // which is the bitmap which belongs to the first free
349  // block. Thus, the bitmaps are in exact reverse order of
350  // the actual memory layout. So, we count down the bitmaps,
351  // which is the same as moving up the memory.
352 
353  // If the used count stored at the start of the Bit Map headers
354  // is equal to the number of Objects that the current Block can
355  // store, then there is definitely no space for another single
356  // object, so just return false.
357  _Counter_type __diff = __detail::__num_bitmaps(__bp);
358 
359  if (*(reinterpret_cast<size_t*>
360  (__bp.first) - (__diff + 1)) == __detail::__num_blocks(__bp))
361  return false;
362 
363  size_t* __rover = reinterpret_cast<size_t*>(__bp.first) - 1;
364 
365  for (_Counter_type __i = 0; __i < __diff; ++__i)
366  {
367  _M_data_offset = __i;
368  if (*__rover)
369  {
370  _M_pbitmap = __rover;
371  return true;
372  }
373  --__rover;
374  }
375  return false;
376  }
377 
378  std::size_t*
379  _M_get() const throw()
380  { return _M_pbitmap; }
381 
382  _Counter_type
383  _M_offset() const throw()
384  { return _M_data_offset * std::size_t(bits_per_block); }
385  };
386 
387  /** @class _Bitmap_counter bitmap_allocator.h bitmap_allocator.h
388  *
389  * @brief The bitmap counter which acts as the bitmap
390  * manipulator, and manages the bit-manipulation functions and
391  * the searching and identification functions on the bit-map.
392  */
393  // _Tp should be a pointer type.
394  template<typename _Tp>
396  {
397  typedef typename
399  typedef typename _BPVector::size_type _Index_type;
400  typedef _Tp pointer;
401 
402  _BPVector& _M_vbp;
403  std::size_t* _M_curr_bmap;
404  std::size_t* _M_last_bmap_in_block;
405  _Index_type _M_curr_index;
406 
407  public:
408  // Use the 2nd parameter with care. Make sure that such an
409  // entry exists in the vector before passing that particular
410  // index to this ctor.
411  _Bitmap_counter(_BPVector& Rvbp, long __index = -1) : _M_vbp(Rvbp)
412  { this->_M_reset(__index); }
413 
414  void
415  _M_reset(long __index = -1) throw()
416  {
417  if (__index == -1)
418  {
419  _M_curr_bmap = 0;
420  _M_curr_index = static_cast<_Index_type>(-1);
421  return;
422  }
423 
424  _M_curr_index = __index;
425  _M_curr_bmap = reinterpret_cast<std::size_t*>
426  (_M_vbp[_M_curr_index].first) - 1;
427 
428  _GLIBCXX_DEBUG_ASSERT(__index <= (long)_M_vbp.size() - 1);
429 
430  _M_last_bmap_in_block = _M_curr_bmap
431  - ((_M_vbp[_M_curr_index].second
432  - _M_vbp[_M_curr_index].first + 1)
433  / std::size_t(bits_per_block) - 1);
434  }
435 
436  // Dangerous Function! Use with extreme care. Pass to this
437  // function ONLY those values that are known to be correct,
438  // otherwise this will mess up big time.
439  void
440  _M_set_internal_bitmap(std::size_t* __new_internal_marker) throw()
441  { _M_curr_bmap = __new_internal_marker; }
442 
443  bool
444  _M_finished() const throw()
445  { return(_M_curr_bmap == 0); }
446 
448  operator++() throw()
449  {
450  if (_M_curr_bmap == _M_last_bmap_in_block)
451  {
452  if (++_M_curr_index == _M_vbp.size())
453  _M_curr_bmap = 0;
454  else
455  this->_M_reset(_M_curr_index);
456  }
457  else
458  --_M_curr_bmap;
459  return *this;
460  }
461 
462  std::size_t*
463  _M_get() const throw()
464  { return _M_curr_bmap; }
465 
466  pointer
467  _M_base() const throw()
468  { return _M_vbp[_M_curr_index].first; }
469 
470  _Index_type
471  _M_offset() const throw()
472  {
473  return std::size_t(bits_per_block)
474  * ((reinterpret_cast<std::size_t*>(this->_M_base())
475  - _M_curr_bmap) - 1);
476  }
477 
478  _Index_type
479  _M_where() const throw()
480  { return _M_curr_index; }
481  };
482 
483  /** @brief Mark a memory address as allocated by re-setting the
484  * corresponding bit in the bit-map.
485  */
486  inline void
487  __bit_allocate(std::size_t* __pbmap, std::size_t __pos) throw()
488  {
489  std::size_t __mask = 1 << __pos;
490  __mask = ~__mask;
491  *__pbmap &= __mask;
492  }
493 
494  /** @brief Mark a memory address as free by setting the
495  * corresponding bit in the bit-map.
496  */
497  inline void
498  __bit_free(std::size_t* __pbmap, std::size_t __pos) throw()
499  {
500  std::size_t __mask = 1 << __pos;
501  *__pbmap |= __mask;
502  }
503  } // namespace __detail
504 
505  /** @brief Generic Version of the bsf instruction.
506  */
507  inline std::size_t
508  _Bit_scan_forward(std::size_t __num)
509  { return static_cast<std::size_t>(__builtin_ctzl(__num)); }
510 
511  /** @class free_list bitmap_allocator.h bitmap_allocator.h
512  *
513  * @brief The free list class for managing chunks of memory to be
514  * given to and returned by the bitmap_allocator.
515  */
516  class free_list
517  {
518  public:
519  typedef std::size_t* value_type;
521  typedef vector_type::iterator iterator;
522  typedef __mutex __mutex_type;
523 
524  private:
525  struct _LT_pointer_compare
526  {
527  bool
528  operator()(const std::size_t* __pui,
529  const std::size_t __cui) const throw()
530  { return *__pui < __cui; }
531  };
532 
533 #if defined __GTHREADS
534  __mutex_type&
535  _M_get_mutex()
536  {
537  static __mutex_type _S_mutex;
538  return _S_mutex;
539  }
540 #endif
541 
542  vector_type&
543  _M_get_free_list()
544  {
545  static vector_type _S_free_list;
546  return _S_free_list;
547  }
548 
549  /** @brief Performs validation of memory based on their size.
550  *
551  * @param __addr The pointer to the memory block to be
552  * validated.
553  *
554  * Validates the memory block passed to this function and
555  * appropriately performs the action of managing the free list of
556  * blocks by adding this block to the free list or deleting this
557  * or larger blocks from the free list.
558  */
559  void
560  _M_validate(std::size_t* __addr) throw()
561  {
562  vector_type& __free_list = _M_get_free_list();
563  const vector_type::size_type __max_size = 64;
564  if (__free_list.size() >= __max_size)
565  {
566  // Ok, the threshold value has been reached. We determine
567  // which block to remove from the list of free blocks.
568  if (*__addr >= *__free_list.back())
569  {
570  // Ok, the new block is greater than or equal to the
571  // last block in the list of free blocks. We just free
572  // the new block.
573  ::operator delete(static_cast<void*>(__addr));
574  return;
575  }
576  else
577  {
578  // Deallocate the last block in the list of free lists,
579  // and insert the new one in its correct position.
580  ::operator delete(static_cast<void*>(__free_list.back()));
581  __free_list.pop_back();
582  }
583  }
584 
585  // Just add the block to the list of free lists unconditionally.
586  iterator __temp = __detail::__lower_bound
587  (__free_list.begin(), __free_list.end(),
588  *__addr, _LT_pointer_compare());
589 
590  // We may insert the new free list before _temp;
591  __free_list.insert(__temp, __addr);
592  }
593 
594  /** @brief Decides whether the wastage of memory is acceptable for
595  * the current memory request and returns accordingly.
596  *
597  * @param __block_size The size of the block available in the free
598  * list.
599  *
600  * @param __required_size The required size of the memory block.
601  *
602  * @return true if the wastage incurred is acceptable, else returns
603  * false.
604  */
605  bool
606  _M_should_i_give(std::size_t __block_size,
607  std::size_t __required_size) throw()
608  {
609  const std::size_t __max_wastage_percentage = 36;
610  if (__block_size >= __required_size &&
611  (((__block_size - __required_size) * 100 / __block_size)
612  < __max_wastage_percentage))
613  return true;
614  else
615  return false;
616  }
617 
618  public:
619  /** @brief This function returns the block of memory to the
620  * internal free list.
621  *
622  * @param __addr The pointer to the memory block that was given
623  * by a call to the _M_get function.
624  */
625  inline void
626  _M_insert(std::size_t* __addr) throw()
627  {
628 #if defined __GTHREADS
629  __scoped_lock __bfl_lock(_M_get_mutex());
630 #endif
631  // Call _M_validate to decide what should be done with
632  // this particular free list.
633  this->_M_validate(reinterpret_cast<std::size_t*>(__addr) - 1);
634  // See discussion as to why this is 1!
635  }
636 
637  /** @brief This function gets a block of memory of the specified
638  * size from the free list.
639  *
640  * @param __sz The size in bytes of the memory required.
641  *
642  * @return A pointer to the new memory block of size at least
643  * equal to that requested.
644  */
645  std::size_t*
646  _M_get(std::size_t __sz) _GLIBCXX_THROW(std::bad_alloc);
647 
648  /** @brief This function just clears the internal Free List, and
649  * gives back all the memory to the OS.
650  */
651  void
652  _M_clear();
653  };
654 
655 
656  // Forward declare the class.
657  template<typename _Tp>
659 
660  // Specialize for void:
661  template<>
662  class bitmap_allocator<void>
663  {
664  public:
665  typedef void* pointer;
666  typedef const void* const_pointer;
667 
668  // Reference-to-void members are impossible.
669  typedef void value_type;
670  template<typename _Tp1>
671  struct rebind
672  {
673  typedef bitmap_allocator<_Tp1> other;
674  };
675  };
676 
677  /**
678  * @brief Bitmap Allocator, primary template.
679  * @ingroup allocators
680  */
681  template<typename _Tp>
682  class bitmap_allocator : private free_list
683  {
684  public:
685  typedef std::size_t size_type;
686  typedef std::ptrdiff_t difference_type;
687  typedef _Tp* pointer;
688  typedef const _Tp* const_pointer;
689  typedef _Tp& reference;
690  typedef const _Tp& const_reference;
691  typedef _Tp value_type;
692  typedef free_list::__mutex_type __mutex_type;
693 
694  template<typename _Tp1>
695  struct rebind
696  {
697  typedef bitmap_allocator<_Tp1> other;
698  };
699 
700 #if __cplusplus >= 201103L
701  // _GLIBCXX_RESOLVE_LIB_DEFECTS
702  // 2103. propagate_on_container_move_assignment
703  typedef std::true_type propagate_on_container_move_assignment;
704 #endif
705 
706  private:
707  template<std::size_t _BSize, std::size_t _AlignSize>
708  struct aligned_size
709  {
710  enum
711  {
712  modulus = _BSize % _AlignSize,
713  value = _BSize + (modulus ? _AlignSize - (modulus) : 0)
714  };
715  };
716 
717  struct _Alloc_block
718  {
719  char __M_unused[aligned_size<sizeof(value_type),
720  _BALLOC_ALIGN_BYTES>::value];
721  };
722 
723 
724  typedef typename std::pair<_Alloc_block*, _Alloc_block*> _Block_pair;
725 
726  typedef typename __detail::__mini_vector<_Block_pair> _BPVector;
727  typedef typename _BPVector::iterator _BPiter;
728 
729  template<typename _Predicate>
730  static _BPiter
731  _S_find(_Predicate __p)
732  {
733  _BPiter __first = _S_mem_blocks.begin();
734  while (__first != _S_mem_blocks.end() && !__p(*__first))
735  ++__first;
736  return __first;
737  }
738 
739 #if defined _GLIBCXX_DEBUG
740  // Complexity: O(lg(N)). Where, N is the number of block of size
741  // sizeof(value_type).
742  void
743  _S_check_for_free_blocks() throw()
744  {
745  typedef typename __detail::_Ffit_finder<_Alloc_block*> _FFF;
746  _BPiter __bpi = _S_find(_FFF());
747 
748  _GLIBCXX_DEBUG_ASSERT(__bpi == _S_mem_blocks.end());
749  }
750 #endif
751 
752  /** @brief Responsible for exponentially growing the internal
753  * memory pool.
754  *
755  * @throw std::bad_alloc. If memory cannot be allocated.
756  *
757  * Complexity: O(1), but internally depends upon the
758  * complexity of the function free_list::_M_get. The part where
759  * the bitmap headers are written has complexity: O(X),where X
760  * is the number of blocks of size sizeof(value_type) within
761  * the newly acquired block. Having a tight bound.
762  */
763  void
764  _S_refill_pool() _GLIBCXX_THROW(std::bad_alloc)
765  {
766  using std::size_t;
767 #if defined _GLIBCXX_DEBUG
768  _S_check_for_free_blocks();
769 #endif
770 
771  const size_t __num_bitmaps = (_S_block_size
772  / size_t(__detail::bits_per_block));
773  const size_t __size_to_allocate = sizeof(size_t)
774  + _S_block_size * sizeof(_Alloc_block)
775  + __num_bitmaps * sizeof(size_t);
776 
777  size_t* __temp =
778  reinterpret_cast<size_t*>(this->_M_get(__size_to_allocate));
779  *__temp = 0;
780  ++__temp;
781 
782  // The Header information goes at the Beginning of the Block.
783  _Block_pair __bp =
784  std::make_pair(reinterpret_cast<_Alloc_block*>
785  (__temp + __num_bitmaps),
786  reinterpret_cast<_Alloc_block*>
787  (__temp + __num_bitmaps)
788  + _S_block_size - 1);
789 
790  // Fill the Vector with this information.
791  _S_mem_blocks.push_back(__bp);
792 
793  for (size_t __i = 0; __i < __num_bitmaps; ++__i)
794  __temp[__i] = ~static_cast<size_t>(0); // 1 Indicates all Free.
795 
796  _S_block_size *= 2;
797  }
798 
799  static _BPVector _S_mem_blocks;
800  static std::size_t _S_block_size;
801  static __detail::_Bitmap_counter<_Alloc_block*> _S_last_request;
802  static typename _BPVector::size_type _S_last_dealloc_index;
803 #if defined __GTHREADS
804  static __mutex_type _S_mut;
805 #endif
806 
807  public:
808 
809  /** @brief Allocates memory for a single object of size
810  * sizeof(_Tp).
811  *
812  * @throw std::bad_alloc. If memory cannot be allocated.
813  *
814  * Complexity: Worst case complexity is O(N), but that
815  * is hardly ever hit. If and when this particular case is
816  * encountered, the next few cases are guaranteed to have a
817  * worst case complexity of O(1)! That's why this function
818  * performs very well on average. You can consider this
819  * function to have a complexity referred to commonly as:
820  * Amortized Constant time.
821  */
822  pointer
823  _M_allocate_single_object() _GLIBCXX_THROW(std::bad_alloc)
824  {
825  using std::size_t;
826 #if defined __GTHREADS
827  __scoped_lock __bit_lock(_S_mut);
828 #endif
829 
830  // The algorithm is something like this: The last_request
831  // variable points to the last accessed Bit Map. When such a
832  // condition occurs, we try to find a free block in the
833  // current bitmap, or succeeding bitmaps until the last bitmap
834  // is reached. If no free block turns up, we resort to First
835  // Fit method.
836 
837  // WARNING: Do not re-order the condition in the while
838  // statement below, because it relies on C++'s short-circuit
839  // evaluation. The return from _S_last_request->_M_get() will
840  // NOT be dereference able if _S_last_request->_M_finished()
841  // returns true. This would inevitably lead to a NULL pointer
842  // dereference if tinkered with.
843  while (_S_last_request._M_finished() == false
844  && (*(_S_last_request._M_get()) == 0))
845  _S_last_request.operator++();
846 
847  if (__builtin_expect(_S_last_request._M_finished() == true, false))
848  {
849  // Fall Back to First Fit algorithm.
850  typedef typename __detail::_Ffit_finder<_Alloc_block*> _FFF;
851  _FFF __fff;
852  _BPiter __bpi = _S_find(__detail::_Functor_Ref<_FFF>(__fff));
853 
854  if (__bpi != _S_mem_blocks.end())
855  {
856  // Search was successful. Ok, now mark the first bit from
857  // the right as 0, meaning Allocated. This bit is obtained
858  // by calling _M_get() on __fff.
859  size_t __nz_bit = _Bit_scan_forward(*__fff._M_get());
860  __detail::__bit_allocate(__fff._M_get(), __nz_bit);
861 
862  _S_last_request._M_reset(__bpi - _S_mem_blocks.begin());
863 
864  // Now, get the address of the bit we marked as allocated.
865  pointer __ret = reinterpret_cast<pointer>
866  (__bpi->first + __fff._M_offset() + __nz_bit);
867  size_t* __puse_count =
868  reinterpret_cast<size_t*>
869  (__bpi->first) - (__detail::__num_bitmaps(*__bpi) + 1);
870 
871  ++(*__puse_count);
872  return __ret;
873  }
874  else
875  {
876  // Search was unsuccessful. We Add more memory to the
877  // pool by calling _S_refill_pool().
878  _S_refill_pool();
879 
880  // _M_Reset the _S_last_request structure to the first
881  // free block's bit map.
882  _S_last_request._M_reset(_S_mem_blocks.size() - 1);
883 
884  // Now, mark that bit as allocated.
885  }
886  }
887 
888  // _S_last_request holds a pointer to a valid bit map, that
889  // points to a free block in memory.
890  size_t __nz_bit = _Bit_scan_forward(*_S_last_request._M_get());
891  __detail::__bit_allocate(_S_last_request._M_get(), __nz_bit);
892 
893  pointer __ret = reinterpret_cast<pointer>
894  (_S_last_request._M_base() + _S_last_request._M_offset() + __nz_bit);
895 
896  size_t* __puse_count = reinterpret_cast<size_t*>
897  (_S_mem_blocks[_S_last_request._M_where()].first)
898  - (__detail::
899  __num_bitmaps(_S_mem_blocks[_S_last_request._M_where()]) + 1);
900 
901  ++(*__puse_count);
902  return __ret;
903  }
904 
905  /** @brief Deallocates memory that belongs to a single object of
906  * size sizeof(_Tp).
907  *
908  * Complexity: O(lg(N)), but the worst case is not hit
909  * often! This is because containers usually deallocate memory
910  * close to each other and this case is handled in O(1) time by
911  * the deallocate function.
912  */
913  void
914  _M_deallocate_single_object(pointer __p) throw()
915  {
916  using std::size_t;
917 #if defined __GTHREADS
918  __scoped_lock __bit_lock(_S_mut);
919 #endif
920  _Alloc_block* __real_p = reinterpret_cast<_Alloc_block*>(__p);
921 
922  typedef typename _BPVector::iterator _Iterator;
923  typedef typename _BPVector::difference_type _Difference_type;
924 
925  _Difference_type __diff;
926  long __displacement;
927 
928  _GLIBCXX_DEBUG_ASSERT(_S_last_dealloc_index >= 0);
929 
930  __detail::_Inclusive_between<_Alloc_block*> __ibt(__real_p);
931  if (__ibt(_S_mem_blocks[_S_last_dealloc_index]))
932  {
933  _GLIBCXX_DEBUG_ASSERT(_S_last_dealloc_index
934  <= _S_mem_blocks.size() - 1);
935 
936  // Initial Assumption was correct!
937  __diff = _S_last_dealloc_index;
938  __displacement = __real_p - _S_mem_blocks[__diff].first;
939  }
940  else
941  {
942  _Iterator _iter = _S_find(__ibt);
943 
944  _GLIBCXX_DEBUG_ASSERT(_iter != _S_mem_blocks.end());
945 
946  __diff = _iter - _S_mem_blocks.begin();
947  __displacement = __real_p - _S_mem_blocks[__diff].first;
948  _S_last_dealloc_index = __diff;
949  }
950 
951  // Get the position of the iterator that has been found.
952  const size_t __rotate = (__displacement
953  % size_t(__detail::bits_per_block));
954  size_t* __bitmapC =
955  reinterpret_cast<size_t*>
956  (_S_mem_blocks[__diff].first) - 1;
957  __bitmapC -= (__displacement / size_t(__detail::bits_per_block));
958 
959  __detail::__bit_free(__bitmapC, __rotate);
960  size_t* __puse_count = reinterpret_cast<size_t*>
961  (_S_mem_blocks[__diff].first)
962  - (__detail::__num_bitmaps(_S_mem_blocks[__diff]) + 1);
963 
964  _GLIBCXX_DEBUG_ASSERT(*__puse_count != 0);
965 
966  --(*__puse_count);
967 
968  if (__builtin_expect(*__puse_count == 0, false))
969  {
970  _S_block_size /= 2;
971 
972  // We can safely remove this block.
973  // _Block_pair __bp = _S_mem_blocks[__diff];
974  this->_M_insert(__puse_count);
975  _S_mem_blocks.erase(_S_mem_blocks.begin() + __diff);
976 
977  // Reset the _S_last_request variable to reflect the
978  // erased block. We do this to protect future requests
979  // after the last block has been removed from a particular
980  // memory Chunk, which in turn has been returned to the
981  // free list, and hence had been erased from the vector,
982  // so the size of the vector gets reduced by 1.
983  if ((_Difference_type)_S_last_request._M_where() >= __diff--)
984  _S_last_request._M_reset(__diff);
985 
986  // If the Index into the vector of the region of memory
987  // that might hold the next address that will be passed to
988  // deallocated may have been invalidated due to the above
989  // erase procedure being called on the vector, hence we
990  // try to restore this invariant too.
991  if (_S_last_dealloc_index >= _S_mem_blocks.size())
992  {
993  _S_last_dealloc_index =(__diff != -1 ? __diff : 0);
994  _GLIBCXX_DEBUG_ASSERT(_S_last_dealloc_index >= 0);
995  }
996  }
997  }
998 
999  public:
1000  bitmap_allocator() _GLIBCXX_USE_NOEXCEPT
1001  { }
1002 
1003  bitmap_allocator(const bitmap_allocator&) _GLIBCXX_USE_NOEXCEPT
1004  { }
1005 
1006  template<typename _Tp1>
1007  bitmap_allocator(const bitmap_allocator<_Tp1>&) _GLIBCXX_USE_NOEXCEPT
1008  { }
1009 
1010  ~bitmap_allocator() _GLIBCXX_USE_NOEXCEPT
1011  { }
1012 
1013  _GLIBCXX_NODISCARD pointer
1014  allocate(size_type __n)
1015  {
1016  if (__n > this->max_size())
1017  std::__throw_bad_alloc();
1018 
1019 #if __cpp_aligned_new
1020  if (alignof(value_type) > __STDCPP_DEFAULT_NEW_ALIGNMENT__)
1021  {
1022  const size_type __b = __n * sizeof(value_type);
1023  std::align_val_t __al = std::align_val_t(alignof(value_type));
1024  return static_cast<pointer>(::operator new(__b, __al));
1025  }
1026 #endif
1027 
1028  if (__builtin_expect(__n == 1, true))
1029  return this->_M_allocate_single_object();
1030  else
1031  {
1032  const size_type __b = __n * sizeof(value_type);
1033  return reinterpret_cast<pointer>(::operator new(__b));
1034  }
1035  }
1036 
1037  _GLIBCXX_NODISCARD pointer
1038  allocate(size_type __n, typename bitmap_allocator<void>::const_pointer)
1039  { return allocate(__n); }
1040 
1041  void
1042  deallocate(pointer __p, size_type __n) throw()
1043  {
1044  if (__builtin_expect(__p != 0, true))
1045  {
1046 #if __cpp_aligned_new
1047  // Types with extended alignment are handled by operator delete.
1048  if (alignof(value_type) > __STDCPP_DEFAULT_NEW_ALIGNMENT__)
1049  {
1050  ::operator delete(__p, std::align_val_t(alignof(value_type)));
1051  return;
1052  }
1053 #endif
1054 
1055  if (__builtin_expect(__n == 1, true))
1056  this->_M_deallocate_single_object(__p);
1057  else
1058  ::operator delete(__p);
1059  }
1060  }
1061 
1062  pointer
1063  address(reference __r) const _GLIBCXX_NOEXCEPT
1064  { return std::__addressof(__r); }
1065 
1066  const_pointer
1067  address(const_reference __r) const _GLIBCXX_NOEXCEPT
1068  { return std::__addressof(__r); }
1069 
1070  size_type
1071  max_size() const _GLIBCXX_USE_NOEXCEPT
1072  { return size_type(-1) / sizeof(value_type); }
1073 
1074 #if __cplusplus >= 201103L
1075  template<typename _Up, typename... _Args>
1076  void
1077  construct(_Up* __p, _Args&&... __args)
1078  { ::new((void *)__p) _Up(std::forward<_Args>(__args)...); }
1079 
1080  template<typename _Up>
1081  void
1082  destroy(_Up* __p)
1083  { __p->~_Up(); }
1084 #else
1085  void
1086  construct(pointer __p, const_reference __data)
1087  { ::new((void *)__p) value_type(__data); }
1088 
1089  void
1090  destroy(pointer __p)
1091  { __p->~value_type(); }
1092 #endif
1093  };
1094 
1095  template<typename _Tp1, typename _Tp2>
1096  bool
1097  operator==(const bitmap_allocator<_Tp1>&,
1098  const bitmap_allocator<_Tp2>&) throw()
1099  { return true; }
1100 
1101 #if __cpp_impl_three_way_comparison < 201907L
1102  template<typename _Tp1, typename _Tp2>
1103  bool
1104  operator!=(const bitmap_allocator<_Tp1>&,
1105  const bitmap_allocator<_Tp2>&) throw()
1106  { return false; }
1107 #endif
1108 
1109  // Static member definitions.
1110  template<typename _Tp>
1111  typename bitmap_allocator<_Tp>::_BPVector
1112  bitmap_allocator<_Tp>::_S_mem_blocks;
1113 
1114  template<typename _Tp>
1115  std::size_t bitmap_allocator<_Tp>::_S_block_size
1116  = 2 * std::size_t(__detail::bits_per_block);
1117 
1118  template<typename _Tp>
1119  typename bitmap_allocator<_Tp>::_BPVector::size_type
1120  bitmap_allocator<_Tp>::_S_last_dealloc_index = 0;
1121 
1122  template<typename _Tp>
1123  __detail::_Bitmap_counter
1124  <typename bitmap_allocator<_Tp>::_Alloc_block*>
1125  bitmap_allocator<_Tp>::_S_last_request(_S_mem_blocks);
1126 
1127 #if defined __GTHREADS
1128  template<typename _Tp>
1129  typename bitmap_allocator<_Tp>::__mutex_type
1130  bitmap_allocator<_Tp>::_S_mut;
1131 #endif
1132 
1133 _GLIBCXX_END_NAMESPACE_VERSION
1134 } // namespace __gnu_cxx
1135 
1136 #endif
_Tp * begin(valarray< _Tp > &__va)
Return an iterator pointing to the first element of the valarray.
Definition: valarray:1214
void __bit_allocate(std::size_t *__pbmap, std::size_t __pos)
Mark a memory address as allocated by re-setting the corresponding bit in the bit-map.
The class which acts as a predicate for applying the first-fit memory allocation policy for the bitma...
integral_constant
Definition: type_traits:57
void __bit_free(std::size_t *__pbmap, std::size_t __pos)
Mark a memory address as free by setting the corresponding bit in the bit-map.
constexpr auto size(const _Container &__cont) noexcept(noexcept(__cont.size())) -> decltype(__cont.size())
Return the size of a container.
Definition: range_access.h:245
std::size_t * _M_get(std::size_t __sz)
This function gets a block of memory of the specified size from the free list.
_Tp * end(valarray< _Tp > &__va)
Return an iterator pointing to one past the last element of the valarray.
Definition: valarray:1234
Struct holding two objects of arbitrary type.
Definition: stl_pair.h:211
void _M_deallocate_single_object(pointer __p)
Deallocates memory that belongs to a single object of size sizeof(_Tp).
std::size_t __num_blocks(_AddrPair __ap)
The number of Blocks pointed to by the address pair passed to the function.
std::size_t __num_bitmaps(_AddrPair __ap)
The number of Bit-maps pointed to by the address pair passed to the function.
Exception possibly thrown by new.bad_alloc (or classes derived from it) is used to report allocation ...
Definition: new:55
One of the comparison functors.
Definition: stl_function.h:346
void _M_clear()
This function just clears the internal Free List, and gives back all the memory to the OS...
The free list class for managing chunks of memory to be given to and returned by the bitmap_allocator...
_T1 first
The first member.
Definition: stl_pair.h:217
constexpr _Tp * __addressof(_Tp &__r) noexcept
Same as C++11 std::addressof.
Definition: move.h:49
One of the comparison functors.
Definition: stl_function.h:343
ISO C++ entities toplevel namespace is std.
pointer _M_allocate_single_object()
Allocates memory for a single object of size sizeof(_Tp).
Scoped lock idiom.
Definition: concurrence.h:228
#define _BALLOC_ALIGN_BYTES
The constant in the expression below is the alignment required in bytes.
Bitmap Allocator, primary template.
The bitmap counter which acts as the bitmap manipulator, and manages the bit-manipulation functions a...
GNU extensions for public use.
void _M_insert(std::size_t *__addr)
This function returns the block of memory to the internal free list.
__mini_vector<> is a stripped down version of the full-fledged std::vector<>.
std::size_t _Bit_scan_forward(std::size_t __num)
Generic Version of the bsf instruction.