adaptive_node_pool_impl.hpp 53 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260
  1. //////////////////////////////////////////////////////////////////////////////
  2. //
  3. // (C) Copyright Ion Gaztanaga 2005-2013. Distributed under the Boost
  4. // Software License, Version 1.0. (See accompanying file
  5. // LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
  6. //
  7. // See http://www.boost.org/libs/container for documentation.
  8. //
  9. //////////////////////////////////////////////////////////////////////////////
  10. #ifndef BOOST_CONTAINER_DETAIL_ADAPTIVE_NODE_POOL_IMPL_HPP
  11. #define BOOST_CONTAINER_DETAIL_ADAPTIVE_NODE_POOL_IMPL_HPP
  12. #ifndef BOOST_CONFIG_HPP
  13. # include <boost/config.hpp>
  14. #endif
  15. #if defined(BOOST_HAS_PRAGMA_ONCE)
  16. # pragma once
  17. #endif
  18. #include <boost/container/detail/config_begin.hpp>
  19. #include <boost/container/detail/workaround.hpp>
  20. // container
  21. #include <boost/container/container_fwd.hpp>
  22. #include <boost/container/throw_exception.hpp>
  23. // container/detail
  24. #include <boost/container/detail/pool_common.hpp>
  25. #include <boost/container/detail/iterator.hpp>
  26. #include <boost/move/detail/iterator_to_raw_pointer.hpp>
  27. #include <boost/container/detail/math_functions.hpp>
  28. #include <boost/container/detail/placement_new.hpp>
  29. #include <boost/container/detail/mpl.hpp>
  30. #include <boost/move/detail/to_raw_pointer.hpp>
  31. #include <boost/move/detail/force_ptr.hpp>
  32. #include <boost/container/detail/type_traits.hpp>
  33. // intrusive
  34. #include <boost/intrusive/pointer_traits.hpp>
  35. #include <boost/intrusive/set.hpp>
  36. #include <boost/intrusive/list.hpp>
  37. #include <boost/intrusive/slist.hpp>
  38. // other
  39. #include <boost/assert.hpp>
  40. #include <cstddef>
  41. namespace boost {
  42. namespace container {
  43. namespace adaptive_pool_flag {
  44. static const unsigned int none = 0u;
  45. static const unsigned int align_only = 1u << 0u;
  46. static const unsigned int size_ordered = 1u << 1u;
  47. static const unsigned int address_ordered = 1u << 2u;
  48. } //namespace adaptive_pool_flag{
  49. namespace dtl {
  50. template<class size_type>
  51. struct hdr_offset_holder_t
  52. {
  53. hdr_offset_holder_t(size_type offset = 0)
  54. : hdr_offset(offset)
  55. {}
  56. size_type hdr_offset;
  57. };
  58. template<class SizeType, unsigned int Flags>
  59. struct less_func;
  60. template<class SizeType>
  61. struct less_func<SizeType, adaptive_pool_flag::none>
  62. {
  63. static bool less(SizeType, SizeType, const void *, const void *)
  64. { return true; }
  65. };
  66. template<class SizeType>
  67. struct less_func<SizeType, adaptive_pool_flag::size_ordered>
  68. {
  69. static bool less(SizeType ls, SizeType rs, const void *, const void *)
  70. { return ls < rs; }
  71. };
  72. template<class SizeType>
  73. struct less_func<SizeType, adaptive_pool_flag::address_ordered>
  74. {
  75. static bool less(SizeType, SizeType, const void *la, const void *ra)
  76. { return la < ra; }
  77. };
  78. template<class SizeType>
  79. struct less_func<SizeType, adaptive_pool_flag::size_ordered | adaptive_pool_flag::address_ordered>
  80. {
  81. static bool less(SizeType ls, SizeType rs, const void *la, const void *ra)
  82. { return (ls < rs) || ((ls == rs) && (la < ra)); }
  83. };
  84. template<class VoidPointer, class SizeType, unsigned OrderFlags>
  85. struct block_container_traits
  86. {
  87. typedef typename bi::make_set_base_hook
  88. < bi::void_pointer<VoidPointer>
  89. , bi::optimize_size<true>
  90. , bi::link_mode<bi::normal_link> >::type hook_t;
  91. template<class T>
  92. struct container
  93. {
  94. typedef typename bi::make_multiset
  95. <T, bi::base_hook<hook_t>, bi::size_type<SizeType> >::type type;
  96. };
  97. template<class Container>
  98. static void reinsert_was_used(Container &container, typename Container::reference v, bool)
  99. {
  100. typedef typename Container::const_iterator const_block_iterator;
  101. typedef typename Container::iterator block_iterator;
  102. typedef typename Container::value_compare value_compare;
  103. const block_iterator this_block(Container::s_iterator_to(v));
  104. const const_block_iterator cendit(container.cend());
  105. block_iterator next_block(this_block);
  106. if(++next_block != cendit && value_compare()(*next_block, v)){
  107. const_block_iterator next2_block(next_block);
  108. //Test if v should be swapped with next (optimization)
  109. if(++next2_block == cendit || !value_compare()(*next2_block, v)){
  110. v.swap_nodes(*next_block);
  111. BOOST_ASSERT(++next_block == this_block);
  112. }
  113. else{
  114. container.erase(this_block);
  115. container.insert(v);
  116. }
  117. }
  118. }
  119. template<class Container>
  120. static void insert_was_empty(Container &container, typename Container::value_type &v, bool)
  121. {
  122. container.insert(v);
  123. }
  124. template<class Container>
  125. static void erase_first(Container &container)
  126. {
  127. container.erase(container.cbegin());
  128. }
  129. template<class Container>
  130. static void erase_last(Container &container)
  131. {
  132. container.erase(--container.cend());
  133. }
  134. };
  135. template<class VoidPointer, class SizeType>
  136. struct block_container_traits<VoidPointer, SizeType, 0u>
  137. {
  138. typedef typename bi::make_list_base_hook
  139. < bi::void_pointer<VoidPointer>
  140. , bi::link_mode<bi::normal_link> >::type hook_t;
  141. template<class T>
  142. struct container
  143. {
  144. typedef typename bi::make_list
  145. <T, bi::base_hook<hook_t>, bi::size_type<SizeType>, bi::constant_time_size<false> >::type type;
  146. };
  147. template<class Container>
  148. static void reinsert_was_used(Container &container, typename Container::value_type &v, bool is_full)
  149. {
  150. if(is_full){
  151. container.erase(Container::s_iterator_to(v));
  152. container.push_back(v);
  153. }
  154. }
  155. template<class Container>
  156. static void insert_was_empty(Container &container, typename Container::value_type &v, bool is_full)
  157. {
  158. if(is_full){
  159. container.push_back(v);
  160. }
  161. else{
  162. container.push_front(v);
  163. }
  164. }
  165. template<class Container>
  166. static void erase_first(Container &container)
  167. {
  168. container.pop_front();
  169. }
  170. template<class Container>
  171. static void erase_last(Container &container)
  172. {
  173. container.pop_back();
  174. }
  175. };
  176. /////////////////////////////
  177. //
  178. // adaptive_pool_types
  179. //
  180. /////////////////////////////
  181. template<class MultiallocationChain, class VoidPointer, class SizeType, unsigned int Flags>
  182. struct adaptive_pool_types
  183. {
  184. typedef VoidPointer void_pointer;
  185. static const unsigned ordered = (Flags & (adaptive_pool_flag::size_ordered | adaptive_pool_flag::address_ordered));
  186. typedef block_container_traits<VoidPointer, SizeType, ordered> block_container_traits_t;
  187. typedef typename block_container_traits_t::hook_t hook_t;
  188. typedef hdr_offset_holder_t<SizeType> hdr_offset_holder;
  189. static const unsigned int order_flags = Flags & (adaptive_pool_flag::size_ordered | adaptive_pool_flag::address_ordered);
  190. typedef MultiallocationChain free_nodes_t;
  191. struct block_info_t
  192. : public hdr_offset_holder,
  193. public hook_t
  194. {
  195. //An intrusive list of free node from this block
  196. free_nodes_t free_nodes;
  197. friend bool operator <(const block_info_t &l, const block_info_t &r)
  198. {
  199. return less_func<SizeType, order_flags>::
  200. less(l.free_nodes.size(), r.free_nodes.size(), &l , &r);
  201. }
  202. friend bool operator ==(const block_info_t &l, const block_info_t &r)
  203. { return &l == &r; }
  204. };
  205. typedef typename block_container_traits_t:: template container<block_info_t>::type block_container_t;
  206. };
  207. /////////////////////////////////////////////
  208. //
  209. // candidate_power_of_2_ct
  210. //
  211. /////////////////////////////////////////////
  212. template< std::size_t alignment
  213. , std::size_t real_node_size
  214. , std::size_t payload_per_allocation
  215. , std::size_t min_elements_per_block
  216. , std::size_t hdr_size
  217. , std::size_t hdr_offset_size
  218. , std::size_t overhead_percent>
  219. struct candidate_power_of_2_ct_helper
  220. {
  221. static const std::size_t hdr_subblock_elements_alone = (alignment - hdr_size - payload_per_allocation)/real_node_size;
  222. static const std::size_t hdr_subblock_elements_first = (alignment - hdr_size - payload_per_allocation)/real_node_size;
  223. static const std::size_t elements_per_b_subblock_mid = (alignment - hdr_offset_size)/real_node_size;
  224. static const std::size_t elements_per_b_subblock_end = (alignment - hdr_offset_size - payload_per_allocation)/real_node_size;
  225. static const std::size_t num_b_subblock =
  226. hdr_subblock_elements_alone >= min_elements_per_block
  227. ? 0
  228. : ( ((hdr_subblock_elements_first + elements_per_b_subblock_end) >= min_elements_per_block)
  229. ? 1
  230. : 2 + (min_elements_per_block - hdr_subblock_elements_first - elements_per_b_subblock_end - 1)/elements_per_b_subblock_mid
  231. )
  232. ;
  233. static const std::size_t num_b_subblock_mid = (num_b_subblock > 1) ? (num_b_subblock - 1) : 0;
  234. static const std::size_t total_nodes = (num_b_subblock == 0)
  235. ? hdr_subblock_elements_alone
  236. : ( (num_b_subblock == 1)
  237. ? (hdr_subblock_elements_first + elements_per_b_subblock_end)
  238. : (hdr_subblock_elements_first + num_b_subblock_mid*elements_per_b_subblock_mid + elements_per_b_subblock_end)
  239. )
  240. ;
  241. static const std::size_t total_data = total_nodes*real_node_size;
  242. static const std::size_t total_size = alignment*(num_b_subblock+1);
  243. static const bool overhead_satisfied = (total_size - total_data)*100/total_size < overhead_percent;
  244. };
  245. template< std::size_t initial_alignment
  246. , std::size_t real_node_size
  247. , std::size_t payload_per_allocation
  248. , std::size_t min_elements_per_block
  249. , std::size_t hdr_size
  250. , std::size_t hdr_offset_size
  251. , std::size_t overhead_percent
  252. , bool Loop = true>
  253. struct candidate_power_of_2_ct
  254. {
  255. typedef candidate_power_of_2_ct_helper
  256. < initial_alignment
  257. , real_node_size
  258. , payload_per_allocation
  259. , min_elements_per_block
  260. , hdr_size
  261. , hdr_offset_size
  262. , overhead_percent> helper_t;
  263. static const std::size_t candidate_power_of_2 = initial_alignment << std::size_t(!helper_t::overhead_satisfied);
  264. typedef typename candidate_power_of_2_ct
  265. < candidate_power_of_2
  266. , real_node_size
  267. , payload_per_allocation
  268. , min_elements_per_block
  269. , hdr_size
  270. , hdr_offset_size
  271. , overhead_percent
  272. , !helper_t::overhead_satisfied
  273. >::type type;
  274. static const std::size_t alignment = type::alignment;
  275. static const std::size_t num_subblocks = type::num_subblocks;
  276. static const std::size_t real_num_node = type::real_num_node;
  277. };
  278. template< std::size_t initial_alignment
  279. , std::size_t real_node_size
  280. , std::size_t payload_per_allocation
  281. , std::size_t min_elements_per_block
  282. , std::size_t hdr_size
  283. , std::size_t hdr_offset_size
  284. , std::size_t overhead_percent
  285. >
  286. struct candidate_power_of_2_ct
  287. < initial_alignment
  288. , real_node_size
  289. , payload_per_allocation
  290. , min_elements_per_block
  291. , hdr_size
  292. , hdr_offset_size
  293. , overhead_percent
  294. , false>
  295. {
  296. typedef candidate_power_of_2_ct
  297. < initial_alignment
  298. , real_node_size
  299. , payload_per_allocation
  300. , min_elements_per_block
  301. , hdr_size
  302. , hdr_offset_size
  303. , overhead_percent
  304. , false> type;
  305. typedef candidate_power_of_2_ct_helper
  306. < initial_alignment
  307. , real_node_size
  308. , payload_per_allocation
  309. , min_elements_per_block
  310. , hdr_size
  311. , hdr_offset_size
  312. , overhead_percent> helper_t;
  313. static const std::size_t alignment = initial_alignment;
  314. static const std::size_t num_subblocks = helper_t::num_b_subblock+1;
  315. static const std::size_t real_num_node = helper_t::total_nodes;
  316. };
  317. /////////////////////////////////////////////
  318. //
  319. // candidate_power_of_2_rt
  320. //
  321. /////////////////////////////////////////////
  322. inline void candidate_power_of_2_rt ( std::size_t initial_alignment
  323. , std::size_t real_node_size
  324. , std::size_t payload_per_allocation
  325. , std::size_t min_elements_per_block
  326. , std::size_t hdr_size
  327. , std::size_t hdr_offset_size
  328. , std::size_t overhead_percent
  329. , std::size_t &alignment
  330. , std::size_t &num_subblocks
  331. , std::size_t &real_num_node)
  332. {
  333. bool overhead_satisfied = false;
  334. std::size_t num_b_subblock = 0;
  335. std::size_t total_nodes = 0;
  336. while(!overhead_satisfied)
  337. {
  338. std::size_t hdr_subblock_elements_alone = (initial_alignment - hdr_size - payload_per_allocation)/real_node_size;
  339. std::size_t hdr_subblock_elements_first = (initial_alignment - hdr_size - payload_per_allocation)/real_node_size;
  340. std::size_t elements_per_b_subblock_mid = (initial_alignment - hdr_offset_size)/real_node_size;
  341. std::size_t elements_per_b_subblock_end = (initial_alignment - hdr_offset_size - payload_per_allocation)/real_node_size;
  342. num_b_subblock =
  343. hdr_subblock_elements_alone >= min_elements_per_block
  344. ? 0
  345. : ( ((hdr_subblock_elements_first + elements_per_b_subblock_end) >= min_elements_per_block)
  346. ? 1
  347. : 2 + (min_elements_per_block - hdr_subblock_elements_first - elements_per_b_subblock_end - 1)/elements_per_b_subblock_mid
  348. )
  349. ;
  350. std::size_t num_b_subblock_mid = (num_b_subblock > 1) ? (num_b_subblock - 1) : 0;
  351. total_nodes = (num_b_subblock == 0)
  352. ? hdr_subblock_elements_alone
  353. : ( (num_b_subblock == 1)
  354. ? (hdr_subblock_elements_first + elements_per_b_subblock_end)
  355. : (hdr_subblock_elements_first + num_b_subblock_mid*elements_per_b_subblock_mid + elements_per_b_subblock_end)
  356. )
  357. ;
  358. std::size_t total_data = total_nodes*real_node_size;
  359. std::size_t total_size = initial_alignment*(num_b_subblock+1);
  360. overhead_satisfied = (total_size - total_data)*100/total_size < overhead_percent;
  361. initial_alignment = initial_alignment << std::size_t(!overhead_satisfied);
  362. }
  363. alignment = initial_alignment;
  364. num_subblocks = num_b_subblock+1;
  365. real_num_node = total_nodes;
  366. }
  367. /////////////////////////////////////////////
  368. //
  369. // private_adaptive_node_pool_impl_common
  370. //
  371. /////////////////////////////////////////////
  372. template< class SegmentManagerBase, unsigned int Flags>
  373. class private_adaptive_node_pool_impl_common
  374. {
  375. public:
  376. //!Segment manager typedef
  377. typedef SegmentManagerBase segment_manager_base_type;
  378. typedef typename SegmentManagerBase::multiallocation_chain multiallocation_chain;
  379. typedef typename SegmentManagerBase::size_type size_type;
  380. //Flags
  381. //align_only
  382. static const bool AlignOnly = (Flags & adaptive_pool_flag::align_only) != 0;
  383. typedef bool_<AlignOnly> IsAlignOnly;
  384. typedef true_ AlignOnlyTrue;
  385. typedef false_ AlignOnlyFalse;
  386. typedef typename SegmentManagerBase::void_pointer void_pointer;
  387. static const typename SegmentManagerBase::
  388. size_type PayloadPerAllocation = SegmentManagerBase::PayloadPerAllocation;
  389. typedef typename boost::intrusive::pointer_traits
  390. <void_pointer>::template rebind_pointer<segment_manager_base_type>::type segment_mngr_base_ptr_t;
  391. protected:
  392. typedef adaptive_pool_types
  393. <multiallocation_chain, void_pointer, size_type, Flags> adaptive_pool_types_t;
  394. typedef typename adaptive_pool_types_t::free_nodes_t free_nodes_t;
  395. typedef typename adaptive_pool_types_t::block_info_t block_info_t;
  396. typedef typename adaptive_pool_types_t::block_container_t block_container_t;
  397. typedef typename adaptive_pool_types_t::block_container_traits_t block_container_traits_t;
  398. typedef typename block_container_t::iterator block_iterator;
  399. typedef typename block_container_t::const_iterator const_block_iterator;
  400. typedef typename adaptive_pool_types_t::hdr_offset_holder hdr_offset_holder;
  401. typedef private_adaptive_node_pool_impl_common this_type;
  402. static const size_type MaxAlign = alignment_of<void_pointer>::value;
  403. static const size_type HdrSize = ((sizeof(block_info_t)-1)/MaxAlign+1)*MaxAlign;
  404. static const size_type HdrOffsetSize = ((sizeof(hdr_offset_holder)-1)/MaxAlign+1)*MaxAlign;
  405. segment_mngr_base_ptr_t mp_segment_mngr_base; //Segment manager
  406. block_container_t m_block_container; //Intrusive block list
  407. size_type m_totally_free_blocks; //Free blocks
  408. class block_destroyer;
  409. friend class block_destroyer;
  410. class block_destroyer
  411. {
  412. public:
  413. block_destroyer(const this_type *impl, multiallocation_chain &chain, const size_type num_subblocks, const size_type real_block_alignment, const size_type real_num_node)
  414. : mp_impl(impl), m_chain(chain), m_num_subblocks(num_subblocks), m_real_block_alignment(real_block_alignment), m_real_num_node(real_num_node)
  415. {}
  416. void operator()(typename block_container_t::pointer to_deallocate)
  417. { return this->do_destroy(to_deallocate, IsAlignOnly()); }
  418. private:
  419. void do_destroy(typename block_container_t::pointer to_deallocate, AlignOnlyTrue)
  420. {
  421. BOOST_ASSERT(to_deallocate->free_nodes.size() == m_real_num_node);
  422. m_chain.push_back(to_deallocate);
  423. }
  424. void do_destroy(typename block_container_t::pointer to_deallocate, AlignOnlyFalse)
  425. {
  426. BOOST_ASSERT(to_deallocate->free_nodes.size() == m_real_num_node);
  427. BOOST_ASSERT(0 == to_deallocate->hdr_offset);
  428. hdr_offset_holder *hdr_off_holder =
  429. mp_impl->priv_first_subblock_from_block(boost::movelib::to_raw_pointer(to_deallocate), m_num_subblocks, m_real_block_alignment);
  430. m_chain.push_back(hdr_off_holder);
  431. }
  432. const this_type *mp_impl;
  433. multiallocation_chain &m_chain;
  434. const size_type m_num_subblocks;
  435. const size_type m_real_block_alignment;
  436. const size_type m_real_num_node;
  437. };
  438. //This macro will activate invariant checking. Slow, but helpful for debugging the code.
  439. //#define BOOST_CONTAINER_ADAPTIVE_NODE_POOL_CHECK_INVARIANTS
  440. void priv_invariants(const size_type real_num_node, const size_type num_subblocks, const size_type real_block_alignment) const
  441. {
  442. (void)real_num_node; (void)num_subblocks; (void)real_block_alignment;
  443. #ifdef BOOST_CONTAINER_ADAPTIVE_NODE_POOL_CHECK_INVARIANTS
  444. //Check that the total totally free blocks are correct
  445. BOOST_ASSERT(m_block_container.size() >= m_totally_free_blocks);
  446. const const_block_iterator itend(m_block_container.cend());
  447. const const_block_iterator itbeg(m_block_container.cbegin());
  448. { //Try to do checks in a single iteration
  449. const_block_iterator it(itbeg);
  450. size_type total_free_nodes = 0;
  451. size_type total_free_blocks = 0u;
  452. for(; it != itend; ++it){
  453. if(it != itbeg){
  454. //Check order invariant
  455. const_block_iterator prev(it);
  456. --prev;
  457. BOOST_ASSERT(!(m_block_container.key_comp()(*it, *prev)));
  458. (void)prev; (void)it;
  459. }
  460. //free_nodes invariant
  461. const size_type free_nodes = it->free_nodes.size();
  462. BOOST_ASSERT(free_nodes <= real_num_node);
  463. BOOST_ASSERT(free_nodes != 0);
  464. //Acummulate total_free_nodes and total_free_blocks
  465. total_free_nodes += free_nodes;
  466. total_free_blocks += it->free_nodes.size() == real_num_node;
  467. if (!AlignOnly) {
  468. //Check that header offsets are correct
  469. hdr_offset_holder *hdr_off_holder = this->priv_first_subblock_from_block(const_cast<block_info_t *>(&*it), num_subblocks, real_block_alignment);
  470. for (size_type i = 0, max = num_subblocks; i < max; ++i) {
  471. const size_type offset = size_type(reinterpret_cast<char*>(const_cast<block_info_t *>(&*it)) - reinterpret_cast<char*>(hdr_off_holder));
  472. (void)offset;
  473. BOOST_ASSERT(hdr_off_holder->hdr_offset == offset);
  474. BOOST_ASSERT(0 == (reinterpret_cast<std::size_t>(hdr_off_holder) & (real_block_alignment - 1)));
  475. BOOST_ASSERT(0 == (hdr_off_holder->hdr_offset & (real_block_alignment - 1)));
  476. hdr_off_holder = move_detail::force_ptr<hdr_offset_holder *>(reinterpret_cast<char*>(hdr_off_holder) + real_block_alignment);
  477. }
  478. }
  479. }
  480. BOOST_ASSERT(total_free_blocks == m_totally_free_blocks);
  481. BOOST_ASSERT(total_free_nodes >= m_totally_free_blocks*real_num_node);
  482. }
  483. #endif
  484. }
  485. void priv_deallocate_free_blocks( const size_type max_free_blocks, const size_type real_num_node
  486. , const size_type num_subblocks, const size_type real_block_alignment)
  487. { //Trampoline function to ease inlining
  488. if(m_totally_free_blocks > max_free_blocks){
  489. this->priv_deallocate_free_blocks_impl(max_free_blocks, real_num_node, num_subblocks, real_block_alignment);
  490. }
  491. }
  492. hdr_offset_holder *priv_first_subblock_from_block(block_info_t *block, const size_type num_subblocks, const size_type real_block_alignment) const
  493. { return this->priv_first_subblock_from_block(block, num_subblocks, real_block_alignment, IsAlignOnly()); }
  494. hdr_offset_holder *priv_first_subblock_from_block(block_info_t *block, const size_type num_subblocks, const size_type real_block_alignment, AlignOnlyFalse) const
  495. {
  496. hdr_offset_holder *const hdr_off_holder = move_detail::force_ptr<hdr_offset_holder*>
  497. (reinterpret_cast<char*>(block) - (num_subblocks-1)*real_block_alignment);
  498. BOOST_ASSERT(hdr_off_holder->hdr_offset == size_type(reinterpret_cast<char*>(block) - reinterpret_cast<char*>(hdr_off_holder)));
  499. BOOST_ASSERT(0 == ((std::size_t)hdr_off_holder & (real_block_alignment - 1)));
  500. BOOST_ASSERT(0 == (hdr_off_holder->hdr_offset & (real_block_alignment - 1)));
  501. return hdr_off_holder;
  502. }
  503. hdr_offset_holder *priv_first_subblock_from_block(block_info_t *block, const size_type num_subblocks, const size_type real_block_alignment, AlignOnlyTrue) const
  504. {
  505. (void)num_subblocks; (void)real_block_alignment;
  506. return move_detail::force_ptr<hdr_offset_holder*>(block);
  507. }
  508. void priv_deallocate_free_blocks_impl( const size_type max_free_blocks, const size_type real_num_node
  509. , const size_type num_subblocks, const size_type real_block_alignment)
  510. {
  511. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  512. //Now check if we've reached the free nodes limit
  513. //and check if we have free blocks. If so, deallocate as much
  514. //as we can to stay below the limit
  515. multiallocation_chain chain;
  516. {
  517. if(Flags & adaptive_pool_flag::size_ordered){
  518. const_block_iterator it = m_block_container.cend();
  519. --it;
  520. size_type totally_free_blocks = m_totally_free_blocks;
  521. for( ; totally_free_blocks > max_free_blocks; --totally_free_blocks){
  522. BOOST_ASSERT(it->free_nodes.size() == real_num_node);
  523. void *addr = priv_first_subblock_from_block(const_cast<block_info_t*>(&*it), num_subblocks, real_block_alignment);
  524. --it;
  525. block_container_traits_t::erase_last(m_block_container);
  526. chain.push_front(void_pointer(addr));
  527. }
  528. }
  529. else{
  530. const_block_iterator it = m_block_container.cend();
  531. size_type totally_free_blocks = m_totally_free_blocks;
  532. while(totally_free_blocks > max_free_blocks){
  533. --it;
  534. if(it->free_nodes.size() == real_num_node){
  535. void *addr = priv_first_subblock_from_block(const_cast<block_info_t*>(&*it), num_subblocks, real_block_alignment);
  536. it = m_block_container.erase(it);
  537. chain.push_front(void_pointer(addr));
  538. --totally_free_blocks;
  539. }
  540. }
  541. }
  542. BOOST_ASSERT((m_totally_free_blocks - max_free_blocks) == chain.size());
  543. m_totally_free_blocks = max_free_blocks;
  544. }
  545. this->mp_segment_mngr_base->deallocate_many(chain);
  546. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  547. }
  548. void priv_fill_chain_remaining_to_block
  549. ( multiallocation_chain &chain, size_type target_elem_in_chain, block_info_t &c_info
  550. , char *mem_address, size_type max_node_in_mem
  551. , const size_type real_node_size)
  552. {
  553. BOOST_ASSERT(chain.size() <= target_elem_in_chain);
  554. //First add all possible nodes to the chain
  555. const size_type left = target_elem_in_chain - chain.size();
  556. const size_type add_to_chain = (max_node_in_mem < left) ? max_node_in_mem : left;
  557. char *free_mem_address = static_cast<char *>(boost::movelib::to_raw_pointer
  558. (chain.incorporate_after(chain.last(), void_pointer(mem_address), real_node_size, add_to_chain)));
  559. //Now store remaining nodes in the free list
  560. if(const size_type free = max_node_in_mem - add_to_chain){
  561. free_nodes_t & free_nodes = c_info.free_nodes;
  562. free_nodes.incorporate_after(free_nodes.last(), void_pointer(free_mem_address), real_node_size, free);
  563. }
  564. }
  565. //!Allocates a several blocks of nodes. Can throw
  566. void priv_append_from_new_blocks( size_type min_elements, multiallocation_chain &chain
  567. , const size_type max_free_blocks
  568. , const size_type real_block_alignment, const size_type real_node_size
  569. , const size_type real_num_node, const size_type num_subblocks
  570. , AlignOnlyTrue)
  571. {
  572. (void)num_subblocks;
  573. BOOST_ASSERT(m_block_container.empty());
  574. BOOST_ASSERT(min_elements > 0);
  575. const size_type n = (min_elements - 1)/real_num_node + 1;
  576. const size_type real_block_size = real_block_alignment - PayloadPerAllocation;
  577. const size_type target_elem_in_chain = chain.size() + min_elements;
  578. for(size_type i = 0; i != n; ++i){
  579. //We allocate a new NodeBlock and put it the last
  580. //element of the tree
  581. char *mem_address = static_cast<char*>
  582. (mp_segment_mngr_base->allocate_aligned(real_block_size, real_block_alignment));
  583. if(!mem_address){
  584. //In case of error, free memory deallocating all nodes (the new ones allocated
  585. //in this function plus previously stored nodes in chain).
  586. this->priv_deallocate_nodes(chain, max_free_blocks, real_num_node, num_subblocks, real_block_alignment);
  587. throw_bad_alloc();
  588. }
  589. block_info_t &c_info = *new(mem_address, boost_container_new_t())block_info_t();
  590. mem_address += HdrSize;
  591. this->priv_fill_chain_remaining_to_block(chain, target_elem_in_chain, c_info, mem_address, real_num_node, real_node_size);
  592. const size_type free_nodes = c_info.free_nodes.size();
  593. if(free_nodes){
  594. const bool is_full = free_nodes == real_num_node;
  595. BOOST_ASSERT(free_nodes < real_num_node);
  596. m_totally_free_blocks += static_cast<size_type>(is_full);
  597. block_container_traits_t::insert_was_empty(m_block_container, c_info, is_full);
  598. }
  599. }
  600. }
  601. void priv_append_from_new_blocks( size_type min_elements, multiallocation_chain &chain
  602. , const size_type max_free_blocks
  603. , const size_type real_block_alignment, const size_type real_node_size
  604. , const size_type real_num_node, const size_type num_subblocks
  605. , AlignOnlyFalse)
  606. {
  607. BOOST_ASSERT(m_block_container.empty());
  608. BOOST_ASSERT(min_elements > 0);
  609. const size_type n = (min_elements - 1)/real_num_node + 1;
  610. const size_type real_block_size = real_block_alignment*num_subblocks - PayloadPerAllocation;
  611. const size_type elements_per_subblock_mid = (real_block_alignment - HdrOffsetSize)/real_node_size;
  612. const size_type elements_per_subblock_end = (real_block_alignment - HdrOffsetSize - PayloadPerAllocation) / real_node_size;
  613. const size_type hdr_subblock_elements = (real_block_alignment - HdrSize - PayloadPerAllocation)/real_node_size;
  614. const size_type target_elem_in_chain = chain.size() + min_elements;
  615. for(size_type i = 0; i != n; ++i){
  616. //We allocate a new NodeBlock and put it the last
  617. //element of the tree
  618. char *mem_address = static_cast<char*>
  619. (mp_segment_mngr_base->allocate_aligned(real_block_size, real_block_alignment));
  620. if(!mem_address){
  621. //In case of error, free memory deallocating all nodes (the new ones allocated
  622. //in this function plus previously stored nodes in chain).
  623. this->priv_deallocate_nodes(chain, max_free_blocks, real_num_node, num_subblocks, real_block_alignment);
  624. throw_bad_alloc();
  625. }
  626. //First initialize header information on the last subblock
  627. char *hdr_addr = mem_address + real_block_alignment*(num_subblocks-1);
  628. block_info_t &c_info = *new(hdr_addr, boost_container_new_t())block_info_t();
  629. //Some structural checks
  630. BOOST_ASSERT(static_cast<void*>(&static_cast<hdr_offset_holder&>(c_info).hdr_offset) ==
  631. static_cast<void*>(&c_info)); (void)c_info;
  632. for( size_type subblock = 0, maxsubblock = num_subblocks - 1
  633. ; subblock < maxsubblock
  634. ; ++subblock, mem_address += real_block_alignment){
  635. //Initialize header offset mark
  636. new(mem_address, boost_container_new_t()) hdr_offset_holder(size_type(hdr_addr - mem_address));
  637. const size_type elements_per_subblock = (subblock != (maxsubblock - 1)) ? elements_per_subblock_mid : elements_per_subblock_end;
  638. this->priv_fill_chain_remaining_to_block
  639. (chain, target_elem_in_chain, c_info, mem_address + HdrOffsetSize, elements_per_subblock, real_node_size);
  640. }
  641. this->priv_fill_chain_remaining_to_block
  642. (chain, target_elem_in_chain, c_info, hdr_addr + HdrSize, hdr_subblock_elements, real_node_size);
  643. m_totally_free_blocks += static_cast<size_type>(c_info.free_nodes.size() == real_num_node);
  644. if (c_info.free_nodes.size())
  645. m_block_container.push_front(c_info);
  646. }
  647. }
  648. //!Allocates array of count elements. Can throw
  649. void *priv_allocate_node( const size_type max_free_blocks, const size_type real_block_alignment, const size_type real_node_size
  650. , const size_type real_num_node, const size_type num_subblocks)
  651. {
  652. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  653. //If there are no free nodes we allocate a new block
  654. if(!m_block_container.empty()){
  655. //We take the first free node the multiset can't be empty
  656. free_nodes_t &free_nodes = m_block_container.begin()->free_nodes;
  657. BOOST_ASSERT(!free_nodes.empty());
  658. const size_type free_nodes_count = free_nodes.size();
  659. void *first_node = boost::movelib::to_raw_pointer(free_nodes.pop_front());
  660. if(free_nodes.empty()){
  661. block_container_traits_t::erase_first(m_block_container);
  662. }
  663. m_totally_free_blocks -= static_cast<size_type>(free_nodes_count == real_num_node);
  664. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  665. return first_node;
  666. }
  667. else{
  668. multiallocation_chain chain;
  669. this->priv_append_from_new_blocks
  670. (1, chain, max_free_blocks, real_block_alignment, real_node_size, real_num_node, num_subblocks, IsAlignOnly());
  671. void *node = boost::movelib::to_raw_pointer(chain.pop_front());
  672. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  673. return node;
  674. }
  675. }
  676. void priv_allocate_nodes( const size_type n, multiallocation_chain &chain
  677. , const size_type max_free_blocks, const size_type real_block_alignment, const size_type real_node_size
  678. , const size_type real_num_node, const size_type num_subblocks)
  679. {
  680. size_type i = 0;
  681. BOOST_CONTAINER_TRY{
  682. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  683. while(i != n){
  684. //If there are no free nodes we allocate all needed blocks
  685. if (m_block_container.empty()){
  686. this->priv_append_from_new_blocks
  687. (n - i, chain, max_free_blocks, real_block_alignment, real_node_size, real_num_node, num_subblocks, IsAlignOnly());
  688. BOOST_ASSERT(m_block_container.empty() || (++m_block_container.cbegin() == m_block_container.cend()));
  689. BOOST_ASSERT(chain.size() == n);
  690. break;
  691. }
  692. free_nodes_t &free_nodes = m_block_container.begin()->free_nodes;
  693. const size_type free_nodes_count_before = free_nodes.size();
  694. m_totally_free_blocks -= static_cast<size_type>(free_nodes_count_before == real_num_node);
  695. const size_type num_left = n-i;
  696. const size_type num_elems = (num_left < free_nodes_count_before) ? num_left : free_nodes_count_before;
  697. typedef typename free_nodes_t::iterator free_nodes_iterator;
  698. if(num_left < free_nodes_count_before){
  699. const free_nodes_iterator it_bbeg(free_nodes.before_begin());
  700. free_nodes_iterator it_bend(it_bbeg);
  701. for(size_type j = 0; j != num_elems; ++j){
  702. ++it_bend;
  703. }
  704. free_nodes_iterator it_end = it_bend; ++it_end;
  705. free_nodes_iterator it_beg = it_bbeg; ++it_beg;
  706. free_nodes.erase_after(it_bbeg, it_end, num_elems);
  707. chain.incorporate_after(chain.last(), &*it_beg, &*it_bend, num_elems);
  708. //chain.splice_after(chain.last(), free_nodes, it_bbeg, it_bend, num_elems);
  709. BOOST_ASSERT(!free_nodes.empty());
  710. }
  711. else{
  712. const free_nodes_iterator it_beg(free_nodes.begin()), it_bend(free_nodes.last());
  713. free_nodes.clear();
  714. chain.incorporate_after(chain.last(), &*it_beg, &*it_bend, num_elems);
  715. block_container_traits_t::erase_first(m_block_container);
  716. }
  717. i += num_elems;
  718. }
  719. }
  720. BOOST_CONTAINER_CATCH(...){
  721. this->priv_deallocate_nodes(chain, max_free_blocks, real_num_node, num_subblocks, real_block_alignment);
  722. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  723. BOOST_CONTAINER_RETHROW
  724. }
  725. BOOST_CONTAINER_CATCH_END
  726. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  727. }
  728. //!Deallocates an array pointed by ptr. Never throws
  729. void priv_deallocate_node( void *pElem
  730. , const size_type max_free_blocks, const size_type real_num_node
  731. , const size_type num_subblocks, const size_type real_block_alignment)
  732. {
  733. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  734. block_info_t &block_info = *this->priv_block_from_node(pElem, real_block_alignment);
  735. const size_type prev_free_nodes = block_info.free_nodes.size();
  736. BOOST_ASSERT(block_info.free_nodes.size() < real_num_node);
  737. //We put the node at the beginning of the free node list
  738. block_info.free_nodes.push_back(void_pointer(pElem));
  739. //The loop reinserts all blocks except the last one
  740. this->priv_reinsert_block(block_info, prev_free_nodes == 0, real_num_node);
  741. this->priv_deallocate_free_blocks(max_free_blocks, real_num_node, num_subblocks, real_block_alignment);
  742. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  743. }
  744. void priv_deallocate_nodes( multiallocation_chain &nodes
  745. , const size_type max_free_blocks, const size_type real_num_node
  746. , const size_type num_subblocks, const size_type real_block_alignment)
  747. {
  748. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  749. //To take advantage of node locality, wait until two
  750. //nodes belong to different blocks. Only then reinsert
  751. //the block of the first node in the block tree.
  752. //Cache of the previous block
  753. block_info_t *prev_block_info = 0;
  754. //If block was empty before this call, it's not already
  755. //inserted in the block tree.
  756. bool prev_block_was_empty = false;
  757. typedef typename free_nodes_t::iterator free_nodes_iterator;
  758. {
  759. const free_nodes_iterator itbb(nodes.before_begin()), ite(nodes.end());
  760. free_nodes_iterator itf(nodes.begin()), itbf(itbb);
  761. size_type splice_node_count = size_type(-1);
  762. while(itf != ite){
  763. void *pElem = boost::movelib::to_raw_pointer(boost::movelib::iterator_to_raw_pointer(itf));
  764. block_info_t &block_info = *this->priv_block_from_node(pElem, real_block_alignment);
  765. BOOST_ASSERT(block_info.free_nodes.size() < real_num_node);
  766. ++splice_node_count;
  767. //If block change is detected calculate the cached block position in the tree
  768. if(&block_info != prev_block_info){
  769. if(prev_block_info){ //Make sure we skip the initial "dummy" cache
  770. free_nodes_iterator it(itbb); ++it;
  771. nodes.erase_after(itbb, itf, splice_node_count);
  772. prev_block_info->free_nodes.incorporate_after(prev_block_info->free_nodes.last(), &*it, &*itbf, splice_node_count);
  773. this->priv_reinsert_block(*prev_block_info, prev_block_was_empty, real_num_node);
  774. splice_node_count = 0;
  775. }
  776. //Update cache with new data
  777. prev_block_was_empty = block_info.free_nodes.empty();
  778. prev_block_info = &block_info;
  779. }
  780. itbf = itf;
  781. ++itf;
  782. }
  783. }
  784. if(prev_block_info){
  785. //The loop reinserts all blocks except the last one
  786. const free_nodes_iterator itfirst(nodes.begin()), itlast(nodes.last());
  787. const size_type splice_node_count = nodes.size();
  788. nodes.clear();
  789. prev_block_info->free_nodes.incorporate_after(prev_block_info->free_nodes.last(), &*itfirst, &*itlast, splice_node_count);
  790. this->priv_reinsert_block(*prev_block_info, prev_block_was_empty, real_num_node);
  791. this->priv_deallocate_free_blocks(max_free_blocks, real_num_node, num_subblocks, real_block_alignment);
  792. }
  793. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  794. }
  795. void priv_reinsert_block(block_info_t &prev_block_info, const bool prev_block_was_empty, const size_type real_num_node)
  796. {
  797. //Cache the free nodes from the block
  798. const size_type this_block_free_nodes = prev_block_info.free_nodes.size();
  799. const bool is_full = this_block_free_nodes == real_num_node;
  800. //Update free block count
  801. m_totally_free_blocks += static_cast<size_type>(is_full);
  802. if(prev_block_was_empty){
  803. block_container_traits_t::insert_was_empty(m_block_container, prev_block_info, is_full);
  804. }
  805. else{
  806. block_container_traits_t::reinsert_was_used(m_block_container, prev_block_info, is_full);
  807. }
  808. }
  809. block_info_t *priv_block_from_node(void *node, const size_type real_block_alignment, AlignOnlyFalse) const
  810. {
  811. hdr_offset_holder *hdr_off_holder =
  812. reinterpret_cast<hdr_offset_holder*>((std::size_t)node & size_type(~(real_block_alignment - 1)));
  813. BOOST_ASSERT(0 == ((std::size_t)hdr_off_holder & (real_block_alignment - 1)));
  814. BOOST_ASSERT(0 == (hdr_off_holder->hdr_offset & (real_block_alignment - 1)));
  815. block_info_t *block = move_detail::force_ptr<block_info_t *>
  816. (reinterpret_cast<char*>(hdr_off_holder) + hdr_off_holder->hdr_offset);
  817. BOOST_ASSERT(block->hdr_offset == 0);
  818. return block;
  819. }
  820. block_info_t *priv_block_from_node(void *node, const size_type real_block_alignment, AlignOnlyTrue) const
  821. {
  822. return (block_info_t *)((std::size_t)node & std::size_t(~(real_block_alignment - 1)));
  823. }
  824. block_info_t *priv_block_from_node(void *node, const size_type real_block_alignment) const
  825. { return this->priv_block_from_node(node, real_block_alignment, IsAlignOnly()); }
  826. //!Deallocates all used memory. Never throws
  827. void priv_clear(const size_type num_subblocks, const size_type real_block_alignment, const size_type real_num_node)
  828. {
  829. #ifndef NDEBUG
  830. block_iterator it = m_block_container.begin();
  831. block_iterator itend = m_block_container.end();
  832. size_type n_free_nodes = 0;
  833. for(; it != itend; ++it){
  834. //Check for memory leak
  835. BOOST_ASSERT(it->free_nodes.size() == real_num_node);
  836. ++n_free_nodes;
  837. }
  838. BOOST_ASSERT(n_free_nodes == m_totally_free_blocks);
  839. #endif
  840. //Check for memory leaks
  841. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  842. multiallocation_chain chain;
  843. m_block_container.clear_and_dispose(block_destroyer(this, chain, num_subblocks, real_block_alignment, real_num_node));
  844. this->mp_segment_mngr_base->deallocate_many(chain);
  845. m_totally_free_blocks = 0;
  846. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  847. }
  848. public:
  849. private_adaptive_node_pool_impl_common(segment_manager_base_type *segment_mngr_base)
  850. //General purpose allocator
  851. : mp_segment_mngr_base(segment_mngr_base)
  852. , m_block_container()
  853. , m_totally_free_blocks(0)
  854. {}
  855. size_type num_free_nodes()
  856. {
  857. typedef typename block_container_t::const_iterator citerator;
  858. size_type count = 0;
  859. citerator it (m_block_container.begin()), itend(m_block_container.end());
  860. for(; it != itend; ++it){
  861. count += it->free_nodes.size();
  862. }
  863. return count;
  864. }
  865. void swap(private_adaptive_node_pool_impl_common &other)
  866. {
  867. std::swap(mp_segment_mngr_base, other.mp_segment_mngr_base);
  868. std::swap(m_totally_free_blocks, other.m_totally_free_blocks);
  869. m_block_container.swap(other.m_block_container);
  870. }
  871. //!Returns the segment manager. Never throws
  872. segment_manager_base_type* get_segment_manager_base()const
  873. { return boost::movelib::to_raw_pointer(mp_segment_mngr_base); }
  874. };
  875. template< class SizeType
  876. , std::size_t HdrSize
  877. , std::size_t PayloadPerAllocation
  878. , std::size_t RealNodeSize
  879. , std::size_t NodesPerBlock
  880. , std::size_t HdrOffsetSize
  881. , std::size_t OverheadPercent
  882. , bool AlignOnly>
  883. struct calculate_alignment_ct
  884. {
  885. static const std::size_t alignment = upper_power_of_2_ct<SizeType, HdrSize + RealNodeSize*NodesPerBlock>::value;
  886. static const std::size_t num_subblocks = 0;
  887. static const std::size_t real_num_node = (alignment - PayloadPerAllocation - HdrSize)/RealNodeSize;
  888. };
  889. template< class SizeType
  890. , std::size_t HdrSize
  891. , std::size_t PayloadPerAllocation
  892. , std::size_t RealNodeSize
  893. , std::size_t NodesPerBlock
  894. , std::size_t HdrOffsetSize
  895. , std::size_t OverheadPercent>
  896. struct calculate_alignment_ct
  897. < SizeType
  898. , HdrSize
  899. , PayloadPerAllocation
  900. , RealNodeSize
  901. , NodesPerBlock
  902. , HdrOffsetSize
  903. , OverheadPercent
  904. , false>
  905. {
  906. typedef typename candidate_power_of_2_ct
  907. < upper_power_of_2_ct<SizeType, HdrSize + PayloadPerAllocation + RealNodeSize>::value
  908. , RealNodeSize
  909. , PayloadPerAllocation
  910. , NodesPerBlock
  911. , HdrSize
  912. , HdrOffsetSize
  913. , OverheadPercent
  914. >::type type;
  915. static const std::size_t alignment = type::alignment;
  916. static const std::size_t num_subblocks = type::num_subblocks;
  917. static const std::size_t real_num_node = type::real_num_node;
  918. };
  919. /////////////////////////////////////////////
  920. //
  921. // private_adaptive_node_pool_impl_ct
  922. //
  923. /////////////////////////////////////////////
  924. template< class SegmentManagerBase
  925. , std::size_t MaxFreeBlocks
  926. , std::size_t NodeSize
  927. , std::size_t NodesPerBlock
  928. , std::size_t OverheadPercent
  929. , unsigned int Flags>
  930. class private_adaptive_node_pool_impl_ct
  931. : public private_adaptive_node_pool_impl_common<SegmentManagerBase, Flags>
  932. {
  933. typedef private_adaptive_node_pool_impl_common<SegmentManagerBase, Flags> base_t;
  934. //Non-copyable
  935. private_adaptive_node_pool_impl_ct();
  936. private_adaptive_node_pool_impl_ct(const private_adaptive_node_pool_impl_ct &);
  937. private_adaptive_node_pool_impl_ct &operator=(const private_adaptive_node_pool_impl_ct &);
  938. public:
  939. typedef typename base_t::void_pointer void_pointer;
  940. typedef typename base_t::size_type size_type;
  941. typedef typename base_t::multiallocation_chain multiallocation_chain;
  942. typedef typename base_t::segment_manager_base_type segment_manager_base_type;
  943. static const typename base_t::size_type PayloadPerAllocation = base_t::PayloadPerAllocation;
  944. //align_only
  945. static const bool AlignOnly = base_t::AlignOnly;
  946. private:
  947. static const size_type MaxAlign = base_t::MaxAlign;
  948. static const size_type HdrSize = base_t::HdrSize;
  949. static const size_type HdrOffsetSize = base_t::HdrOffsetSize;
  950. static const size_type RealNodeSize = lcm_ct<NodeSize, alignment_of<void_pointer>::value>::value;
  951. typedef calculate_alignment_ct
  952. < size_type, HdrSize, PayloadPerAllocation
  953. , RealNodeSize, NodesPerBlock, HdrOffsetSize, OverheadPercent, AlignOnly> data_t;
  954. //Round the size to a power of two value.
  955. //This is the total memory size (including payload) that we want to
  956. //allocate from the general-purpose allocator
  957. static const size_type NumSubBlocks = data_t::num_subblocks;
  958. static const size_type RealNumNode = data_t::real_num_node;
  959. static const size_type RealBlockAlignment = data_t::alignment;
  960. public:
  961. //!Constructor from a segment manager. Never throws
  962. private_adaptive_node_pool_impl_ct(typename base_t::segment_manager_base_type *segment_mngr_base)
  963. //General purpose allocator
  964. : base_t(segment_mngr_base)
  965. {}
  966. //!Destructor. Deallocates all allocated blocks. Never throws
  967. ~private_adaptive_node_pool_impl_ct()
  968. { this->priv_clear(NumSubBlocks, data_t::alignment, RealNumNode); }
  969. size_type get_real_num_node() const
  970. { return RealNumNode; }
  971. //!Allocates array of count elements. Can throw
  972. void *allocate_node()
  973. {
  974. return this->priv_allocate_node
  975. (MaxFreeBlocks, data_t::alignment, RealNodeSize, RealNumNode, NumSubBlocks);
  976. }
  977. //!Allocates n nodes.
  978. //!Can throw
  979. void allocate_nodes(const size_type n, multiallocation_chain &chain)
  980. {
  981. this->priv_allocate_nodes
  982. (n, chain, MaxFreeBlocks, data_t::alignment, RealNodeSize, RealNumNode, NumSubBlocks);
  983. }
  984. //!Deallocates an array pointed by ptr. Never throws
  985. void deallocate_node(void *pElem)
  986. {
  987. this->priv_deallocate_node(pElem, MaxFreeBlocks, RealNumNode, NumSubBlocks, RealBlockAlignment);
  988. }
  989. //!Deallocates a linked list of nodes. Never throws
  990. void deallocate_nodes(multiallocation_chain &nodes)
  991. {
  992. this->priv_deallocate_nodes(nodes, MaxFreeBlocks, RealNumNode, NumSubBlocks, data_t::alignment);
  993. }
  994. void deallocate_free_blocks()
  995. { this->priv_deallocate_free_blocks(0, RealNumNode, NumSubBlocks, data_t::alignment); }
  996. //Deprecated, use deallocate_free_blocks
  997. void deallocate_free_chunks()
  998. { this->priv_deallocate_free_blocks(0, RealNumNode, NumSubBlocks, data_t::alignment); }
  999. };
  1000. /////////////////////////////////////////////
  1001. //
  1002. // private_adaptive_node_pool_impl_rt
  1003. //
  1004. /////////////////////////////////////////////
  1005. template<class SizeType>
  1006. struct private_adaptive_node_pool_impl_rt_data
  1007. {
  1008. typedef SizeType size_type;
  1009. private_adaptive_node_pool_impl_rt_data(size_type max_free_blocks, size_type real_node_size)
  1010. : m_max_free_blocks(max_free_blocks), m_real_node_size(real_node_size)
  1011. , m_real_block_alignment(), m_num_subblocks(), m_real_num_node()
  1012. {}
  1013. const size_type m_max_free_blocks;
  1014. const size_type m_real_node_size;
  1015. //Round the size to a power of two value.
  1016. //This is the total memory size (including payload) that we want to
  1017. //allocate from the general-purpose allocator
  1018. size_type m_real_block_alignment;
  1019. size_type m_num_subblocks;
  1020. //This is the real number of nodes per block
  1021. size_type m_real_num_node;
  1022. };
  1023. template<class SegmentManagerBase, unsigned int Flags>
  1024. class private_adaptive_node_pool_impl_rt
  1025. : private private_adaptive_node_pool_impl_rt_data<typename SegmentManagerBase::size_type>
  1026. , public private_adaptive_node_pool_impl_common<SegmentManagerBase, Flags>
  1027. {
  1028. typedef private_adaptive_node_pool_impl_common<SegmentManagerBase, Flags> impl_t;
  1029. typedef private_adaptive_node_pool_impl_rt_data<typename SegmentManagerBase::size_type> data_t;
  1030. //Non-copyable
  1031. private_adaptive_node_pool_impl_rt();
  1032. private_adaptive_node_pool_impl_rt(const private_adaptive_node_pool_impl_rt &);
  1033. private_adaptive_node_pool_impl_rt &operator=(const private_adaptive_node_pool_impl_rt &);
  1034. protected:
  1035. typedef typename impl_t::void_pointer void_pointer;
  1036. typedef typename impl_t::size_type size_type;
  1037. typedef typename impl_t::multiallocation_chain multiallocation_chain;
  1038. static const typename impl_t::size_type PayloadPerAllocation = impl_t::PayloadPerAllocation;
  1039. //Flags
  1040. //align_only
  1041. static const bool AlignOnly = impl_t::AlignOnly;
  1042. static const size_type HdrSize = impl_t::HdrSize;
  1043. static const size_type HdrOffsetSize = impl_t::HdrOffsetSize;
  1044. public:
  1045. //!Segment manager typedef
  1046. typedef SegmentManagerBase segment_manager_base_type;
  1047. //!Constructor from a segment manager. Never throws
  1048. private_adaptive_node_pool_impl_rt
  1049. ( segment_manager_base_type *segment_mngr_base
  1050. , size_type node_size
  1051. , size_type nodes_per_block
  1052. , size_type max_free_blocks
  1053. , unsigned char overhead_percent
  1054. )
  1055. : data_t(max_free_blocks, lcm(node_size, size_type(alignment_of<void_pointer>::value)))
  1056. , impl_t(segment_mngr_base)
  1057. {
  1058. if(AlignOnly){
  1059. this->m_real_block_alignment = upper_power_of_2(HdrSize + this->m_real_node_size*nodes_per_block);
  1060. this->m_real_num_node = (this->m_real_block_alignment - PayloadPerAllocation - HdrSize)/this->m_real_node_size;
  1061. }
  1062. else{
  1063. candidate_power_of_2_rt ( upper_power_of_2(HdrSize + PayloadPerAllocation + this->m_real_node_size)
  1064. , this->m_real_node_size
  1065. , PayloadPerAllocation
  1066. , nodes_per_block
  1067. , HdrSize
  1068. , HdrOffsetSize
  1069. , overhead_percent
  1070. , this->m_real_block_alignment
  1071. , this->m_num_subblocks
  1072. , this->m_real_num_node);
  1073. }
  1074. }
  1075. //!Destructor. Deallocates all allocated blocks. Never throws
  1076. ~private_adaptive_node_pool_impl_rt()
  1077. { this->priv_clear(this->m_num_subblocks, this->m_real_block_alignment, this->m_real_num_node); }
  1078. size_type get_real_num_node() const
  1079. { return this->m_real_num_node; }
  1080. //!Allocates array of count elements. Can throw
  1081. void *allocate_node()
  1082. {
  1083. return this->priv_allocate_node
  1084. (this->m_max_free_blocks, this->m_real_block_alignment, this->m_real_node_size, this->m_real_num_node, this->m_num_subblocks);
  1085. }
  1086. //!Allocates n nodes.
  1087. //!Can throw
  1088. void allocate_nodes(const size_type n, multiallocation_chain &chain)
  1089. {
  1090. this->priv_allocate_nodes
  1091. (n, chain, this->m_max_free_blocks, this->m_real_block_alignment, this->m_real_node_size, this->m_real_num_node, this->m_num_subblocks);
  1092. }
  1093. //!Deallocates an array pointed by ptr. Never throws
  1094. void deallocate_node(void *pElem)
  1095. {
  1096. this->priv_deallocate_node(pElem, this->m_max_free_blocks, this->m_real_num_node, this->m_num_subblocks, this->m_real_block_alignment);
  1097. }
  1098. //!Deallocates a linked list of nodes. Never throws
  1099. void deallocate_nodes(multiallocation_chain &nodes)
  1100. {
  1101. this->priv_deallocate_nodes(nodes, this->m_max_free_blocks, this->m_real_num_node, this->m_num_subblocks, this->m_real_block_alignment);
  1102. }
  1103. void deallocate_free_blocks()
  1104. { this->priv_deallocate_free_blocks(0, this->m_real_num_node, this->m_num_subblocks, this->m_real_block_alignment); }
  1105. //Deprecated, use deallocate_free_blocks
  1106. void deallocate_free_chunks()
  1107. { this->priv_deallocate_free_blocks(0, this->m_real_num_node, this->m_num_subblocks, this->m_real_block_alignment); }
  1108. };
  1109. } //namespace dtl {
  1110. } //namespace container {
  1111. } //namespace boost {
  1112. #include <boost/container/detail/config_end.hpp>
  1113. #endif //#ifndef BOOST_CONTAINER_DETAIL_ADAPTIVE_NODE_POOL_IMPL_HPP