Intel(R) Threading Building Blocks Doxygen Documentation  version 4.2.3
tbb::interface5::internal::split_ordered_list< T, Allocator > Class Template Reference

#include <_concurrent_unordered_impl.h>

Inheritance diagram for tbb::interface5::internal::split_ordered_list< T, Allocator >:
Collaboration diagram for tbb::interface5::internal::split_ordered_list< T, Allocator >:

Classes

struct  node
 

Public Types

typedef split_ordered_list< T, Allocator > self_type
 
typedef tbb::internal::allocator_rebind< Allocator, T >::type allocator_type
 
typedef nodenodeptr_t
 
typedef tbb::internal::allocator_traits< allocator_type >::value_type value_type
 
typedef tbb::internal::allocator_traits< allocator_type >::size_type size_type
 
typedef tbb::internal::allocator_traits< allocator_type >::difference_type difference_type
 
typedef tbb::internal::allocator_traits< allocator_type >::pointer pointer
 
typedef tbb::internal::allocator_traits< allocator_type >::const_pointer const_pointer
 
typedef value_typereference
 
typedef const value_typeconst_reference
 
typedef solist_iterator< self_type, const value_typeconst_iterator
 
typedef solist_iterator< self_type, value_typeiterator
 
typedef flist_iterator< self_type, const value_typeraw_const_iterator
 
typedef flist_iterator< self_type, value_typeraw_iterator
 

Public Member Functions

nodeptr_t create_node (sokey_t order_key)
 
template<typename Arg >
nodeptr_t create_node (sokey_t order_key, __TBB_FORWARDING_REF(Arg) t, tbb::internal::true_type=tbb::internal::true_type())
 
template<typename Arg >
nodeptr_t create_node (sokey_t, __TBB_FORWARDING_REF(Arg), tbb::internal::false_type)
 
template<typename __TBB_PARAMETER_PACK Args>
nodeptr_t create_node_v (__TBB_FORWARDING_REF(Args) __TBB_PARAMETER_PACK args)
 
 split_ordered_list (allocator_type a=allocator_type())
 
 ~split_ordered_list ()
 
allocator_type get_allocator () const
 
void clear ()
 
iterator begin ()
 
const_iterator begin () const
 
iterator end ()
 
const_iterator end () const
 
const_iterator cbegin () const
 
const_iterator cend () const
 
bool empty () const
 
size_type size () const
 
size_type max_size () const
 
void swap (self_type &other)
 
raw_iterator raw_begin ()
 
raw_const_iterator raw_begin () const
 
raw_iterator raw_end ()
 
raw_const_iterator raw_end () const
 
iterator get_iterator (raw_iterator it)
 
const_iterator get_iterator (raw_const_iterator it) const
 
raw_iterator get_iterator (raw_const_iterator it)
 
iterator first_real_iterator (raw_iterator it)
 
const_iterator first_real_iterator (raw_const_iterator it) const
 
void destroy_node (nodeptr_t pnode)
 
std::pair< iterator, bool > try_insert (raw_iterator it, raw_iterator next, nodeptr_t pnode, size_type *new_count)
 
raw_iterator insert_dummy (raw_iterator it, sokey_t order_key)
 
nodeptr_t erase_node_impl (raw_iterator previous, raw_const_iterator &where)
 
void erase_node (raw_iterator previous, raw_const_iterator &where, tbb::internal::true_type)
 
void erase_node (raw_iterator previous, raw_const_iterator &where, tbb::internal::false_type)
 
void erase_node (raw_iterator previous, raw_const_iterator &where)
 
template<typename AllowDestroy >
iterator erase_node (raw_iterator previous, const_iterator where, AllowDestroy)
 
iterator erase_node (raw_iterator previous, const_iterator &where)
 
void move_all (self_type &source)
 

Static Public Member Functions

static sokey_t get_order_key (const raw_const_iterator &it)
 
static sokey_t get_safe_order_key (const raw_const_iterator &it)
 
static iterator get_iterator (const_iterator it)
 
static nodeptr_t try_insert_atomic (nodeptr_t previous, nodeptr_t new_node, nodeptr_t current_node)
 

Private Member Functions

void check_range (raw_iterator first, raw_iterator last)
 
void check_range ()
 

Private Attributes

tbb::internal::allocator_rebind< allocator_type, node >::type my_node_allocator
 
size_type my_element_count
 
nodeptr_t my_head
 

Friends

template<typename Traits >
class concurrent_unordered_base
 

Detailed Description

template<typename T, typename Allocator>
class tbb::interface5::internal::split_ordered_list< T, Allocator >

Definition at line 57 of file _concurrent_unordered_impl.h.

Member Typedef Documentation

◆ allocator_type

template<typename T, typename Allocator>
typedef tbb::internal::allocator_rebind<Allocator, T>::type tbb::interface5::internal::split_ordered_list< T, Allocator >::allocator_type

Definition at line 193 of file _concurrent_unordered_impl.h.

◆ const_iterator

template<typename T, typename Allocator>
typedef solist_iterator<self_type, const value_type> tbb::interface5::internal::split_ordered_list< T, Allocator >::const_iterator

Definition at line 207 of file _concurrent_unordered_impl.h.

◆ const_pointer

Definition at line 202 of file _concurrent_unordered_impl.h.

◆ const_reference

template<typename T, typename Allocator>
typedef const value_type& tbb::interface5::internal::split_ordered_list< T, Allocator >::const_reference

Definition at line 205 of file _concurrent_unordered_impl.h.

◆ difference_type

Definition at line 200 of file _concurrent_unordered_impl.h.

◆ iterator

template<typename T, typename Allocator>
typedef solist_iterator<self_type, value_type> tbb::interface5::internal::split_ordered_list< T, Allocator >::iterator

Definition at line 208 of file _concurrent_unordered_impl.h.

◆ nodeptr_t

template<typename T, typename Allocator>
typedef node* tbb::interface5::internal::split_ordered_list< T, Allocator >::nodeptr_t

Definition at line 195 of file _concurrent_unordered_impl.h.

◆ pointer

template<typename T, typename Allocator>
typedef tbb::internal::allocator_traits<allocator_type>::pointer tbb::interface5::internal::split_ordered_list< T, Allocator >::pointer

Definition at line 201 of file _concurrent_unordered_impl.h.

◆ raw_const_iterator

template<typename T, typename Allocator>
typedef flist_iterator<self_type, const value_type> tbb::interface5::internal::split_ordered_list< T, Allocator >::raw_const_iterator

Definition at line 209 of file _concurrent_unordered_impl.h.

◆ raw_iterator

template<typename T, typename Allocator>
typedef flist_iterator<self_type, value_type> tbb::interface5::internal::split_ordered_list< T, Allocator >::raw_iterator

Definition at line 210 of file _concurrent_unordered_impl.h.

◆ reference

template<typename T, typename Allocator>
typedef value_type& tbb::interface5::internal::split_ordered_list< T, Allocator >::reference

Definition at line 204 of file _concurrent_unordered_impl.h.

◆ self_type

template<typename T, typename Allocator>
typedef split_ordered_list<T, Allocator> tbb::interface5::internal::split_ordered_list< T, Allocator >::self_type

Definition at line 191 of file _concurrent_unordered_impl.h.

◆ size_type

template<typename T, typename Allocator>
typedef tbb::internal::allocator_traits<allocator_type>::size_type tbb::interface5::internal::split_ordered_list< T, Allocator >::size_type

Definition at line 199 of file _concurrent_unordered_impl.h.

◆ value_type

template<typename T, typename Allocator>
typedef tbb::internal::allocator_traits<allocator_type>::value_type tbb::interface5::internal::split_ordered_list< T, Allocator >::value_type

Definition at line 198 of file _concurrent_unordered_impl.h.

Constructor & Destructor Documentation

◆ split_ordered_list()

template<typename T, typename Allocator>
tbb::interface5::internal::split_ordered_list< T, Allocator >::split_ordered_list ( allocator_type  a = allocator_type())
inline

Definition at line 309 of file _concurrent_unordered_impl.h.

311  {
312  // Immediately allocate a dummy node with order key of 0. This node
313  // will always be the head of the list.
315  }
tbb::internal::allocator_rebind< allocator_type, node >::type my_node_allocator

◆ ~split_ordered_list()

template<typename T, typename Allocator>
tbb::interface5::internal::split_ordered_list< T, Allocator >::~split_ordered_list ( )
inline

Definition at line 317 of file _concurrent_unordered_impl.h.

318  {
319  // Clear the list
320  clear();
321 
322  // Remove the head element which is not cleared by clear()
323  nodeptr_t pnode = my_head;
324  my_head = NULL;
325 
326  __TBB_ASSERT(pnode != NULL && pnode->my_next == NULL, "Invalid head list node");
327 
328  destroy_node(pnode);
329  }
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165

Member Function Documentation

◆ begin() [1/2]

◆ begin() [2/2]

template<typename T, typename Allocator>
const_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::begin ( ) const
inline

◆ cbegin()

template<typename T, typename Allocator>
const_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::cbegin ( ) const
inline

◆ cend()

template<typename T, typename Allocator>
const_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::cend ( ) const
inline

◆ check_range() [1/2]

template<typename T, typename Allocator>
void tbb::interface5::internal::split_ordered_list< T, Allocator >::check_range ( raw_iterator  first,
raw_iterator  last 
)
inlineprivate

Definition at line 650 of file _concurrent_unordered_impl.h.

651  {
652 #if TBB_USE_ASSERT
653  for (raw_iterator it = first; it != last; ++it)
654  {
655  raw_iterator next = it;
656  ++next;
657 
658  __TBB_ASSERT(next == raw_end() || get_order_key(next) >= get_order_key(it), "!!! List order inconsistency !!!");
659  }
660 #else
662 #endif
663  }
flist_iterator< self_type, value_type > raw_iterator
static sokey_t get_order_key(const raw_const_iterator &it)
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165
auto first(Container &c) -> decltype(begin(c))
void suppress_unused_warning(const T1 &)
Utility template function to prevent "unused" warnings by various compilers.
Definition: tbb_stddef.h:377
auto last(Container &c) -> decltype(begin(c))

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::concurrent_unordered_base().

Here is the caller graph for this function:

◆ check_range() [2/2]

◆ clear()

template<typename T, typename Allocator>
void tbb::interface5::internal::split_ordered_list< T, Allocator >::clear ( )
inline

Definition at line 337 of file _concurrent_unordered_impl.h.

337  {
338  nodeptr_t pnext;
339  nodeptr_t pnode = my_head;
340 
341  __TBB_ASSERT(my_head != NULL, "Invalid head list node");
342  pnext = pnode->my_next;
343  pnode->my_next = NULL;
344  pnode = pnext;
345 
346  while (pnode != NULL)
347  {
348  pnext = pnode->my_next;
349  destroy_node(pnode);
350  pnode = pnext;
351  }
352 
353  my_element_count = 0;
354  }
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::clear(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_copy(), and tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::~split_ordered_list().

Here is the caller graph for this function:

◆ create_node() [1/3]

◆ create_node() [2/3]

template<typename T, typename Allocator>
template<typename Arg >
nodeptr_t tbb::interface5::internal::split_ordered_list< T, Allocator >::create_node ( sokey_t  order_key,
__TBB_FORWARDING_REF(Arg)  t,
tbb::internal::true_type  = tbb::internal::true_type() 
)
inline

Definition at line 269 of file _concurrent_unordered_impl.h.

270  {
271  nodeptr_t pnode = my_node_allocator.allocate(1);
272 
273  //TODO: use RAII scoped guard instead of explicit catch
274  __TBB_TRY {
275  new(static_cast<void*>(&pnode->my_element)) T(tbb::internal::forward<Arg>(t));
276  pnode->init(order_key);
277  } __TBB_CATCH(...) {
278  my_node_allocator.deallocate(pnode, 1);
279  __TBB_RETHROW();
280  }
281 
282  return (pnode);
283  }
#define __TBB_CATCH(e)
Definition: tbb_stddef.h:284
#define __TBB_TRY
Definition: tbb_stddef.h:283
#define __TBB_RETHROW()
Definition: tbb_stddef.h:286
tbb::internal::allocator_rebind< allocator_type, node >::type my_node_allocator

◆ create_node() [3/3]

template<typename T, typename Allocator>
template<typename Arg >
nodeptr_t tbb::interface5::internal::split_ordered_list< T, Allocator >::create_node ( sokey_t  ,
__TBB_FORWARDING_REF(Arg)  ,
tbb::internal::false_type   
)
inline

Definition at line 287 of file _concurrent_unordered_impl.h.

288  {
289  __TBB_ASSERT(false, "This compile-time helper should never get called");
290  return nodeptr_t();
291  }
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165

◆ create_node_v()

template<typename T, typename Allocator>
template<typename __TBB_PARAMETER_PACK Args>
nodeptr_t tbb::interface5::internal::split_ordered_list< T, Allocator >::create_node_v ( __TBB_FORWARDING_REF(Args) __TBB_PARAMETER_PACK  args)
inline

Definition at line 295 of file _concurrent_unordered_impl.h.

295  {
296  nodeptr_t pnode = my_node_allocator.allocate(1);
297 
298  //TODO: use RAII scoped guard instead of explicit catch
299  __TBB_TRY {
300  new(static_cast<void*>(&pnode->my_element)) T(__TBB_PACK_EXPANSION(tbb::internal::forward<Args>(args)));
301  } __TBB_CATCH(...) {
302  my_node_allocator.deallocate(pnode, 1);
303  __TBB_RETHROW();
304  }
305 
306  return (pnode);
307  }
#define __TBB_CATCH(e)
Definition: tbb_stddef.h:284
#define __TBB_TRY
Definition: tbb_stddef.h:283
#define __TBB_RETHROW()
Definition: tbb_stddef.h:286
#define __TBB_PACK_EXPANSION(A)
Definition: tbb_stddef.h:504
tbb::internal::allocator_rebind< allocator_type, node >::type my_node_allocator

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::emplace().

Here is the caller graph for this function:

◆ destroy_node()

◆ empty()

template<typename T, typename Allocator>
bool tbb::interface5::internal::split_ordered_list< T, Allocator >::empty ( ) const
inline

◆ end() [1/2]

◆ end() [2/2]

template<typename T, typename Allocator>
const_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::end ( ) const
inline

Definition at line 370 of file _concurrent_unordered_impl.h.

370  {
371  return (const_iterator(0, this));
372  }
solist_iterator< self_type, const value_type > const_iterator

◆ erase_node() [1/5]

◆ erase_node() [2/5]

template<typename T, typename Allocator>
void tbb::interface5::internal::split_ordered_list< T, Allocator >::erase_node ( raw_iterator  previous,
raw_const_iterator where,
tbb::internal::false_type   
)
inline

Definition at line 590 of file _concurrent_unordered_impl.h.

592  {
593  erase_node_impl(previous, where);
594  }
nodeptr_t erase_node_impl(raw_iterator previous, raw_const_iterator &where)

◆ erase_node() [3/5]

template<typename T, typename Allocator>
void tbb::interface5::internal::split_ordered_list< T, Allocator >::erase_node ( raw_iterator  previous,
raw_const_iterator where 
)
inline

Definition at line 596 of file _concurrent_unordered_impl.h.

596  {
597  erase_node(previous, where, /*allow_destroy*/tbb::internal::true_type());
598  }
void erase_node(raw_iterator previous, raw_const_iterator &where, tbb::internal::true_type)

◆ erase_node() [4/5]

template<typename T, typename Allocator>
template<typename AllowDestroy >
iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::erase_node ( raw_iterator  previous,
const_iterator  where,
AllowDestroy   
)
inline

Definition at line 602 of file _concurrent_unordered_impl.h.

603  {
604  raw_const_iterator it = where;
605  erase_node(previous, it, AllowDestroy());
607 
608  return get_iterator(first_real_iterator(it));
609  }
flist_iterator< self_type, const value_type > raw_const_iterator
void erase_node(raw_iterator previous, raw_const_iterator &where, tbb::internal::true_type)

◆ erase_node() [5/5]

template<typename T, typename Allocator>
iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::erase_node ( raw_iterator  previous,
const_iterator where 
)
inline

Definition at line 611 of file _concurrent_unordered_impl.h.

611  {
612  return erase_node(previous, where, /*allow_destroy*/tbb::internal::true_type());
613  }
void erase_node(raw_iterator previous, raw_const_iterator &where, tbb::internal::true_type)

◆ erase_node_impl()

template<typename T, typename Allocator>
nodeptr_t tbb::interface5::internal::split_ordered_list< T, Allocator >::erase_node_impl ( raw_iterator  previous,
raw_const_iterator where 
)
inline

Definition at line 574 of file _concurrent_unordered_impl.h.

574  {
575  nodeptr_t pnode = (where++).get_node_ptr();
576  nodeptr_t prevnode = previous.get_node_ptr();
577  __TBB_ASSERT(prevnode->my_next == pnode, "Erase must take consecutive iterators");
578  prevnode->my_next = pnode->my_next;
579  return pnode;
580  }
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165

Referenced by tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::erase_node().

Here is the caller graph for this function:

◆ first_real_iterator() [1/2]

template<typename T, typename Allocator>
iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::first_real_iterator ( raw_iterator  it)
inline

Definition at line 465 of file _concurrent_unordered_impl.h.

466  {
467  // Skip all dummy, internal only iterators
468  while (it != raw_end() && it.get_node_ptr()->is_dummy())
469  ++it;
470 
471  return iterator(it.get_node_ptr(), this);
472  }
solist_iterator< self_type, value_type > iterator

Referenced by tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::begin(), tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::erase_node(), tbb::interface5::internal::concurrent_unordered_base< Traits >::const_range_type::set_midpoint(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::unsafe_begin(), and tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::unsafe_end().

Here is the caller graph for this function:

◆ first_real_iterator() [2/2]

template<typename T, typename Allocator>
const_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::first_real_iterator ( raw_const_iterator  it) const
inline

Definition at line 476 of file _concurrent_unordered_impl.h.

477  {
478  // Skip all dummy, internal only iterators
479  while (it != raw_end() && it.get_node_ptr()->is_dummy())
480  ++it;
481 
482  return const_iterator(it.get_node_ptr(), this);
483  }
solist_iterator< self_type, const value_type > const_iterator

◆ get_allocator()

template<typename T, typename Allocator>
allocator_type tbb::interface5::internal::split_ordered_list< T, Allocator >::get_allocator ( ) const
inline

◆ get_iterator() [1/4]

template<typename T, typename Allocator>
iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::get_iterator ( raw_iterator  it)
inline

Definition at line 441 of file _concurrent_unordered_impl.h.

441  {
442  __TBB_ASSERT(it.get_node_ptr() == NULL || !it.get_node_ptr()->is_dummy(), "Invalid user node (dummy)");
443  return iterator(it.get_node_ptr(), this);
444  }
solist_iterator< self_type, value_type > iterator
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165

Referenced by tbb::interface5::internal::concurrent_unordered_base< Traits >::const_range_type::begin(), tbb::interface5::internal::concurrent_unordered_base< Traits >::const_range_type::end(), tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::erase_node(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_equal_range(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_erase(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_extract(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_find(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_insert(), tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::move_all(), and tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::unsafe_erase().

Here is the caller graph for this function:

◆ get_iterator() [2/4]

template<typename T, typename Allocator>
const_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::get_iterator ( raw_const_iterator  it) const
inline

Definition at line 448 of file _concurrent_unordered_impl.h.

448  {
449  __TBB_ASSERT(it.get_node_ptr() == NULL || !it.get_node_ptr()->is_dummy(), "Invalid user node (dummy)");
450  return const_iterator(it.get_node_ptr(), this);
451  }
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165
solist_iterator< self_type, const value_type > const_iterator

◆ get_iterator() [3/4]

template<typename T, typename Allocator>
raw_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::get_iterator ( raw_const_iterator  it)
inline

Definition at line 454 of file _concurrent_unordered_impl.h.

454  {
455  return raw_iterator(it.get_node_ptr());
456  }
flist_iterator< self_type, value_type > raw_iterator

◆ get_iterator() [4/4]

template<typename T, typename Allocator>
static iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::get_iterator ( const_iterator  it)
inlinestatic

Definition at line 459 of file _concurrent_unordered_impl.h.

459  {
460  return iterator(it.my_node_ptr, it.my_list_ptr);
461  }
solist_iterator< self_type, value_type > iterator

◆ get_order_key()

template<typename T, typename Allocator>
static sokey_t tbb::interface5::internal::split_ordered_list< T, Allocator >::get_order_key ( const raw_const_iterator it)
inlinestatic

◆ get_safe_order_key()

template<typename T, typename Allocator>
static sokey_t tbb::interface5::internal::split_ordered_list< T, Allocator >::get_safe_order_key ( const raw_const_iterator it)
inlinestatic

Definition at line 434 of file _concurrent_unordered_impl.h.

434  {
435  if( !it.get_node_ptr() ) return ~sokey_t(0);
436  return it.get_node_ptr()->get_order_key();
437  }

◆ insert_dummy()

template<typename T, typename Allocator>
raw_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::insert_dummy ( raw_iterator  it,
sokey_t  order_key 
)
inline

Definition at line 517 of file _concurrent_unordered_impl.h.

518  {
520  raw_iterator where = it;
521 
522  __TBB_ASSERT(where != last, "Invalid head node");
523 
524  ++where;
525 
526  // Create a dummy element up front, even though it may be discarded (due to concurrent insertion)
527  nodeptr_t dummy_node = create_node(order_key);
528 
529  for (;;)
530  {
531  __TBB_ASSERT(it != last, "Invalid head list node");
532 
533  // If the head iterator is at the end of the list, or past the point where this dummy
534  // node needs to be inserted, then try to insert it.
535  if (where == last || get_order_key(where) > order_key)
536  {
537  __TBB_ASSERT(get_order_key(it) < order_key, "Invalid node order in the list");
538 
539  // Try to insert it in the right place
540  nodeptr_t inserted_node = try_insert_atomic(it.get_node_ptr(), dummy_node, where.get_node_ptr());
541 
542  if (inserted_node == dummy_node)
543  {
544  // Insertion succeeded, check the list for order violations
545  check_range(it, where);
546  return raw_iterator(dummy_node);
547  }
548  else
549  {
550  // Insertion failed: either dummy node was inserted by another thread, or
551  // a real element was inserted at exactly the same place as dummy node.
552  // Proceed with the search from the previous location where order key was
553  // known to be larger (note: this is legal only because there is no safe
554  // concurrent erase operation supported).
555  where = it;
556  ++where;
557  continue;
558  }
559  }
560  else if (get_order_key(where) == order_key)
561  {
562  // Another dummy node with the same value found, discard the new one.
563  destroy_node(dummy_node);
564  return where;
565  }
566 
567  // Move the iterator forward
568  it = where;
569  ++where;
570  }
571 
572  }
flist_iterator< self_type, value_type > raw_iterator
static sokey_t get_order_key(const raw_const_iterator &it)
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165
auto last(Container &c) -> decltype(begin(c))
static nodeptr_t try_insert_atomic(nodeptr_t previous, nodeptr_t new_node, nodeptr_t current_node)

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::init_bucket().

Here is the caller graph for this function:

◆ max_size()

template<typename T, typename Allocator>
size_type tbb::interface5::internal::split_ordered_list< T, Allocator >::max_size ( ) const
inline

Definition at line 393 of file _concurrent_unordered_impl.h.

393  {
394  return my_node_allocator.max_size();
395  }
tbb::internal::allocator_rebind< allocator_type, node >::type my_node_allocator

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::max_size().

Here is the caller graph for this function:

◆ move_all()

template<typename T, typename Allocator>
void tbb::interface5::internal::split_ordered_list< T, Allocator >::move_all ( self_type source)
inline

Definition at line 618 of file _concurrent_unordered_impl.h.

619  {
620  raw_const_iterator first = source.raw_begin();
621  raw_const_iterator last = source.raw_end();
622 
623  if (first == last)
624  return;
625 
626  nodeptr_t previous_node = my_head;
627  raw_const_iterator begin_iterator = first++;
628 
629  // Move all elements one by one, including dummy ones
630  for (raw_const_iterator it = first; it != last;)
631  {
632  nodeptr_t pnode = it.get_node_ptr();
633 
634  nodeptr_t dummy_node = pnode->is_dummy() ? create_node(pnode->get_order_key()) : create_node(pnode->get_order_key(), pnode->my_element);
635  previous_node = try_insert_atomic(previous_node, dummy_node, NULL);
636  __TBB_ASSERT(previous_node != NULL, "Insertion must succeed");
637  raw_const_iterator where = it++;
638  source.erase_node(get_iterator(begin_iterator), where);
639  }
640  check_range();
641  }
flist_iterator< self_type, const value_type > raw_const_iterator
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165
auto first(Container &c) -> decltype(begin(c))
auto last(Container &c) -> decltype(begin(c))
static nodeptr_t try_insert_atomic(nodeptr_t previous, nodeptr_t new_node, nodeptr_t current_node)

◆ raw_begin() [1/2]

◆ raw_begin() [2/2]

template<typename T, typename Allocator>
raw_const_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::raw_begin ( ) const
inline

Definition at line 418 of file _concurrent_unordered_impl.h.

418  {
419  return raw_const_iterator(my_head);
420  }
flist_iterator< self_type, const value_type > raw_const_iterator

◆ raw_end() [1/2]

template<typename T, typename Allocator>
raw_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::raw_end ( )
inline

Definition at line 422 of file _concurrent_unordered_impl.h.

422  {
423  return raw_iterator(0);
424  }
flist_iterator< self_type, value_type > raw_iterator

Referenced by tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::check_range(), tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::first_real_iterator(), tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::insert_dummy(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_equal_range(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_erase(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_extract(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_find(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_insert(), tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::move_all(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::unsafe_bucket_size(), and tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::unsafe_end().

Here is the caller graph for this function:

◆ raw_end() [2/2]

template<typename T, typename Allocator>
raw_const_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::raw_end ( ) const
inline

Definition at line 426 of file _concurrent_unordered_impl.h.

426  {
427  return raw_const_iterator(0);
428  }
flist_iterator< self_type, const value_type > raw_const_iterator

◆ size()

◆ swap()

template<typename T, typename Allocator>
void tbb::interface5::internal::split_ordered_list< T, Allocator >::swap ( self_type other)
inline

Definition at line 398 of file _concurrent_unordered_impl.h.

399  {
400  if (this == &other)
401  {
402  // Nothing to do
403  return;
404  }
405 
406  std::swap(my_element_count, other.my_element_count);
407  std::swap(my_head, other.my_head);
408  }
void swap(atomic< T > &lhs, atomic< T > &rhs)
Definition: atomic.h:535

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::swap().

Here is the caller graph for this function:

◆ try_insert()

template<typename T, typename Allocator>
std::pair<iterator, bool> tbb::interface5::internal::split_ordered_list< T, Allocator >::try_insert ( raw_iterator  it,
raw_iterator  next,
nodeptr_t  pnode,
size_type new_count 
)
inline

Definition at line 499 of file _concurrent_unordered_impl.h.

500  {
501  nodeptr_t inserted_node = try_insert_atomic(it.get_node_ptr(), pnode, next.get_node_ptr());
502 
503  if (inserted_node == pnode)
504  {
505  // If the insert succeeded, check that the order is correct and increment the element count
506  check_range(it, next);
507  *new_count = tbb::internal::as_atomic(my_element_count).fetch_and_increment();
508  return std::pair<iterator, bool>(iterator(pnode, this), true);
509  }
510  else
511  {
512  return std::pair<iterator, bool>(end(), false);
513  }
514  }
solist_iterator< self_type, value_type > iterator
atomic< T > & as_atomic(T &t)
Definition: atomic.h:543
static nodeptr_t try_insert_atomic(nodeptr_t previous, nodeptr_t new_node, nodeptr_t current_node)

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_insert().

Here is the caller graph for this function:

◆ try_insert_atomic()

Friends And Related Function Documentation

◆ concurrent_unordered_base

template<typename T, typename Allocator>
template<typename Traits >
friend class concurrent_unordered_base
friend

Definition at line 647 of file _concurrent_unordered_impl.h.

Member Data Documentation

◆ my_element_count

◆ my_head

◆ my_node_allocator


The documentation for this class was generated from the following file:

Copyright © 2005-2019 Intel Corporation. All Rights Reserved.

Intel, Pentium, Intel Xeon, Itanium, Intel XScale and VTune are registered trademarks or trademarks of Intel Corporation or its subsidiaries in the United States and other countries.

* Other names and brands may be claimed as the property of others.