circular_deque.h 37 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126
  1. // Copyright 2017 The Chromium Authors. All rights reserved.
  2. // Use of this source code is governed by a BSD-style license that can be
  3. // found in the LICENSE file.
  4. #ifndef BASE_CONTAINERS_CIRCULAR_DEQUE_H_
  5. #define BASE_CONTAINERS_CIRCULAR_DEQUE_H_
  6. #include <algorithm>
  7. #include <cstddef>
  8. #include <iterator>
  9. #include <type_traits>
  10. #include <utility>
  11. #include "base/as_const.h"
  12. #include "base/check.h"
  13. #include "base/containers/vector_buffer.h"
  14. #include "base/dcheck_is_on.h"
  15. #include "base/memory/raw_ptr_exclusion.h"
  16. #include "base/ranges/algorithm.h"
  17. #include "base/template_util.h"
  18. // base::circular_deque is similar to std::deque. Unlike std::deque, the
  19. // storage is provided in a flat circular buffer conceptually similar to a
  20. // vector. The beginning and end will wrap around as necessary so that
  21. // pushes and pops will be constant time as long as a capacity expansion is
  22. // not required.
  23. //
  24. // The API should be identical to std::deque with the following differences:
  25. //
  26. // - ITERATORS ARE NOT STABLE. Mutating the container will invalidate all
  27. // iterators.
  28. //
  29. // - Insertions may resize the vector and so are not constant time (std::deque
  30. // guarantees constant time for insertions at the ends).
  31. //
  32. // - Container-wide comparisons are not implemented. If you want to compare
  33. // two containers, use an algorithm so the expensive iteration is explicit.
  34. //
  35. // If you want a similar container with only a queue API, use base::queue in
  36. // base/containers/queue.h.
  37. //
  38. // Constructors:
  39. // circular_deque();
  40. // circular_deque(size_t count);
  41. // circular_deque(size_t count, const T& value);
  42. // circular_deque(InputIterator first, InputIterator last);
  43. // circular_deque(const circular_deque&);
  44. // circular_deque(circular_deque&&);
  45. // circular_deque(std::initializer_list<value_type>);
  46. //
  47. // Assignment functions:
  48. // circular_deque& operator=(const circular_deque&);
  49. // circular_deque& operator=(circular_deque&&);
  50. // circular_deque& operator=(std::initializer_list<T>);
  51. // void assign(size_t count, const T& value);
  52. // void assign(InputIterator first, InputIterator last);
  53. // void assign(std::initializer_list<T> value);
  54. //
  55. // Random accessors:
  56. // T& at(size_t);
  57. // const T& at(size_t) const;
  58. // T& operator[](size_t);
  59. // const T& operator[](size_t) const;
  60. //
  61. // End accessors:
  62. // T& front();
  63. // const T& front() const;
  64. // T& back();
  65. // const T& back() const;
  66. //
  67. // Iterator functions:
  68. // iterator begin();
  69. // const_iterator begin() const;
  70. // const_iterator cbegin() const;
  71. // iterator end();
  72. // const_iterator end() const;
  73. // const_iterator cend() const;
  74. // reverse_iterator rbegin();
  75. // const_reverse_iterator rbegin() const;
  76. // const_reverse_iterator crbegin() const;
  77. // reverse_iterator rend();
  78. // const_reverse_iterator rend() const;
  79. // const_reverse_iterator crend() const;
  80. //
  81. // Memory management:
  82. // void reserve(size_t); // SEE IMPLEMENTATION FOR SOME GOTCHAS.
  83. // size_t capacity() const;
  84. // void shrink_to_fit();
  85. //
  86. // Size management:
  87. // void clear();
  88. // bool empty() const;
  89. // size_t size() const;
  90. // void resize(size_t);
  91. // void resize(size_t count, const T& value);
  92. //
  93. // Positional insert and erase:
  94. // void insert(const_iterator pos, size_type count, const T& value);
  95. // void insert(const_iterator pos,
  96. // InputIterator first, InputIterator last);
  97. // iterator insert(const_iterator pos, const T& value);
  98. // iterator insert(const_iterator pos, T&& value);
  99. // iterator emplace(const_iterator pos, Args&&... args);
  100. // iterator erase(const_iterator pos);
  101. // iterator erase(const_iterator first, const_iterator last);
  102. //
  103. // End insert and erase:
  104. // void push_front(const T&);
  105. // void push_front(T&&);
  106. // void push_back(const T&);
  107. // void push_back(T&&);
  108. // T& emplace_front(Args&&...);
  109. // T& emplace_back(Args&&...);
  110. // void pop_front();
  111. // void pop_back();
  112. //
  113. // General:
  114. // void swap(circular_deque&);
  115. namespace base {
  116. template <class T>
  117. class circular_deque;
  118. namespace internal {
  119. // Start allocating nonempty buffers with this many entries. This is the
  120. // external capacity so the internal buffer will be one larger (= 4) which is
  121. // more even for the allocator. See the descriptions of internal vs. external
  122. // capacity on the comment above the buffer_ variable below.
  123. constexpr size_t kCircularBufferInitialCapacity = 3;
  124. template <typename T>
  125. class circular_deque_const_iterator {
  126. public:
  127. using difference_type = std::ptrdiff_t;
  128. using value_type = T;
  129. using pointer = const T*;
  130. using reference = const T&;
  131. using iterator_category = std::random_access_iterator_tag;
  132. circular_deque_const_iterator() : parent_deque_(nullptr), index_(0) {
  133. #if DCHECK_IS_ON()
  134. created_generation_ = 0;
  135. #endif // DCHECK_IS_ON()
  136. }
  137. // Dereferencing.
  138. const T& operator*() const {
  139. CheckUnstableUsage();
  140. parent_deque_->CheckValidIndex(index_);
  141. return parent_deque_->buffer_[index_];
  142. }
  143. const T* operator->() const {
  144. CheckUnstableUsage();
  145. parent_deque_->CheckValidIndex(index_);
  146. return &parent_deque_->buffer_[index_];
  147. }
  148. const value_type& operator[](difference_type i) const { return *(*this + i); }
  149. // Increment and decrement.
  150. circular_deque_const_iterator& operator++() {
  151. Increment();
  152. return *this;
  153. }
  154. circular_deque_const_iterator operator++(int) {
  155. circular_deque_const_iterator ret = *this;
  156. Increment();
  157. return ret;
  158. }
  159. circular_deque_const_iterator& operator--() {
  160. Decrement();
  161. return *this;
  162. }
  163. circular_deque_const_iterator operator--(int) {
  164. circular_deque_const_iterator ret = *this;
  165. Decrement();
  166. return ret;
  167. }
  168. // Random access mutation.
  169. friend circular_deque_const_iterator operator+(
  170. const circular_deque_const_iterator& iter,
  171. difference_type offset) {
  172. circular_deque_const_iterator ret = iter;
  173. ret.Add(offset);
  174. return ret;
  175. }
  176. circular_deque_const_iterator& operator+=(difference_type offset) {
  177. Add(offset);
  178. return *this;
  179. }
  180. friend circular_deque_const_iterator operator-(
  181. const circular_deque_const_iterator& iter,
  182. difference_type offset) {
  183. circular_deque_const_iterator ret = iter;
  184. ret.Add(-offset);
  185. return ret;
  186. }
  187. circular_deque_const_iterator& operator-=(difference_type offset) {
  188. Add(-offset);
  189. return *this;
  190. }
  191. friend std::ptrdiff_t operator-(const circular_deque_const_iterator& lhs,
  192. const circular_deque_const_iterator& rhs) {
  193. lhs.CheckComparable(rhs);
  194. return lhs.OffsetFromBegin() - rhs.OffsetFromBegin();
  195. }
  196. // Comparisons.
  197. friend bool operator==(const circular_deque_const_iterator& lhs,
  198. const circular_deque_const_iterator& rhs) {
  199. lhs.CheckComparable(rhs);
  200. return lhs.index_ == rhs.index_;
  201. }
  202. friend bool operator!=(const circular_deque_const_iterator& lhs,
  203. const circular_deque_const_iterator& rhs) {
  204. return !(lhs == rhs);
  205. }
  206. friend bool operator<(const circular_deque_const_iterator& lhs,
  207. const circular_deque_const_iterator& rhs) {
  208. lhs.CheckComparable(rhs);
  209. return lhs.OffsetFromBegin() < rhs.OffsetFromBegin();
  210. }
  211. friend bool operator<=(const circular_deque_const_iterator& lhs,
  212. const circular_deque_const_iterator& rhs) {
  213. return !(lhs > rhs);
  214. }
  215. friend bool operator>(const circular_deque_const_iterator& lhs,
  216. const circular_deque_const_iterator& rhs) {
  217. lhs.CheckComparable(rhs);
  218. return lhs.OffsetFromBegin() > rhs.OffsetFromBegin();
  219. }
  220. friend bool operator>=(const circular_deque_const_iterator& lhs,
  221. const circular_deque_const_iterator& rhs) {
  222. return !(lhs < rhs);
  223. }
  224. protected:
  225. friend class circular_deque<T>;
  226. circular_deque_const_iterator(const circular_deque<T>* parent, size_t index)
  227. : parent_deque_(parent), index_(index) {
  228. #if DCHECK_IS_ON()
  229. created_generation_ = parent->generation_;
  230. #endif // DCHECK_IS_ON()
  231. }
  232. // Returns the offset from the beginning index of the buffer to the current
  233. // item.
  234. size_t OffsetFromBegin() const {
  235. if (index_ >= parent_deque_->begin_)
  236. return index_ - parent_deque_->begin_; // On the same side as begin.
  237. return parent_deque_->buffer_.capacity() - parent_deque_->begin_ + index_;
  238. }
  239. // Most uses will be ++ and -- so use a simplified implementation.
  240. void Increment() {
  241. CheckUnstableUsage();
  242. parent_deque_->CheckValidIndex(index_);
  243. index_++;
  244. if (index_ == parent_deque_->buffer_.capacity())
  245. index_ = 0;
  246. }
  247. void Decrement() {
  248. CheckUnstableUsage();
  249. parent_deque_->CheckValidIndexOrEnd(index_);
  250. if (index_ == 0)
  251. index_ = parent_deque_->buffer_.capacity() - 1;
  252. else
  253. index_--;
  254. }
  255. void Add(difference_type delta) {
  256. CheckUnstableUsage();
  257. #if DCHECK_IS_ON()
  258. if (delta <= 0)
  259. parent_deque_->CheckValidIndexOrEnd(index_);
  260. else
  261. parent_deque_->CheckValidIndex(index_);
  262. #endif
  263. // It should be valid to add 0 to any iterator, even if the container is
  264. // empty and the iterator points to end(). The modulo below will divide
  265. // by 0 if the buffer capacity is empty, so it's important to check for
  266. // this case explicitly.
  267. if (delta == 0)
  268. return;
  269. difference_type new_offset = OffsetFromBegin() + delta;
  270. DCHECK(new_offset >= 0 &&
  271. new_offset <= static_cast<difference_type>(parent_deque_->size()));
  272. index_ = (new_offset + parent_deque_->begin_) %
  273. parent_deque_->buffer_.capacity();
  274. }
  275. #if DCHECK_IS_ON()
  276. void CheckUnstableUsage() const {
  277. DCHECK(parent_deque_);
  278. // Since circular_deque doesn't guarantee stability, any attempt to
  279. // dereference this iterator after a mutation (i.e. the generation doesn't
  280. // match the original) in the container is illegal.
  281. DCHECK(created_generation_ == parent_deque_->generation_)
  282. << "circular_deque iterator dereferenced after mutation.";
  283. }
  284. void CheckComparable(const circular_deque_const_iterator& other) const {
  285. DCHECK(parent_deque_ == other.parent_deque_);
  286. // Since circular_deque doesn't guarantee stability, two iterators that
  287. // are compared must have been generated without mutating the container.
  288. // If this fires, the container was mutated between generating the two
  289. // iterators being compared.
  290. DCHECK(created_generation_ == other.created_generation_);
  291. }
  292. #else
  293. inline void CheckUnstableUsage() const {}
  294. inline void CheckComparable(const circular_deque_const_iterator&) const {}
  295. #endif // DCHECK_IS_ON()
  296. // `parent_deque_` is not a raw_ptr<...> for performance reasons: Usually
  297. // on-stack pointer, pointing back to the collection being iterated, owned by
  298. // object that iterates over it. Additionally this is supported by the
  299. // analysis of sampling profiler data and tab_search:top100:2020.
  300. RAW_PTR_EXCLUSION const circular_deque<T>* parent_deque_;
  301. size_t index_;
  302. #if DCHECK_IS_ON()
  303. // The generation of the parent deque when this iterator was created. The
  304. // container will update the generation for every modification so we can
  305. // test if the container was modified by comparing them.
  306. uint64_t created_generation_;
  307. #endif // DCHECK_IS_ON()
  308. };
  309. template <typename T>
  310. class circular_deque_iterator : public circular_deque_const_iterator<T> {
  311. using base = circular_deque_const_iterator<T>;
  312. public:
  313. friend class circular_deque<T>;
  314. using difference_type = std::ptrdiff_t;
  315. using value_type = T;
  316. using pointer = T*;
  317. using reference = T&;
  318. using iterator_category = std::random_access_iterator_tag;
  319. // Expose the base class' constructor.
  320. circular_deque_iterator() : circular_deque_const_iterator<T>() {}
  321. // Dereferencing.
  322. T& operator*() const { return const_cast<T&>(base::operator*()); }
  323. T* operator->() const { return const_cast<T*>(base::operator->()); }
  324. T& operator[](difference_type i) {
  325. return const_cast<T&>(base::operator[](i));
  326. }
  327. // Random access mutation.
  328. friend circular_deque_iterator operator+(const circular_deque_iterator& iter,
  329. difference_type offset) {
  330. circular_deque_iterator ret = iter;
  331. ret.Add(offset);
  332. return ret;
  333. }
  334. circular_deque_iterator& operator+=(difference_type offset) {
  335. base::Add(offset);
  336. return *this;
  337. }
  338. friend circular_deque_iterator operator-(const circular_deque_iterator& iter,
  339. difference_type offset) {
  340. circular_deque_iterator ret = iter;
  341. ret.Add(-offset);
  342. return ret;
  343. }
  344. circular_deque_iterator& operator-=(difference_type offset) {
  345. base::Add(-offset);
  346. return *this;
  347. }
  348. // Increment and decrement.
  349. circular_deque_iterator& operator++() {
  350. base::Increment();
  351. return *this;
  352. }
  353. circular_deque_iterator operator++(int) {
  354. circular_deque_iterator ret = *this;
  355. base::Increment();
  356. return ret;
  357. }
  358. circular_deque_iterator& operator--() {
  359. base::Decrement();
  360. return *this;
  361. }
  362. circular_deque_iterator operator--(int) {
  363. circular_deque_iterator ret = *this;
  364. base::Decrement();
  365. return ret;
  366. }
  367. private:
  368. circular_deque_iterator(const circular_deque<T>* parent, size_t index)
  369. : circular_deque_const_iterator<T>(parent, index) {}
  370. };
  371. } // namespace internal
  372. template <typename T>
  373. class circular_deque {
  374. private:
  375. using VectorBuffer = internal::VectorBuffer<T>;
  376. public:
  377. using value_type = T;
  378. using size_type = std::size_t;
  379. using difference_type = std::ptrdiff_t;
  380. using reference = value_type&;
  381. using const_reference = const value_type&;
  382. using pointer = value_type*;
  383. using const_pointer = const value_type*;
  384. using iterator = internal::circular_deque_iterator<T>;
  385. using const_iterator = internal::circular_deque_const_iterator<T>;
  386. using reverse_iterator = std::reverse_iterator<iterator>;
  387. using const_reverse_iterator = std::reverse_iterator<const_iterator>;
  388. // ---------------------------------------------------------------------------
  389. // Constructor
  390. constexpr circular_deque() = default;
  391. // Constructs with |count| copies of |value| or default constructed version.
  392. circular_deque(size_type count) { resize(count); }
  393. circular_deque(size_type count, const T& value) { resize(count, value); }
  394. // Range constructor.
  395. template <class InputIterator>
  396. circular_deque(InputIterator first, InputIterator last) {
  397. assign(first, last);
  398. }
  399. // Copy/move.
  400. circular_deque(const circular_deque& other) : buffer_(other.size() + 1) {
  401. assign(other.begin(), other.end());
  402. }
  403. circular_deque(circular_deque&& other) noexcept
  404. : buffer_(std::move(other.buffer_)),
  405. begin_(other.begin_),
  406. end_(other.end_) {
  407. other.begin_ = 0;
  408. other.end_ = 0;
  409. }
  410. circular_deque(std::initializer_list<value_type> init) { assign(init); }
  411. ~circular_deque() { DestructRange(begin_, end_); }
  412. // ---------------------------------------------------------------------------
  413. // Assignments.
  414. //
  415. // All of these may invalidate iterators and references.
  416. circular_deque& operator=(const circular_deque& other) {
  417. if (&other == this)
  418. return *this;
  419. reserve(other.size());
  420. assign(other.begin(), other.end());
  421. return *this;
  422. }
  423. circular_deque& operator=(circular_deque&& other) noexcept {
  424. if (&other == this)
  425. return *this;
  426. // We're about to overwrite the buffer, so don't free it in clear to
  427. // avoid doing it twice.
  428. ClearRetainCapacity();
  429. buffer_ = std::move(other.buffer_);
  430. begin_ = other.begin_;
  431. end_ = other.end_;
  432. other.begin_ = 0;
  433. other.end_ = 0;
  434. IncrementGeneration();
  435. return *this;
  436. }
  437. circular_deque& operator=(std::initializer_list<value_type> ilist) {
  438. reserve(ilist.size());
  439. assign(std::begin(ilist), std::end(ilist));
  440. return *this;
  441. }
  442. void assign(size_type count, const value_type& value) {
  443. ClearRetainCapacity();
  444. reserve(count);
  445. for (size_t i = 0; i < count; i++)
  446. emplace_back(value);
  447. IncrementGeneration();
  448. }
  449. // This variant should be enabled only when InputIterator is an iterator.
  450. template <typename InputIterator>
  451. typename std::enable_if<::base::internal::is_iterator<InputIterator>::value,
  452. void>::type
  453. assign(InputIterator first, InputIterator last) {
  454. // Possible future enhancement, dispatch on iterator tag type. For forward
  455. // iterators we can use std::difference to preallocate the space required
  456. // and only do one copy.
  457. ClearRetainCapacity();
  458. for (; first != last; ++first)
  459. emplace_back(*first);
  460. IncrementGeneration();
  461. }
  462. void assign(std::initializer_list<value_type> value) {
  463. reserve(std::distance(value.begin(), value.end()));
  464. assign(value.begin(), value.end());
  465. }
  466. // ---------------------------------------------------------------------------
  467. // Accessors.
  468. //
  469. // Since this class assumes no exceptions, at() and operator[] are equivalent.
  470. const value_type& at(size_type i) const {
  471. DCHECK(i < size());
  472. size_t right_size = buffer_.capacity() - begin_;
  473. if (begin_ <= end_ || i < right_size)
  474. return buffer_[begin_ + i];
  475. return buffer_[i - right_size];
  476. }
  477. value_type& at(size_type i) {
  478. return const_cast<value_type&>(base::as_const(*this).at(i));
  479. }
  480. value_type& operator[](size_type i) {
  481. return const_cast<value_type&>(base::as_const(*this)[i]);
  482. }
  483. const value_type& operator[](size_type i) const { return at(i); }
  484. value_type& front() {
  485. DCHECK(!empty());
  486. return buffer_[begin_];
  487. }
  488. const value_type& front() const {
  489. DCHECK(!empty());
  490. return buffer_[begin_];
  491. }
  492. value_type& back() {
  493. DCHECK(!empty());
  494. return *(--end());
  495. }
  496. const value_type& back() const {
  497. DCHECK(!empty());
  498. return *(--end());
  499. }
  500. // ---------------------------------------------------------------------------
  501. // Iterators.
  502. iterator begin() { return iterator(this, begin_); }
  503. const_iterator begin() const { return const_iterator(this, begin_); }
  504. const_iterator cbegin() const { return const_iterator(this, begin_); }
  505. iterator end() { return iterator(this, end_); }
  506. const_iterator end() const { return const_iterator(this, end_); }
  507. const_iterator cend() const { return const_iterator(this, end_); }
  508. reverse_iterator rbegin() { return reverse_iterator(end()); }
  509. const_reverse_iterator rbegin() const {
  510. return const_reverse_iterator(end());
  511. }
  512. const_reverse_iterator crbegin() const { return rbegin(); }
  513. reverse_iterator rend() { return reverse_iterator(begin()); }
  514. const_reverse_iterator rend() const {
  515. return const_reverse_iterator(begin());
  516. }
  517. const_reverse_iterator crend() const { return rend(); }
  518. // ---------------------------------------------------------------------------
  519. // Memory management.
  520. // IMPORTANT NOTE ON reserve(...): This class implements auto-shrinking of
  521. // the buffer when elements are deleted and there is "too much" wasted space.
  522. // So if you call reserve() with a large size in anticipation of pushing many
  523. // elements, but pop an element before the queue is full, the capacity you
  524. // reserved may be lost.
  525. //
  526. // As a result, it's only worthwhile to call reserve() when you're adding
  527. // many things at once with no intermediate operations.
  528. void reserve(size_type new_capacity) {
  529. if (new_capacity > capacity())
  530. SetCapacityTo(new_capacity);
  531. }
  532. size_type capacity() const {
  533. // One item is wasted to indicate end().
  534. return buffer_.capacity() == 0 ? 0 : buffer_.capacity() - 1;
  535. }
  536. void shrink_to_fit() {
  537. if (empty()) {
  538. // Optimize empty case to really delete everything if there was
  539. // something.
  540. if (buffer_.capacity())
  541. buffer_ = VectorBuffer();
  542. } else {
  543. SetCapacityTo(size());
  544. }
  545. }
  546. // ---------------------------------------------------------------------------
  547. // Size management.
  548. // This will additionally reset the capacity() to 0.
  549. void clear() {
  550. // This can't resize(0) because that requires a default constructor to
  551. // compile, which not all contained classes may implement.
  552. ClearRetainCapacity();
  553. buffer_ = VectorBuffer();
  554. }
  555. bool empty() const { return begin_ == end_; }
  556. size_type size() const {
  557. if (begin_ <= end_)
  558. return end_ - begin_;
  559. return buffer_.capacity() - begin_ + end_;
  560. }
  561. // When reducing size, the elements are deleted from the end. When expanding
  562. // size, elements are added to the end with |value| or the default
  563. // constructed version. Even when using resize(count) to shrink, a default
  564. // constructor is required for the code to compile, even though it will not
  565. // be called.
  566. //
  567. // There are two versions rather than using a default value to avoid
  568. // creating a temporary when shrinking (when it's not needed). Plus if
  569. // the default constructor is desired when expanding usually just calling it
  570. // for each element is faster than making a default-constructed temporary and
  571. // copying it.
  572. void resize(size_type count) {
  573. // SEE BELOW VERSION if you change this. The code is mostly the same.
  574. if (count > size()) {
  575. // This could be slighly more efficient but expanding a queue with
  576. // identical elements is unusual and the extra computations of emplacing
  577. // one-by-one will typically be small relative to calling the constructor
  578. // for every item.
  579. ExpandCapacityIfNecessary(count - size());
  580. while (size() < count)
  581. emplace_back();
  582. } else if (count < size()) {
  583. size_t new_end = (begin_ + count) % buffer_.capacity();
  584. DestructRange(new_end, end_);
  585. end_ = new_end;
  586. ShrinkCapacityIfNecessary();
  587. }
  588. IncrementGeneration();
  589. }
  590. void resize(size_type count, const value_type& value) {
  591. // SEE ABOVE VERSION if you change this. The code is mostly the same.
  592. if (count > size()) {
  593. ExpandCapacityIfNecessary(count - size());
  594. while (size() < count)
  595. emplace_back(value);
  596. } else if (count < size()) {
  597. size_t new_end = (begin_ + count) % buffer_.capacity();
  598. DestructRange(new_end, end_);
  599. end_ = new_end;
  600. ShrinkCapacityIfNecessary();
  601. }
  602. IncrementGeneration();
  603. }
  604. // ---------------------------------------------------------------------------
  605. // Insert and erase.
  606. //
  607. // Insertion and deletion in the middle is O(n) and invalidates all existing
  608. // iterators.
  609. //
  610. // The implementation of insert isn't optimized as much as it could be. If
  611. // the insertion requires that the buffer be grown, it will first be grown
  612. // and everything moved, and then the items will be inserted, potentially
  613. // moving some items twice. This simplifies the implemntation substantially
  614. // and means less generated templatized code. Since this is an uncommon
  615. // operation for deques, and already relatively slow, it doesn't seem worth
  616. // the benefit to optimize this.
  617. void insert(const_iterator pos, size_type count, const T& value) {
  618. ValidateIterator(pos);
  619. // Optimize insert at the beginning.
  620. if (pos == begin()) {
  621. ExpandCapacityIfNecessary(count);
  622. for (size_t i = 0; i < count; i++)
  623. push_front(value);
  624. return;
  625. }
  626. iterator insert_cur(this, pos.index_);
  627. iterator insert_end;
  628. MakeRoomFor(count, &insert_cur, &insert_end);
  629. while (insert_cur < insert_end) {
  630. new (&buffer_[insert_cur.index_]) T(value);
  631. ++insert_cur;
  632. }
  633. IncrementGeneration();
  634. }
  635. // This enable_if keeps this call from getting confused with the (pos, count,
  636. // value) version when value is an integer.
  637. template <class InputIterator>
  638. typename std::enable_if<::base::internal::is_iterator<InputIterator>::value,
  639. void>::type
  640. insert(const_iterator pos, InputIterator first, InputIterator last) {
  641. ValidateIterator(pos);
  642. const difference_type inserted_items_signed = std::distance(first, last);
  643. if (inserted_items_signed == 0)
  644. return; // Can divide by 0 when doing modulo below, so return early.
  645. CHECK(inserted_items_signed > 0);
  646. const size_type inserted_items =
  647. static_cast<size_type>(inserted_items_signed);
  648. // Make a hole to copy the items into.
  649. iterator insert_cur;
  650. iterator insert_end;
  651. if (pos == begin()) {
  652. // Optimize insert at the beginning, nothing needs to be shifted and the
  653. // hole is the |inserted_items| block immediately before |begin_|.
  654. ExpandCapacityIfNecessary(inserted_items);
  655. insert_end = begin();
  656. begin_ =
  657. (begin_ + buffer_.capacity() - inserted_items) % buffer_.capacity();
  658. insert_cur = begin();
  659. } else {
  660. insert_cur = iterator(this, pos.index_);
  661. MakeRoomFor(inserted_items, &insert_cur, &insert_end);
  662. }
  663. // Copy the items.
  664. while (insert_cur < insert_end) {
  665. new (&buffer_[insert_cur.index_]) T(*first);
  666. ++insert_cur;
  667. ++first;
  668. }
  669. IncrementGeneration();
  670. }
  671. // These all return an iterator to the inserted item. Existing iterators will
  672. // be invalidated.
  673. iterator insert(const_iterator pos, const T& value) {
  674. return emplace(pos, value);
  675. }
  676. iterator insert(const_iterator pos, T&& value) {
  677. return emplace(pos, std::move(value));
  678. }
  679. template <class... Args>
  680. iterator emplace(const_iterator pos, Args&&... args) {
  681. ValidateIterator(pos);
  682. // Optimize insert at beginning which doesn't require shifting.
  683. if (pos == cbegin()) {
  684. emplace_front(std::forward<Args>(args)...);
  685. return begin();
  686. }
  687. // Do this before we make the new iterators we return.
  688. IncrementGeneration();
  689. iterator insert_begin(this, pos.index_);
  690. iterator insert_end;
  691. MakeRoomFor(1, &insert_begin, &insert_end);
  692. new (&buffer_[insert_begin.index_]) T(std::forward<Args>(args)...);
  693. return insert_begin;
  694. }
  695. // Calling erase() won't automatically resize the buffer smaller like resize
  696. // or the pop functions. Erase is slow and relatively uncommon, and for
  697. // normal deque usage a pop will normally be done on a regular basis that
  698. // will prevent excessive buffer usage over long periods of time. It's not
  699. // worth having the extra code for every template instantiation of erase()
  700. // to resize capacity downward to a new buffer.
  701. iterator erase(const_iterator pos) { return erase(pos, pos + 1); }
  702. iterator erase(const_iterator first, const_iterator last) {
  703. ValidateIterator(first);
  704. ValidateIterator(last);
  705. IncrementGeneration();
  706. // First, call the destructor on the deleted items.
  707. if (first.index_ == last.index_) {
  708. // Nothing deleted. Need to return early to avoid falling through to
  709. // moving items on top of themselves.
  710. return iterator(this, first.index_);
  711. } else if (first.index_ < last.index_) {
  712. // Contiguous range.
  713. buffer_.DestructRange(&buffer_[first.index_], &buffer_[last.index_]);
  714. } else {
  715. // Deleted range wraps around.
  716. buffer_.DestructRange(&buffer_[first.index_],
  717. &buffer_[buffer_.capacity()]);
  718. buffer_.DestructRange(&buffer_[0], &buffer_[last.index_]);
  719. }
  720. if (first.index_ == begin_) {
  721. // This deletion is from the beginning. Nothing needs to be copied, only
  722. // begin_ needs to be updated.
  723. begin_ = last.index_;
  724. return iterator(this, last.index_);
  725. }
  726. // In an erase operation, the shifted items all move logically to the left,
  727. // so move them from left-to-right.
  728. iterator move_src(this, last.index_);
  729. iterator move_src_end = end();
  730. iterator move_dest(this, first.index_);
  731. for (; move_src < move_src_end; move_src++, move_dest++) {
  732. buffer_.MoveRange(&buffer_[move_src.index_],
  733. &buffer_[move_src.index_ + 1],
  734. &buffer_[move_dest.index_]);
  735. }
  736. end_ = move_dest.index_;
  737. // Since we did not reallocate and only changed things after the erase
  738. // element(s), the input iterator's index points to the thing following the
  739. // deletion.
  740. return iterator(this, first.index_);
  741. }
  742. // ---------------------------------------------------------------------------
  743. // Begin/end operations.
  744. void push_front(const T& value) { emplace_front(value); }
  745. void push_front(T&& value) { emplace_front(std::move(value)); }
  746. void push_back(const T& value) { emplace_back(value); }
  747. void push_back(T&& value) { emplace_back(std::move(value)); }
  748. template <class... Args>
  749. reference emplace_front(Args&&... args) {
  750. ExpandCapacityIfNecessary(1);
  751. if (begin_ == 0)
  752. begin_ = buffer_.capacity() - 1;
  753. else
  754. begin_--;
  755. IncrementGeneration();
  756. new (&buffer_[begin_]) T(std::forward<Args>(args)...);
  757. return front();
  758. }
  759. template <class... Args>
  760. reference emplace_back(Args&&... args) {
  761. ExpandCapacityIfNecessary(1);
  762. new (&buffer_[end_]) T(std::forward<Args>(args)...);
  763. if (end_ == buffer_.capacity() - 1)
  764. end_ = 0;
  765. else
  766. end_++;
  767. IncrementGeneration();
  768. return back();
  769. }
  770. void pop_front() {
  771. DCHECK(size());
  772. buffer_.DestructRange(&buffer_[begin_], &buffer_[begin_ + 1]);
  773. begin_++;
  774. if (begin_ == buffer_.capacity())
  775. begin_ = 0;
  776. ShrinkCapacityIfNecessary();
  777. // Technically popping will not invalidate any iterators since the
  778. // underlying buffer will be stable. But in the future we may want to add a
  779. // feature that resizes the buffer smaller if there is too much wasted
  780. // space. This ensures we can make such a change safely.
  781. IncrementGeneration();
  782. }
  783. void pop_back() {
  784. DCHECK(size());
  785. if (end_ == 0)
  786. end_ = buffer_.capacity() - 1;
  787. else
  788. end_--;
  789. buffer_.DestructRange(&buffer_[end_], &buffer_[end_ + 1]);
  790. ShrinkCapacityIfNecessary();
  791. // See pop_front comment about why this is here.
  792. IncrementGeneration();
  793. }
  794. // ---------------------------------------------------------------------------
  795. // General operations.
  796. void swap(circular_deque& other) {
  797. std::swap(buffer_, other.buffer_);
  798. std::swap(begin_, other.begin_);
  799. std::swap(end_, other.end_);
  800. IncrementGeneration();
  801. }
  802. friend void swap(circular_deque& lhs, circular_deque& rhs) { lhs.swap(rhs); }
  803. private:
  804. friend internal::circular_deque_iterator<T>;
  805. friend internal::circular_deque_const_iterator<T>;
  806. // Moves the items in the given circular buffer to the current one. The
  807. // source is moved from so will become invalid. The destination buffer must
  808. // have already been allocated with enough size.
  809. static void MoveBuffer(VectorBuffer& from_buf,
  810. size_t from_begin,
  811. size_t from_end,
  812. VectorBuffer* to_buf,
  813. size_t* to_begin,
  814. size_t* to_end) {
  815. size_t from_capacity = from_buf.capacity();
  816. *to_begin = 0;
  817. if (from_begin < from_end) {
  818. // Contiguous.
  819. from_buf.MoveRange(&from_buf[from_begin], &from_buf[from_end],
  820. to_buf->begin());
  821. *to_end = from_end - from_begin;
  822. } else if (from_begin > from_end) {
  823. // Discontiguous, copy the right side to the beginning of the new buffer.
  824. from_buf.MoveRange(&from_buf[from_begin], &from_buf[from_capacity],
  825. to_buf->begin());
  826. size_t right_size = from_capacity - from_begin;
  827. // Append the left side.
  828. from_buf.MoveRange(&from_buf[0], &from_buf[from_end],
  829. &(*to_buf)[right_size]);
  830. *to_end = right_size + from_end;
  831. } else {
  832. // No items.
  833. *to_end = 0;
  834. }
  835. }
  836. // Expands the buffer size. This assumes the size is larger than the
  837. // number of elements in the vector (it won't call delete on anything).
  838. void SetCapacityTo(size_t new_capacity) {
  839. // Use the capacity + 1 as the internal buffer size to differentiate
  840. // empty and full (see definition of buffer_ below).
  841. VectorBuffer new_buffer(new_capacity + 1);
  842. MoveBuffer(buffer_, begin_, end_, &new_buffer, &begin_, &end_);
  843. buffer_ = std::move(new_buffer);
  844. }
  845. void ExpandCapacityIfNecessary(size_t additional_elts) {
  846. size_t min_new_capacity = size() + additional_elts;
  847. if (capacity() >= min_new_capacity)
  848. return; // Already enough room.
  849. min_new_capacity =
  850. std::max(min_new_capacity, internal::kCircularBufferInitialCapacity);
  851. // std::vector always grows by at least 50%. WTF::Deque grows by at least
  852. // 25%. We expect queue workloads to generally stay at a similar size and
  853. // grow less than a vector might, so use 25%.
  854. size_t new_capacity =
  855. std::max(min_new_capacity, capacity() + capacity() / 4);
  856. SetCapacityTo(new_capacity);
  857. }
  858. void ShrinkCapacityIfNecessary() {
  859. // Don't auto-shrink below this size.
  860. if (capacity() <= internal::kCircularBufferInitialCapacity)
  861. return;
  862. // Shrink when 100% of the size() is wasted.
  863. size_t sz = size();
  864. size_t empty_spaces = capacity() - sz;
  865. if (empty_spaces < sz)
  866. return;
  867. // Leave 1/4 the size as free capacity, not going below the initial
  868. // capacity.
  869. size_t new_capacity =
  870. std::max(internal::kCircularBufferInitialCapacity, sz + sz / 4);
  871. if (new_capacity < capacity()) {
  872. // Count extra item to convert to internal capacity.
  873. SetCapacityTo(new_capacity);
  874. }
  875. }
  876. // Backend for clear() but does not resize the internal buffer.
  877. void ClearRetainCapacity() {
  878. // This can't resize(0) because that requires a default constructor to
  879. // compile, which not all contained classes may implement.
  880. DestructRange(begin_, end_);
  881. begin_ = 0;
  882. end_ = 0;
  883. IncrementGeneration();
  884. }
  885. // Calls destructors for the given begin->end indices. The indices may wrap
  886. // around. The buffer is not resized, and the begin_ and end_ members are
  887. // not changed.
  888. void DestructRange(size_t begin, size_t end) {
  889. if (end == begin) {
  890. return;
  891. } else if (end > begin) {
  892. buffer_.DestructRange(&buffer_[begin], &buffer_[end]);
  893. } else {
  894. buffer_.DestructRange(&buffer_[begin], &buffer_[buffer_.capacity()]);
  895. buffer_.DestructRange(&buffer_[0], &buffer_[end]);
  896. }
  897. }
  898. // Makes room for |count| items starting at |*insert_begin|. Since iterators
  899. // are not stable across buffer resizes, |*insert_begin| will be updated to
  900. // point to the beginning of the newly opened position in the new array (it's
  901. // in/out), and the end of the newly opened position (it's out-only).
  902. void MakeRoomFor(size_t count, iterator* insert_begin, iterator* insert_end) {
  903. if (count == 0) {
  904. *insert_end = *insert_begin;
  905. return;
  906. }
  907. // The offset from the beginning will be stable across reallocations.
  908. size_t begin_offset = insert_begin->OffsetFromBegin();
  909. ExpandCapacityIfNecessary(count);
  910. insert_begin->index_ = (begin_ + begin_offset) % buffer_.capacity();
  911. *insert_end =
  912. iterator(this, (insert_begin->index_ + count) % buffer_.capacity());
  913. // Update the new end and prepare the iterators for copying.
  914. iterator src = end();
  915. end_ = (end_ + count) % buffer_.capacity();
  916. iterator dest = end();
  917. // Move the elements. This will always involve shifting logically to the
  918. // right, so move in a right-to-left order.
  919. while (true) {
  920. if (src == *insert_begin)
  921. break;
  922. --src;
  923. --dest;
  924. buffer_.MoveRange(&buffer_[src.index_], &buffer_[src.index_ + 1],
  925. &buffer_[dest.index_]);
  926. }
  927. }
  928. #if DCHECK_IS_ON()
  929. // Asserts the given index is dereferencable. The index is an index into the
  930. // buffer, not an index used by operator[] or at() which will be offsets from
  931. // begin.
  932. void CheckValidIndex(size_t i) const {
  933. if (begin_ <= end_)
  934. DCHECK(i >= begin_ && i < end_);
  935. else
  936. DCHECK((i >= begin_ && i < buffer_.capacity()) || i < end_);
  937. }
  938. // Asserts the given index is either dereferencable or points to end().
  939. void CheckValidIndexOrEnd(size_t i) const {
  940. if (i != end_)
  941. CheckValidIndex(i);
  942. }
  943. void ValidateIterator(const const_iterator& i) const {
  944. DCHECK(i.parent_deque_ == this);
  945. i.CheckUnstableUsage();
  946. }
  947. // See generation_ below.
  948. void IncrementGeneration() { generation_++; }
  949. #else
  950. // No-op versions of these functions for release builds.
  951. void CheckValidIndex(size_t) const {}
  952. void CheckValidIndexOrEnd(size_t) const {}
  953. void ValidateIterator(const const_iterator& i) const {}
  954. void IncrementGeneration() {}
  955. #endif
  956. // Danger, the buffer_.capacity() is the "internal capacity" which is
  957. // capacity() + 1 since there is an extra item to indicate the end. Otherwise
  958. // being completely empty and completely full are indistinguishable (begin ==
  959. // end). We could add a separate flag to avoid it, but that adds significant
  960. // extra complexity since every computation will have to check for it. Always
  961. // keeping one extra unused element in the buffer makes iterator computations
  962. // much simpler.
  963. //
  964. // Container internal code will want to use buffer_.capacity() for offset
  965. // computations rather than capacity().
  966. VectorBuffer buffer_;
  967. size_type begin_ = 0;
  968. size_type end_ = 0;
  969. #if DCHECK_IS_ON()
  970. // Incremented every time a modification is made that could affect iterator
  971. // invalidations.
  972. uint64_t generation_ = 0;
  973. #endif
  974. };
  975. // Implementations of base::Erase[If] (see base/stl_util.h).
  976. template <class T, class Value>
  977. size_t Erase(circular_deque<T>& container, const Value& value) {
  978. auto it = ranges::remove(container, value);
  979. size_t removed = std::distance(it, container.end());
  980. container.erase(it, container.end());
  981. return removed;
  982. }
  983. template <class T, class Predicate>
  984. size_t EraseIf(circular_deque<T>& container, Predicate pred) {
  985. auto it = ranges::remove_if(container, pred);
  986. size_t removed = std::distance(it, container.end());
  987. container.erase(it, container.end());
  988. return removed;
  989. }
  990. } // namespace base
  991. #endif // BASE_CONTAINERS_CIRCULAR_DEQUE_H_