GrTRecorder.h 6.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176
  1. /*
  2. * Copyright 2014 Google Inc.
  3. *
  4. * Use of this source code is governed by a BSD-style license that can be
  5. * found in the LICENSE file.
  6. */
  7. #ifndef GrTRecorder_DEFINED
  8. #define GrTRecorder_DEFINED
  9. #include "include/gpu/GrTypes.h"
  10. #include "include/private/SkTLogic.h"
  11. #include "src/core/SkArenaAlloc.h"
  12. /**
  13. * Records a list of items with a common base type, optional associated data, and
  14. * permanent memory addresses. It supports forward iteration.
  15. *
  16. * This class allocates space for the stored items and associated data in a SkArenaAlloc.
  17. * There is an overhead of 1 pointer for each stored item.
  18. *
  19. * Upon reset or delete, the items are destructed in the same order they were received,
  20. * not reverse (stack) order.
  21. *
  22. * @param TBase Common base type of items in the list. It is assumed that the items are
  23. * trivially destructable or that TBase has a virtual destructor as ~TBase()
  24. * is called to destroy the items.
  25. */
  26. template <typename TBase> class GrTRecorder {
  27. private:
  28. template <bool IsConst> class IterImpl;
  29. public:
  30. using iterator = IterImpl<false>;
  31. using const_iterator = IterImpl<true>;
  32. /**
  33. * Create a recorder.
  34. *
  35. * @param initialSizeInBytes The amount of memory reserved by the recorder initially,
  36. and after calls to reset().
  37. */
  38. explicit GrTRecorder(size_t initialSizeInBytes) : fArena(initialSizeInBytes) {}
  39. GrTRecorder(const GrTRecorder&) = delete;
  40. GrTRecorder& operator=(const GrTRecorder&) = delete;
  41. ~GrTRecorder() { this->reset(); }
  42. bool empty() { return !SkToBool(fTail); }
  43. /** The last item. Must not be empty. */
  44. TBase& back() {
  45. SkASSERT(!this->empty());
  46. return *fTail->get();
  47. }
  48. /** Forward mutable iteration */
  49. iterator begin() { return iterator(fHead); }
  50. iterator end() { return iterator(nullptr); }
  51. /** Forward const iteration */
  52. const_iterator begin() const { return const_iterator(fHead); }
  53. const_iterator end() const { return const_iterator(nullptr); }
  54. /** Destruct all items in the list and reset to empty. Frees memory allocated from arena. */
  55. void reset();
  56. /**
  57. * Emplace a new TItem (which derives from TBase) in the recorder. This requires equivalence
  58. * between reinterpret_cast<TBase*> and static_cast<TBase*> when operating on TItem*.
  59. * Multiple inheritance may make this not true. It is runtime asserted.
  60. */
  61. template <typename TItem, typename... Args> TItem& emplace(Args... args) {
  62. return this->emplaceWithData<TItem, Args...>(0, std::forward<Args>(args)...);
  63. }
  64. /**
  65. * Emplace a new TItem (which derives from TBase) in the recorder with extra data space. The
  66. * extra data immediately follows the stored item with no extra alignment. E.g.,
  67. * void* extraData = &recorder->emplaceWithData<Subclass>(dataSize, ...) + 1;
  68. *
  69. * This requires equivalence between reinterpret_cast<TBase*> and static_cast<TBase*> when
  70. * operating on TItem*. Multiple inheritance may make this not true. It is runtime asserted.
  71. */
  72. template <typename TItem, typename... Args>
  73. SK_WHEN((std::is_base_of<TBase, TItem>::value), TItem&)
  74. emplaceWithData(size_t extraDataSize, Args... args);
  75. private:
  76. struct Header {
  77. Header* fNext = nullptr;
  78. // We always store the T immediately after the header (and ensure proper alignment). See
  79. // emplaceWithData() implementation.
  80. TBase* get() const { return reinterpret_cast<TBase*>(const_cast<Header*>(this) + 1); }
  81. };
  82. SkArenaAlloc fArena;
  83. Header* fHead = nullptr;
  84. Header* fTail = nullptr;
  85. };
  86. ////////////////////////////////////////////////////////////////////////////////
  87. template <typename TBase>
  88. template <typename TItem, typename... Args>
  89. inline SK_WHEN((std::is_base_of<TBase, TItem>::value), TItem&)
  90. GrTRecorder<TBase>::emplaceWithData(size_t extraDataSize, Args... args) {
  91. static constexpr size_t kTAlign = alignof(TItem);
  92. static constexpr size_t kHeaderAlign = alignof(Header);
  93. static constexpr size_t kAllocAlign = kTAlign > kHeaderAlign ? kTAlign : kHeaderAlign;
  94. static constexpr size_t kTItemOffset = GrSizeAlignUp(sizeof(Header), kAllocAlign);
  95. // We're assuming if we back up from kItemOffset by sizeof(Header) we will still be aligned.
  96. GR_STATIC_ASSERT(sizeof(Header) % alignof(Header) == 0);
  97. const size_t totalSize = kTItemOffset + sizeof(TItem) + extraDataSize;
  98. auto alloc = reinterpret_cast<char*>(fArena.makeBytesAlignedTo(totalSize, kAllocAlign));
  99. Header* header = new (alloc + kTItemOffset - sizeof(Header)) Header();
  100. if (fTail) {
  101. fTail->fNext = header;
  102. }
  103. fTail = header;
  104. if (!fHead) {
  105. fHead = header;
  106. }
  107. auto* item = new (alloc + kTItemOffset) TItem(std::forward<Args>(args)...);
  108. // We require that we can reinterpret_cast between TBase* and TItem*. Could not figure out how
  109. // to statically assert this. See proposal for std::is_initial_base_of here:
  110. // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2016/p0466r0.pdf
  111. SkASSERT(reinterpret_cast<uintptr_t>(item) ==
  112. reinterpret_cast<uintptr_t>(static_cast<TBase*>(item)));
  113. return *item;
  114. }
  115. template <typename TBase> inline void GrTRecorder<TBase>::reset() {
  116. for (auto& i : *this) {
  117. i.~TBase();
  118. }
  119. GR_STATIC_ASSERT(std::is_trivially_destructible<Header>::value);
  120. fHead = fTail = nullptr;
  121. fArena.reset();
  122. }
  123. /**
  124. * Iterates through a recorder front-to-back, const or not.
  125. */
  126. template <typename TBase> template <bool IsConst> class GrTRecorder<TBase>::IterImpl {
  127. private:
  128. using T = typename std::conditional<IsConst, const TBase, TBase>::type;
  129. public:
  130. IterImpl() = default;
  131. IterImpl operator++() {
  132. fCurr = fCurr->fNext;
  133. return *this;
  134. }
  135. IterImpl operator++(int) {
  136. auto old = fCurr;
  137. fCurr = fCurr->fNext;
  138. return {old};
  139. }
  140. T& operator*() const { return *fCurr->get(); }
  141. T* operator->() const { return fCurr->get(); }
  142. bool operator==(const IterImpl& that) const { return fCurr == that.fCurr; }
  143. bool operator!=(const IterImpl& that) const { return !(*this == that); }
  144. private:
  145. IterImpl(Header* curr) : fCurr(curr) {}
  146. Header* fCurr = nullptr;
  147. friend class GrTRecorder<TBase>; // To construct from Header.
  148. };
  149. #endif