GrCCPathCache.h 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368
  1. /*
  2. * Copyright 2018 Google Inc.
  3. *
  4. * Use of this source code is governed by a BSD-style license that can be
  5. * found in the LICENSE file.
  6. */
  7. #ifndef GrCCPathCache_DEFINED
  8. #define GrCCPathCache_DEFINED
  9. #include "include/private/SkTHash.h"
  10. #include "src/core/SkExchange.h"
  11. #include "src/core/SkTInternalLList.h"
  12. #include "src/gpu/ccpr/GrCCAtlas.h"
  13. #include "src/gpu/ccpr/GrCCPathProcessor.h"
  14. #include "src/gpu/geometry/GrShape.h"
  15. class GrCCPathCacheEntry;
  16. class GrShape;
  17. /**
  18. * This class implements an LRU cache that maps from GrShape to GrCCPathCacheEntry objects. Shapes
  19. * are only given one entry in the cache, so any time they are accessed with a different matrix, the
  20. * old entry gets evicted.
  21. */
  22. class GrCCPathCache {
  23. public:
  24. GrCCPathCache(uint32_t contextUniqueID);
  25. ~GrCCPathCache();
  26. class Key : public SkPathRef::GenIDChangeListener {
  27. public:
  28. static sk_sp<Key> Make(uint32_t pathCacheUniqueID, int dataCountU32,
  29. const void* data = nullptr);
  30. uint32_t pathCacheUniqueID() const { return fPathCacheUniqueID; }
  31. int dataSizeInBytes() const { return fDataSizeInBytes; }
  32. const uint32_t* data() const;
  33. void resetDataCountU32(int dataCountU32) {
  34. SkASSERT(dataCountU32 <= fDataReserveCountU32);
  35. fDataSizeInBytes = dataCountU32 * sizeof(uint32_t);
  36. }
  37. uint32_t* data();
  38. bool operator==(const Key& that) const {
  39. return fDataSizeInBytes == that.fDataSizeInBytes &&
  40. !memcmp(this->data(), that.data(), fDataSizeInBytes);
  41. }
  42. // Called when our corresponding path is modified or deleted. Not threadsafe.
  43. void onChange() override;
  44. private:
  45. Key(uint32_t pathCacheUniqueID, int dataCountU32)
  46. : fPathCacheUniqueID(pathCacheUniqueID)
  47. , fDataSizeInBytes(dataCountU32 * sizeof(uint32_t))
  48. SkDEBUGCODE(, fDataReserveCountU32(dataCountU32)) {
  49. SkASSERT(SK_InvalidUniqueID != fPathCacheUniqueID);
  50. }
  51. const uint32_t fPathCacheUniqueID;
  52. int fDataSizeInBytes;
  53. SkDEBUGCODE(const int fDataReserveCountU32);
  54. // The GrShape's unstyled key is stored as a variable-length footer to this class. GetKey
  55. // provides access to it.
  56. };
  57. // Stores the components of a transformation that affect a path mask (i.e. everything but
  58. // integer translation). During construction, any integer portions of the matrix's translate are
  59. // shaved off and returned to the caller. The caller is responsible for those integer shifts.
  60. struct MaskTransform {
  61. MaskTransform(const SkMatrix& m, SkIVector* shift);
  62. float fMatrix2x2[4];
  63. #ifndef SK_BUILD_FOR_ANDROID_FRAMEWORK
  64. // Except on AOSP, cache hits must have matching subpixel portions of their view matrix.
  65. // On AOSP we follow after HWUI and ignore the subpixel translate.
  66. float fSubpixelTranslate[2];
  67. #endif
  68. };
  69. // Represents a ref on a GrCCPathCacheEntry that should only be used during the current flush.
  70. class OnFlushEntryRef : SkNoncopyable {
  71. public:
  72. static OnFlushEntryRef OnFlushRef(GrCCPathCacheEntry*);
  73. OnFlushEntryRef() = default;
  74. OnFlushEntryRef(OnFlushEntryRef&& ref) : fEntry(skstd::exchange(ref.fEntry, nullptr)) {}
  75. ~OnFlushEntryRef();
  76. GrCCPathCacheEntry* get() const { return fEntry; }
  77. GrCCPathCacheEntry* operator->() const { return fEntry; }
  78. GrCCPathCacheEntry& operator*() const { return *fEntry; }
  79. explicit operator bool() const { return fEntry; }
  80. void operator=(OnFlushEntryRef&& ref) { fEntry = skstd::exchange(ref.fEntry, nullptr); }
  81. private:
  82. OnFlushEntryRef(GrCCPathCacheEntry* entry) : fEntry(entry) {}
  83. GrCCPathCacheEntry* fEntry = nullptr;
  84. };
  85. // Finds an entry in the cache that matches the given shape and transformation matrix.
  86. // 'maskShift' is filled with an integer post-translate that the caller must apply when drawing
  87. // the entry's mask to the device.
  88. //
  89. // NOTE: Shapes are only given one entry, so any time they are accessed with a new
  90. // transformation, the old entry gets evicted.
  91. OnFlushEntryRef find(GrOnFlushResourceProvider*, const GrShape&,
  92. const SkIRect& clippedDrawBounds, const SkMatrix& viewMatrix,
  93. SkIVector* maskShift);
  94. void doPreFlushProcessing();
  95. void purgeEntriesOlderThan(GrProxyProvider*, const GrStdSteadyClock::time_point& purgeTime);
  96. // As we evict entries from our local path cache, we accumulate a list of invalidated atlas
  97. // textures. This call purges the invalidated atlas textures from the mainline GrResourceCache.
  98. // This call is available with two different "provider" objects, to accomodate whatever might
  99. // be available at the callsite.
  100. void purgeInvalidatedAtlasTextures(GrOnFlushResourceProvider*);
  101. void purgeInvalidatedAtlasTextures(GrProxyProvider*);
  102. private:
  103. // This is a special ref ptr for GrCCPathCacheEntry, used by the hash table. It provides static
  104. // methods for SkTHash, and can only be moved. This guarantees the hash table holds exactly one
  105. // reference for each entry. Also, when a HashNode goes out of scope, that means it is exiting
  106. // the hash table. We take that opportunity to remove it from the LRU list and do some cleanup.
  107. class HashNode : SkNoncopyable {
  108. public:
  109. static const Key& GetKey(const HashNode&);
  110. inline static uint32_t Hash(const Key& key) {
  111. return GrResourceKeyHash(key.data(), key.dataSizeInBytes());
  112. }
  113. HashNode() = default;
  114. HashNode(GrCCPathCache*, sk_sp<Key>, const MaskTransform&, const GrShape&);
  115. HashNode(HashNode&& node)
  116. : fPathCache(node.fPathCache), fEntry(std::move(node.fEntry)) {
  117. SkASSERT(!node.fEntry);
  118. }
  119. ~HashNode();
  120. void operator=(HashNode&& node);
  121. GrCCPathCacheEntry* entry() const { return fEntry.get(); }
  122. private:
  123. GrCCPathCache* fPathCache = nullptr;
  124. sk_sp<GrCCPathCacheEntry> fEntry;
  125. };
  126. GrStdSteadyClock::time_point quickPerFlushTimestamp() {
  127. // time_point::min() means it's time to update fPerFlushTimestamp with a newer clock read.
  128. if (GrStdSteadyClock::time_point::min() == fPerFlushTimestamp) {
  129. fPerFlushTimestamp = GrStdSteadyClock::now();
  130. }
  131. return fPerFlushTimestamp;
  132. }
  133. void evict(const GrCCPathCache::Key&, GrCCPathCacheEntry* = nullptr);
  134. // Evicts all the cache entries whose keys have been queued up in fInvalidatedKeysInbox via
  135. // SkPath listeners.
  136. void evictInvalidatedCacheKeys();
  137. const uint32_t fContextUniqueID;
  138. SkTHashTable<HashNode, const Key&> fHashTable;
  139. SkTInternalLList<GrCCPathCacheEntry> fLRU;
  140. SkMessageBus<sk_sp<Key>>::Inbox fInvalidatedKeysInbox;
  141. sk_sp<Key> fScratchKey; // Reused for creating a temporary key in the find() method.
  142. // We only read the clock once per flush, and cache it in this variable. This prevents us from
  143. // excessive clock reads for cache timestamps that might degrade performance.
  144. GrStdSteadyClock::time_point fPerFlushTimestamp = GrStdSteadyClock::time_point::min();
  145. // As we evict entries from our local path cache, we accumulate lists of invalidated atlas
  146. // textures in these two members. We hold these until we purge them from the GrResourceCache
  147. // (e.g. via purgeInvalidatedAtlasTextures().)
  148. SkSTArray<4, sk_sp<GrTextureProxy>> fInvalidatedProxies;
  149. SkSTArray<4, GrUniqueKey> fInvalidatedProxyUniqueKeys;
  150. friend class GrCCCachedAtlas; // To append to fInvalidatedProxies, fInvalidatedProxyUniqueKeys.
  151. public:
  152. const SkTHashTable<HashNode, const Key&>& testingOnly_getHashTable() const;
  153. const SkTInternalLList<GrCCPathCacheEntry>& testingOnly_getLRU() const;
  154. };
  155. /**
  156. * This class stores all the data necessary to draw a specific path + matrix combination from their
  157. * corresponding cached atlas.
  158. */
  159. class GrCCPathCacheEntry : public GrNonAtomicRef<GrCCPathCacheEntry> {
  160. public:
  161. SK_DECLARE_INTERNAL_LLIST_INTERFACE(GrCCPathCacheEntry);
  162. ~GrCCPathCacheEntry() {
  163. SkASSERT(this->hasBeenEvicted()); // Should have called GrCCPathCache::evict().
  164. SkASSERT(!fCachedAtlas);
  165. SkASSERT(0 == fOnFlushRefCnt);
  166. }
  167. const GrCCPathCache::Key& cacheKey() const { SkASSERT(fCacheKey); return *fCacheKey; }
  168. // The number of flushes during which this specific entry (path + matrix combination) has been
  169. // pulled from the path cache. If a path is pulled from the cache more than once in a single
  170. // flush, the hit count is only incremented once.
  171. //
  172. // If the entry did not previously exist, its hit count will be 1.
  173. int hitCount() const { return fHitCount; }
  174. // The accumulative region of the path that has been drawn during the lifetime of this cache
  175. // entry (as defined by the 'clippedDrawBounds' parameter for GrCCPathCache::find).
  176. const SkIRect& hitRect() const { return fHitRect; }
  177. const GrCCCachedAtlas* cachedAtlas() const { return fCachedAtlas.get(); }
  178. const SkIRect& devIBounds() const { return fDevIBounds; }
  179. int width() const { return fDevIBounds.width(); }
  180. int height() const { return fDevIBounds.height(); }
  181. enum class ReleaseAtlasResult : bool {
  182. kNone,
  183. kDidInvalidateFromCache
  184. };
  185. // Called once our path has been rendered into the mainline CCPR (fp16, coverage count) atlas.
  186. // The caller will stash this atlas texture away after drawing, and during the next flush,
  187. // recover it and attempt to copy any paths that got reused into permanent 8-bit atlases.
  188. void setCoverageCountAtlas(
  189. GrOnFlushResourceProvider*, GrCCAtlas*, const SkIVector& atlasOffset,
  190. const GrOctoBounds& octoBounds, const SkIRect& devIBounds, const SkIVector& maskShift);
  191. // Called once our path mask has been copied into a permanent, 8-bit atlas. This method points
  192. // the entry at the new atlas and updates the GrCCCCachedAtlas data.
  193. ReleaseAtlasResult upgradeToLiteralCoverageAtlas(GrCCPathCache*, GrOnFlushResourceProvider*,
  194. GrCCAtlas*, const SkIVector& newAtlasOffset);
  195. private:
  196. using MaskTransform = GrCCPathCache::MaskTransform;
  197. GrCCPathCacheEntry(sk_sp<GrCCPathCache::Key> cacheKey, const MaskTransform& maskTransform)
  198. : fCacheKey(std::move(cacheKey)), fMaskTransform(maskTransform) {
  199. }
  200. bool hasBeenEvicted() const { return fCacheKey->shouldUnregisterFromPath(); }
  201. // Resets this entry back to not having an atlas, and purges its previous atlas texture from the
  202. // resource cache if needed.
  203. ReleaseAtlasResult releaseCachedAtlas(GrCCPathCache*);
  204. sk_sp<GrCCPathCache::Key> fCacheKey;
  205. GrStdSteadyClock::time_point fTimestamp;
  206. int fHitCount = 0;
  207. SkIRect fHitRect = SkIRect::MakeEmpty();
  208. sk_sp<GrCCCachedAtlas> fCachedAtlas;
  209. SkIVector fAtlasOffset;
  210. MaskTransform fMaskTransform;
  211. GrOctoBounds fOctoBounds;
  212. SkIRect fDevIBounds;
  213. int fOnFlushRefCnt = 0;
  214. friend class GrCCPathCache;
  215. friend void GrCCPathProcessor::Instance::set(const GrCCPathCacheEntry&, const SkIVector&,
  216. uint64_t color, GrFillRule); // To access data.
  217. public:
  218. int testingOnly_peekOnFlushRefCnt() const;
  219. };
  220. /**
  221. * Encapsulates the data for an atlas whose texture is stored in the mainline GrResourceCache. Many
  222. * instances of GrCCPathCacheEntry will reference the same GrCCCachedAtlas.
  223. *
  224. * We use this object to track the percentage of the original atlas pixels that could still ever
  225. * potentially be reused (i.e., those which still represent an extant path). When the percentage
  226. * of useful pixels drops below 50%, we purge the entire texture from the resource cache.
  227. *
  228. * This object also holds a ref on the atlas's actual texture proxy during flush. When
  229. * fOnFlushRefCnt decrements back down to zero, we release fOnFlushProxy and reset it back to null.
  230. */
  231. class GrCCCachedAtlas : public GrNonAtomicRef<GrCCCachedAtlas> {
  232. public:
  233. using ReleaseAtlasResult = GrCCPathCacheEntry::ReleaseAtlasResult;
  234. GrCCCachedAtlas(GrCCAtlas::CoverageType type, const GrUniqueKey& textureKey,
  235. sk_sp<GrTextureProxy> onFlushProxy)
  236. : fCoverageType(type)
  237. , fTextureKey(textureKey)
  238. , fOnFlushProxy(std::move(onFlushProxy)) {}
  239. ~GrCCCachedAtlas() {
  240. SkASSERT(!fOnFlushProxy);
  241. SkASSERT(!fOnFlushRefCnt);
  242. }
  243. GrCCAtlas::CoverageType coverageType() const { return fCoverageType; }
  244. const GrUniqueKey& textureKey() const { return fTextureKey; }
  245. GrTextureProxy* getOnFlushProxy() const { return fOnFlushProxy.get(); }
  246. void setOnFlushProxy(sk_sp<GrTextureProxy> proxy) {
  247. SkASSERT(!fOnFlushProxy);
  248. fOnFlushProxy = std::move(proxy);
  249. }
  250. void addPathPixels(int numPixels) { fNumPathPixels += numPixels; }
  251. ReleaseAtlasResult invalidatePathPixels(GrCCPathCache*, int numPixels);
  252. int peekOnFlushRefCnt() const { return fOnFlushRefCnt; }
  253. void incrOnFlushRefCnt(int count = 1) const {
  254. SkASSERT(count > 0);
  255. SkASSERT(fOnFlushProxy);
  256. fOnFlushRefCnt += count;
  257. }
  258. void decrOnFlushRefCnt(int count = 1) const;
  259. private:
  260. const GrCCAtlas::CoverageType fCoverageType;
  261. const GrUniqueKey fTextureKey;
  262. int fNumPathPixels = 0;
  263. int fNumInvalidatedPathPixels = 0;
  264. bool fIsInvalidatedFromResourceCache = false;
  265. mutable sk_sp<GrTextureProxy> fOnFlushProxy;
  266. mutable int fOnFlushRefCnt = 0;
  267. public:
  268. int testingOnly_peekOnFlushRefCnt() const;
  269. };
  270. inline GrCCPathCache::HashNode::HashNode(GrCCPathCache* pathCache, sk_sp<Key> key,
  271. const MaskTransform& m, const GrShape& shape)
  272. : fPathCache(pathCache)
  273. , fEntry(new GrCCPathCacheEntry(key, m)) {
  274. SkASSERT(shape.hasUnstyledKey());
  275. shape.addGenIDChangeListener(std::move(key));
  276. }
  277. inline const GrCCPathCache::Key& GrCCPathCache::HashNode::GetKey(
  278. const GrCCPathCache::HashNode& node) {
  279. return *node.entry()->fCacheKey;
  280. }
  281. inline GrCCPathCache::HashNode::~HashNode() {
  282. SkASSERT(!fEntry || fEntry->hasBeenEvicted()); // Should have called GrCCPathCache::evict().
  283. }
  284. inline void GrCCPathCache::HashNode::operator=(HashNode&& node) {
  285. SkASSERT(!fEntry || fEntry->hasBeenEvicted()); // Should have called GrCCPathCache::evict().
  286. fEntry = skstd::exchange(node.fEntry, nullptr);
  287. }
  288. inline void GrCCPathProcessor::Instance::set(
  289. const GrCCPathCacheEntry& entry, const SkIVector& shift, uint64_t color,
  290. GrFillRule fillRule) {
  291. float dx = (float)shift.fX, dy = (float)shift.fY;
  292. this->set(entry.fOctoBounds.makeOffset(dx, dy), entry.fAtlasOffset - shift, color, fillRule);
  293. }
  294. #endif