SkImage_Lazy.cpp 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500
  1. /*
  2. * Copyright 2015 Google Inc.
  3. *
  4. * Use of this source code is governed by a BSD-style license that can be
  5. * found in the LICENSE file.
  6. */
  7. #include "src/image/SkImage_Lazy.h"
  8. #include "include/core/SkBitmap.h"
  9. #include "include/core/SkData.h"
  10. #include "include/core/SkImageGenerator.h"
  11. #include "src/core/SkBitmapCache.h"
  12. #include "src/core/SkCachedData.h"
  13. #include "src/core/SkImagePriv.h"
  14. #include "src/core/SkNextID.h"
  15. #if SK_SUPPORT_GPU
  16. #include "include/gpu/GrSamplerState.h"
  17. #include "include/private/GrRecordingContext.h"
  18. #include "include/private/GrResourceKey.h"
  19. #include "src/gpu/GrCaps.h"
  20. #include "src/gpu/GrGpuResourcePriv.h"
  21. #include "src/gpu/GrImageTextureMaker.h"
  22. #include "src/gpu/GrProxyProvider.h"
  23. #include "src/gpu/GrRecordingContextPriv.h"
  24. #include "src/gpu/GrYUVProvider.h"
  25. #include "src/gpu/SkGr.h"
  26. #endif
  27. // Ref-counted tuple(SkImageGenerator, SkMutex) which allows sharing one generator among N images
  28. class SharedGenerator final : public SkNVRefCnt<SharedGenerator> {
  29. public:
  30. static sk_sp<SharedGenerator> Make(std::unique_ptr<SkImageGenerator> gen) {
  31. return gen ? sk_sp<SharedGenerator>(new SharedGenerator(std::move(gen))) : nullptr;
  32. }
  33. // This is thread safe. It is a const field set in the constructor.
  34. const SkImageInfo& getInfo() { return fGenerator->getInfo(); }
  35. private:
  36. explicit SharedGenerator(std::unique_ptr<SkImageGenerator> gen)
  37. : fGenerator(std::move(gen)) {
  38. SkASSERT(fGenerator);
  39. }
  40. friend class ScopedGenerator;
  41. friend class SkImage_Lazy;
  42. std::unique_ptr<SkImageGenerator> fGenerator;
  43. SkMutex fMutex;
  44. };
  45. ///////////////////////////////////////////////////////////////////////////////
  46. SkImage_Lazy::Validator::Validator(sk_sp<SharedGenerator> gen, const SkIRect* subset,
  47. const SkColorType* colorType, sk_sp<SkColorSpace> colorSpace)
  48. : fSharedGenerator(std::move(gen)) {
  49. if (!fSharedGenerator) {
  50. return;
  51. }
  52. // The following generator accessors are safe without acquiring the mutex (const getters).
  53. // TODO: refactor to use a ScopedGenerator instead, for clarity.
  54. const SkImageInfo& info = fSharedGenerator->fGenerator->getInfo();
  55. if (info.isEmpty()) {
  56. fSharedGenerator.reset();
  57. return;
  58. }
  59. fUniqueID = fSharedGenerator->fGenerator->uniqueID();
  60. const SkIRect bounds = SkIRect::MakeWH(info.width(), info.height());
  61. if (subset) {
  62. if (!bounds.contains(*subset)) {
  63. fSharedGenerator.reset();
  64. return;
  65. }
  66. if (*subset != bounds) {
  67. // we need a different uniqueID since we really are a subset of the raw generator
  68. fUniqueID = SkNextID::ImageID();
  69. }
  70. } else {
  71. subset = &bounds;
  72. }
  73. fInfo = info.makeWH(subset->width(), subset->height());
  74. fOrigin = SkIPoint::Make(subset->x(), subset->y());
  75. if (colorType || colorSpace) {
  76. if (colorType) {
  77. fInfo = fInfo.makeColorType(*colorType);
  78. }
  79. if (colorSpace) {
  80. fInfo = fInfo.makeColorSpace(colorSpace);
  81. }
  82. fUniqueID = SkNextID::ImageID();
  83. }
  84. }
  85. ///////////////////////////////////////////////////////////////////////////////
  86. // Helper for exclusive access to a shared generator.
  87. class SkImage_Lazy::ScopedGenerator {
  88. public:
  89. ScopedGenerator(const sk_sp<SharedGenerator>& gen)
  90. : fSharedGenerator(gen)
  91. , fAutoAquire(gen->fMutex) {}
  92. SkImageGenerator* operator->() const {
  93. fSharedGenerator->fMutex.assertHeld();
  94. return fSharedGenerator->fGenerator.get();
  95. }
  96. operator SkImageGenerator*() const {
  97. fSharedGenerator->fMutex.assertHeld();
  98. return fSharedGenerator->fGenerator.get();
  99. }
  100. private:
  101. const sk_sp<SharedGenerator>& fSharedGenerator;
  102. SkAutoMutexExclusive fAutoAquire;
  103. };
  104. ///////////////////////////////////////////////////////////////////////////////
  105. SkImage_Lazy::SkImage_Lazy(Validator* validator)
  106. : INHERITED(validator->fInfo, validator->fUniqueID)
  107. , fSharedGenerator(std::move(validator->fSharedGenerator))
  108. , fOrigin(validator->fOrigin) {
  109. SkASSERT(fSharedGenerator);
  110. fUniqueID = validator->fUniqueID;
  111. }
  112. SkImage_Lazy::~SkImage_Lazy() {
  113. #if SK_SUPPORT_GPU
  114. for (int i = 0; i < fUniqueKeyInvalidatedMessages.count(); ++i) {
  115. SkMessageBus<GrUniqueKeyInvalidatedMessage>::Post(*fUniqueKeyInvalidatedMessages[i]);
  116. }
  117. fUniqueKeyInvalidatedMessages.deleteAll();
  118. #endif
  119. }
  120. //////////////////////////////////////////////////////////////////////////////////////////////////
  121. static bool generate_pixels(SkImageGenerator* gen, const SkPixmap& pmap, int originX, int originY) {
  122. const int genW = gen->getInfo().width();
  123. const int genH = gen->getInfo().height();
  124. const SkIRect srcR = SkIRect::MakeWH(genW, genH);
  125. const SkIRect dstR = SkIRect::MakeXYWH(originX, originY, pmap.width(), pmap.height());
  126. if (!srcR.contains(dstR)) {
  127. return false;
  128. }
  129. // If they are requesting a subset, we have to have a temp allocation for full image, and
  130. // then copy the subset into their allocation
  131. SkBitmap full;
  132. SkPixmap fullPM;
  133. const SkPixmap* dstPM = &pmap;
  134. if (srcR != dstR) {
  135. if (!full.tryAllocPixels(pmap.info().makeWH(genW, genH))) {
  136. return false;
  137. }
  138. if (!full.peekPixels(&fullPM)) {
  139. return false;
  140. }
  141. dstPM = &fullPM;
  142. }
  143. if (!gen->getPixels(dstPM->info(), dstPM->writable_addr(), dstPM->rowBytes())) {
  144. return false;
  145. }
  146. if (srcR != dstR) {
  147. if (!full.readPixels(pmap, originX, originY)) {
  148. return false;
  149. }
  150. }
  151. return true;
  152. }
  153. bool SkImage_Lazy::getROPixels(SkBitmap* bitmap, SkImage::CachingHint chint) const {
  154. auto check_output_bitmap = [bitmap]() {
  155. SkASSERT(bitmap->isImmutable());
  156. SkASSERT(bitmap->getPixels());
  157. (void)bitmap;
  158. };
  159. auto desc = SkBitmapCacheDesc::Make(this);
  160. if (SkBitmapCache::Find(desc, bitmap)) {
  161. check_output_bitmap();
  162. return true;
  163. }
  164. if (SkImage::kAllow_CachingHint == chint) {
  165. SkPixmap pmap;
  166. SkBitmapCache::RecPtr cacheRec = SkBitmapCache::Alloc(desc, this->imageInfo(), &pmap);
  167. if (!cacheRec ||
  168. !generate_pixels(ScopedGenerator(fSharedGenerator), pmap,
  169. fOrigin.x(), fOrigin.y())) {
  170. return false;
  171. }
  172. SkBitmapCache::Add(std::move(cacheRec), bitmap);
  173. this->notifyAddedToRasterCache();
  174. } else {
  175. if (!bitmap->tryAllocPixels(this->imageInfo()) ||
  176. !generate_pixels(ScopedGenerator(fSharedGenerator), bitmap->pixmap(), fOrigin.x(),
  177. fOrigin.y())) {
  178. return false;
  179. }
  180. bitmap->setImmutable();
  181. }
  182. check_output_bitmap();
  183. return true;
  184. }
  185. //////////////////////////////////////////////////////////////////////////////////////////////////
  186. bool SkImage_Lazy::onReadPixels(const SkImageInfo& dstInfo, void* dstPixels, size_t dstRB,
  187. int srcX, int srcY, CachingHint chint) const {
  188. SkBitmap bm;
  189. if (this->getROPixels(&bm, chint)) {
  190. return bm.readPixels(dstInfo, dstPixels, dstRB, srcX, srcY);
  191. }
  192. return false;
  193. }
  194. sk_sp<SkData> SkImage_Lazy::onRefEncoded() const {
  195. ScopedGenerator generator(fSharedGenerator);
  196. return generator->refEncodedData();
  197. }
  198. bool SkImage_Lazy::onIsValid(GrContext* context) const {
  199. ScopedGenerator generator(fSharedGenerator);
  200. return generator->isValid(context);
  201. }
  202. ///////////////////////////////////////////////////////////////////////////////////////////////////
  203. #if SK_SUPPORT_GPU
  204. sk_sp<GrTextureProxy> SkImage_Lazy::asTextureProxyRef(GrRecordingContext* context,
  205. const GrSamplerState& params,
  206. SkScalar scaleAdjust[2]) const {
  207. if (!context) {
  208. return nullptr;
  209. }
  210. GrImageTextureMaker textureMaker(context, this, kAllow_CachingHint);
  211. return textureMaker.refTextureProxyForParams(params, scaleAdjust);
  212. }
  213. #endif
  214. sk_sp<SkImage> SkImage_Lazy::onMakeSubset(GrRecordingContext* context,
  215. const SkIRect& subset) const {
  216. SkASSERT(this->bounds().contains(subset));
  217. SkASSERT(this->bounds() != subset);
  218. const SkIRect generatorSubset = subset.makeOffset(fOrigin.x(), fOrigin.y());
  219. const SkColorType colorType = this->colorType();
  220. Validator validator(fSharedGenerator, &generatorSubset, &colorType, this->refColorSpace());
  221. return validator ? sk_sp<SkImage>(new SkImage_Lazy(&validator)) : nullptr;
  222. }
  223. sk_sp<SkImage> SkImage_Lazy::onMakeColorTypeAndColorSpace(GrRecordingContext*,
  224. SkColorType targetCT,
  225. sk_sp<SkColorSpace> targetCS) const {
  226. SkAutoMutexExclusive autoAquire(fOnMakeColorTypeAndSpaceMutex);
  227. if (fOnMakeColorTypeAndSpaceResult &&
  228. targetCT == fOnMakeColorTypeAndSpaceResult->colorType() &&
  229. SkColorSpace::Equals(targetCS.get(), fOnMakeColorTypeAndSpaceResult->colorSpace())) {
  230. return fOnMakeColorTypeAndSpaceResult;
  231. }
  232. const SkIRect generatorSubset =
  233. SkIRect::MakeXYWH(fOrigin.x(), fOrigin.y(), this->width(), this->height());
  234. Validator validator(fSharedGenerator, &generatorSubset, &targetCT, targetCS);
  235. sk_sp<SkImage> result = validator ? sk_sp<SkImage>(new SkImage_Lazy(&validator)) : nullptr;
  236. if (result) {
  237. fOnMakeColorTypeAndSpaceResult = result;
  238. }
  239. return result;
  240. }
  241. sk_sp<SkImage> SkImage::MakeFromGenerator(std::unique_ptr<SkImageGenerator> generator,
  242. const SkIRect* subset) {
  243. SkImage_Lazy::Validator
  244. validator(SharedGenerator::Make(std::move(generator)), subset, nullptr, nullptr);
  245. return validator ? sk_make_sp<SkImage_Lazy>(&validator) : nullptr;
  246. }
  247. //////////////////////////////////////////////////////////////////////////////////////////////////
  248. #if SK_SUPPORT_GPU
  249. void SkImage_Lazy::makeCacheKeyFromOrigKey(const GrUniqueKey& origKey,
  250. GrUniqueKey* cacheKey) const {
  251. SkASSERT(!cacheKey->isValid());
  252. if (origKey.isValid()) {
  253. static const GrUniqueKey::Domain kDomain = GrUniqueKey::GenerateDomain();
  254. GrUniqueKey::Builder builder(cacheKey, origKey, kDomain, 0, "Image");
  255. }
  256. }
  257. class Generator_GrYUVProvider : public GrYUVProvider {
  258. public:
  259. Generator_GrYUVProvider(SkImageGenerator* gen) : fGen(gen) {}
  260. private:
  261. uint32_t onGetID() const override { return fGen->uniqueID(); }
  262. bool onQueryYUVA8(SkYUVASizeInfo* sizeInfo,
  263. SkYUVAIndex yuvaIndices[SkYUVAIndex::kIndexCount],
  264. SkYUVColorSpace* colorSpace) const override {
  265. return fGen->queryYUVA8(sizeInfo, yuvaIndices, colorSpace);
  266. }
  267. bool onGetYUVA8Planes(const SkYUVASizeInfo& sizeInfo,
  268. const SkYUVAIndex yuvaIndices[SkYUVAIndex::kIndexCount],
  269. void* planes[]) override {
  270. return fGen->getYUVA8Planes(sizeInfo, yuvaIndices, planes);
  271. }
  272. SkImageGenerator* fGen;
  273. typedef GrYUVProvider INHERITED;
  274. };
  275. static void set_key_on_proxy(GrProxyProvider* proxyProvider,
  276. GrTextureProxy* proxy, GrTextureProxy* originalProxy,
  277. const GrUniqueKey& key) {
  278. if (key.isValid()) {
  279. if (originalProxy && originalProxy->getUniqueKey().isValid()) {
  280. SkASSERT(originalProxy->getUniqueKey() == key);
  281. SkASSERT(GrMipMapped::kYes == proxy->mipMapped() &&
  282. GrMipMapped::kNo == originalProxy->mipMapped());
  283. // If we had an originalProxy with a valid key, that means there already is a proxy in
  284. // the cache which matches the key, but it does not have mip levels and we require them.
  285. // Thus we must remove the unique key from that proxy.
  286. SkASSERT(originalProxy->getUniqueKey() == key);
  287. proxyProvider->removeUniqueKeyFromProxy(originalProxy);
  288. }
  289. proxyProvider->assignUniqueKeyToProxy(key, proxy);
  290. }
  291. }
  292. sk_sp<SkCachedData> SkImage_Lazy::getPlanes(SkYUVASizeInfo* yuvaSizeInfo,
  293. SkYUVAIndex yuvaIndices[SkYUVAIndex::kIndexCount],
  294. SkYUVColorSpace* yuvColorSpace,
  295. const void* planes[SkYUVASizeInfo::kMaxCount]) {
  296. ScopedGenerator generator(fSharedGenerator);
  297. Generator_GrYUVProvider provider(generator);
  298. sk_sp<SkCachedData> data = provider.getPlanes(yuvaSizeInfo, yuvaIndices, yuvColorSpace, planes);
  299. if (!data) {
  300. return nullptr;
  301. }
  302. return data;
  303. }
  304. /*
  305. * We have 4 ways to try to return a texture (in sorted order)
  306. *
  307. * 1. Check the cache for a pre-existing one
  308. * 2. Ask the generator to natively create one
  309. * 3. Ask the generator to return YUV planes, which the GPU can convert
  310. * 4. Ask the generator to return RGB(A) data, which the GPU can convert
  311. */
  312. sk_sp<GrTextureProxy> SkImage_Lazy::lockTextureProxy(
  313. GrRecordingContext* ctx,
  314. const GrUniqueKey& origKey,
  315. SkImage::CachingHint chint,
  316. bool willBeMipped,
  317. GrTextureMaker::AllowedTexGenType genType) const {
  318. // Values representing the various texture lock paths we can take. Used for logging the path
  319. // taken to a histogram.
  320. enum LockTexturePath {
  321. kFailure_LockTexturePath,
  322. kPreExisting_LockTexturePath,
  323. kNative_LockTexturePath,
  324. kCompressed_LockTexturePath, // Deprecated
  325. kYUV_LockTexturePath,
  326. kRGBA_LockTexturePath,
  327. };
  328. enum { kLockTexturePathCount = kRGBA_LockTexturePath + 1 };
  329. // Build our texture key.
  330. // Even though some proxies created here may have a specific origin and use that origin, we do
  331. // not include that in the key. Since SkImages are meant to be immutable, a given SkImage will
  332. // always have an associated proxy that is always one origin or the other. It never can change
  333. // origins. Thus we don't need to include that info in the key iteself.
  334. GrUniqueKey key;
  335. this->makeCacheKeyFromOrigKey(origKey, &key);
  336. GrProxyProvider* proxyProvider = ctx->priv().proxyProvider();
  337. sk_sp<GrTextureProxy> proxy;
  338. // 1. Check the cache for a pre-existing one
  339. if (key.isValid()) {
  340. proxy = proxyProvider->findOrCreateProxyByUniqueKey(key, kTopLeft_GrSurfaceOrigin);
  341. if (proxy) {
  342. SK_HISTOGRAM_ENUMERATION("LockTexturePath", kPreExisting_LockTexturePath,
  343. kLockTexturePathCount);
  344. if (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped()) {
  345. return proxy;
  346. }
  347. }
  348. }
  349. // 2. Ask the generator to natively create one
  350. if (!proxy) {
  351. ScopedGenerator generator(fSharedGenerator);
  352. if (GrTextureMaker::AllowedTexGenType::kCheap == genType &&
  353. SkImageGenerator::TexGenType::kCheap != generator->onCanGenerateTexture()) {
  354. return nullptr;
  355. }
  356. if ((proxy = generator->generateTexture(ctx, this->imageInfo(), fOrigin, willBeMipped))) {
  357. SK_HISTOGRAM_ENUMERATION("LockTexturePath", kNative_LockTexturePath,
  358. kLockTexturePathCount);
  359. set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key);
  360. if (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped()) {
  361. *fUniqueKeyInvalidatedMessages.append() =
  362. new GrUniqueKeyInvalidatedMessage(key, ctx->priv().contextID());
  363. return proxy;
  364. }
  365. }
  366. }
  367. // 3. Ask the generator to return YUV planes, which the GPU can convert. If we will be mipping
  368. // the texture we fall through here and have the CPU generate the mip maps for us.
  369. if (!proxy && !willBeMipped && !ctx->priv().options().fDisableGpuYUVConversion) {
  370. const GrSurfaceDesc desc = GrImageInfoToSurfaceDesc(this->imageInfo());
  371. SkColorType colorType = this->colorType();
  372. ScopedGenerator generator(fSharedGenerator);
  373. Generator_GrYUVProvider provider(generator);
  374. // The pixels in the texture will be in the generator's color space.
  375. // If onMakeColorTypeAndColorSpace has been called then this will not match this image's
  376. // color space. To correct this, apply a color space conversion from the generator's color
  377. // space to this image's color space.
  378. SkColorSpace* generatorColorSpace = fSharedGenerator->fGenerator->getInfo().colorSpace();
  379. SkColorSpace* thisColorSpace = this->colorSpace();
  380. // TODO: Update to create the mipped surface in the YUV generator and draw the base
  381. // layer directly into the mipped surface.
  382. proxy = provider.refAsTextureProxy(ctx, desc, SkColorTypeToGrColorType(colorType),
  383. generatorColorSpace, thisColorSpace);
  384. if (proxy) {
  385. SK_HISTOGRAM_ENUMERATION("LockTexturePath", kYUV_LockTexturePath,
  386. kLockTexturePathCount);
  387. set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key);
  388. *fUniqueKeyInvalidatedMessages.append() =
  389. new GrUniqueKeyInvalidatedMessage(key, ctx->priv().contextID());
  390. return proxy;
  391. }
  392. }
  393. // 4. Ask the generator to return RGB(A) data, which the GPU can convert
  394. SkBitmap bitmap;
  395. if (!proxy && this->getROPixels(&bitmap, chint)) {
  396. proxy = proxyProvider->createProxyFromBitmap(bitmap, willBeMipped ? GrMipMapped::kYes
  397. : GrMipMapped::kNo);
  398. if (proxy && (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped())) {
  399. SK_HISTOGRAM_ENUMERATION("LockTexturePath", kRGBA_LockTexturePath,
  400. kLockTexturePathCount);
  401. set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key);
  402. *fUniqueKeyInvalidatedMessages.append() =
  403. new GrUniqueKeyInvalidatedMessage(key, ctx->priv().contextID());
  404. return proxy;
  405. }
  406. }
  407. if (proxy) {
  408. // We need a mipped proxy, but we either found a proxy earlier that wasn't mipped, generated
  409. // a native non mipped proxy, or generated a non-mipped yuv proxy. Thus we generate a new
  410. // mipped surface and copy the original proxy into the base layer. We will then let the gpu
  411. // generate the rest of the mips.
  412. SkASSERT(willBeMipped);
  413. SkASSERT(GrMipMapped::kNo == proxy->mipMapped());
  414. *fUniqueKeyInvalidatedMessages.append() =
  415. new GrUniqueKeyInvalidatedMessage(key, ctx->priv().contextID());
  416. if (auto mippedProxy = GrCopyBaseMipMapToTextureProxy(ctx, proxy.get())) {
  417. set_key_on_proxy(proxyProvider, mippedProxy.get(), proxy.get(), key);
  418. return mippedProxy;
  419. }
  420. // We failed to make a mipped proxy with the base copied into it. This could have
  421. // been from failure to make the proxy or failure to do the copy. Thus we will fall
  422. // back to just using the non mipped proxy; See skbug.com/7094.
  423. return proxy;
  424. }
  425. SK_HISTOGRAM_ENUMERATION("LockTexturePath", kFailure_LockTexturePath,
  426. kLockTexturePathCount);
  427. return nullptr;
  428. }
  429. ///////////////////////////////////////////////////////////////////////////////////////////////////
  430. #endif