GrAHardwareBufferUtils.cpp 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522
  1. /*
  2. * Copyright 2019 Google Inc.
  3. *
  4. * Use of this source code is governed by a BSD-style license that can be
  5. * found in the LICENSE file.
  6. */
  7. #include "include/core/SkTypes.h"
  8. #if defined(SK_BUILD_FOR_ANDROID) && __ANDROID_API__ >= 26
  9. #define GL_GLEXT_PROTOTYPES
  10. #define EGL_EGLEXT_PROTOTYPES
  11. #include "src/gpu/GrAHardwareBufferUtils.h"
  12. #include <android/hardware_buffer.h>
  13. #include "include/gpu/GrContext.h"
  14. #include "include/gpu/gl/GrGLTypes.h"
  15. #include "src/gpu/GrContextPriv.h"
  16. #include "src/gpu/gl/GrGLDefines.h"
  17. #ifdef SK_VULKAN
  18. #include "src/gpu/vk/GrVkCaps.h"
  19. #include "src/gpu/vk/GrVkGpu.h"
  20. #endif
  21. #include <EGL/egl.h>
  22. #include <EGL/eglext.h>
  23. #include <GLES/gl.h>
  24. #include <GLES/glext.h>
  25. #define PROT_CONTENT_EXT_STR "EGL_EXT_protected_content"
  26. #define EGL_PROTECTED_CONTENT_EXT 0x32C0
  27. #define VK_CALL(X) gpu->vkInterface()->fFunctions.f##X;
  28. namespace GrAHardwareBufferUtils {
  29. SkColorType GetSkColorTypeFromBufferFormat(uint32_t bufferFormat) {
  30. switch (bufferFormat) {
  31. case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM:
  32. return kRGBA_8888_SkColorType;
  33. case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM:
  34. return kRGB_888x_SkColorType;
  35. case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT:
  36. return kRGBA_F16_SkColorType;
  37. case AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM:
  38. return kRGB_565_SkColorType;
  39. case AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM:
  40. return kRGB_888x_SkColorType;
  41. case AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM:
  42. return kRGBA_1010102_SkColorType;
  43. default:
  44. // Given that we only use this texture as a source, colorType will not impact how Skia
  45. // uses the texture. The only potential affect this is anticipated to have is that for
  46. // some format types if we are not bound as an OES texture we may get invalid results
  47. // for SKP capture if we read back the texture.
  48. return kRGBA_8888_SkColorType;
  49. }
  50. }
  51. GrBackendFormat GetBackendFormat(GrContext* context, AHardwareBuffer* hardwareBuffer,
  52. uint32_t bufferFormat, bool requireKnownFormat) {
  53. GrBackendApi backend = context->backend();
  54. if (backend == GrBackendApi::kOpenGL) {
  55. switch (bufferFormat) {
  56. //TODO: find out if we can detect, which graphic buffers support GR_GL_TEXTURE_2D
  57. case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM:
  58. case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM:
  59. return GrBackendFormat::MakeGL(GR_GL_RGBA8, GR_GL_TEXTURE_EXTERNAL);
  60. case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT:
  61. return GrBackendFormat::MakeGL(GR_GL_RGBA16F, GR_GL_TEXTURE_EXTERNAL);
  62. case AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM:
  63. return GrBackendFormat::MakeGL(GR_GL_RGB565, GR_GL_TEXTURE_EXTERNAL);
  64. case AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM:
  65. return GrBackendFormat::MakeGL(GR_GL_RGB10_A2, GR_GL_TEXTURE_EXTERNAL);
  66. case AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM:
  67. return GrBackendFormat::MakeGL(GR_GL_RGB8, GR_GL_TEXTURE_EXTERNAL);
  68. default:
  69. if (requireKnownFormat) {
  70. return GrBackendFormat();
  71. } else {
  72. return GrBackendFormat::MakeGL(GR_GL_RGBA8, GR_GL_TEXTURE_EXTERNAL);
  73. }
  74. }
  75. } else if (backend == GrBackendApi::kVulkan) {
  76. #ifdef SK_VULKAN
  77. switch (bufferFormat) {
  78. case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM:
  79. return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8A8_UNORM);
  80. case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT:
  81. return GrBackendFormat::MakeVk(VK_FORMAT_R16G16B16A16_SFLOAT);
  82. case AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM:
  83. return GrBackendFormat::MakeVk(VK_FORMAT_R5G6B5_UNORM_PACK16);
  84. case AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM:
  85. return GrBackendFormat::MakeVk(VK_FORMAT_A2B10G10R10_UNORM_PACK32);
  86. case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM:
  87. return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8A8_UNORM);
  88. case AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM:
  89. return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8_UNORM);
  90. default: {
  91. if (requireKnownFormat) {
  92. return GrBackendFormat();
  93. } else {
  94. GrVkGpu* gpu = static_cast<GrVkGpu*>(context->priv().getGpu());
  95. SkASSERT(gpu);
  96. VkDevice device = gpu->device();
  97. if (!gpu->vkCaps().supportsAndroidHWBExternalMemory()) {
  98. return GrBackendFormat();
  99. }
  100. VkAndroidHardwareBufferFormatPropertiesANDROID hwbFormatProps;
  101. hwbFormatProps.sType =
  102. VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
  103. hwbFormatProps.pNext = nullptr;
  104. VkAndroidHardwareBufferPropertiesANDROID hwbProps;
  105. hwbProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
  106. hwbProps.pNext = &hwbFormatProps;
  107. VkResult err = VK_CALL(GetAndroidHardwareBufferProperties(device,
  108. hardwareBuffer,
  109. &hwbProps));
  110. if (VK_SUCCESS != err) {
  111. return GrBackendFormat();
  112. }
  113. if (hwbFormatProps.format != VK_FORMAT_UNDEFINED) {
  114. return GrBackendFormat();
  115. }
  116. GrVkYcbcrConversionInfo ycbcrConversion;
  117. ycbcrConversion.fYcbcrModel = hwbFormatProps.suggestedYcbcrModel;
  118. ycbcrConversion.fYcbcrRange = hwbFormatProps.suggestedYcbcrRange;
  119. ycbcrConversion.fXChromaOffset = hwbFormatProps.suggestedXChromaOffset;
  120. ycbcrConversion.fYChromaOffset = hwbFormatProps.suggestedYChromaOffset;
  121. ycbcrConversion.fForceExplicitReconstruction = VK_FALSE;
  122. ycbcrConversion.fExternalFormat = hwbFormatProps.externalFormat;
  123. ycbcrConversion.fExternalFormatFeatures = hwbFormatProps.formatFeatures;
  124. if (VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT &
  125. hwbFormatProps.formatFeatures) {
  126. ycbcrConversion.fChromaFilter = VK_FILTER_LINEAR;
  127. } else {
  128. ycbcrConversion.fChromaFilter = VK_FILTER_NEAREST;
  129. }
  130. return GrBackendFormat::MakeVk(ycbcrConversion);
  131. }
  132. }
  133. }
  134. #else
  135. return GrBackendFormat();
  136. #endif
  137. }
  138. return GrBackendFormat();
  139. }
  140. class GLCleanupHelper {
  141. public:
  142. GLCleanupHelper(GrGLuint texID, EGLImageKHR image, EGLDisplay display)
  143. : fTexID(texID)
  144. , fImage(image)
  145. , fDisplay(display) { }
  146. ~GLCleanupHelper() {
  147. glDeleteTextures(1, &fTexID);
  148. // eglDestroyImageKHR will remove a ref from the AHardwareBuffer
  149. eglDestroyImageKHR(fDisplay, fImage);
  150. }
  151. private:
  152. GrGLuint fTexID;
  153. EGLImageKHR fImage;
  154. EGLDisplay fDisplay;
  155. };
  156. void delete_gl_texture(void* context) {
  157. GLCleanupHelper* cleanupHelper = static_cast<GLCleanupHelper*>(context);
  158. delete cleanupHelper;
  159. }
  160. static GrBackendTexture make_gl_backend_texture(
  161. GrContext* context, AHardwareBuffer* hardwareBuffer,
  162. int width, int height,
  163. DeleteImageProc* deleteProc,
  164. DeleteImageCtx* deleteCtx,
  165. bool isProtectedContent,
  166. const GrBackendFormat& backendFormat,
  167. bool isRenderable) {
  168. while (GL_NO_ERROR != glGetError()) {} //clear GL errors
  169. EGLClientBuffer clientBuffer = eglGetNativeClientBufferANDROID(hardwareBuffer);
  170. EGLint attribs[] = { EGL_IMAGE_PRESERVED_KHR, EGL_TRUE,
  171. isProtectedContent ? EGL_PROTECTED_CONTENT_EXT : EGL_NONE,
  172. isProtectedContent ? EGL_TRUE : EGL_NONE,
  173. EGL_NONE };
  174. EGLDisplay display = eglGetCurrentDisplay();
  175. // eglCreateImageKHR will add a ref to the AHardwareBuffer
  176. EGLImageKHR image = eglCreateImageKHR(display, EGL_NO_CONTEXT, EGL_NATIVE_BUFFER_ANDROID,
  177. clientBuffer, attribs);
  178. if (EGL_NO_IMAGE_KHR == image) {
  179. SkDebugf("Could not create EGL image, err = (%#x)", (int) eglGetError() );
  180. return GrBackendTexture();
  181. }
  182. GrGLuint texID;
  183. glGenTextures(1, &texID);
  184. if (!texID) {
  185. eglDestroyImageKHR(display, image);
  186. return GrBackendTexture();
  187. }
  188. GrGLuint target = isRenderable ? GR_GL_TEXTURE_2D : GR_GL_TEXTURE_EXTERNAL;
  189. glBindTexture(target, texID);
  190. GLenum status = GL_NO_ERROR;
  191. if ((status = glGetError()) != GL_NO_ERROR) {
  192. SkDebugf("glBindTexture failed (%#x)", (int) status);
  193. glDeleteTextures(1, &texID);
  194. eglDestroyImageKHR(display, image);
  195. return GrBackendTexture();
  196. }
  197. glEGLImageTargetTexture2DOES(target, image);
  198. if ((status = glGetError()) != GL_NO_ERROR) {
  199. SkDebugf("glEGLImageTargetTexture2DOES failed (%#x)", (int) status);
  200. glDeleteTextures(1, &texID);
  201. eglDestroyImageKHR(display, image);
  202. return GrBackendTexture();
  203. }
  204. context->resetContext(kTextureBinding_GrGLBackendState);
  205. GrGLTextureInfo textureInfo;
  206. textureInfo.fID = texID;
  207. SkASSERT(backendFormat.isValid());
  208. textureInfo.fTarget = target;
  209. textureInfo.fFormat = *backendFormat.getGLFormat();
  210. *deleteProc = delete_gl_texture;
  211. *deleteCtx = new GLCleanupHelper(texID, image, display);
  212. return GrBackendTexture(width, height, GrMipMapped::kNo, textureInfo);
  213. }
  214. #ifdef SK_VULKAN
  215. class VulkanCleanupHelper {
  216. public:
  217. VulkanCleanupHelper(GrVkGpu* gpu, VkImage image, VkDeviceMemory memory)
  218. : fDevice(gpu->device())
  219. , fImage(image)
  220. , fMemory(memory)
  221. , fDestroyImage(gpu->vkInterface()->fFunctions.fDestroyImage)
  222. , fFreeMemory(gpu->vkInterface()->fFunctions.fFreeMemory) {}
  223. ~VulkanCleanupHelper() {
  224. fDestroyImage(fDevice, fImage, nullptr);
  225. fFreeMemory(fDevice, fMemory, nullptr);
  226. }
  227. private:
  228. VkDevice fDevice;
  229. VkImage fImage;
  230. VkDeviceMemory fMemory;
  231. PFN_vkDestroyImage fDestroyImage;
  232. PFN_vkFreeMemory fFreeMemory;
  233. };
  234. void delete_vk_image(void* context) {
  235. VulkanCleanupHelper* cleanupHelper = static_cast<VulkanCleanupHelper*>(context);
  236. delete cleanupHelper;
  237. }
  238. static GrBackendTexture make_vk_backend_texture(
  239. GrContext* context, AHardwareBuffer* hardwareBuffer,
  240. int width, int height,
  241. DeleteImageProc* deleteProc,
  242. DeleteImageCtx* deleteCtx,
  243. bool isProtectedContent,
  244. const GrBackendFormat& backendFormat,
  245. bool isRenderable) {
  246. SkASSERT(context->backend() == GrBackendApi::kVulkan);
  247. GrVkGpu* gpu = static_cast<GrVkGpu*>(context->priv().getGpu());
  248. VkPhysicalDevice physicalDevice = gpu->physicalDevice();
  249. VkDevice device = gpu->device();
  250. SkASSERT(gpu);
  251. if (!gpu->vkCaps().supportsAndroidHWBExternalMemory()) {
  252. return GrBackendTexture();
  253. }
  254. SkASSERT(backendFormat.getVkFormat());
  255. VkFormat format = *backendFormat.getVkFormat();
  256. VkResult err;
  257. VkAndroidHardwareBufferFormatPropertiesANDROID hwbFormatProps;
  258. hwbFormatProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
  259. hwbFormatProps.pNext = nullptr;
  260. VkAndroidHardwareBufferPropertiesANDROID hwbProps;
  261. hwbProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
  262. hwbProps.pNext = &hwbFormatProps;
  263. err = VK_CALL(GetAndroidHardwareBufferProperties(device, hardwareBuffer, &hwbProps));
  264. if (VK_SUCCESS != err) {
  265. return GrBackendTexture();
  266. }
  267. VkExternalFormatANDROID externalFormat;
  268. externalFormat.sType = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID;
  269. externalFormat.pNext = nullptr;
  270. externalFormat.externalFormat = 0; // If this is zero it is as if we aren't using this struct.
  271. const GrVkYcbcrConversionInfo* ycbcrConversion = backendFormat.getVkYcbcrConversionInfo();
  272. if (!ycbcrConversion) {
  273. return GrBackendTexture();
  274. }
  275. if (hwbFormatProps.format != VK_FORMAT_UNDEFINED) {
  276. // TODO: We should not assume the transfer features here and instead should have a way for
  277. // Ganesh's tracking of intenral images to report whether or not they support transfers.
  278. SkASSERT(SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & hwbFormatProps.formatFeatures) &&
  279. SkToBool(VK_FORMAT_FEATURE_TRANSFER_SRC_BIT & hwbFormatProps.formatFeatures) &&
  280. SkToBool(VK_FORMAT_FEATURE_TRANSFER_DST_BIT & hwbFormatProps.formatFeatures));
  281. SkASSERT(!ycbcrConversion->isValid());
  282. } else {
  283. SkASSERT(ycbcrConversion->isValid());
  284. // We have an external only format
  285. SkASSERT(SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & hwbFormatProps.formatFeatures));
  286. SkASSERT(format == VK_FORMAT_UNDEFINED);
  287. SkASSERT(hwbFormatProps.externalFormat == ycbcrConversion->fExternalFormat);
  288. externalFormat.externalFormat = hwbFormatProps.externalFormat;
  289. }
  290. SkASSERT(format == hwbFormatProps.format);
  291. const VkExternalMemoryImageCreateInfo externalMemoryImageInfo{
  292. VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, // sType
  293. &externalFormat, // pNext
  294. VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, // handleTypes
  295. };
  296. VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT;
  297. if (format != VK_FORMAT_UNDEFINED) {
  298. usageFlags = usageFlags |
  299. VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
  300. VK_IMAGE_USAGE_TRANSFER_DST_BIT;
  301. if (isRenderable) {
  302. usageFlags = usageFlags | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
  303. }
  304. }
  305. // TODO: Check the supported tilings vkGetPhysicalDeviceImageFormatProperties2 to see if we have
  306. // to use linear. Add better linear support throughout Ganesh.
  307. VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL;
  308. const VkImageCreateInfo imageCreateInfo = {
  309. VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType
  310. &externalMemoryImageInfo, // pNext
  311. 0, // VkImageCreateFlags
  312. VK_IMAGE_TYPE_2D, // VkImageType
  313. format, // VkFormat
  314. { (uint32_t)width, (uint32_t)height, 1 }, // VkExtent3D
  315. 1, // mipLevels
  316. 1, // arrayLayers
  317. VK_SAMPLE_COUNT_1_BIT, // samples
  318. tiling, // VkImageTiling
  319. usageFlags, // VkImageUsageFlags
  320. VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode
  321. 0, // queueFamilyCount
  322. 0, // pQueueFamilyIndices
  323. VK_IMAGE_LAYOUT_UNDEFINED, // initialLayout
  324. };
  325. VkImage image;
  326. err = VK_CALL(CreateImage(device, &imageCreateInfo, nullptr, &image));
  327. if (VK_SUCCESS != err) {
  328. return GrBackendTexture();
  329. }
  330. VkPhysicalDeviceMemoryProperties2 phyDevMemProps;
  331. phyDevMemProps.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2;
  332. phyDevMemProps.pNext = nullptr;
  333. uint32_t typeIndex = 0;
  334. uint32_t heapIndex = 0;
  335. bool foundHeap = false;
  336. VK_CALL(GetPhysicalDeviceMemoryProperties2(physicalDevice, &phyDevMemProps));
  337. uint32_t memTypeCnt = phyDevMemProps.memoryProperties.memoryTypeCount;
  338. for (uint32_t i = 0; i < memTypeCnt && !foundHeap; ++i) {
  339. if (hwbProps.memoryTypeBits & (1 << i)) {
  340. const VkPhysicalDeviceMemoryProperties& pdmp = phyDevMemProps.memoryProperties;
  341. uint32_t supportedFlags = pdmp.memoryTypes[i].propertyFlags &
  342. VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
  343. if (supportedFlags == VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) {
  344. typeIndex = i;
  345. heapIndex = pdmp.memoryTypes[i].heapIndex;
  346. foundHeap = true;
  347. }
  348. }
  349. }
  350. if (!foundHeap) {
  351. VK_CALL(DestroyImage(device, image, nullptr));
  352. return GrBackendTexture();
  353. }
  354. VkImportAndroidHardwareBufferInfoANDROID hwbImportInfo;
  355. hwbImportInfo.sType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
  356. hwbImportInfo.pNext = nullptr;
  357. hwbImportInfo.buffer = hardwareBuffer;
  358. VkMemoryDedicatedAllocateInfo dedicatedAllocInfo;
  359. dedicatedAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO;
  360. dedicatedAllocInfo.pNext = &hwbImportInfo;
  361. dedicatedAllocInfo.image = image;
  362. dedicatedAllocInfo.buffer = VK_NULL_HANDLE;
  363. VkMemoryAllocateInfo allocInfo = {
  364. VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // sType
  365. &dedicatedAllocInfo, // pNext
  366. hwbProps.allocationSize, // allocationSize
  367. typeIndex, // memoryTypeIndex
  368. };
  369. VkDeviceMemory memory;
  370. err = VK_CALL(AllocateMemory(device, &allocInfo, nullptr, &memory));
  371. if (VK_SUCCESS != err) {
  372. VK_CALL(DestroyImage(device, image, nullptr));
  373. return GrBackendTexture();
  374. }
  375. VkBindImageMemoryInfo bindImageInfo;
  376. bindImageInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
  377. bindImageInfo.pNext = nullptr;
  378. bindImageInfo.image = image;
  379. bindImageInfo.memory = memory;
  380. bindImageInfo.memoryOffset = 0;
  381. err = VK_CALL(BindImageMemory2(device, 1, &bindImageInfo));
  382. if (VK_SUCCESS != err) {
  383. VK_CALL(DestroyImage(device, image, nullptr));
  384. VK_CALL(FreeMemory(device, memory, nullptr));
  385. return GrBackendTexture();
  386. }
  387. GrVkImageInfo imageInfo;
  388. imageInfo.fImage = image;
  389. imageInfo.fAlloc = GrVkAlloc(memory, 0, hwbProps.allocationSize, 0);
  390. imageInfo.fImageTiling = tiling;
  391. imageInfo.fImageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  392. imageInfo.fFormat = format;
  393. imageInfo.fLevelCount = 1;
  394. // TODO: This should possibly be VK_QUEUE_FAMILY_FOREIGN_EXT but current Adreno devices do not
  395. // support that extension. Or if we know the source of the AHardwareBuffer is not from a
  396. // "foreign" device we can leave them as external.
  397. imageInfo.fCurrentQueueFamily = VK_QUEUE_FAMILY_EXTERNAL;
  398. imageInfo.fYcbcrConversionInfo = *ycbcrConversion;
  399. *deleteProc = delete_vk_image;
  400. *deleteCtx = new VulkanCleanupHelper(gpu, image, memory);
  401. return GrBackendTexture(width, height, imageInfo);
  402. }
  403. #endif
  404. static bool can_import_protected_content_eglimpl() {
  405. EGLDisplay dpy = eglGetDisplay(EGL_DEFAULT_DISPLAY);
  406. const char* exts = eglQueryString(dpy, EGL_EXTENSIONS);
  407. size_t cropExtLen = strlen(PROT_CONTENT_EXT_STR);
  408. size_t extsLen = strlen(exts);
  409. bool equal = !strcmp(PROT_CONTENT_EXT_STR, exts);
  410. bool atStart = !strncmp(PROT_CONTENT_EXT_STR " ", exts, cropExtLen+1);
  411. bool atEnd = (cropExtLen+1) < extsLen
  412. && !strcmp(" " PROT_CONTENT_EXT_STR,
  413. exts + extsLen - (cropExtLen+1));
  414. bool inMiddle = strstr(exts, " " PROT_CONTENT_EXT_STR " ");
  415. return equal || atStart || atEnd || inMiddle;
  416. }
  417. static bool can_import_protected_content(GrContext* context) {
  418. if (GrBackendApi::kOpenGL == context->backend()) {
  419. // Only compute whether the extension is present once the first time this
  420. // function is called.
  421. static bool hasIt = can_import_protected_content_eglimpl();
  422. return hasIt;
  423. }
  424. return false;
  425. }
  426. GrBackendTexture MakeBackendTexture(GrContext* context, AHardwareBuffer* hardwareBuffer,
  427. int width, int height,
  428. DeleteImageProc* deleteProc,
  429. DeleteImageCtx* deleteCtx,
  430. bool isProtectedContent,
  431. const GrBackendFormat& backendFormat,
  432. bool isRenderable) {
  433. if (context->abandoned()) {
  434. return GrBackendTexture();
  435. }
  436. bool createProtectedImage = isProtectedContent && can_import_protected_content(context);
  437. if (GrBackendApi::kOpenGL == context->backend()) {
  438. return make_gl_backend_texture(context, hardwareBuffer, width, height, deleteProc,
  439. deleteCtx, createProtectedImage, backendFormat,
  440. isRenderable);
  441. } else {
  442. SkASSERT(GrBackendApi::kVulkan == context->backend());
  443. #ifdef SK_VULKAN
  444. // Currently we don't support protected images on vulkan
  445. SkASSERT(!createProtectedImage);
  446. return make_vk_backend_texture(context, hardwareBuffer, width, height, deleteProc,
  447. deleteCtx, createProtectedImage, backendFormat,
  448. isRenderable);
  449. #else
  450. return GrBackendTexture();
  451. #endif
  452. }
  453. }
  454. } // GrAHardwareBufferUtils
  455. #endif