GrVkCaps.cpp 51 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328
  1. /*
  2. * Copyright 2015 Google Inc.
  3. *
  4. * Use of this source code is governed by a BSD-style license that can be
  5. * found in the LICENSE file.
  6. */
  7. #include "include/gpu/GrBackendSurface.h"
  8. #include "include/gpu/GrRenderTarget.h"
  9. #include "include/gpu/vk/GrVkBackendContext.h"
  10. #include "include/gpu/vk/GrVkExtensions.h"
  11. #include "src/gpu/GrRenderTargetProxy.h"
  12. #include "src/gpu/GrShaderCaps.h"
  13. #include "src/gpu/GrUtil.h"
  14. #include "src/gpu/SkGr.h"
  15. #include "src/gpu/vk/GrVkCaps.h"
  16. #include "src/gpu/vk/GrVkInterface.h"
  17. #include "src/gpu/vk/GrVkTexture.h"
  18. #include "src/gpu/vk/GrVkUtil.h"
  19. #ifdef SK_BUILD_FOR_ANDROID
  20. #include <sys/system_properties.h>
  21. #endif
  22. GrVkCaps::GrVkCaps(const GrContextOptions& contextOptions, const GrVkInterface* vkInterface,
  23. VkPhysicalDevice physDev, const VkPhysicalDeviceFeatures2& features,
  24. uint32_t instanceVersion, uint32_t physicalDeviceVersion,
  25. const GrVkExtensions& extensions, GrProtected isProtected)
  26. : INHERITED(contextOptions) {
  27. /**************************************************************************
  28. * GrCaps fields
  29. **************************************************************************/
  30. fMipMapSupport = true; // always available in Vulkan
  31. fSRGBSupport = true; // always available in Vulkan
  32. fNPOTTextureTileSupport = true; // always available in Vulkan
  33. fReuseScratchTextures = true; //TODO: figure this out
  34. fGpuTracingSupport = false; //TODO: figure this out
  35. fOversizedStencilSupport = false; //TODO: figure this out
  36. fInstanceAttribSupport = true;
  37. fSemaphoreSupport = true; // always available in Vulkan
  38. fFenceSyncSupport = true; // always available in Vulkan
  39. fCrossContextTextureSupport = true;
  40. fHalfFloatVertexAttributeSupport = true;
  41. // We always copy in/out of a transfer buffer so it's trivial to support row bytes.
  42. fReadPixelsRowBytesSupport = true;
  43. fWritePixelsRowBytesSupport = true;
  44. fTransferBufferSupport = true;
  45. fMaxRenderTargetSize = 4096; // minimum required by spec
  46. fMaxTextureSize = 4096; // minimum required by spec
  47. fDynamicStateArrayGeometryProcessorTextureSupport = true;
  48. fShaderCaps.reset(new GrShaderCaps(contextOptions));
  49. this->init(contextOptions, vkInterface, physDev, features, physicalDeviceVersion, extensions,
  50. isProtected);
  51. }
  52. static int get_compatible_format_class(GrPixelConfig config) {
  53. switch (config) {
  54. case kAlpha_8_GrPixelConfig:
  55. case kAlpha_8_as_Red_GrPixelConfig:
  56. case kGray_8_GrPixelConfig:
  57. case kGray_8_as_Red_GrPixelConfig:
  58. return 1;
  59. case kRGB_565_GrPixelConfig:
  60. case kRGBA_4444_GrPixelConfig:
  61. case kRG_88_GrPixelConfig:
  62. case kAlpha_half_GrPixelConfig:
  63. case kAlpha_half_as_Red_GrPixelConfig:
  64. case kR_16_GrPixelConfig:
  65. return 2;
  66. case kRGB_888_GrPixelConfig:
  67. return 3;
  68. case kRGBA_8888_GrPixelConfig:
  69. case kRGB_888X_GrPixelConfig:
  70. case kBGRA_8888_GrPixelConfig:
  71. case kSRGBA_8888_GrPixelConfig:
  72. case kRGBA_1010102_GrPixelConfig:
  73. case kRG_1616_GrPixelConfig:
  74. return 4;
  75. case kRGBA_half_GrPixelConfig:
  76. case kRGBA_half_Clamped_GrPixelConfig:
  77. return 5;
  78. case kRGBA_float_GrPixelConfig:
  79. return 6;
  80. case kRGB_ETC1_GrPixelConfig:
  81. return 7;
  82. case kUnknown_GrPixelConfig:
  83. case kAlpha_8_as_Alpha_GrPixelConfig:
  84. case kGray_8_as_Lum_GrPixelConfig:
  85. case kAlpha_half_as_Lum_GrPixelConfig:
  86. SK_ABORT("Unsupported Vulkan pixel config");
  87. return 0;
  88. // Experimental (for Y416 and mutant P016/P010)
  89. case kRGBA_16161616_GrPixelConfig:
  90. return 8;
  91. case kRG_half_GrPixelConfig:
  92. return 4;
  93. }
  94. SK_ABORT("Invalid pixel config");
  95. return 0;
  96. }
  97. bool GrVkCaps::canCopyImage(GrPixelConfig dstConfig, int dstSampleCnt, bool dstHasYcbcr,
  98. GrPixelConfig srcConfig, int srcSampleCnt, bool srcHasYcbcr) const {
  99. if ((dstSampleCnt > 1 || srcSampleCnt > 1) && dstSampleCnt != srcSampleCnt) {
  100. return false;
  101. }
  102. if (dstHasYcbcr || srcHasYcbcr) {
  103. return false;
  104. }
  105. // We require that all vulkan GrSurfaces have been created with transfer_dst and transfer_src
  106. // as image usage flags.
  107. if (get_compatible_format_class(srcConfig) != get_compatible_format_class(dstConfig)) {
  108. return false;
  109. }
  110. return true;
  111. }
  112. bool GrVkCaps::canCopyAsBlit(GrPixelConfig dstConfig, int dstSampleCnt, bool dstIsLinear,
  113. bool dstHasYcbcr, GrPixelConfig srcConfig, int srcSampleCnt,
  114. bool srcIsLinear, bool srcHasYcbcr) const {
  115. VkFormat dstFormat;
  116. SkAssertResult(GrPixelConfigToVkFormat(dstConfig, &dstFormat));
  117. VkFormat srcFormat;
  118. SkAssertResult(GrPixelConfigToVkFormat(srcConfig, &srcFormat));
  119. // We require that all vulkan GrSurfaces have been created with transfer_dst and transfer_src
  120. // as image usage flags.
  121. if (!this->formatCanBeDstofBlit(dstFormat, dstIsLinear) ||
  122. !this->formatCanBeSrcofBlit(srcFormat, srcIsLinear)) {
  123. return false;
  124. }
  125. // We cannot blit images that are multisampled. Will need to figure out if we can blit the
  126. // resolved msaa though.
  127. if (dstSampleCnt > 1 || srcSampleCnt > 1) {
  128. return false;
  129. }
  130. if (dstHasYcbcr || srcHasYcbcr) {
  131. return false;
  132. }
  133. return true;
  134. }
  135. bool GrVkCaps::canCopyAsResolve(GrPixelConfig dstConfig, int dstSampleCnt, bool dstHasYcbcr,
  136. GrPixelConfig srcConfig, int srcSampleCnt, bool srcHasYcbcr) const {
  137. // The src surface must be multisampled.
  138. if (srcSampleCnt <= 1) {
  139. return false;
  140. }
  141. // The dst must not be multisampled.
  142. if (dstSampleCnt > 1) {
  143. return false;
  144. }
  145. // Surfaces must have the same format.
  146. if (dstConfig != srcConfig) {
  147. return false;
  148. }
  149. if (dstHasYcbcr || srcHasYcbcr) {
  150. return false;
  151. }
  152. return true;
  153. }
  154. bool GrVkCaps::onCanCopySurface(const GrSurfaceProxy* dst, const GrSurfaceProxy* src,
  155. const SkIRect& srcRect, const SkIPoint& dstPoint) const {
  156. if (src->isProtected() && !dst->isProtected()) {
  157. return false;
  158. }
  159. GrPixelConfig dstConfig = dst->config();
  160. GrPixelConfig srcConfig = src->config();
  161. // TODO: Figure out a way to track if we've wrapped a linear texture in a proxy (e.g.
  162. // PromiseImage which won't get instantiated right away. Does this need a similar thing like the
  163. // tracking of external or rectangle textures in GL? For now we don't create linear textures
  164. // internally, and I don't believe anyone is wrapping them.
  165. bool srcIsLinear = false;
  166. bool dstIsLinear = false;
  167. int dstSampleCnt = 0;
  168. int srcSampleCnt = 0;
  169. if (const GrRenderTargetProxy* rtProxy = dst->asRenderTargetProxy()) {
  170. // Copying to or from render targets that wrap a secondary command buffer is not allowed
  171. // since they would require us to know the VkImage, which we don't have, as well as need us
  172. // to stop and start the VkRenderPass which we don't have access to.
  173. if (rtProxy->wrapsVkSecondaryCB()) {
  174. return false;
  175. }
  176. dstSampleCnt = rtProxy->numSamples();
  177. }
  178. if (const GrRenderTargetProxy* rtProxy = src->asRenderTargetProxy()) {
  179. // Copying to or from render targets that wrap a secondary command buffer is not allowed
  180. // since they would require us to know the VkImage, which we don't have, as well as need us
  181. // to stop and start the VkRenderPass which we don't have access to.
  182. if (rtProxy->wrapsVkSecondaryCB()) {
  183. return false;
  184. }
  185. srcSampleCnt = rtProxy->numSamples();
  186. }
  187. SkASSERT((dstSampleCnt > 0) == SkToBool(dst->asRenderTargetProxy()));
  188. SkASSERT((srcSampleCnt > 0) == SkToBool(src->asRenderTargetProxy()));
  189. bool dstHasYcbcr = false;
  190. if (auto ycbcr = dst->backendFormat().getVkYcbcrConversionInfo()) {
  191. if (ycbcr->isValid()) {
  192. dstHasYcbcr = true;
  193. }
  194. }
  195. bool srcHasYcbcr = false;
  196. if (auto ycbcr = src->backendFormat().getVkYcbcrConversionInfo()) {
  197. if (ycbcr->isValid()) {
  198. srcHasYcbcr = true;
  199. }
  200. }
  201. return this->canCopyImage(dstConfig, dstSampleCnt, dstHasYcbcr,
  202. srcConfig, srcSampleCnt, srcHasYcbcr) ||
  203. this->canCopyAsBlit(dstConfig, dstSampleCnt, dstIsLinear, dstHasYcbcr,
  204. srcConfig, srcSampleCnt, srcIsLinear, srcHasYcbcr) ||
  205. this->canCopyAsResolve(dstConfig, dstSampleCnt, dstHasYcbcr,
  206. srcConfig, srcSampleCnt, srcHasYcbcr);
  207. }
  208. template<typename T> T* get_extension_feature_struct(const VkPhysicalDeviceFeatures2& features,
  209. VkStructureType type) {
  210. // All Vulkan structs that could be part of the features chain will start with the
  211. // structure type followed by the pNext pointer. We cast to the CommonVulkanHeader
  212. // so we can get access to the pNext for the next struct.
  213. struct CommonVulkanHeader {
  214. VkStructureType sType;
  215. void* pNext;
  216. };
  217. void* pNext = features.pNext;
  218. while (pNext) {
  219. CommonVulkanHeader* header = static_cast<CommonVulkanHeader*>(pNext);
  220. if (header->sType == type) {
  221. return static_cast<T*>(pNext);
  222. }
  223. pNext = header->pNext;
  224. }
  225. return nullptr;
  226. }
  227. void GrVkCaps::init(const GrContextOptions& contextOptions, const GrVkInterface* vkInterface,
  228. VkPhysicalDevice physDev, const VkPhysicalDeviceFeatures2& features,
  229. uint32_t physicalDeviceVersion, const GrVkExtensions& extensions,
  230. GrProtected isProtected) {
  231. VkPhysicalDeviceProperties properties;
  232. GR_VK_CALL(vkInterface, GetPhysicalDeviceProperties(physDev, &properties));
  233. VkPhysicalDeviceMemoryProperties memoryProperties;
  234. GR_VK_CALL(vkInterface, GetPhysicalDeviceMemoryProperties(physDev, &memoryProperties));
  235. SkASSERT(physicalDeviceVersion <= properties.apiVersion);
  236. if (extensions.hasExtension(VK_KHR_SWAPCHAIN_EXTENSION_NAME, 1)) {
  237. fSupportsSwapchain = true;
  238. }
  239. if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
  240. extensions.hasExtension(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, 1)) {
  241. fSupportsPhysicalDeviceProperties2 = true;
  242. }
  243. if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
  244. extensions.hasExtension(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME, 1)) {
  245. fSupportsMemoryRequirements2 = true;
  246. }
  247. if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
  248. extensions.hasExtension(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME, 1)) {
  249. fSupportsBindMemory2 = true;
  250. }
  251. if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
  252. extensions.hasExtension(VK_KHR_MAINTENANCE1_EXTENSION_NAME, 1)) {
  253. fSupportsMaintenance1 = true;
  254. }
  255. if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
  256. extensions.hasExtension(VK_KHR_MAINTENANCE2_EXTENSION_NAME, 1)) {
  257. fSupportsMaintenance2 = true;
  258. }
  259. if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
  260. extensions.hasExtension(VK_KHR_MAINTENANCE3_EXTENSION_NAME, 1)) {
  261. fSupportsMaintenance3 = true;
  262. }
  263. if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
  264. (extensions.hasExtension(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME, 1) &&
  265. this->supportsMemoryRequirements2())) {
  266. fSupportsDedicatedAllocation = true;
  267. }
  268. if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
  269. (extensions.hasExtension(VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME, 1) &&
  270. this->supportsPhysicalDeviceProperties2() &&
  271. extensions.hasExtension(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME, 1) &&
  272. this->supportsDedicatedAllocation())) {
  273. fSupportsExternalMemory = true;
  274. }
  275. #ifdef SK_BUILD_FOR_ANDROID
  276. // Currently Adreno devices are not supporting the QUEUE_FAMILY_FOREIGN_EXTENSION, so until they
  277. // do we don't explicitly require it here even the spec says it is required.
  278. if (extensions.hasExtension(
  279. VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME, 2) &&
  280. /* extensions.hasExtension(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, 1) &&*/
  281. this->supportsExternalMemory() &&
  282. this->supportsBindMemory2()) {
  283. fSupportsAndroidHWBExternalMemory = true;
  284. fSupportsAHardwareBufferImages = true;
  285. }
  286. #endif
  287. auto ycbcrFeatures =
  288. get_extension_feature_struct<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(
  289. features,
  290. VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES);
  291. if (ycbcrFeatures && ycbcrFeatures->samplerYcbcrConversion &&
  292. fSupportsAndroidHWBExternalMemory &&
  293. (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
  294. (extensions.hasExtension(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME, 1) &&
  295. this->supportsMaintenance1() &&
  296. this->supportsBindMemory2() &&
  297. this->supportsMemoryRequirements2() &&
  298. this->supportsPhysicalDeviceProperties2()))) {
  299. fSupportsYcbcrConversion = true;
  300. }
  301. // We always push back the default GrVkYcbcrConversionInfo so that the case of no conversion
  302. // will return a key of 0.
  303. fYcbcrInfos.push_back(GrVkYcbcrConversionInfo());
  304. if ((isProtected == GrProtected::kYes) &&
  305. (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0))) {
  306. fSupportsProtectedMemory = true;
  307. fAvoidUpdateBuffers = true;
  308. fShouldAlwaysUseDedicatedImageMemory = true;
  309. }
  310. this->initGrCaps(vkInterface, physDev, properties, memoryProperties, features, extensions);
  311. this->initShaderCaps(properties, features);
  312. if (!contextOptions.fDisableDriverCorrectnessWorkarounds) {
  313. #if defined(SK_CPU_X86)
  314. // We need to do this before initing the config table since it uses fSRGBSupport
  315. if (kImagination_VkVendor == properties.vendorID) {
  316. fSRGBSupport = false;
  317. }
  318. #endif
  319. }
  320. if (kQualcomm_VkVendor == properties.vendorID) {
  321. // A "clear" load for the CCPR atlas runs faster on QC than a "discard" load followed by a
  322. // scissored clear.
  323. // On NVIDIA and Intel, the discard load followed by clear is faster.
  324. // TODO: Evaluate on ARM, Imagination, and ATI.
  325. fPreferFullscreenClears = true;
  326. }
  327. if (kQualcomm_VkVendor == properties.vendorID || kARM_VkVendor == properties.vendorID) {
  328. // On Qualcomm and ARM mapping a gpu buffer and doing both reads and writes to it is slow.
  329. // Thus for index and vertex buffers we will force to use a cpu side buffer and then copy
  330. // the whole buffer up to the gpu.
  331. fBufferMapThreshold = SK_MaxS32;
  332. }
  333. if (kQualcomm_VkVendor == properties.vendorID) {
  334. // On Qualcomm it looks like using vkCmdUpdateBuffer is slower than using a transfer buffer
  335. // even for small sizes.
  336. fAvoidUpdateBuffers = true;
  337. }
  338. if (kARM_VkVendor == properties.vendorID) {
  339. // ARM seems to do better with more fine triangles as opposed to using the sample mask.
  340. // (At least in our current round rect op.)
  341. fPreferTrianglesOverSampleMask = true;
  342. }
  343. this->initFormatTable(vkInterface, physDev, properties);
  344. this->initStencilFormat(vkInterface, physDev);
  345. if (!contextOptions.fDisableDriverCorrectnessWorkarounds) {
  346. this->applyDriverCorrectnessWorkarounds(properties);
  347. }
  348. this->applyOptionsOverrides(contextOptions);
  349. fShaderCaps->applyOptionsOverrides(contextOptions);
  350. }
  351. void GrVkCaps::applyDriverCorrectnessWorkarounds(const VkPhysicalDeviceProperties& properties) {
  352. if (kQualcomm_VkVendor == properties.vendorID) {
  353. fMustDoCopiesFromOrigin = true;
  354. // Transfer doesn't support this workaround.
  355. fTransferBufferSupport = false;
  356. }
  357. #if defined(SK_BUILD_FOR_WIN)
  358. if (kNvidia_VkVendor == properties.vendorID || kIntel_VkVendor == properties.vendorID) {
  359. fMustSleepOnTearDown = true;
  360. }
  361. #elif defined(SK_BUILD_FOR_ANDROID)
  362. if (kImagination_VkVendor == properties.vendorID) {
  363. fMustSleepOnTearDown = true;
  364. }
  365. #endif
  366. #if defined(SK_BUILD_FOR_ANDROID)
  367. // Protected memory features have problems in Android P and earlier.
  368. if (fSupportsProtectedMemory && (kQualcomm_VkVendor == properties.vendorID)) {
  369. char androidAPIVersion[PROP_VALUE_MAX];
  370. int strLength = __system_property_get("ro.build.version.sdk", androidAPIVersion);
  371. if (strLength == 0 || atoi(androidAPIVersion) <= 28) {
  372. fSupportsProtectedMemory = false;
  373. }
  374. }
  375. #endif
  376. // AMD seems to have issues binding new VkPipelines inside a secondary command buffer.
  377. // Current workaround is to use a different secondary command buffer for each new VkPipeline.
  378. if (kAMD_VkVendor == properties.vendorID) {
  379. fNewCBOnPipelineChange = true;
  380. }
  381. // On Mali galaxy s7 we see lots of rendering issues when we suballocate VkImages.
  382. if (kARM_VkVendor == properties.vendorID) {
  383. fShouldAlwaysUseDedicatedImageMemory = true;
  384. }
  385. ////////////////////////////////////////////////////////////////////////////
  386. // GrCaps workarounds
  387. ////////////////////////////////////////////////////////////////////////////
  388. if (kARM_VkVendor == properties.vendorID) {
  389. fInstanceAttribSupport = false;
  390. fAvoidWritePixelsFastPath = true; // bugs.skia.org/8064
  391. }
  392. // AMD advertises support for MAX_UINT vertex input attributes, but in reality only supports 32.
  393. if (kAMD_VkVendor == properties.vendorID) {
  394. fMaxVertexAttributes = SkTMin(fMaxVertexAttributes, 32);
  395. }
  396. ////////////////////////////////////////////////////////////////////////////
  397. // GrShaderCaps workarounds
  398. ////////////////////////////////////////////////////////////////////////////
  399. if (kImagination_VkVendor == properties.vendorID) {
  400. fShaderCaps->fAtan2ImplementedAsAtanYOverX = true;
  401. }
  402. }
  403. int get_max_sample_count(VkSampleCountFlags flags) {
  404. SkASSERT(flags & VK_SAMPLE_COUNT_1_BIT);
  405. if (!(flags & VK_SAMPLE_COUNT_2_BIT)) {
  406. return 0;
  407. }
  408. if (!(flags & VK_SAMPLE_COUNT_4_BIT)) {
  409. return 2;
  410. }
  411. if (!(flags & VK_SAMPLE_COUNT_8_BIT)) {
  412. return 4;
  413. }
  414. if (!(flags & VK_SAMPLE_COUNT_16_BIT)) {
  415. return 8;
  416. }
  417. if (!(flags & VK_SAMPLE_COUNT_32_BIT)) {
  418. return 16;
  419. }
  420. if (!(flags & VK_SAMPLE_COUNT_64_BIT)) {
  421. return 32;
  422. }
  423. return 64;
  424. }
  425. void GrVkCaps::initGrCaps(const GrVkInterface* vkInterface,
  426. VkPhysicalDevice physDev,
  427. const VkPhysicalDeviceProperties& properties,
  428. const VkPhysicalDeviceMemoryProperties& memoryProperties,
  429. const VkPhysicalDeviceFeatures2& features,
  430. const GrVkExtensions& extensions) {
  431. // So GPUs, like AMD, are reporting MAX_INT support vertex attributes. In general, there is no
  432. // need for us ever to support that amount, and it makes tests which tests all the vertex
  433. // attribs timeout looping over that many. For now, we'll cap this at 64 max and can raise it if
  434. // we ever find that need.
  435. static const uint32_t kMaxVertexAttributes = 64;
  436. fMaxVertexAttributes = SkTMin(properties.limits.maxVertexInputAttributes, kMaxVertexAttributes);
  437. // We could actually query and get a max size for each config, however maxImageDimension2D will
  438. // give the minimum max size across all configs. So for simplicity we will use that for now.
  439. fMaxRenderTargetSize = SkTMin(properties.limits.maxImageDimension2D, (uint32_t)INT_MAX);
  440. fMaxTextureSize = SkTMin(properties.limits.maxImageDimension2D, (uint32_t)INT_MAX);
  441. if (fDriverBugWorkarounds.max_texture_size_limit_4096) {
  442. fMaxTextureSize = SkTMin(fMaxTextureSize, 4096);
  443. }
  444. // Our render targets are always created with textures as the color
  445. // attachment, hence this min:
  446. fMaxRenderTargetSize = SkTMin(fMaxTextureSize, fMaxRenderTargetSize);
  447. // TODO: check if RT's larger than 4k incur a performance cost on ARM.
  448. fMaxPreferredRenderTargetSize = fMaxRenderTargetSize;
  449. // Assuming since we will always map in the end to upload the data we might as well just map
  450. // from the get go. There is no hard data to suggest this is faster or slower.
  451. fBufferMapThreshold = 0;
  452. fMapBufferFlags = kCanMap_MapFlag | kSubset_MapFlag | kAsyncRead_MapFlag;
  453. fOversizedStencilSupport = true;
  454. if (extensions.hasExtension(VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME, 2) &&
  455. this->supportsPhysicalDeviceProperties2()) {
  456. VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT blendProps;
  457. blendProps.sType =
  458. VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT;
  459. blendProps.pNext = nullptr;
  460. VkPhysicalDeviceProperties2 props;
  461. props.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
  462. props.pNext = &blendProps;
  463. GR_VK_CALL(vkInterface, GetPhysicalDeviceProperties2(physDev, &props));
  464. if (blendProps.advancedBlendAllOperations == VK_TRUE) {
  465. fShaderCaps->fAdvBlendEqInteraction = GrShaderCaps::kAutomatic_AdvBlendEqInteraction;
  466. auto blendFeatures =
  467. get_extension_feature_struct<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT>(
  468. features,
  469. VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT);
  470. if (blendFeatures && blendFeatures->advancedBlendCoherentOperations == VK_TRUE) {
  471. fBlendEquationSupport = kAdvancedCoherent_BlendEquationSupport;
  472. } else {
  473. // TODO: Currently non coherent blends are not supported in our vulkan backend. They
  474. // require us to support self dependencies in our render passes.
  475. // fBlendEquationSupport = kAdvanced_BlendEquationSupport;
  476. }
  477. }
  478. }
  479. }
  480. void GrVkCaps::initShaderCaps(const VkPhysicalDeviceProperties& properties,
  481. const VkPhysicalDeviceFeatures2& features) {
  482. GrShaderCaps* shaderCaps = fShaderCaps.get();
  483. shaderCaps->fVersionDeclString = "#version 330\n";
  484. // Vulkan is based off ES 3.0 so the following should all be supported
  485. shaderCaps->fUsesPrecisionModifiers = true;
  486. shaderCaps->fFlatInterpolationSupport = true;
  487. // Flat interpolation appears to be slow on Qualcomm GPUs. This was tested in GL and is assumed
  488. // to be true with Vulkan as well.
  489. shaderCaps->fPreferFlatInterpolation = kQualcomm_VkVendor != properties.vendorID;
  490. // GrShaderCaps
  491. shaderCaps->fShaderDerivativeSupport = true;
  492. // FIXME: http://skbug.com/7733: Disable geometry shaders until Intel/Radeon GMs draw correctly.
  493. // shaderCaps->fGeometryShaderSupport =
  494. // shaderCaps->fGSInvocationsSupport = features.features.geometryShader;
  495. shaderCaps->fDualSourceBlendingSupport = features.features.dualSrcBlend;
  496. shaderCaps->fIntegerSupport = true;
  497. shaderCaps->fVertexIDSupport = true;
  498. shaderCaps->fFPManipulationSupport = true;
  499. // Assume the minimum precisions mandated by the SPIR-V spec.
  500. shaderCaps->fFloatIs32Bits = true;
  501. shaderCaps->fHalfIs32Bits = false;
  502. shaderCaps->fMaxFragmentSamplers = SkTMin(
  503. SkTMin(properties.limits.maxPerStageDescriptorSampledImages,
  504. properties.limits.maxPerStageDescriptorSamplers),
  505. (uint32_t)INT_MAX);
  506. }
  507. bool stencil_format_supported(const GrVkInterface* interface,
  508. VkPhysicalDevice physDev,
  509. VkFormat format) {
  510. VkFormatProperties props;
  511. memset(&props, 0, sizeof(VkFormatProperties));
  512. GR_VK_CALL(interface, GetPhysicalDeviceFormatProperties(physDev, format, &props));
  513. return SkToBool(VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT & props.optimalTilingFeatures);
  514. }
  515. void GrVkCaps::initStencilFormat(const GrVkInterface* interface, VkPhysicalDevice physDev) {
  516. // List of legal stencil formats (though perhaps not supported on
  517. // the particular gpu/driver) from most preferred to least. We are guaranteed to have either
  518. // VK_FORMAT_D24_UNORM_S8_UINT or VK_FORMAT_D32_SFLOAT_S8_UINT. VK_FORMAT_D32_SFLOAT_S8_UINT
  519. // can optionally have 24 unused bits at the end so we assume the total bits is 64.
  520. static const StencilFormat
  521. // internal Format stencil bits total bits packed?
  522. gS8 = { VK_FORMAT_S8_UINT, 8, 8, false },
  523. gD24S8 = { VK_FORMAT_D24_UNORM_S8_UINT, 8, 32, true },
  524. gD32S8 = { VK_FORMAT_D32_SFLOAT_S8_UINT, 8, 64, true };
  525. if (stencil_format_supported(interface, physDev, VK_FORMAT_S8_UINT)) {
  526. fPreferredStencilFormat = gS8;
  527. } else if (stencil_format_supported(interface, physDev, VK_FORMAT_D24_UNORM_S8_UINT)) {
  528. fPreferredStencilFormat = gD24S8;
  529. } else {
  530. SkASSERT(stencil_format_supported(interface, physDev, VK_FORMAT_D32_SFLOAT_S8_UINT));
  531. fPreferredStencilFormat = gD32S8;
  532. }
  533. }
  534. static bool format_is_srgb(VkFormat format) {
  535. SkASSERT(GrVkFormatIsSupported(format));
  536. switch (format) {
  537. case VK_FORMAT_R8G8B8A8_SRGB:
  538. return true;
  539. default:
  540. return false;
  541. }
  542. }
  543. // These are all the valid VkFormats that we support in Skia. They are roughly ordered from most
  544. // frequently used to least to improve look up times in arrays.
  545. static constexpr VkFormat kVkFormats[] = {
  546. VK_FORMAT_R8G8B8A8_UNORM,
  547. VK_FORMAT_R8_UNORM,
  548. VK_FORMAT_B8G8R8A8_UNORM,
  549. VK_FORMAT_R5G6B5_UNORM_PACK16,
  550. VK_FORMAT_R16G16B16A16_SFLOAT,
  551. VK_FORMAT_R16_SFLOAT,
  552. VK_FORMAT_R8G8B8_UNORM,
  553. VK_FORMAT_R8G8_UNORM,
  554. VK_FORMAT_A2B10G10R10_UNORM_PACK32,
  555. VK_FORMAT_B4G4R4A4_UNORM_PACK16,
  556. VK_FORMAT_R4G4B4A4_UNORM_PACK16,
  557. VK_FORMAT_R32G32B32A32_SFLOAT,
  558. VK_FORMAT_R8G8B8A8_SRGB,
  559. VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK,
  560. VK_FORMAT_R16_UNORM,
  561. VK_FORMAT_R16G16_UNORM,
  562. // Experimental (for Y416 and mutant P016/P010)
  563. VK_FORMAT_R16G16B16A16_UNORM,
  564. VK_FORMAT_R16G16_SFLOAT,
  565. };
  566. const GrVkCaps::FormatInfo& GrVkCaps::getFormatInfo(VkFormat format) const {
  567. static_assert(SK_ARRAY_COUNT(kVkFormats) == GrVkCaps::kNumVkFormats,
  568. "Size of VkFormats array must match static value in header");
  569. for (size_t i = 0; i < SK_ARRAY_COUNT(kVkFormats); ++i) {
  570. if (kVkFormats[i] == format) {
  571. return fFormatTable[i];
  572. }
  573. }
  574. SK_ABORT("Invalid VkFormat");
  575. static const FormatInfo kInvalidFormat;
  576. return kInvalidFormat;
  577. }
  578. void GrVkCaps::initFormatTable(const GrVkInterface* interface, VkPhysicalDevice physDev,
  579. const VkPhysicalDeviceProperties& properties) {
  580. static_assert(SK_ARRAY_COUNT(kVkFormats) == GrVkCaps::kNumVkFormats,
  581. "Size of VkFormats array must match static value in header");
  582. for (size_t i = 0; i < SK_ARRAY_COUNT(kVkFormats); ++i) {
  583. VkFormat format = kVkFormats[i];
  584. if (!format_is_srgb(format) || fSRGBSupport) {
  585. fFormatTable[i].init(interface, physDev, properties, format);
  586. }
  587. }
  588. }
  589. void GrVkCaps::FormatInfo::InitConfigFlags(VkFormatFeatureFlags vkFlags, uint16_t* flags) {
  590. if (SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & vkFlags) &&
  591. SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT & vkFlags)) {
  592. *flags = *flags | kTextureable_Flag;
  593. // Ganesh assumes that all renderable surfaces are also texturable
  594. if (SkToBool(VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT & vkFlags)) {
  595. *flags = *flags | kRenderable_Flag;
  596. }
  597. }
  598. if (SkToBool(VK_FORMAT_FEATURE_BLIT_SRC_BIT & vkFlags)) {
  599. *flags = *flags | kBlitSrc_Flag;
  600. }
  601. if (SkToBool(VK_FORMAT_FEATURE_BLIT_DST_BIT & vkFlags)) {
  602. *flags = *flags | kBlitDst_Flag;
  603. }
  604. }
  605. void GrVkCaps::FormatInfo::initSampleCounts(const GrVkInterface* interface,
  606. VkPhysicalDevice physDev,
  607. const VkPhysicalDeviceProperties& physProps,
  608. VkFormat format) {
  609. VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
  610. VK_IMAGE_USAGE_TRANSFER_DST_BIT |
  611. VK_IMAGE_USAGE_SAMPLED_BIT |
  612. VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
  613. VkImageFormatProperties properties;
  614. GR_VK_CALL(interface, GetPhysicalDeviceImageFormatProperties(physDev,
  615. format,
  616. VK_IMAGE_TYPE_2D,
  617. VK_IMAGE_TILING_OPTIMAL,
  618. usage,
  619. 0, // createFlags
  620. &properties));
  621. VkSampleCountFlags flags = properties.sampleCounts;
  622. if (flags & VK_SAMPLE_COUNT_1_BIT) {
  623. fColorSampleCounts.push_back(1);
  624. }
  625. if (kImagination_VkVendor == physProps.vendorID) {
  626. // MSAA does not work on imagination
  627. return;
  628. }
  629. if (kIntel_VkVendor == physProps.vendorID) {
  630. // MSAA on Intel before Gen 9 is slow and/or buggy
  631. if (GrGetIntelGpuFamily(physProps.deviceID) < kFirstGen9_IntelGpuFamily) {
  632. return;
  633. }
  634. }
  635. if (flags & VK_SAMPLE_COUNT_2_BIT) {
  636. fColorSampleCounts.push_back(2);
  637. }
  638. if (flags & VK_SAMPLE_COUNT_4_BIT) {
  639. fColorSampleCounts.push_back(4);
  640. }
  641. if (flags & VK_SAMPLE_COUNT_8_BIT) {
  642. fColorSampleCounts.push_back(8);
  643. }
  644. if (flags & VK_SAMPLE_COUNT_16_BIT) {
  645. fColorSampleCounts.push_back(16);
  646. }
  647. if (flags & VK_SAMPLE_COUNT_32_BIT) {
  648. fColorSampleCounts.push_back(32);
  649. }
  650. if (flags & VK_SAMPLE_COUNT_64_BIT) {
  651. fColorSampleCounts.push_back(64);
  652. }
  653. }
  654. void GrVkCaps::FormatInfo::init(const GrVkInterface* interface,
  655. VkPhysicalDevice physDev,
  656. const VkPhysicalDeviceProperties& properties,
  657. VkFormat format) {
  658. VkFormatProperties props;
  659. memset(&props, 0, sizeof(VkFormatProperties));
  660. GR_VK_CALL(interface, GetPhysicalDeviceFormatProperties(physDev, format, &props));
  661. InitConfigFlags(props.linearTilingFeatures, &fLinearFlags);
  662. InitConfigFlags(props.optimalTilingFeatures, &fOptimalFlags);
  663. if (fOptimalFlags & kRenderable_Flag) {
  664. this->initSampleCounts(interface, physDev, properties, format);
  665. }
  666. }
  667. bool GrVkCaps::isFormatSRGB(const GrBackendFormat& format) const {
  668. if (!format.getVkFormat()) {
  669. return false;
  670. }
  671. return format_is_srgb(*format.getVkFormat());
  672. }
  673. bool GrVkCaps::isFormatTexturable(GrColorType, const GrBackendFormat& format) const {
  674. if (!format.getVkFormat()) {
  675. return false;
  676. }
  677. return this->isVkFormatTexturable(*format.getVkFormat());
  678. }
  679. bool GrVkCaps::isVkFormatTexturable(VkFormat format) const {
  680. if (!GrVkFormatIsSupported(format)) {
  681. return false;
  682. }
  683. const FormatInfo& info = this->getFormatInfo(format);
  684. return SkToBool(FormatInfo::kTextureable_Flag & info.fOptimalFlags);
  685. }
  686. bool GrVkCaps::isConfigTexturable(GrPixelConfig config) const {
  687. VkFormat format;
  688. if (!GrPixelConfigToVkFormat(config, &format)) {
  689. return false;
  690. }
  691. return this->isVkFormatTexturable(format);
  692. }
  693. bool GrVkCaps::isFormatRenderable(VkFormat format) const {
  694. return this->maxRenderTargetSampleCount(format) > 0;
  695. }
  696. int GrVkCaps::getRenderTargetSampleCount(int requestedCount,
  697. GrColorType, const GrBackendFormat& format) const {
  698. if (!format.getVkFormat()) {
  699. return 0;
  700. }
  701. return this->getRenderTargetSampleCount(requestedCount, *format.getVkFormat());
  702. }
  703. int GrVkCaps::getRenderTargetSampleCount(int requestedCount, GrPixelConfig config) const {
  704. // Currently we don't allow RGB_888X to be renderable because we don't have a way to handle
  705. // blends that reference dst alpha when the values in the dst alpha channel are uninitialized.
  706. if (config == kRGB_888X_GrPixelConfig) {
  707. return 0;
  708. }
  709. VkFormat format;
  710. if (!GrPixelConfigToVkFormat(config, &format)) {
  711. return 0;
  712. }
  713. return this->getRenderTargetSampleCount(requestedCount, format);
  714. }
  715. int GrVkCaps::getRenderTargetSampleCount(int requestedCount, VkFormat format) const {
  716. requestedCount = SkTMax(1, requestedCount);
  717. const FormatInfo& info = this->getFormatInfo(format);
  718. int count = info.fColorSampleCounts.count();
  719. if (!count) {
  720. return 0;
  721. }
  722. if (1 == requestedCount) {
  723. SkASSERT(info.fColorSampleCounts.count() && info.fColorSampleCounts[0] == 1);
  724. return 1;
  725. }
  726. for (int i = 0; i < count; ++i) {
  727. if (info.fColorSampleCounts[i] >= requestedCount) {
  728. return info.fColorSampleCounts[i];
  729. }
  730. }
  731. return 0;
  732. }
  733. int GrVkCaps::maxRenderTargetSampleCount(GrColorType, const GrBackendFormat& format) const {
  734. if (!format.getVkFormat()) {
  735. return 0;
  736. }
  737. return this->maxRenderTargetSampleCount(*format.getVkFormat());
  738. }
  739. int GrVkCaps::maxRenderTargetSampleCount(GrPixelConfig config) const {
  740. // Currently we don't allow RGB_888X to be renderable because we don't have a way to handle
  741. // blends that reference dst alpha when the values in the dst alpha channel are uninitialized.
  742. if (config == kRGB_888X_GrPixelConfig) {
  743. return 0;
  744. }
  745. VkFormat format;
  746. if (!GrPixelConfigToVkFormat(config, &format)) {
  747. return 0;
  748. }
  749. return this->maxRenderTargetSampleCount(format);
  750. }
  751. int GrVkCaps::maxRenderTargetSampleCount(VkFormat format) const {
  752. const FormatInfo& info = this->getFormatInfo(format);
  753. const auto& table = info.fColorSampleCounts;
  754. if (!table.count()) {
  755. return 0;
  756. }
  757. return table[table.count() - 1];
  758. }
  759. GrCaps::SurfaceReadPixelsSupport GrVkCaps::surfaceSupportsReadPixels(
  760. const GrSurface* surface) const {
  761. if (surface->isProtected()) {
  762. return SurfaceReadPixelsSupport::kUnsupported;
  763. }
  764. if (auto tex = static_cast<const GrVkTexture*>(surface->asTexture())) {
  765. // We can't directly read from a VkImage that has a ycbcr sampler.
  766. if (tex->ycbcrConversionInfo().isValid()) {
  767. return SurfaceReadPixelsSupport::kCopyToTexture2D;
  768. }
  769. // We can't directly read from a compressed format
  770. SkImage::CompressionType compressionType;
  771. if (GrVkFormatToCompressionType(tex->imageFormat(), &compressionType)) {
  772. return SurfaceReadPixelsSupport::kCopyToTexture2D;
  773. }
  774. }
  775. return SurfaceReadPixelsSupport::kSupported;
  776. }
  777. bool GrVkCaps::onSurfaceSupportsWritePixels(const GrSurface* surface) const {
  778. if (auto rt = surface->asRenderTarget()) {
  779. return rt->numSamples() <= 1 && SkToBool(surface->asTexture());
  780. }
  781. // We can't write to a texture that has a ycbcr sampler.
  782. if (auto tex = static_cast<const GrVkTexture*>(surface->asTexture())) {
  783. // We can't directly read from a VkImage that has a ycbcr sampler.
  784. if (tex->ycbcrConversionInfo().isValid()) {
  785. return false;
  786. }
  787. }
  788. return true;
  789. }
  790. // A near clone of format_color_type_valid_pair
  791. static GrPixelConfig validate_image_info(VkFormat format, GrColorType ct, bool hasYcbcrConversion) {
  792. if (format == VK_FORMAT_UNDEFINED) {
  793. // If the format is undefined then it is only valid as an external image which requires that
  794. // we have a valid VkYcbcrConversion.
  795. if (hasYcbcrConversion) {
  796. // We don't actually care what the color type or config are since we won't use those
  797. // values for external textures. However, for read pixels we will draw to a non ycbcr
  798. // texture of this config so we set RGBA here for that.
  799. return kRGBA_8888_GrPixelConfig;
  800. } else {
  801. return kUnknown_GrPixelConfig;
  802. }
  803. }
  804. if (hasYcbcrConversion) {
  805. // We only support having a ycbcr conversion for external images.
  806. return kUnknown_GrPixelConfig;
  807. }
  808. switch (ct) {
  809. case GrColorType::kUnknown:
  810. break;
  811. case GrColorType::kAlpha_8:
  812. if (VK_FORMAT_R8_UNORM == format) {
  813. return kAlpha_8_as_Red_GrPixelConfig;
  814. }
  815. break;
  816. case GrColorType::kBGR_565:
  817. if (VK_FORMAT_R5G6B5_UNORM_PACK16 == format) {
  818. return kRGB_565_GrPixelConfig;
  819. }
  820. break;
  821. case GrColorType::kABGR_4444:
  822. if (VK_FORMAT_B4G4R4A4_UNORM_PACK16 == format ||
  823. VK_FORMAT_R4G4B4A4_UNORM_PACK16 == format) {
  824. return kRGBA_4444_GrPixelConfig;
  825. }
  826. break;
  827. case GrColorType::kRGBA_8888:
  828. if (VK_FORMAT_R8G8B8A8_UNORM == format) {
  829. return kRGBA_8888_GrPixelConfig;
  830. }
  831. break;
  832. case GrColorType::kRGBA_8888_SRGB:
  833. if (VK_FORMAT_R8G8B8A8_SRGB == format) {
  834. return kSRGBA_8888_GrPixelConfig;
  835. }
  836. break;
  837. case GrColorType::kRGB_888x:
  838. if (VK_FORMAT_R8G8B8_UNORM == format) {
  839. return kRGB_888_GrPixelConfig;
  840. }
  841. if (VK_FORMAT_R8G8B8A8_UNORM == format) {
  842. return kRGB_888X_GrPixelConfig;
  843. }
  844. break;
  845. case GrColorType::kRG_88:
  846. if (VK_FORMAT_R8G8_UNORM == format) {
  847. return kRG_88_GrPixelConfig;
  848. }
  849. break;
  850. case GrColorType::kBGRA_8888:
  851. if (VK_FORMAT_B8G8R8A8_UNORM == format) {
  852. return kBGRA_8888_GrPixelConfig;
  853. }
  854. break;
  855. case GrColorType::kRGBA_1010102:
  856. if (VK_FORMAT_A2B10G10R10_UNORM_PACK32 == format) {
  857. return kRGBA_1010102_GrPixelConfig;
  858. }
  859. break;
  860. case GrColorType::kGray_8:
  861. if (VK_FORMAT_R8_UNORM == format) {
  862. return kGray_8_as_Red_GrPixelConfig;
  863. }
  864. break;
  865. case GrColorType::kAlpha_F16:
  866. if (VK_FORMAT_R16_SFLOAT == format) {
  867. return kAlpha_half_as_Red_GrPixelConfig;
  868. }
  869. break;
  870. case GrColorType::kRGBA_F16:
  871. if (VK_FORMAT_R16G16B16A16_SFLOAT == format) {
  872. return kRGBA_half_GrPixelConfig;
  873. }
  874. break;
  875. case GrColorType::kRGBA_F16_Clamped:
  876. if (VK_FORMAT_R16G16B16A16_SFLOAT == format) {
  877. return kRGBA_half_Clamped_GrPixelConfig;
  878. }
  879. break;
  880. case GrColorType::kRGBA_F32:
  881. if (VK_FORMAT_R32G32B32A32_SFLOAT == format) {
  882. return kRGBA_float_GrPixelConfig;
  883. }
  884. break;
  885. case GrColorType::kR_16:
  886. if (VK_FORMAT_R16_UNORM == format) {
  887. return kR_16_GrPixelConfig;
  888. }
  889. break;
  890. case GrColorType::kRG_1616:
  891. if (VK_FORMAT_R16G16_UNORM == format) {
  892. return kRG_1616_GrPixelConfig;
  893. }
  894. break;
  895. case GrColorType::kRGBA_16161616:
  896. if (VK_FORMAT_R16G16B16A16_UNORM == format) {
  897. return kRGBA_16161616_GrPixelConfig;
  898. }
  899. break;
  900. case GrColorType::kRG_F16:
  901. if (VK_FORMAT_R16G16_SFLOAT == format) {
  902. return kRG_half_GrPixelConfig;
  903. }
  904. break;
  905. }
  906. return kUnknown_GrPixelConfig;
  907. }
  908. GrPixelConfig GrVkCaps::validateBackendRenderTarget(const GrBackendRenderTarget& rt,
  909. GrColorType ct) const {
  910. GrVkImageInfo imageInfo;
  911. if (!rt.getVkImageInfo(&imageInfo)) {
  912. return kUnknown_GrPixelConfig;
  913. }
  914. return validate_image_info(imageInfo.fFormat, ct, imageInfo.fYcbcrConversionInfo.isValid());
  915. }
  916. bool GrVkCaps::onAreColorTypeAndFormatCompatible(GrColorType ct,
  917. const GrBackendFormat& format) const {
  918. const VkFormat* vkFormat = format.getVkFormat();
  919. const GrVkYcbcrConversionInfo* ycbcrInfo = format.getVkYcbcrConversionInfo();
  920. if (!vkFormat || !ycbcrInfo) {
  921. return false;
  922. }
  923. return kUnknown_GrPixelConfig != validate_image_info(*vkFormat, ct, ycbcrInfo->isValid());
  924. }
  925. GrPixelConfig GrVkCaps::onGetConfigFromBackendFormat(const GrBackendFormat& format,
  926. GrColorType ct) const {
  927. const VkFormat* vkFormat = format.getVkFormat();
  928. const GrVkYcbcrConversionInfo* ycbcrInfo = format.getVkYcbcrConversionInfo();
  929. if (!vkFormat || !ycbcrInfo) {
  930. return kUnknown_GrPixelConfig;
  931. }
  932. return validate_image_info(*vkFormat, ct, ycbcrInfo->isValid());
  933. }
  934. static GrPixelConfig get_yuva_config(VkFormat vkFormat) {
  935. switch (vkFormat) {
  936. case VK_FORMAT_R8_UNORM:
  937. return kAlpha_8_as_Red_GrPixelConfig;
  938. case VK_FORMAT_R8G8B8A8_UNORM:
  939. return kRGBA_8888_GrPixelConfig;
  940. case VK_FORMAT_R8G8B8_UNORM:
  941. return kRGB_888_GrPixelConfig;
  942. case VK_FORMAT_R8G8_UNORM:
  943. return kRG_88_GrPixelConfig;
  944. case VK_FORMAT_B8G8R8A8_UNORM:
  945. return kBGRA_8888_GrPixelConfig;
  946. case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
  947. return kRGBA_1010102_GrPixelConfig;
  948. case VK_FORMAT_R16_UNORM:
  949. return kR_16_GrPixelConfig;
  950. case VK_FORMAT_R16G16_UNORM:
  951. return kRG_1616_GrPixelConfig;
  952. // Experimental (for Y416 and mutant P016/P010)
  953. case VK_FORMAT_R16G16B16A16_UNORM:
  954. return kRGBA_16161616_GrPixelConfig;
  955. case VK_FORMAT_R16G16_SFLOAT:
  956. return kRG_half_GrPixelConfig;
  957. default:
  958. return kUnknown_GrPixelConfig;
  959. }
  960. }
  961. GrPixelConfig GrVkCaps::getYUVAConfigFromBackendFormat(const GrBackendFormat& format) const {
  962. const VkFormat* vkFormat = format.getVkFormat();
  963. if (!vkFormat) {
  964. return kUnknown_GrPixelConfig;
  965. }
  966. return get_yuva_config(*vkFormat);
  967. }
  968. GrColorType GrVkCaps::getYUVAColorTypeFromBackendFormat(const GrBackendFormat& format) const {
  969. const VkFormat* vkFormat = format.getVkFormat();
  970. if (!vkFormat) {
  971. return GrColorType::kUnknown;
  972. }
  973. switch (*vkFormat) {
  974. case VK_FORMAT_R8_UNORM: return GrColorType::kAlpha_8;
  975. case VK_FORMAT_R8G8B8A8_UNORM: return GrColorType::kRGBA_8888;
  976. case VK_FORMAT_R8G8B8_UNORM: return GrColorType::kRGB_888x;
  977. case VK_FORMAT_R8G8_UNORM: return GrColorType::kRG_88;
  978. case VK_FORMAT_B8G8R8A8_UNORM: return GrColorType::kBGRA_8888;
  979. case VK_FORMAT_A2B10G10R10_UNORM_PACK32: return GrColorType::kRGBA_1010102;
  980. case VK_FORMAT_R16_UNORM: return GrColorType::kR_16;
  981. case VK_FORMAT_R16G16_UNORM: return GrColorType::kRG_1616;
  982. // Experimental (for Y416 and mutant P016/P010)
  983. case VK_FORMAT_R16G16B16A16_UNORM: return GrColorType::kRGBA_16161616;
  984. case VK_FORMAT_R16G16_SFLOAT: return GrColorType::kRG_F16;
  985. default: return GrColorType::kUnknown;
  986. }
  987. SkUNREACHABLE;
  988. }
  989. GrBackendFormat GrVkCaps::getBackendFormatFromColorType(GrColorType ct) const {
  990. GrPixelConfig config = GrColorTypeToPixelConfig(ct);
  991. if (config == kUnknown_GrPixelConfig) {
  992. return GrBackendFormat();
  993. }
  994. VkFormat format;
  995. if (!GrPixelConfigToVkFormat(config, &format)) {
  996. return GrBackendFormat();
  997. }
  998. return GrBackendFormat::MakeVk(format);
  999. }
  1000. GrBackendFormat GrVkCaps::getBackendFormatFromCompressionType(
  1001. SkImage::CompressionType compressionType) const {
  1002. switch (compressionType) {
  1003. case SkImage::kETC1_CompressionType:
  1004. return GrBackendFormat::MakeVk(VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK);
  1005. }
  1006. SK_ABORT("Invalid compression type");
  1007. return {};
  1008. }
  1009. bool GrVkCaps::canClearTextureOnCreation() const { return true; }
  1010. #ifdef SK_DEBUG
  1011. static bool format_color_type_valid_pair(VkFormat vkFormat, GrColorType colorType) {
  1012. switch (colorType) {
  1013. case GrColorType::kUnknown:
  1014. return false;
  1015. case GrColorType::kAlpha_8:
  1016. return VK_FORMAT_R8_UNORM == vkFormat;
  1017. case GrColorType::kBGR_565:
  1018. return VK_FORMAT_R5G6B5_UNORM_PACK16 == vkFormat;
  1019. case GrColorType::kABGR_4444:
  1020. return VK_FORMAT_B4G4R4A4_UNORM_PACK16 == vkFormat ||
  1021. VK_FORMAT_R4G4B4A4_UNORM_PACK16 == vkFormat;
  1022. case GrColorType::kRGBA_8888:
  1023. return VK_FORMAT_R8G8B8A8_UNORM == vkFormat;
  1024. case GrColorType::kRGBA_8888_SRGB:
  1025. return VK_FORMAT_R8G8B8A8_SRGB == vkFormat;
  1026. case GrColorType::kRGB_888x:
  1027. GR_STATIC_ASSERT(GrCompressionTypeClosestColorType(SkImage::kETC1_CompressionType) ==
  1028. GrColorType::kRGB_888x);
  1029. return VK_FORMAT_R8G8B8_UNORM == vkFormat || VK_FORMAT_R8G8B8A8_UNORM == vkFormat ||
  1030. VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK == vkFormat;
  1031. case GrColorType::kRG_88:
  1032. return VK_FORMAT_R8G8_UNORM == vkFormat;
  1033. case GrColorType::kBGRA_8888:
  1034. return VK_FORMAT_B8G8R8A8_UNORM == vkFormat;
  1035. case GrColorType::kRGBA_1010102:
  1036. return VK_FORMAT_A2B10G10R10_UNORM_PACK32 == vkFormat;
  1037. case GrColorType::kGray_8:
  1038. return VK_FORMAT_R8_UNORM == vkFormat;
  1039. case GrColorType::kAlpha_F16:
  1040. return VK_FORMAT_R16_SFLOAT == vkFormat;
  1041. case GrColorType::kRGBA_F16:
  1042. return VK_FORMAT_R16G16B16A16_SFLOAT == vkFormat;
  1043. case GrColorType::kRGBA_F16_Clamped:
  1044. return VK_FORMAT_R16G16B16A16_SFLOAT == vkFormat;
  1045. case GrColorType::kRGBA_F32:
  1046. return VK_FORMAT_R32G32B32A32_SFLOAT == vkFormat;
  1047. case GrColorType::kR_16:
  1048. return VK_FORMAT_R16_UNORM == vkFormat;
  1049. case GrColorType::kRG_1616:
  1050. return VK_FORMAT_R16G16_UNORM == vkFormat;
  1051. // Experimental (for Y416 and mutant P016/P010)
  1052. case GrColorType::kRGBA_16161616:
  1053. return VK_FORMAT_R16G16B16A16_UNORM == vkFormat;
  1054. case GrColorType::kRG_F16:
  1055. return VK_FORMAT_R16G16_SFLOAT == vkFormat;
  1056. }
  1057. SK_ABORT("Unknown color type");
  1058. return false;
  1059. }
  1060. #endif
  1061. static GrSwizzle get_swizzle(const GrBackendFormat& format, GrColorType colorType,
  1062. bool forOutput) {
  1063. SkASSERT(format.getVkFormat());
  1064. VkFormat vkFormat = *format.getVkFormat();
  1065. SkASSERT(format_color_type_valid_pair(vkFormat, colorType));
  1066. switch (colorType) {
  1067. case GrColorType::kAlpha_8: // fall through
  1068. case GrColorType::kAlpha_F16:
  1069. if (forOutput) {
  1070. return GrSwizzle::AAAA();
  1071. } else {
  1072. return GrSwizzle::RRRR();
  1073. }
  1074. case GrColorType::kGray_8:
  1075. if (!forOutput) {
  1076. return GrSwizzle::RRRA();
  1077. }
  1078. break;
  1079. case GrColorType::kABGR_4444:
  1080. if (VK_FORMAT_B4G4R4A4_UNORM_PACK16 == vkFormat) {
  1081. return GrSwizzle::BGRA();
  1082. }
  1083. break;
  1084. case GrColorType::kRGB_888x:
  1085. if (!forOutput) {
  1086. return GrSwizzle::RGB1();
  1087. }
  1088. default:
  1089. return GrSwizzle::RGBA();
  1090. }
  1091. return GrSwizzle::RGBA();
  1092. }
  1093. GrSwizzle GrVkCaps::getTextureSwizzle(const GrBackendFormat& format, GrColorType colorType) const {
  1094. return get_swizzle(format, colorType, false);
  1095. }
  1096. GrSwizzle GrVkCaps::getOutputSwizzle(const GrBackendFormat& format, GrColorType colorType) const {
  1097. return get_swizzle(format, colorType, true);
  1098. }
  1099. size_t GrVkCaps::onTransferFromOffsetAlignment(GrColorType bufferColorType) const {
  1100. // This GrColorType has 32 bpp but the Vulkan pixel format we use for with may have 24bpp
  1101. // (VK_FORMAT_R8G8B8_...) or may be 32 bpp. We don't support post transforming the pixel data
  1102. // for transfer-from currently and don't want to have to pass info about the src surface here.
  1103. if (bufferColorType == GrColorType::kRGB_888x) {
  1104. return false;
  1105. }
  1106. size_t bpp = GrColorTypeBytesPerPixel(bufferColorType);
  1107. // The VkBufferImageCopy bufferOffset field must be both a multiple of 4 and of a single texel.
  1108. switch (bpp & 0b11) {
  1109. // bpp is already a multiple of 4.
  1110. case 0: return bpp;
  1111. // bpp is a multiple of 2 but not 4.
  1112. case 2: return 2 * bpp;
  1113. // bpp is not a multiple of 2.
  1114. default: return 4 * bpp;
  1115. }
  1116. }
  1117. GrCaps::SupportedRead GrVkCaps::supportedReadPixelsColorType(
  1118. GrColorType srcColorType, const GrBackendFormat& srcBackendFormat,
  1119. GrColorType dstColorType) const {
  1120. const VkFormat* vkFormat = srcBackendFormat.getVkFormat();
  1121. if (!vkFormat) {
  1122. return {GrSwizzle(), GrColorType::kUnknown};
  1123. }
  1124. switch (*vkFormat) {
  1125. case VK_FORMAT_R8G8B8A8_UNORM:
  1126. return {GrSwizzle::RGBA(), GrColorType::kRGBA_8888};
  1127. case VK_FORMAT_R8_UNORM:
  1128. if (srcColorType == GrColorType::kAlpha_8) {
  1129. return {GrSwizzle::RGBA(), GrColorType::kAlpha_8};
  1130. } else if (srcColorType == GrColorType::kGray_8) {
  1131. return {GrSwizzle::RGBA(), GrColorType::kGray_8};
  1132. }
  1133. case VK_FORMAT_B8G8R8A8_UNORM:
  1134. return {GrSwizzle::RGBA(), GrColorType::kBGRA_8888};
  1135. case VK_FORMAT_R5G6B5_UNORM_PACK16:
  1136. return {GrSwizzle::RGBA(), GrColorType::kBGR_565};
  1137. case VK_FORMAT_R16G16B16A16_SFLOAT:
  1138. if (srcColorType == GrColorType::kRGBA_F16) {
  1139. return {GrSwizzle::RGBA(), GrColorType::kRGBA_F16};
  1140. } else if (srcColorType == GrColorType::kRGBA_F16_Clamped){
  1141. return {GrSwizzle::RGBA(), GrColorType::kRGBA_F16_Clamped};
  1142. }
  1143. case VK_FORMAT_R16_SFLOAT:
  1144. return {GrSwizzle::RGBA(), GrColorType::kAlpha_F16};
  1145. case VK_FORMAT_R8G8B8_UNORM:
  1146. return {GrSwizzle::RGBA(), GrColorType::kRGB_888x};
  1147. case VK_FORMAT_R8G8_UNORM:
  1148. return {GrSwizzle::RGBA(), GrColorType::kRG_88};
  1149. case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
  1150. return {GrSwizzle::RGBA(), GrColorType::kRGBA_1010102};
  1151. case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
  1152. return {GrSwizzle::RGBA(), GrColorType::kABGR_4444};
  1153. case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
  1154. return {GrSwizzle::RGBA(), GrColorType::kABGR_4444};
  1155. case VK_FORMAT_R32G32B32A32_SFLOAT:
  1156. return {GrSwizzle::RGBA(), GrColorType::kRGBA_F32};
  1157. case VK_FORMAT_R8G8B8A8_SRGB:
  1158. return {GrSwizzle::RGBA(), GrColorType::kRGBA_8888_SRGB};
  1159. case VK_FORMAT_R16_UNORM:
  1160. return {GrSwizzle::RGBA(), GrColorType::kR_16};
  1161. case VK_FORMAT_R16G16_UNORM:
  1162. return {GrSwizzle::RGBA(), GrColorType::kRG_1616};
  1163. // Experimental (for Y416 and mutant P016/P010)
  1164. case VK_FORMAT_R16G16B16A16_UNORM:
  1165. return {GrSwizzle::RGBA(), GrColorType::kRGBA_16161616};
  1166. case VK_FORMAT_R16G16_SFLOAT:
  1167. return {GrSwizzle::RGBA(), GrColorType::kRG_F16};
  1168. default:
  1169. return {GrSwizzle(), GrColorType::kUnknown};
  1170. }
  1171. }