GrGLGpu.cpp 173 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251
  1. /*
  2. * Copyright 2011 Google Inc.
  3. *
  4. * Use of this source code is governed by a BSD-style license that can be
  5. * found in the LICENSE file.
  6. */
  7. #include "include/core/SkPixmap.h"
  8. #include "include/core/SkStrokeRec.h"
  9. #include "include/core/SkTypes.h"
  10. #include "include/gpu/GrBackendSemaphore.h"
  11. #include "include/gpu/GrBackendSurface.h"
  12. #include "include/gpu/GrTypes.h"
  13. #include "include/private/SkHalf.h"
  14. #include "include/private/SkTemplates.h"
  15. #include "include/private/SkTo.h"
  16. #include "src/core/SkAutoMalloc.h"
  17. #include "src/core/SkConvertPixels.h"
  18. #include "src/core/SkMakeUnique.h"
  19. #include "src/core/SkMipMap.h"
  20. #include "src/core/SkTraceEvent.h"
  21. #include "src/gpu/GrContextPriv.h"
  22. #include "src/gpu/GrCpuBuffer.h"
  23. #include "src/gpu/GrDataUtils.h"
  24. #include "src/gpu/GrFixedClip.h"
  25. #include "src/gpu/GrGpuResourcePriv.h"
  26. #include "src/gpu/GrMesh.h"
  27. #include "src/gpu/GrPipeline.h"
  28. #include "src/gpu/GrRenderTargetPriv.h"
  29. #include "src/gpu/GrShaderCaps.h"
  30. #include "src/gpu/GrSurfaceProxyPriv.h"
  31. #include "src/gpu/GrTexturePriv.h"
  32. #include "src/gpu/gl/GrGLBuffer.h"
  33. #include "src/gpu/gl/GrGLGpu.h"
  34. #include "src/gpu/gl/GrGLGpuCommandBuffer.h"
  35. #include "src/gpu/gl/GrGLSemaphore.h"
  36. #include "src/gpu/gl/GrGLStencilAttachment.h"
  37. #include "src/gpu/gl/GrGLTextureRenderTarget.h"
  38. #include "src/gpu/gl/builders/GrGLShaderStringBuilder.h"
  39. #include "src/sksl/SkSLCompiler.h"
  40. #include <cmath>
  41. #define GL_CALL(X) GR_GL_CALL(this->glInterface(), X)
  42. #define GL_CALL_RET(RET, X) GR_GL_CALL_RET(this->glInterface(), RET, X)
  43. #if GR_GL_CHECK_ALLOC_WITH_GET_ERROR
  44. #define CLEAR_ERROR_BEFORE_ALLOC(iface) GrGLClearErr(iface)
  45. #define GL_ALLOC_CALL(iface, call) GR_GL_CALL_NOERRCHECK(iface, call)
  46. #define CHECK_ALLOC_ERROR(iface) GR_GL_GET_ERROR(iface)
  47. #else
  48. #define CLEAR_ERROR_BEFORE_ALLOC(iface)
  49. #define GL_ALLOC_CALL(iface, call) GR_GL_CALL(iface, call)
  50. #define CHECK_ALLOC_ERROR(iface) GR_GL_NO_ERROR
  51. #endif
  52. //#define USE_NSIGHT
  53. ///////////////////////////////////////////////////////////////////////////////
  54. static const GrGLenum gXfermodeEquation2Blend[] = {
  55. // Basic OpenGL blend equations.
  56. GR_GL_FUNC_ADD,
  57. GR_GL_FUNC_SUBTRACT,
  58. GR_GL_FUNC_REVERSE_SUBTRACT,
  59. // GL_KHR_blend_equation_advanced.
  60. GR_GL_SCREEN,
  61. GR_GL_OVERLAY,
  62. GR_GL_DARKEN,
  63. GR_GL_LIGHTEN,
  64. GR_GL_COLORDODGE,
  65. GR_GL_COLORBURN,
  66. GR_GL_HARDLIGHT,
  67. GR_GL_SOFTLIGHT,
  68. GR_GL_DIFFERENCE,
  69. GR_GL_EXCLUSION,
  70. GR_GL_MULTIPLY,
  71. GR_GL_HSL_HUE,
  72. GR_GL_HSL_SATURATION,
  73. GR_GL_HSL_COLOR,
  74. GR_GL_HSL_LUMINOSITY,
  75. // Illegal... needs to map to something.
  76. GR_GL_FUNC_ADD,
  77. };
  78. GR_STATIC_ASSERT(0 == kAdd_GrBlendEquation);
  79. GR_STATIC_ASSERT(1 == kSubtract_GrBlendEquation);
  80. GR_STATIC_ASSERT(2 == kReverseSubtract_GrBlendEquation);
  81. GR_STATIC_ASSERT(3 == kScreen_GrBlendEquation);
  82. GR_STATIC_ASSERT(4 == kOverlay_GrBlendEquation);
  83. GR_STATIC_ASSERT(5 == kDarken_GrBlendEquation);
  84. GR_STATIC_ASSERT(6 == kLighten_GrBlendEquation);
  85. GR_STATIC_ASSERT(7 == kColorDodge_GrBlendEquation);
  86. GR_STATIC_ASSERT(8 == kColorBurn_GrBlendEquation);
  87. GR_STATIC_ASSERT(9 == kHardLight_GrBlendEquation);
  88. GR_STATIC_ASSERT(10 == kSoftLight_GrBlendEquation);
  89. GR_STATIC_ASSERT(11 == kDifference_GrBlendEquation);
  90. GR_STATIC_ASSERT(12 == kExclusion_GrBlendEquation);
  91. GR_STATIC_ASSERT(13 == kMultiply_GrBlendEquation);
  92. GR_STATIC_ASSERT(14 == kHSLHue_GrBlendEquation);
  93. GR_STATIC_ASSERT(15 == kHSLSaturation_GrBlendEquation);
  94. GR_STATIC_ASSERT(16 == kHSLColor_GrBlendEquation);
  95. GR_STATIC_ASSERT(17 == kHSLLuminosity_GrBlendEquation);
  96. GR_STATIC_ASSERT(SK_ARRAY_COUNT(gXfermodeEquation2Blend) == kGrBlendEquationCnt);
  97. static const GrGLenum gXfermodeCoeff2Blend[] = {
  98. GR_GL_ZERO,
  99. GR_GL_ONE,
  100. GR_GL_SRC_COLOR,
  101. GR_GL_ONE_MINUS_SRC_COLOR,
  102. GR_GL_DST_COLOR,
  103. GR_GL_ONE_MINUS_DST_COLOR,
  104. GR_GL_SRC_ALPHA,
  105. GR_GL_ONE_MINUS_SRC_ALPHA,
  106. GR_GL_DST_ALPHA,
  107. GR_GL_ONE_MINUS_DST_ALPHA,
  108. GR_GL_CONSTANT_COLOR,
  109. GR_GL_ONE_MINUS_CONSTANT_COLOR,
  110. GR_GL_CONSTANT_ALPHA,
  111. GR_GL_ONE_MINUS_CONSTANT_ALPHA,
  112. // extended blend coeffs
  113. GR_GL_SRC1_COLOR,
  114. GR_GL_ONE_MINUS_SRC1_COLOR,
  115. GR_GL_SRC1_ALPHA,
  116. GR_GL_ONE_MINUS_SRC1_ALPHA,
  117. // Illegal... needs to map to something.
  118. GR_GL_ZERO,
  119. };
  120. bool GrGLGpu::BlendCoeffReferencesConstant(GrBlendCoeff coeff) {
  121. static const bool gCoeffReferencesBlendConst[] = {
  122. false,
  123. false,
  124. false,
  125. false,
  126. false,
  127. false,
  128. false,
  129. false,
  130. false,
  131. false,
  132. true,
  133. true,
  134. true,
  135. true,
  136. // extended blend coeffs
  137. false,
  138. false,
  139. false,
  140. false,
  141. // Illegal.
  142. false,
  143. };
  144. return gCoeffReferencesBlendConst[coeff];
  145. GR_STATIC_ASSERT(kGrBlendCoeffCnt == SK_ARRAY_COUNT(gCoeffReferencesBlendConst));
  146. GR_STATIC_ASSERT(0 == kZero_GrBlendCoeff);
  147. GR_STATIC_ASSERT(1 == kOne_GrBlendCoeff);
  148. GR_STATIC_ASSERT(2 == kSC_GrBlendCoeff);
  149. GR_STATIC_ASSERT(3 == kISC_GrBlendCoeff);
  150. GR_STATIC_ASSERT(4 == kDC_GrBlendCoeff);
  151. GR_STATIC_ASSERT(5 == kIDC_GrBlendCoeff);
  152. GR_STATIC_ASSERT(6 == kSA_GrBlendCoeff);
  153. GR_STATIC_ASSERT(7 == kISA_GrBlendCoeff);
  154. GR_STATIC_ASSERT(8 == kDA_GrBlendCoeff);
  155. GR_STATIC_ASSERT(9 == kIDA_GrBlendCoeff);
  156. GR_STATIC_ASSERT(10 == kConstC_GrBlendCoeff);
  157. GR_STATIC_ASSERT(11 == kIConstC_GrBlendCoeff);
  158. GR_STATIC_ASSERT(12 == kConstA_GrBlendCoeff);
  159. GR_STATIC_ASSERT(13 == kIConstA_GrBlendCoeff);
  160. GR_STATIC_ASSERT(14 == kS2C_GrBlendCoeff);
  161. GR_STATIC_ASSERT(15 == kIS2C_GrBlendCoeff);
  162. GR_STATIC_ASSERT(16 == kS2A_GrBlendCoeff);
  163. GR_STATIC_ASSERT(17 == kIS2A_GrBlendCoeff);
  164. // assertion for gXfermodeCoeff2Blend have to be in GrGpu scope
  165. GR_STATIC_ASSERT(kGrBlendCoeffCnt == SK_ARRAY_COUNT(gXfermodeCoeff2Blend));
  166. }
  167. //////////////////////////////////////////////////////////////////////////////
  168. static int gl_target_to_binding_index(GrGLenum target) {
  169. switch (target) {
  170. case GR_GL_TEXTURE_2D:
  171. return 0;
  172. case GR_GL_TEXTURE_RECTANGLE:
  173. return 1;
  174. case GR_GL_TEXTURE_EXTERNAL:
  175. return 2;
  176. }
  177. SK_ABORT("Unexpected GL texture target.");
  178. return 0;
  179. }
  180. GrGpuResource::UniqueID GrGLGpu::TextureUnitBindings::boundID(GrGLenum target) const {
  181. return fTargetBindings[gl_target_to_binding_index(target)].fBoundResourceID;
  182. }
  183. bool GrGLGpu::TextureUnitBindings::hasBeenModified(GrGLenum target) const {
  184. return fTargetBindings[gl_target_to_binding_index(target)].fHasBeenModified;
  185. }
  186. void GrGLGpu::TextureUnitBindings::setBoundID(GrGLenum target, GrGpuResource::UniqueID resourceID) {
  187. int targetIndex = gl_target_to_binding_index(target);
  188. fTargetBindings[targetIndex].fBoundResourceID = resourceID;
  189. fTargetBindings[targetIndex].fHasBeenModified = true;
  190. }
  191. void GrGLGpu::TextureUnitBindings::invalidateForScratchUse(GrGLenum target) {
  192. this->setBoundID(target, GrGpuResource::UniqueID());
  193. }
  194. void GrGLGpu::TextureUnitBindings::invalidateAllTargets(bool markUnmodified) {
  195. for (auto& targetBinding : fTargetBindings) {
  196. targetBinding.fBoundResourceID.makeInvalid();
  197. if (markUnmodified) {
  198. targetBinding.fHasBeenModified = false;
  199. }
  200. }
  201. }
  202. //////////////////////////////////////////////////////////////////////////////
  203. static GrGLenum filter_to_gl_mag_filter(GrSamplerState::Filter filter) {
  204. switch (filter) {
  205. case GrSamplerState::Filter::kNearest: return GR_GL_NEAREST;
  206. case GrSamplerState::Filter::kBilerp: return GR_GL_LINEAR;
  207. case GrSamplerState::Filter::kMipMap: return GR_GL_LINEAR;
  208. }
  209. SK_ABORT("Unknown filter");
  210. return 0;
  211. }
  212. static GrGLenum filter_to_gl_min_filter(GrSamplerState::Filter filter) {
  213. switch (filter) {
  214. case GrSamplerState::Filter::kNearest: return GR_GL_NEAREST;
  215. case GrSamplerState::Filter::kBilerp: return GR_GL_LINEAR;
  216. case GrSamplerState::Filter::kMipMap: return GR_GL_LINEAR_MIPMAP_LINEAR;
  217. }
  218. SK_ABORT("Unknown filter");
  219. return 0;
  220. }
  221. static inline GrGLenum wrap_mode_to_gl_wrap(GrSamplerState::WrapMode wrapMode,
  222. const GrCaps& caps) {
  223. switch (wrapMode) {
  224. case GrSamplerState::WrapMode::kClamp: return GR_GL_CLAMP_TO_EDGE;
  225. case GrSamplerState::WrapMode::kRepeat: return GR_GL_REPEAT;
  226. case GrSamplerState::WrapMode::kMirrorRepeat: return GR_GL_MIRRORED_REPEAT;
  227. case GrSamplerState::WrapMode::kClampToBorder:
  228. // May not be supported but should have been caught earlier
  229. SkASSERT(caps.clampToBorderSupport());
  230. return GR_GL_CLAMP_TO_BORDER;
  231. }
  232. SK_ABORT("Unknown wrap mode");
  233. return 0;
  234. }
  235. ///////////////////////////////////////////////////////////////////////////////
  236. class GrGLGpu::SamplerObjectCache {
  237. public:
  238. SamplerObjectCache(GrGLGpu* gpu) : fGpu(gpu) {
  239. fNumTextureUnits = fGpu->glCaps().shaderCaps()->maxFragmentSamplers();
  240. fHWBoundSamplers.reset(new GrGLuint[fNumTextureUnits]);
  241. std::fill_n(fHWBoundSamplers.get(), fNumTextureUnits, 0);
  242. std::fill_n(fSamplers, kNumSamplers, 0);
  243. }
  244. ~SamplerObjectCache() {
  245. if (!fNumTextureUnits) {
  246. // We've already been abandoned.
  247. return;
  248. }
  249. for (GrGLuint sampler : fSamplers) {
  250. // The spec states that "zero" values should be silently ignored, however they still
  251. // trigger GL errors on some NVIDIA platforms.
  252. if (sampler) {
  253. GR_GL_CALL(fGpu->glInterface(), DeleteSamplers(1, &sampler));
  254. }
  255. }
  256. }
  257. void bindSampler(int unitIdx, const GrSamplerState& state) {
  258. int index = StateToIndex(state);
  259. if (!fSamplers[index]) {
  260. GrGLuint s;
  261. GR_GL_CALL(fGpu->glInterface(), GenSamplers(1, &s));
  262. if (!s) {
  263. return;
  264. }
  265. fSamplers[index] = s;
  266. auto minFilter = filter_to_gl_min_filter(state.filter());
  267. auto magFilter = filter_to_gl_mag_filter(state.filter());
  268. auto wrapX = wrap_mode_to_gl_wrap(state.wrapModeX(), fGpu->glCaps());
  269. auto wrapY = wrap_mode_to_gl_wrap(state.wrapModeY(), fGpu->glCaps());
  270. GR_GL_CALL(fGpu->glInterface(),
  271. SamplerParameteri(s, GR_GL_TEXTURE_MIN_FILTER, minFilter));
  272. GR_GL_CALL(fGpu->glInterface(),
  273. SamplerParameteri(s, GR_GL_TEXTURE_MAG_FILTER, magFilter));
  274. GR_GL_CALL(fGpu->glInterface(), SamplerParameteri(s, GR_GL_TEXTURE_WRAP_S, wrapX));
  275. GR_GL_CALL(fGpu->glInterface(), SamplerParameteri(s, GR_GL_TEXTURE_WRAP_T, wrapY));
  276. }
  277. if (fHWBoundSamplers[unitIdx] != fSamplers[index]) {
  278. GR_GL_CALL(fGpu->glInterface(), BindSampler(unitIdx, fSamplers[index]));
  279. fHWBoundSamplers[unitIdx] = fSamplers[index];
  280. }
  281. }
  282. void invalidateBindings() {
  283. // When we have sampler support we always use samplers. So setting these to zero will cause
  284. // a rebind on next usage.
  285. std::fill_n(fHWBoundSamplers.get(), fNumTextureUnits, 0);
  286. }
  287. void abandon() {
  288. fHWBoundSamplers.reset();
  289. fNumTextureUnits = 0;
  290. }
  291. void release() {
  292. if (!fNumTextureUnits) {
  293. // We've already been abandoned.
  294. return;
  295. }
  296. GR_GL_CALL(fGpu->glInterface(), DeleteSamplers(kNumSamplers, fSamplers));
  297. std::fill_n(fSamplers, kNumSamplers, 0);
  298. // Deleting a bound sampler implicitly binds sampler 0.
  299. std::fill_n(fHWBoundSamplers.get(), fNumTextureUnits, 0);
  300. }
  301. private:
  302. static int StateToIndex(const GrSamplerState& state) {
  303. int filter = static_cast<int>(state.filter());
  304. SkASSERT(filter >= 0 && filter < 3);
  305. int wrapX = static_cast<int>(state.wrapModeX());
  306. SkASSERT(wrapX >= 0 && wrapX < 4);
  307. int wrapY = static_cast<int>(state.wrapModeY());
  308. SkASSERT(wrapY >= 0 && wrapY < 4);
  309. int idx = 16 * filter + 4 * wrapX + wrapY;
  310. SkASSERT(idx < kNumSamplers);
  311. return idx;
  312. }
  313. GrGLGpu* fGpu;
  314. static constexpr int kNumSamplers = 48;
  315. std::unique_ptr<GrGLuint[]> fHWBoundSamplers;
  316. GrGLuint fSamplers[kNumSamplers];
  317. int fNumTextureUnits;
  318. };
  319. ///////////////////////////////////////////////////////////////////////////////
  320. sk_sp<GrGpu> GrGLGpu::Make(sk_sp<const GrGLInterface> interface, const GrContextOptions& options,
  321. GrContext* context) {
  322. if (!interface) {
  323. interface = GrGLMakeNativeInterface();
  324. // For clients that have written their own GrGLCreateNativeInterface and haven't yet updated
  325. // to GrGLMakeNativeInterface.
  326. if (!interface) {
  327. interface = sk_ref_sp(GrGLCreateNativeInterface());
  328. }
  329. if (!interface) {
  330. return nullptr;
  331. }
  332. }
  333. #ifdef USE_NSIGHT
  334. const_cast<GrContextOptions&>(options).fSuppressPathRendering = true;
  335. #endif
  336. auto glContext = GrGLContext::Make(std::move(interface), options);
  337. if (!glContext) {
  338. return nullptr;
  339. }
  340. return sk_sp<GrGpu>(new GrGLGpu(std::move(glContext), context));
  341. }
  342. GrGLGpu::GrGLGpu(std::unique_ptr<GrGLContext> ctx, GrContext* context)
  343. : GrGpu(context)
  344. , fGLContext(std::move(ctx))
  345. , fProgramCache(new ProgramCache(this))
  346. , fHWProgramID(0)
  347. , fTempSrcFBOID(0)
  348. , fTempDstFBOID(0)
  349. , fStencilClearFBOID(0) {
  350. SkASSERT(fGLContext);
  351. GrGLClearErr(this->glInterface());
  352. fCaps = sk_ref_sp(fGLContext->caps());
  353. fHWTextureUnitBindings.reset(this->numTextureUnits());
  354. this->hwBufferState(GrGpuBufferType::kVertex)->fGLTarget = GR_GL_ARRAY_BUFFER;
  355. this->hwBufferState(GrGpuBufferType::kIndex)->fGLTarget = GR_GL_ELEMENT_ARRAY_BUFFER;
  356. if (GrGLCaps::kChromium_TransferBufferType == this->glCaps().transferBufferType()) {
  357. this->hwBufferState(GrGpuBufferType::kXferCpuToGpu)->fGLTarget =
  358. GR_GL_PIXEL_UNPACK_TRANSFER_BUFFER_CHROMIUM;
  359. this->hwBufferState(GrGpuBufferType::kXferGpuToCpu)->fGLTarget =
  360. GR_GL_PIXEL_PACK_TRANSFER_BUFFER_CHROMIUM;
  361. } else {
  362. this->hwBufferState(GrGpuBufferType::kXferCpuToGpu)->fGLTarget = GR_GL_PIXEL_UNPACK_BUFFER;
  363. this->hwBufferState(GrGpuBufferType::kXferGpuToCpu)->fGLTarget = GR_GL_PIXEL_PACK_BUFFER;
  364. }
  365. for (int i = 0; i < kGrGpuBufferTypeCount; ++i) {
  366. fHWBufferState[i].invalidate();
  367. }
  368. GR_STATIC_ASSERT(4 == SK_ARRAY_COUNT(fHWBufferState));
  369. if (this->glCaps().shaderCaps()->pathRenderingSupport()) {
  370. fPathRendering.reset(new GrGLPathRendering(this));
  371. }
  372. if (this->glCaps().samplerObjectSupport()) {
  373. fSamplerObjectCache.reset(new SamplerObjectCache(this));
  374. }
  375. }
  376. GrGLGpu::~GrGLGpu() {
  377. // Ensure any GrGpuResource objects get deleted first, since they may require a working GrGLGpu
  378. // to release the resources held by the objects themselves.
  379. fPathRendering.reset();
  380. fCopyProgramArrayBuffer.reset();
  381. fMipmapProgramArrayBuffer.reset();
  382. fHWProgram.reset();
  383. if (fHWProgramID) {
  384. // detach the current program so there is no confusion on OpenGL's part
  385. // that we want it to be deleted
  386. GL_CALL(UseProgram(0));
  387. }
  388. if (fTempSrcFBOID) {
  389. this->deleteFramebuffer(fTempSrcFBOID);
  390. }
  391. if (fTempDstFBOID) {
  392. this->deleteFramebuffer(fTempDstFBOID);
  393. }
  394. if (fStencilClearFBOID) {
  395. this->deleteFramebuffer(fStencilClearFBOID);
  396. }
  397. for (size_t i = 0; i < SK_ARRAY_COUNT(fCopyPrograms); ++i) {
  398. if (0 != fCopyPrograms[i].fProgram) {
  399. GL_CALL(DeleteProgram(fCopyPrograms[i].fProgram));
  400. }
  401. }
  402. for (size_t i = 0; i < SK_ARRAY_COUNT(fMipmapPrograms); ++i) {
  403. if (0 != fMipmapPrograms[i].fProgram) {
  404. GL_CALL(DeleteProgram(fMipmapPrograms[i].fProgram));
  405. }
  406. }
  407. delete fProgramCache;
  408. fSamplerObjectCache.reset();
  409. }
  410. void GrGLGpu::disconnect(DisconnectType type) {
  411. INHERITED::disconnect(type);
  412. if (DisconnectType::kCleanup == type) {
  413. if (fHWProgramID) {
  414. GL_CALL(UseProgram(0));
  415. }
  416. if (fTempSrcFBOID) {
  417. this->deleteFramebuffer(fTempSrcFBOID);
  418. }
  419. if (fTempDstFBOID) {
  420. this->deleteFramebuffer(fTempDstFBOID);
  421. }
  422. if (fStencilClearFBOID) {
  423. this->deleteFramebuffer(fStencilClearFBOID);
  424. }
  425. for (size_t i = 0; i < SK_ARRAY_COUNT(fCopyPrograms); ++i) {
  426. if (fCopyPrograms[i].fProgram) {
  427. GL_CALL(DeleteProgram(fCopyPrograms[i].fProgram));
  428. }
  429. }
  430. for (size_t i = 0; i < SK_ARRAY_COUNT(fMipmapPrograms); ++i) {
  431. if (fMipmapPrograms[i].fProgram) {
  432. GL_CALL(DeleteProgram(fMipmapPrograms[i].fProgram));
  433. }
  434. }
  435. if (fSamplerObjectCache) {
  436. fSamplerObjectCache->release();
  437. }
  438. } else {
  439. if (fProgramCache) {
  440. fProgramCache->abandon();
  441. }
  442. if (fSamplerObjectCache) {
  443. fSamplerObjectCache->abandon();
  444. }
  445. }
  446. fHWProgram.reset();
  447. delete fProgramCache;
  448. fProgramCache = nullptr;
  449. fHWProgramID = 0;
  450. fTempSrcFBOID = 0;
  451. fTempDstFBOID = 0;
  452. fStencilClearFBOID = 0;
  453. fCopyProgramArrayBuffer.reset();
  454. for (size_t i = 0; i < SK_ARRAY_COUNT(fCopyPrograms); ++i) {
  455. fCopyPrograms[i].fProgram = 0;
  456. }
  457. fMipmapProgramArrayBuffer.reset();
  458. for (size_t i = 0; i < SK_ARRAY_COUNT(fMipmapPrograms); ++i) {
  459. fMipmapPrograms[i].fProgram = 0;
  460. }
  461. if (this->glCaps().shaderCaps()->pathRenderingSupport()) {
  462. this->glPathRendering()->disconnect(type);
  463. }
  464. }
  465. ///////////////////////////////////////////////////////////////////////////////
  466. void GrGLGpu::onResetContext(uint32_t resetBits) {
  467. if (resetBits & kMisc_GrGLBackendState) {
  468. // we don't use the zb at all
  469. GL_CALL(Disable(GR_GL_DEPTH_TEST));
  470. GL_CALL(DepthMask(GR_GL_FALSE));
  471. // We don't use face culling.
  472. GL_CALL(Disable(GR_GL_CULL_FACE));
  473. // We do use separate stencil. Our algorithms don't care which face is front vs. back so
  474. // just set this to the default for self-consistency.
  475. GL_CALL(FrontFace(GR_GL_CCW));
  476. this->hwBufferState(GrGpuBufferType::kXferCpuToGpu)->invalidate();
  477. this->hwBufferState(GrGpuBufferType::kXferGpuToCpu)->invalidate();
  478. if (GR_IS_GR_GL(this->glStandard())) {
  479. #ifndef USE_NSIGHT
  480. // Desktop-only state that we never change
  481. if (!this->glCaps().isCoreProfile()) {
  482. GL_CALL(Disable(GR_GL_POINT_SMOOTH));
  483. GL_CALL(Disable(GR_GL_LINE_SMOOTH));
  484. GL_CALL(Disable(GR_GL_POLYGON_SMOOTH));
  485. GL_CALL(Disable(GR_GL_POLYGON_STIPPLE));
  486. GL_CALL(Disable(GR_GL_COLOR_LOGIC_OP));
  487. GL_CALL(Disable(GR_GL_INDEX_LOGIC_OP));
  488. }
  489. // The windows NVIDIA driver has GL_ARB_imaging in the extension string when using a
  490. // core profile. This seems like a bug since the core spec removes any mention of
  491. // GL_ARB_imaging.
  492. if (this->glCaps().imagingSupport() && !this->glCaps().isCoreProfile()) {
  493. GL_CALL(Disable(GR_GL_COLOR_TABLE));
  494. }
  495. GL_CALL(Disable(GR_GL_POLYGON_OFFSET_FILL));
  496. if (this->caps()->wireframeMode()) {
  497. GL_CALL(PolygonMode(GR_GL_FRONT_AND_BACK, GR_GL_LINE));
  498. } else {
  499. GL_CALL(PolygonMode(GR_GL_FRONT_AND_BACK, GR_GL_FILL));
  500. }
  501. #endif
  502. // Since ES doesn't support glPointSize at all we always use the VS to
  503. // set the point size
  504. GL_CALL(Enable(GR_GL_VERTEX_PROGRAM_POINT_SIZE));
  505. }
  506. if (GR_IS_GR_GL_ES(this->glStandard()) &&
  507. this->glCaps().fbFetchRequiresEnablePerSample()) {
  508. // The arm extension requires specifically enabling MSAA fetching per sample.
  509. // On some devices this may have a perf hit. Also multiple render targets are disabled
  510. GL_CALL(Enable(GR_GL_FETCH_PER_SAMPLE));
  511. }
  512. fHWWriteToColor = kUnknown_TriState;
  513. // we only ever use lines in hairline mode
  514. GL_CALL(LineWidth(1));
  515. GL_CALL(Disable(GR_GL_DITHER));
  516. fHWClearColor[0] = fHWClearColor[1] = fHWClearColor[2] = fHWClearColor[3] = SK_FloatNaN;
  517. }
  518. if (resetBits & kMSAAEnable_GrGLBackendState) {
  519. fMSAAEnabled = kUnknown_TriState;
  520. if (this->caps()->mixedSamplesSupport()) {
  521. // The skia blend modes all use premultiplied alpha and therefore expect RGBA coverage
  522. // modulation. This state has no effect when not rendering to a mixed sampled target.
  523. GL_CALL(CoverageModulation(GR_GL_RGBA));
  524. }
  525. }
  526. fHWActiveTextureUnitIdx = -1; // invalid
  527. fLastPrimitiveType = static_cast<GrPrimitiveType>(-1);
  528. if (resetBits & kTextureBinding_GrGLBackendState) {
  529. for (int s = 0; s < this->numTextureUnits(); ++s) {
  530. fHWTextureUnitBindings[s].invalidateAllTargets(false);
  531. }
  532. if (fSamplerObjectCache) {
  533. fSamplerObjectCache->invalidateBindings();
  534. }
  535. }
  536. if (resetBits & kBlend_GrGLBackendState) {
  537. fHWBlendState.invalidate();
  538. }
  539. if (resetBits & kView_GrGLBackendState) {
  540. fHWScissorSettings.invalidate();
  541. fHWWindowRectsState.invalidate();
  542. fHWViewport.invalidate();
  543. }
  544. if (resetBits & kStencil_GrGLBackendState) {
  545. fHWStencilSettings.invalidate();
  546. fHWStencilTestEnabled = kUnknown_TriState;
  547. }
  548. // Vertex
  549. if (resetBits & kVertex_GrGLBackendState) {
  550. fHWVertexArrayState.invalidate();
  551. this->hwBufferState(GrGpuBufferType::kVertex)->invalidate();
  552. this->hwBufferState(GrGpuBufferType::kIndex)->invalidate();
  553. }
  554. if (resetBits & kRenderTarget_GrGLBackendState) {
  555. fHWBoundRenderTargetUniqueID.makeInvalid();
  556. fHWSRGBFramebuffer = kUnknown_TriState;
  557. }
  558. if (resetBits & kPathRendering_GrGLBackendState) {
  559. if (this->caps()->shaderCaps()->pathRenderingSupport()) {
  560. this->glPathRendering()->resetContext();
  561. }
  562. }
  563. // we assume these values
  564. if (resetBits & kPixelStore_GrGLBackendState) {
  565. if (this->caps()->writePixelsRowBytesSupport()) {
  566. GL_CALL(PixelStorei(GR_GL_UNPACK_ROW_LENGTH, 0));
  567. }
  568. if (this->glCaps().readPixelsRowBytesSupport()) {
  569. GL_CALL(PixelStorei(GR_GL_PACK_ROW_LENGTH, 0));
  570. }
  571. if (this->glCaps().packFlipYSupport()) {
  572. GL_CALL(PixelStorei(GR_GL_PACK_REVERSE_ROW_ORDER, GR_GL_FALSE));
  573. }
  574. }
  575. if (resetBits & kProgram_GrGLBackendState) {
  576. fHWProgramID = 0;
  577. fHWProgram.reset();
  578. }
  579. ++fResetTimestampForTextureParameters;
  580. }
  581. static bool check_backend_texture(const GrBackendTexture& backendTex, const GrGLCaps& caps,
  582. GrGLTexture::IDDesc* idDesc) {
  583. GrGLTextureInfo info;
  584. if (!backendTex.getGLTextureInfo(&info) || !info.fID || !info.fFormat) {
  585. return false;
  586. }
  587. idDesc->fInfo = info;
  588. if (GR_GL_TEXTURE_EXTERNAL == idDesc->fInfo.fTarget) {
  589. if (!caps.shaderCaps()->externalTextureSupport()) {
  590. return false;
  591. }
  592. } else if (GR_GL_TEXTURE_RECTANGLE == idDesc->fInfo.fTarget) {
  593. if (!caps.rectangleTextureSupport()) {
  594. return false;
  595. }
  596. } else if (GR_GL_TEXTURE_2D != idDesc->fInfo.fTarget) {
  597. return false;
  598. }
  599. if (backendTex.isProtected()) {
  600. // Not supported in GL backend at this time.
  601. return false;
  602. }
  603. return true;
  604. }
  605. sk_sp<GrTexture> GrGLGpu::onWrapBackendTexture(const GrBackendTexture& backendTex,
  606. GrColorType grColorType, GrWrapOwnership ownership,
  607. GrWrapCacheable cacheable, GrIOType ioType) {
  608. GrGLTexture::IDDesc idDesc;
  609. if (!check_backend_texture(backendTex, this->glCaps(), &idDesc)) {
  610. return nullptr;
  611. }
  612. if (kBorrow_GrWrapOwnership == ownership) {
  613. idDesc.fOwnership = GrBackendObjectOwnership::kBorrowed;
  614. } else {
  615. idDesc.fOwnership = GrBackendObjectOwnership::kOwned;
  616. }
  617. GrPixelConfig config = this->caps()->getConfigFromBackendFormat(backendTex.getBackendFormat(),
  618. grColorType);
  619. SkASSERT(kUnknown_GrPixelConfig != config);
  620. GrSurfaceDesc surfDesc;
  621. surfDesc.fWidth = backendTex.width();
  622. surfDesc.fHeight = backendTex.height();
  623. surfDesc.fConfig = config;
  624. GrMipMapsStatus mipMapsStatus = backendTex.hasMipMaps() ? GrMipMapsStatus::kValid
  625. : GrMipMapsStatus::kNotAllocated;
  626. auto texture = GrGLTexture::MakeWrapped(this, surfDesc, mipMapsStatus, idDesc,
  627. backendTex.getGLTextureParams(), cacheable, ioType);
  628. // We don't know what parameters are already set on wrapped textures.
  629. texture->textureParamsModified();
  630. return std::move(texture);
  631. }
  632. sk_sp<GrTexture> GrGLGpu::onWrapRenderableBackendTexture(const GrBackendTexture& backendTex,
  633. int sampleCnt,
  634. GrColorType colorType,
  635. GrWrapOwnership ownership,
  636. GrWrapCacheable cacheable) {
  637. GrGLTexture::IDDesc idDesc;
  638. if (!check_backend_texture(backendTex, this->glCaps(), &idDesc)) {
  639. return nullptr;
  640. }
  641. // We don't support rendering to a EXTERNAL texture.
  642. if (GR_GL_TEXTURE_EXTERNAL == idDesc.fInfo.fTarget) {
  643. return nullptr;
  644. }
  645. if (kBorrow_GrWrapOwnership == ownership) {
  646. idDesc.fOwnership = GrBackendObjectOwnership::kBorrowed;
  647. } else {
  648. idDesc.fOwnership = GrBackendObjectOwnership::kOwned;
  649. }
  650. const GrCaps* caps = this->caps();
  651. GrPixelConfig config = caps->getConfigFromBackendFormat(backendTex.getBackendFormat(),
  652. colorType);
  653. SkASSERT(kUnknown_GrPixelConfig != config);
  654. GrSurfaceDesc surfDesc;
  655. surfDesc.fWidth = backendTex.width();
  656. surfDesc.fHeight = backendTex.height();
  657. surfDesc.fConfig = config;
  658. sampleCnt =
  659. caps->getRenderTargetSampleCount(sampleCnt, colorType, backendTex.getBackendFormat());
  660. if (sampleCnt < 1) {
  661. return nullptr;
  662. }
  663. GrGLRenderTarget::IDDesc rtIDDesc;
  664. if (!this->createRenderTargetObjects(surfDesc, sampleCnt, idDesc.fInfo, &rtIDDesc)) {
  665. return nullptr;
  666. }
  667. GrMipMapsStatus mipMapsStatus = backendTex.hasMipMaps() ? GrMipMapsStatus::kDirty
  668. : GrMipMapsStatus::kNotAllocated;
  669. sk_sp<GrGLTextureRenderTarget> texRT(GrGLTextureRenderTarget::MakeWrapped(
  670. this, surfDesc, sampleCnt, idDesc, backendTex.getGLTextureParams(), rtIDDesc, cacheable,
  671. mipMapsStatus));
  672. texRT->baseLevelWasBoundToFBO();
  673. // We don't know what parameters are already set on wrapped textures.
  674. texRT->textureParamsModified();
  675. return std::move(texRT);
  676. }
  677. sk_sp<GrRenderTarget> GrGLGpu::onWrapBackendRenderTarget(const GrBackendRenderTarget& backendRT,
  678. GrColorType grColorType) {
  679. GrGLFramebufferInfo info;
  680. if (!backendRT.getGLFramebufferInfo(&info)) {
  681. return nullptr;
  682. }
  683. if (backendRT.isProtected()) {
  684. // Not supported in GL at this time.
  685. return nullptr;
  686. }
  687. GrGLRenderTarget::IDDesc idDesc;
  688. idDesc.fRTFBOID = info.fFBOID;
  689. idDesc.fMSColorRenderbufferID = 0;
  690. idDesc.fTexFBOID = GrGLRenderTarget::kUnresolvableFBOID;
  691. idDesc.fRTFBOOwnership = GrBackendObjectOwnership::kBorrowed;
  692. GrPixelConfig config = this->caps()->getConfigFromBackendFormat(backendRT.getBackendFormat(),
  693. grColorType);
  694. SkASSERT(kUnknown_GrPixelConfig != config);
  695. GrSurfaceDesc desc;
  696. desc.fWidth = backendRT.width();
  697. desc.fHeight = backendRT.height();
  698. desc.fConfig = config;
  699. int sampleCount =
  700. this->caps()->getRenderTargetSampleCount(backendRT.sampleCnt(), grColorType,
  701. backendRT.getBackendFormat());
  702. return GrGLRenderTarget::MakeWrapped(this, desc, sampleCount, info.fFormat, idDesc,
  703. backendRT.stencilBits());
  704. }
  705. sk_sp<GrRenderTarget> GrGLGpu::onWrapBackendTextureAsRenderTarget(const GrBackendTexture& tex,
  706. int sampleCnt,
  707. GrColorType grColorType) {
  708. GrGLTextureInfo info;
  709. if (!tex.getGLTextureInfo(&info) || !info.fID) {
  710. return nullptr;
  711. }
  712. if (GR_GL_TEXTURE_RECTANGLE != info.fTarget &&
  713. GR_GL_TEXTURE_2D != info.fTarget) {
  714. // Only texture rectangle and texture 2d are supported. We do not check whether texture
  715. // rectangle is supported by Skia - if the caller provided us with a texture rectangle,
  716. // we assume the necessary support exists.
  717. return nullptr;
  718. }
  719. GrPixelConfig config = this->caps()->getConfigFromBackendFormat(tex.getBackendFormat(),
  720. grColorType);
  721. SkASSERT(kUnknown_GrPixelConfig != config);
  722. GrSurfaceDesc surfDesc;
  723. surfDesc.fWidth = tex.width();
  724. surfDesc.fHeight = tex.height();
  725. surfDesc.fConfig = config;
  726. int sampleCount = this->caps()->getRenderTargetSampleCount(sampleCnt, grColorType,
  727. tex.getBackendFormat());
  728. GrGLRenderTarget::IDDesc rtIDDesc;
  729. if (!this->createRenderTargetObjects(surfDesc, sampleCount, info, &rtIDDesc)) {
  730. return nullptr;
  731. }
  732. return GrGLRenderTarget::MakeWrapped(this, surfDesc, sampleCount, info.fFormat, rtIDDesc, 0);
  733. }
  734. static bool check_write_and_transfer_input(GrGLTexture* glTex) {
  735. if (!glTex) {
  736. return false;
  737. }
  738. // Write or transfer of pixels is not implemented for TEXTURE_EXTERNAL textures
  739. if (GR_GL_TEXTURE_EXTERNAL == glTex->target()) {
  740. return false;
  741. }
  742. return true;
  743. }
  744. bool GrGLGpu::onWritePixels(GrSurface* surface, int left, int top, int width, int height,
  745. GrColorType srcColorType, const GrMipLevel texels[],
  746. int mipLevelCount) {
  747. auto glTex = static_cast<GrGLTexture*>(surface->asTexture());
  748. if (!check_write_and_transfer_input(glTex)) {
  749. return false;
  750. }
  751. this->bindTextureToScratchUnit(glTex->target(), glTex->textureID());
  752. // No sRGB transformation occurs in uploadTexData. We choose to make the src config match the
  753. // srgb-ness of the surface to avoid issues in ES2 where internal/external formats must match.
  754. // When we're on ES2 and the dst is GL_SRGB_ALPHA by making the config be kSRGB_8888 we know
  755. // that our caps will choose GL_SRGB_ALPHA as the external format, too. On ES3 or regular GL our
  756. // caps knows to make the external format be GL_RGBA.
  757. auto srcAsConfig = GrColorTypeToPixelConfig(srcColorType);
  758. SkASSERT(!GrPixelConfigIsCompressed(glTex->config()));
  759. return this->uploadTexData(glTex->config(), glTex->width(), glTex->height(), glTex->target(),
  760. kWrite_UploadType, left, top, width, height, srcAsConfig, texels,
  761. mipLevelCount);
  762. }
  763. // For GL_[UN]PACK_ALIGNMENT. TODO: This really wants to be GrColorType.
  764. static inline GrGLint config_alignment(GrPixelConfig config) {
  765. SkASSERT(!GrPixelConfigIsCompressed(config));
  766. switch (config) {
  767. case kAlpha_8_GrPixelConfig:
  768. case kAlpha_8_as_Alpha_GrPixelConfig:
  769. case kAlpha_8_as_Red_GrPixelConfig:
  770. case kGray_8_GrPixelConfig:
  771. case kGray_8_as_Lum_GrPixelConfig:
  772. case kGray_8_as_Red_GrPixelConfig:
  773. return 1;
  774. case kRGB_565_GrPixelConfig:
  775. case kRGBA_4444_GrPixelConfig:
  776. case kRG_88_GrPixelConfig:
  777. case kAlpha_half_GrPixelConfig:
  778. case kAlpha_half_as_Lum_GrPixelConfig:
  779. case kAlpha_half_as_Red_GrPixelConfig:
  780. case kRGBA_half_GrPixelConfig:
  781. case kRGBA_half_Clamped_GrPixelConfig:
  782. case kR_16_GrPixelConfig:
  783. return 2;
  784. case kRGBA_8888_GrPixelConfig:
  785. case kRGB_888_GrPixelConfig: // We're really talking about GrColorType::kRGB_888x here.
  786. case kRGB_888X_GrPixelConfig:
  787. case kBGRA_8888_GrPixelConfig:
  788. case kSRGBA_8888_GrPixelConfig:
  789. case kRGBA_1010102_GrPixelConfig:
  790. case kRGBA_float_GrPixelConfig:
  791. case kRG_1616_GrPixelConfig:
  792. return 4;
  793. case kRGB_ETC1_GrPixelConfig:
  794. case kUnknown_GrPixelConfig:
  795. return 0;
  796. // Experimental (for Y416 and mutant P016/P010)
  797. case kRGBA_16161616_GrPixelConfig:
  798. return 8;
  799. case kRG_half_GrPixelConfig:
  800. return 4;
  801. }
  802. SK_ABORT("Invalid pixel config");
  803. return 0;
  804. }
  805. bool GrGLGpu::onTransferPixelsTo(GrTexture* texture, int left, int top, int width, int height,
  806. GrColorType bufferColorType, GrGpuBuffer* transferBuffer,
  807. size_t offset, size_t rowBytes) {
  808. GrGLTexture* glTex = static_cast<GrGLTexture*>(texture);
  809. GrPixelConfig texConfig = glTex->config();
  810. SkASSERT(this->caps()->isConfigTexturable(texConfig));
  811. // Can't transfer compressed data
  812. SkASSERT(!GrPixelConfigIsCompressed(glTex->config()));
  813. if (!check_write_and_transfer_input(glTex)) {
  814. return false;
  815. }
  816. static_assert(sizeof(int) == sizeof(int32_t), "");
  817. if (width <= 0 || height <= 0) {
  818. return false;
  819. }
  820. this->bindTextureToScratchUnit(glTex->target(), glTex->textureID());
  821. SkASSERT(!transferBuffer->isMapped());
  822. SkASSERT(!transferBuffer->isCpuBuffer());
  823. const GrGLBuffer* glBuffer = static_cast<const GrGLBuffer*>(transferBuffer);
  824. this->bindBuffer(GrGpuBufferType::kXferCpuToGpu, glBuffer);
  825. SkDEBUGCODE(
  826. SkIRect subRect = SkIRect::MakeXYWH(left, top, width, height);
  827. SkIRect bounds = SkIRect::MakeWH(texture->width(), texture->height());
  828. SkASSERT(bounds.contains(subRect));
  829. )
  830. int bpp = GrColorTypeBytesPerPixel(bufferColorType);
  831. const size_t trimRowBytes = width * bpp;
  832. const void* pixels = (void*)offset;
  833. if (width < 0 || height < 0) {
  834. return false;
  835. }
  836. bool restoreGLRowLength = false;
  837. if (trimRowBytes != rowBytes) {
  838. // we should have checked for this support already
  839. SkASSERT(this->glCaps().writePixelsRowBytesSupport());
  840. GL_CALL(PixelStorei(GR_GL_UNPACK_ROW_LENGTH, rowBytes / bpp));
  841. restoreGLRowLength = true;
  842. }
  843. // Internal format comes from the texture desc.
  844. GrGLenum internalFormat;
  845. // External format and type come from the upload data.
  846. GrGLenum externalFormat;
  847. GrGLenum externalType;
  848. auto bufferAsConfig = GrColorTypeToPixelConfig(bufferColorType);
  849. if (!this->glCaps().getTexImageFormats(texConfig, bufferAsConfig, &internalFormat,
  850. &externalFormat, &externalType)) {
  851. return false;
  852. }
  853. GL_CALL(PixelStorei(GR_GL_UNPACK_ALIGNMENT, config_alignment(texConfig)));
  854. GL_CALL(TexSubImage2D(glTex->target(),
  855. 0,
  856. left, top,
  857. width,
  858. height,
  859. externalFormat, externalType,
  860. pixels));
  861. if (restoreGLRowLength) {
  862. GL_CALL(PixelStorei(GR_GL_UNPACK_ROW_LENGTH, 0));
  863. }
  864. return true;
  865. }
  866. bool GrGLGpu::onTransferPixelsFrom(GrSurface* surface, int left, int top, int width, int height,
  867. GrColorType dstColorType, GrGpuBuffer* transferBuffer,
  868. size_t offset) {
  869. auto* glBuffer = static_cast<GrGLBuffer*>(transferBuffer);
  870. this->bindBuffer(GrGpuBufferType::kXferGpuToCpu, glBuffer);
  871. auto offsetAsPtr = reinterpret_cast<void*>(offset);
  872. return this->readOrTransferPixelsFrom(surface, left, top, width, height, dstColorType,
  873. offsetAsPtr, width);
  874. }
  875. /**
  876. * Creates storage space for the texture and fills it with texels.
  877. *
  878. * @param config Pixel config of the texture.
  879. * @param interface The GL interface in use.
  880. * @param caps The capabilities of the GL device.
  881. * @param target Which bound texture to target (GR_GL_TEXTURE_2D, e.g.)
  882. * @param internalFormat The data format used for the internal storage of the texture. May be sized.
  883. * @param internalFormatForTexStorage The data format used for the TexStorage API. Must be sized.
  884. * @param externalFormat The data format used for the external storage of the texture.
  885. * @param externalType The type of the data used for the external storage of the texture.
  886. * @param texels The texel data of the texture being created.
  887. * @param mipLevelCount Number of mipmap levels
  888. * @param baseWidth The width of the texture's base mipmap level
  889. * @param baseHeight The height of the texture's base mipmap level
  890. */
  891. static bool allocate_and_populate_texture(GrPixelConfig config,
  892. const GrGLInterface& interface,
  893. const GrGLCaps& caps,
  894. GrGLenum target,
  895. GrGLenum internalFormat,
  896. GrGLenum internalFormatForTexStorage,
  897. GrGLenum externalFormat,
  898. GrGLenum externalType,
  899. const GrMipLevel texels[],
  900. int mipLevelCount,
  901. int baseWidth,
  902. int baseHeight,
  903. bool* changedUnpackRowLength) {
  904. CLEAR_ERROR_BEFORE_ALLOC(&interface);
  905. if (caps.configSupportsTexStorage(config)) {
  906. // We never resize or change formats of textures.
  907. GL_ALLOC_CALL(&interface,
  908. TexStorage2D(target, SkTMax(mipLevelCount, 1), internalFormatForTexStorage,
  909. baseWidth, baseHeight));
  910. GrGLenum error = CHECK_ALLOC_ERROR(&interface);
  911. if (error != GR_GL_NO_ERROR) {
  912. return false;
  913. } else {
  914. size_t bpp = GrBytesPerPixel(config);
  915. for (int currentMipLevel = 0; currentMipLevel < mipLevelCount; currentMipLevel++) {
  916. const void* currentMipData = texels[currentMipLevel].fPixels;
  917. if (currentMipData == nullptr) {
  918. continue;
  919. }
  920. int twoToTheMipLevel = 1 << currentMipLevel;
  921. const int currentWidth = SkTMax(1, baseWidth / twoToTheMipLevel);
  922. const int currentHeight = SkTMax(1, baseHeight / twoToTheMipLevel);
  923. if (texels[currentMipLevel].fPixels) {
  924. const size_t trimRowBytes = currentWidth * bpp;
  925. const size_t rowBytes = texels[currentMipLevel].fRowBytes;
  926. if (rowBytes != trimRowBytes) {
  927. SkASSERT(caps.writePixelsRowBytesSupport());
  928. GrGLint rowLength = static_cast<GrGLint>(rowBytes / bpp);
  929. GR_GL_CALL(&interface, PixelStorei(GR_GL_UNPACK_ROW_LENGTH, rowLength));
  930. *changedUnpackRowLength = true;
  931. } else if (*changedUnpackRowLength) {
  932. SkASSERT(caps.writePixelsRowBytesSupport());
  933. GR_GL_CALL(&interface, PixelStorei(GR_GL_UNPACK_ROW_LENGTH, 0));
  934. *changedUnpackRowLength = false;
  935. }
  936. }
  937. GR_GL_CALL(&interface,
  938. TexSubImage2D(target,
  939. currentMipLevel,
  940. 0, // left
  941. 0, // top
  942. currentWidth,
  943. currentHeight,
  944. externalFormat, externalType,
  945. currentMipData));
  946. }
  947. return true;
  948. }
  949. } else {
  950. if (!mipLevelCount) {
  951. GL_ALLOC_CALL(&interface,
  952. TexImage2D(target,
  953. 0,
  954. internalFormat,
  955. baseWidth,
  956. baseHeight,
  957. 0, // border
  958. externalFormat, externalType,
  959. nullptr));
  960. GrGLenum error = CHECK_ALLOC_ERROR(&interface);
  961. if (error != GR_GL_NO_ERROR) {
  962. return false;
  963. }
  964. } else {
  965. size_t bpp = GrBytesPerPixel(config);
  966. for (int currentMipLevel = 0; currentMipLevel < mipLevelCount; currentMipLevel++) {
  967. int twoToTheMipLevel = 1 << currentMipLevel;
  968. const int currentWidth = SkTMax(1, baseWidth / twoToTheMipLevel);
  969. const int currentHeight = SkTMax(1, baseHeight / twoToTheMipLevel);
  970. if (texels[currentMipLevel].fPixels) {
  971. const size_t trimRowBytes = currentWidth * bpp;
  972. const size_t rowBytes = texels[currentMipLevel].fRowBytes;
  973. if (rowBytes != trimRowBytes) {
  974. SkASSERT(caps.writePixelsRowBytesSupport());
  975. GrGLint rowLength = static_cast<GrGLint>(rowBytes / bpp);
  976. GR_GL_CALL(&interface, PixelStorei(GR_GL_UNPACK_ROW_LENGTH, rowLength));
  977. *changedUnpackRowLength = true;
  978. } else if (*changedUnpackRowLength) {
  979. SkASSERT(caps.writePixelsRowBytesSupport());
  980. GR_GL_CALL(&interface, PixelStorei(GR_GL_UNPACK_ROW_LENGTH, 0));
  981. *changedUnpackRowLength = false;
  982. }
  983. }
  984. const void* currentMipData = texels[currentMipLevel].fPixels;
  985. // Even if curremtMipData is nullptr, continue to call TexImage2D.
  986. // This will allocate texture memory which we can later populate.
  987. GL_ALLOC_CALL(&interface,
  988. TexImage2D(target,
  989. currentMipLevel,
  990. internalFormat,
  991. currentWidth,
  992. currentHeight,
  993. 0, // border
  994. externalFormat, externalType,
  995. currentMipData));
  996. GrGLenum error = CHECK_ALLOC_ERROR(&interface);
  997. if (error != GR_GL_NO_ERROR) {
  998. return false;
  999. }
  1000. }
  1001. }
  1002. }
  1003. return true;
  1004. }
  1005. /**
  1006. * After a texture is created, any state which was altered during its creation
  1007. * needs to be restored.
  1008. *
  1009. * @param interface The GL interface to use.
  1010. * @param caps The capabilities of the GL device.
  1011. * @param restoreGLRowLength Should the row length unpacking be restored?
  1012. * @param glFlipY Did GL flip the texture vertically?
  1013. */
  1014. static void restore_pixelstore_state(const GrGLInterface& interface, const GrGLCaps& caps,
  1015. bool restoreGLRowLength) {
  1016. if (restoreGLRowLength) {
  1017. SkASSERT(caps.writePixelsRowBytesSupport());
  1018. GR_GL_CALL(&interface, PixelStorei(GR_GL_UNPACK_ROW_LENGTH, 0));
  1019. }
  1020. }
  1021. void GrGLGpu::unbindCpuToGpuXferBuffer() {
  1022. auto* xferBufferState = this->hwBufferState(GrGpuBufferType::kXferCpuToGpu);
  1023. if (!xferBufferState->fBoundBufferUniqueID.isInvalid()) {
  1024. GL_CALL(BindBuffer(xferBufferState->fGLTarget, 0));
  1025. xferBufferState->invalidate();
  1026. }
  1027. }
  1028. // TODO: Make this take a GrColorType instead of dataConfig. This requires updating GrGLCaps to
  1029. // convert from GrColorType to externalFormat/externalType GLenum values.
  1030. bool GrGLGpu::uploadTexData(GrPixelConfig texConfig, int texWidth, int texHeight, GrGLenum target,
  1031. UploadType uploadType, int left, int top, int width, int height,
  1032. GrPixelConfig dataConfig, const GrMipLevel texels[], int mipLevelCount,
  1033. GrMipMapsStatus* mipMapsStatus) {
  1034. // If we're uploading compressed data then we should be using uploadCompressedTexData
  1035. SkASSERT(!GrPixelConfigIsCompressed(dataConfig));
  1036. SkASSERT(this->caps()->isConfigTexturable(texConfig));
  1037. SkDEBUGCODE(
  1038. SkIRect subRect = SkIRect::MakeXYWH(left, top, width, height);
  1039. SkIRect bounds = SkIRect::MakeWH(texWidth, texHeight);
  1040. SkASSERT(bounds.contains(subRect));
  1041. )
  1042. SkASSERT(1 == mipLevelCount ||
  1043. (0 == left && 0 == top && width == texWidth && height == texHeight));
  1044. this->unbindCpuToGpuXferBuffer();
  1045. const GrGLInterface* interface = this->glInterface();
  1046. const GrGLCaps& caps = this->glCaps();
  1047. size_t bpp = GrBytesPerPixel(dataConfig);
  1048. if (width == 0 || height == 0) {
  1049. return false;
  1050. }
  1051. // Internal format comes from the texture desc.
  1052. GrGLenum internalFormat;
  1053. // External format and type come from the upload data.
  1054. GrGLenum externalFormat;
  1055. GrGLenum externalType;
  1056. if (!this->glCaps().getTexImageFormats(texConfig, dataConfig, &internalFormat, &externalFormat,
  1057. &externalType)) {
  1058. return false;
  1059. }
  1060. // TexStorage requires a sized format, and internalFormat may or may not be
  1061. GrGLenum internalFormatForTexStorage = this->glCaps().configSizedInternalFormat(texConfig);
  1062. /*
  1063. * Check whether to allocate a temporary buffer for flipping y or
  1064. * because our srcData has extra bytes past each row. If so, we need
  1065. * to trim those off here, since GL ES may not let us specify
  1066. * GL_UNPACK_ROW_LENGTH.
  1067. */
  1068. bool restoreGLRowLength = false;
  1069. // in case we need a temporary, trimmed copy of the src pixels
  1070. SkAutoSMalloc<128 * 128> tempStorage;
  1071. if (mipMapsStatus) {
  1072. *mipMapsStatus = GrMipMapsStatus::kValid;
  1073. }
  1074. if (mipMapsStatus && mipLevelCount <= 1) {
  1075. *mipMapsStatus = GrMipMapsStatus::kNotAllocated;
  1076. }
  1077. if (mipLevelCount) {
  1078. GR_GL_CALL(interface, PixelStorei(GR_GL_UNPACK_ALIGNMENT, config_alignment(texConfig)));
  1079. }
  1080. bool succeeded = true;
  1081. if (kNewTexture_UploadType == uploadType) {
  1082. if (0 == left && 0 == top && texWidth == width && texHeight == height) {
  1083. succeeded = allocate_and_populate_texture(
  1084. texConfig, *interface, caps, target, internalFormat,
  1085. internalFormatForTexStorage, externalFormat, externalType, texels,
  1086. mipLevelCount, width, height, &restoreGLRowLength);
  1087. } else {
  1088. succeeded = false;
  1089. }
  1090. } else {
  1091. for (int currentMipLevel = 0; currentMipLevel < mipLevelCount; currentMipLevel++) {
  1092. if (!texels[currentMipLevel].fPixels) {
  1093. continue;
  1094. }
  1095. int twoToTheMipLevel = 1 << currentMipLevel;
  1096. const int currentWidth = SkTMax(1, width / twoToTheMipLevel);
  1097. const int currentHeight = SkTMax(1, height / twoToTheMipLevel);
  1098. const size_t trimRowBytes = currentWidth * bpp;
  1099. const size_t rowBytes = texels[currentMipLevel].fRowBytes;
  1100. if (caps.writePixelsRowBytesSupport() && rowBytes != trimRowBytes) {
  1101. GrGLint rowLength = static_cast<GrGLint>(rowBytes / bpp);
  1102. GR_GL_CALL(interface, PixelStorei(GR_GL_UNPACK_ROW_LENGTH, rowLength));
  1103. restoreGLRowLength = true;
  1104. }
  1105. GL_CALL(TexSubImage2D(target,
  1106. currentMipLevel,
  1107. left, top,
  1108. currentWidth,
  1109. currentHeight,
  1110. externalFormat, externalType,
  1111. texels[currentMipLevel].fPixels));
  1112. }
  1113. }
  1114. restore_pixelstore_state(*interface, caps, restoreGLRowLength);
  1115. return succeeded;
  1116. }
  1117. GrGLenum GrGLGpu::uploadCompressedTexData(SkImage::CompressionType compressionType, int width,
  1118. int height, GrGLenum target, const void* data) {
  1119. const GrGLCaps& caps = this->glCaps();
  1120. GrPixelConfig config = GrCompressionTypePixelConfig(compressionType);
  1121. // We only need the internal format for compressed 2D textures.
  1122. GrGLenum internalFormat;
  1123. if (!caps.getCompressedTexImageFormats(config, &internalFormat)) {
  1124. return 0;
  1125. }
  1126. bool useTexStorage = caps.configSupportsTexStorage(config);
  1127. static constexpr int kMipLevelCount = 1;
  1128. // Make sure that the width and height that we pass to OpenGL
  1129. // is a multiple of the block size.
  1130. size_t dataSize = GrCompressedDataSize(compressionType, width, height);
  1131. if (useTexStorage) {
  1132. // We never resize or change formats of textures.
  1133. GL_ALLOC_CALL(this->glInterface(),
  1134. TexStorage2D(target, kMipLevelCount, internalFormat, width, height));
  1135. GrGLenum error = CHECK_ALLOC_ERROR(this->glInterface());
  1136. if (error != GR_GL_NO_ERROR) {
  1137. return 0;
  1138. }
  1139. GL_CALL(CompressedTexSubImage2D(target,
  1140. 0, // level
  1141. 0, // left
  1142. 0, // top
  1143. width,
  1144. height,
  1145. internalFormat,
  1146. SkToInt(dataSize),
  1147. data));
  1148. } else {
  1149. GL_ALLOC_CALL(this->glInterface(), CompressedTexImage2D(target,
  1150. 0, // level
  1151. internalFormat,
  1152. width,
  1153. height,
  1154. 0, // border
  1155. SkToInt(dataSize),
  1156. data));
  1157. GrGLenum error = CHECK_ALLOC_ERROR(this->glInterface());
  1158. if (error != GR_GL_NO_ERROR) {
  1159. return 0;
  1160. }
  1161. }
  1162. return internalFormat;
  1163. }
  1164. static bool renderbuffer_storage_msaa(const GrGLContext& ctx,
  1165. int sampleCount,
  1166. GrGLenum format,
  1167. int width, int height) {
  1168. CLEAR_ERROR_BEFORE_ALLOC(ctx.interface());
  1169. SkASSERT(GrGLCaps::kNone_MSFBOType != ctx.caps()->msFBOType());
  1170. switch (ctx.caps()->msFBOType()) {
  1171. case GrGLCaps::kStandard_MSFBOType:
  1172. GL_ALLOC_CALL(ctx.interface(),
  1173. RenderbufferStorageMultisample(GR_GL_RENDERBUFFER,
  1174. sampleCount,
  1175. format,
  1176. width, height));
  1177. break;
  1178. case GrGLCaps::kES_Apple_MSFBOType:
  1179. GL_ALLOC_CALL(ctx.interface(),
  1180. RenderbufferStorageMultisampleES2APPLE(GR_GL_RENDERBUFFER,
  1181. sampleCount,
  1182. format,
  1183. width, height));
  1184. break;
  1185. case GrGLCaps::kES_EXT_MsToTexture_MSFBOType:
  1186. case GrGLCaps::kES_IMG_MsToTexture_MSFBOType:
  1187. GL_ALLOC_CALL(ctx.interface(),
  1188. RenderbufferStorageMultisampleES2EXT(GR_GL_RENDERBUFFER,
  1189. sampleCount,
  1190. format,
  1191. width, height));
  1192. break;
  1193. case GrGLCaps::kNone_MSFBOType:
  1194. SK_ABORT("Shouldn't be here if we don't support multisampled renderbuffers.");
  1195. break;
  1196. }
  1197. return (GR_GL_NO_ERROR == CHECK_ALLOC_ERROR(ctx.interface()));
  1198. }
  1199. bool GrGLGpu::createRenderTargetObjects(const GrSurfaceDesc& desc,
  1200. int sampleCount,
  1201. const GrGLTextureInfo& texInfo,
  1202. GrGLRenderTarget::IDDesc* idDesc) {
  1203. idDesc->fMSColorRenderbufferID = 0;
  1204. idDesc->fRTFBOID = 0;
  1205. idDesc->fRTFBOOwnership = GrBackendObjectOwnership::kOwned;
  1206. idDesc->fTexFBOID = 0;
  1207. GrGLenum status;
  1208. GrGLenum colorRenderbufferFormat = 0; // suppress warning
  1209. GrGLFormat format = GrGLFormatFromGLEnum(texInfo.fFormat);
  1210. if (format == GrGLFormat::kUnknown) {
  1211. goto FAILED;
  1212. }
  1213. if (sampleCount > 1 && GrGLCaps::kNone_MSFBOType == this->glCaps().msFBOType()) {
  1214. goto FAILED;
  1215. }
  1216. GL_CALL(GenFramebuffers(1, &idDesc->fTexFBOID));
  1217. if (!idDesc->fTexFBOID) {
  1218. goto FAILED;
  1219. }
  1220. // If we are using multisampling we will create two FBOS. We render to one and then resolve to
  1221. // the texture bound to the other. The exception is the IMG multisample extension. With this
  1222. // extension the texture is multisampled when rendered to and then auto-resolves it when it is
  1223. // rendered from.
  1224. if (sampleCount > 1 && this->glCaps().usesMSAARenderBuffers()) {
  1225. GL_CALL(GenFramebuffers(1, &idDesc->fRTFBOID));
  1226. GL_CALL(GenRenderbuffers(1, &idDesc->fMSColorRenderbufferID));
  1227. if (!idDesc->fRTFBOID ||
  1228. !idDesc->fMSColorRenderbufferID) {
  1229. goto FAILED;
  1230. }
  1231. colorRenderbufferFormat = this->glCaps().getRenderbufferInternalFormat(format);
  1232. } else {
  1233. idDesc->fRTFBOID = idDesc->fTexFBOID;
  1234. }
  1235. // below here we may bind the FBO
  1236. fHWBoundRenderTargetUniqueID.makeInvalid();
  1237. if (idDesc->fRTFBOID != idDesc->fTexFBOID) {
  1238. SkASSERT(sampleCount > 1);
  1239. GL_CALL(BindRenderbuffer(GR_GL_RENDERBUFFER, idDesc->fMSColorRenderbufferID));
  1240. if (!renderbuffer_storage_msaa(*fGLContext, sampleCount, colorRenderbufferFormat,
  1241. desc.fWidth, desc.fHeight)) {
  1242. goto FAILED;
  1243. }
  1244. this->bindFramebuffer(GR_GL_FRAMEBUFFER, idDesc->fRTFBOID);
  1245. GL_CALL(FramebufferRenderbuffer(GR_GL_FRAMEBUFFER,
  1246. GR_GL_COLOR_ATTACHMENT0,
  1247. GR_GL_RENDERBUFFER,
  1248. idDesc->fMSColorRenderbufferID));
  1249. if (!this->glCaps().isFormatVerifiedColorAttachment(format)) {
  1250. GL_CALL_RET(status, CheckFramebufferStatus(GR_GL_FRAMEBUFFER));
  1251. if (status != GR_GL_FRAMEBUFFER_COMPLETE) {
  1252. goto FAILED;
  1253. }
  1254. fGLContext->caps()->markFormatAsValidColorAttachment(format);
  1255. }
  1256. }
  1257. this->bindFramebuffer(GR_GL_FRAMEBUFFER, idDesc->fTexFBOID);
  1258. if (this->glCaps().usesImplicitMSAAResolve() && sampleCount > 1) {
  1259. GL_CALL(FramebufferTexture2DMultisample(GR_GL_FRAMEBUFFER, GR_GL_COLOR_ATTACHMENT0,
  1260. texInfo.fTarget, texInfo.fID, 0, sampleCount));
  1261. } else {
  1262. GL_CALL(FramebufferTexture2D(GR_GL_FRAMEBUFFER,
  1263. GR_GL_COLOR_ATTACHMENT0,
  1264. texInfo.fTarget,
  1265. texInfo.fID, 0));
  1266. }
  1267. if (!this->glCaps().isFormatVerifiedColorAttachment(format)) {
  1268. GL_CALL_RET(status, CheckFramebufferStatus(GR_GL_FRAMEBUFFER));
  1269. if (status != GR_GL_FRAMEBUFFER_COMPLETE) {
  1270. goto FAILED;
  1271. }
  1272. fGLContext->caps()->markFormatAsValidColorAttachment(format);
  1273. }
  1274. return true;
  1275. FAILED:
  1276. if (idDesc->fMSColorRenderbufferID) {
  1277. GL_CALL(DeleteRenderbuffers(1, &idDesc->fMSColorRenderbufferID));
  1278. }
  1279. if (idDesc->fRTFBOID != idDesc->fTexFBOID) {
  1280. this->deleteFramebuffer(idDesc->fRTFBOID);
  1281. }
  1282. if (idDesc->fTexFBOID) {
  1283. this->deleteFramebuffer(idDesc->fTexFBOID);
  1284. }
  1285. return false;
  1286. }
  1287. // good to set a break-point here to know when createTexture fails
  1288. static sk_sp<GrTexture> return_null_texture() {
  1289. // SkDEBUGFAIL("null texture");
  1290. return nullptr;
  1291. }
  1292. static GrGLTextureParameters::SamplerOverriddenState set_initial_texture_params(
  1293. const GrGLInterface* interface, const GrGLTextureInfo& info) {
  1294. // Some drivers like to know filter/wrap before seeing glTexImage2D. Some
  1295. // drivers have a bug where an FBO won't be complete if it includes a
  1296. // texture that is not mipmap complete (considering the filter in use).
  1297. GrGLTextureParameters::SamplerOverriddenState state;
  1298. state.fMinFilter = GR_GL_NEAREST;
  1299. state.fMagFilter = GR_GL_NEAREST;
  1300. state.fWrapS = GR_GL_CLAMP_TO_EDGE;
  1301. state.fWrapT = GR_GL_CLAMP_TO_EDGE;
  1302. GR_GL_CALL(interface, TexParameteri(info.fTarget, GR_GL_TEXTURE_MAG_FILTER, state.fMagFilter));
  1303. GR_GL_CALL(interface, TexParameteri(info.fTarget, GR_GL_TEXTURE_MIN_FILTER, state.fMinFilter));
  1304. GR_GL_CALL(interface, TexParameteri(info.fTarget, GR_GL_TEXTURE_WRAP_S, state.fWrapS));
  1305. GR_GL_CALL(interface, TexParameteri(info.fTarget, GR_GL_TEXTURE_WRAP_T, state.fWrapT));
  1306. return state;
  1307. }
  1308. sk_sp<GrTexture> GrGLGpu::onCreateTexture(const GrSurfaceDesc& desc,
  1309. GrRenderable renderable,
  1310. int renderTargetSampleCnt,
  1311. SkBudgeted budgeted,
  1312. GrProtected isProtected,
  1313. const GrMipLevel texels[],
  1314. int mipLevelCount) {
  1315. // We don't support protected textures in GL.
  1316. if (isProtected == GrProtected::kYes) {
  1317. return nullptr;
  1318. }
  1319. SkASSERT(GrGLCaps::kNone_MSFBOType != this->glCaps().msFBOType() || renderTargetSampleCnt == 1);
  1320. GrGLTexture::IDDesc idDesc;
  1321. idDesc.fOwnership = GrBackendObjectOwnership::kOwned;
  1322. GrMipMapsStatus mipMapsStatus;
  1323. GrGLTextureParameters::SamplerOverriddenState initialState;
  1324. if (!this->createTextureImpl(desc, &idDesc.fInfo, renderable, &initialState, texels,
  1325. mipLevelCount, &mipMapsStatus)) {
  1326. return return_null_texture();
  1327. }
  1328. sk_sp<GrGLTexture> tex;
  1329. if (renderable == GrRenderable::kYes) {
  1330. // unbind the texture from the texture unit before binding it to the frame buffer
  1331. GL_CALL(BindTexture(idDesc.fInfo.fTarget, 0));
  1332. GrGLRenderTarget::IDDesc rtIDDesc;
  1333. if (!this->createRenderTargetObjects(desc, renderTargetSampleCnt, idDesc.fInfo,
  1334. &rtIDDesc)) {
  1335. GL_CALL(DeleteTextures(1, &idDesc.fInfo.fID));
  1336. return return_null_texture();
  1337. }
  1338. tex = sk_make_sp<GrGLTextureRenderTarget>(this, budgeted, desc, renderTargetSampleCnt,
  1339. idDesc, rtIDDesc, mipMapsStatus);
  1340. tex->baseLevelWasBoundToFBO();
  1341. } else {
  1342. tex = sk_make_sp<GrGLTexture>(this, budgeted, desc, idDesc, mipMapsStatus);
  1343. }
  1344. // The non-sampler params are still at their default values.
  1345. tex->parameters()->set(&initialState, GrGLTextureParameters::NonsamplerState(),
  1346. fResetTimestampForTextureParameters);
  1347. bool clearLevelsWithoutData =
  1348. this->caps()->shouldInitializeTextures() && this->glCaps().clearTextureSupport();
  1349. if (clearLevelsWithoutData) {
  1350. static constexpr uint32_t kZero = 0;
  1351. int levelCnt = SkTMax(1, tex->texturePriv().maxMipMapLevel());
  1352. for (int i = 0; i < levelCnt; ++i) {
  1353. if (i >= mipLevelCount || !texels[i].fPixels) {
  1354. GL_CALL(ClearTexImage(tex->textureID(), i, GR_GL_RGBA, GR_GL_UNSIGNED_BYTE,
  1355. &kZero));
  1356. }
  1357. }
  1358. }
  1359. return std::move(tex);
  1360. }
  1361. sk_sp<GrTexture> GrGLGpu::onCreateCompressedTexture(int width, int height,
  1362. SkImage::CompressionType compression,
  1363. SkBudgeted budgeted, const void* data) {
  1364. GrGLTexture::IDDesc idDesc;
  1365. GrGLTextureParameters::SamplerOverriddenState initialState;
  1366. if (!this->createCompressedTextureImpl(&idDesc.fInfo, width, height, compression, &initialState,
  1367. data)) {
  1368. return nullptr;
  1369. }
  1370. idDesc.fOwnership = GrBackendObjectOwnership::kOwned;
  1371. GrSurfaceDesc desc;
  1372. desc.fConfig = GrCompressionTypePixelConfig(compression);
  1373. desc.fWidth = width;
  1374. desc.fHeight = height;
  1375. auto tex =
  1376. sk_make_sp<GrGLTexture>(this, budgeted, desc, idDesc, GrMipMapsStatus::kNotAllocated);
  1377. // The non-sampler params are still at their default values.
  1378. tex->parameters()->set(&initialState, GrGLTextureParameters::NonsamplerState(),
  1379. fResetTimestampForTextureParameters);
  1380. return std::move(tex);
  1381. }
  1382. namespace {
  1383. const GrGLuint kUnknownBitCount = GrGLStencilAttachment::kUnknownBitCount;
  1384. void inline get_stencil_rb_sizes(const GrGLInterface* gl,
  1385. GrGLStencilAttachment::Format* format) {
  1386. // we shouldn't ever know one size and not the other
  1387. SkASSERT((kUnknownBitCount == format->fStencilBits) ==
  1388. (kUnknownBitCount == format->fTotalBits));
  1389. if (kUnknownBitCount == format->fStencilBits) {
  1390. GR_GL_GetRenderbufferParameteriv(gl, GR_GL_RENDERBUFFER,
  1391. GR_GL_RENDERBUFFER_STENCIL_SIZE,
  1392. (GrGLint*)&format->fStencilBits);
  1393. if (format->fPacked) {
  1394. GR_GL_GetRenderbufferParameteriv(gl, GR_GL_RENDERBUFFER,
  1395. GR_GL_RENDERBUFFER_DEPTH_SIZE,
  1396. (GrGLint*)&format->fTotalBits);
  1397. format->fTotalBits += format->fStencilBits;
  1398. } else {
  1399. format->fTotalBits = format->fStencilBits;
  1400. }
  1401. }
  1402. }
  1403. }
  1404. int GrGLGpu::getCompatibleStencilIndex(GrGLFormat format) {
  1405. static const int kSize = 16;
  1406. SkASSERT(this->glCaps().canFormatBeFBOColorAttachment(format));
  1407. if (!this->glCaps().hasStencilFormatBeenDeterminedForFormat(format)) {
  1408. // Default to unsupported, set this if we find a stencil format that works.
  1409. int firstWorkingStencilFormatIndex = -1;
  1410. // Create color texture
  1411. GrGLuint colorID = 0;
  1412. GL_CALL(GenTextures(1, &colorID));
  1413. this->bindTextureToScratchUnit(GR_GL_TEXTURE_2D, colorID);
  1414. GL_CALL(TexParameteri(GR_GL_TEXTURE_2D,
  1415. GR_GL_TEXTURE_MAG_FILTER,
  1416. GR_GL_NEAREST));
  1417. GL_CALL(TexParameteri(GR_GL_TEXTURE_2D,
  1418. GR_GL_TEXTURE_MIN_FILTER,
  1419. GR_GL_NEAREST));
  1420. GL_CALL(TexParameteri(GR_GL_TEXTURE_2D,
  1421. GR_GL_TEXTURE_WRAP_S,
  1422. GR_GL_CLAMP_TO_EDGE));
  1423. GL_CALL(TexParameteri(GR_GL_TEXTURE_2D,
  1424. GR_GL_TEXTURE_WRAP_T,
  1425. GR_GL_CLAMP_TO_EDGE));
  1426. GrGLenum internalFormat = this->glCaps().getTexImageInternalFormat(format);
  1427. GrGLenum externalFormat = this->glCaps().getBaseInternalFormat(format);
  1428. GrGLenum externalType = this->glCaps().getFormatDefaultExternalType(format);
  1429. if (!internalFormat || !externalFormat || !externalType) {
  1430. return -1;
  1431. }
  1432. this->unbindCpuToGpuXferBuffer();
  1433. CLEAR_ERROR_BEFORE_ALLOC(this->glInterface());
  1434. GL_ALLOC_CALL(this->glInterface(), TexImage2D(GR_GL_TEXTURE_2D,
  1435. 0,
  1436. internalFormat,
  1437. kSize,
  1438. kSize,
  1439. 0,
  1440. externalFormat,
  1441. externalType,
  1442. nullptr));
  1443. if (GR_GL_NO_ERROR != CHECK_ALLOC_ERROR(this->glInterface())) {
  1444. GL_CALL(DeleteTextures(1, &colorID));
  1445. return -1;
  1446. }
  1447. // unbind the texture from the texture unit before binding it to the frame buffer
  1448. GL_CALL(BindTexture(GR_GL_TEXTURE_2D, 0));
  1449. // Create Framebuffer
  1450. GrGLuint fb = 0;
  1451. GL_CALL(GenFramebuffers(1, &fb));
  1452. this->bindFramebuffer(GR_GL_FRAMEBUFFER, fb);
  1453. fHWBoundRenderTargetUniqueID.makeInvalid();
  1454. GL_CALL(FramebufferTexture2D(GR_GL_FRAMEBUFFER,
  1455. GR_GL_COLOR_ATTACHMENT0,
  1456. GR_GL_TEXTURE_2D,
  1457. colorID,
  1458. 0));
  1459. GrGLuint sbRBID = 0;
  1460. GL_CALL(GenRenderbuffers(1, &sbRBID));
  1461. // look over formats till I find a compatible one
  1462. int stencilFmtCnt = this->glCaps().stencilFormats().count();
  1463. if (sbRBID) {
  1464. GL_CALL(BindRenderbuffer(GR_GL_RENDERBUFFER, sbRBID));
  1465. for (int i = 0; i < stencilFmtCnt && sbRBID; ++i) {
  1466. const GrGLCaps::StencilFormat& sFmt = this->glCaps().stencilFormats()[i];
  1467. CLEAR_ERROR_BEFORE_ALLOC(this->glInterface());
  1468. GL_ALLOC_CALL(this->glInterface(), RenderbufferStorage(GR_GL_RENDERBUFFER,
  1469. sFmt.fInternalFormat,
  1470. kSize, kSize));
  1471. if (GR_GL_NO_ERROR == CHECK_ALLOC_ERROR(this->glInterface())) {
  1472. GL_CALL(FramebufferRenderbuffer(GR_GL_FRAMEBUFFER,
  1473. GR_GL_STENCIL_ATTACHMENT,
  1474. GR_GL_RENDERBUFFER, sbRBID));
  1475. if (sFmt.fPacked) {
  1476. GL_CALL(FramebufferRenderbuffer(GR_GL_FRAMEBUFFER,
  1477. GR_GL_DEPTH_ATTACHMENT,
  1478. GR_GL_RENDERBUFFER, sbRBID));
  1479. } else {
  1480. GL_CALL(FramebufferRenderbuffer(GR_GL_FRAMEBUFFER,
  1481. GR_GL_DEPTH_ATTACHMENT,
  1482. GR_GL_RENDERBUFFER, 0));
  1483. }
  1484. GrGLenum status;
  1485. GL_CALL_RET(status, CheckFramebufferStatus(GR_GL_FRAMEBUFFER));
  1486. if (status == GR_GL_FRAMEBUFFER_COMPLETE) {
  1487. firstWorkingStencilFormatIndex = i;
  1488. break;
  1489. }
  1490. GL_CALL(FramebufferRenderbuffer(GR_GL_FRAMEBUFFER,
  1491. GR_GL_STENCIL_ATTACHMENT,
  1492. GR_GL_RENDERBUFFER, 0));
  1493. if (sFmt.fPacked) {
  1494. GL_CALL(FramebufferRenderbuffer(GR_GL_FRAMEBUFFER,
  1495. GR_GL_DEPTH_ATTACHMENT,
  1496. GR_GL_RENDERBUFFER, 0));
  1497. }
  1498. }
  1499. }
  1500. GL_CALL(DeleteRenderbuffers(1, &sbRBID));
  1501. }
  1502. GL_CALL(DeleteTextures(1, &colorID));
  1503. this->bindFramebuffer(GR_GL_FRAMEBUFFER, 0);
  1504. this->deleteFramebuffer(fb);
  1505. fGLContext->caps()->setStencilFormatIndexForFormat(format, firstWorkingStencilFormatIndex);
  1506. }
  1507. return this->glCaps().getStencilFormatIndexForFormat(format);
  1508. }
  1509. bool GrGLGpu::createTextureImpl(const GrSurfaceDesc& desc, GrGLTextureInfo* info,
  1510. GrRenderable renderable,
  1511. GrGLTextureParameters::SamplerOverriddenState* initialState,
  1512. const GrMipLevel texels[], int mipLevelCount,
  1513. GrMipMapsStatus* mipMapsStatus) {
  1514. SkASSERT(!GrPixelConfigIsCompressed(desc.fConfig));
  1515. info->fID = 0;
  1516. info->fTarget = GR_GL_TEXTURE_2D;
  1517. GL_CALL(GenTextures(1, &(info->fID)));
  1518. if (!info->fID) {
  1519. return false;
  1520. }
  1521. info->fFormat = this->glCaps().configSizedInternalFormat(desc.fConfig);
  1522. this->bindTextureToScratchUnit(info->fTarget, info->fID);
  1523. if (GrRenderable::kYes == renderable && this->glCaps().textureUsageSupport()) {
  1524. // provides a hint about how this texture will be used
  1525. GL_CALL(TexParameteri(info->fTarget,
  1526. GR_GL_TEXTURE_USAGE,
  1527. GR_GL_FRAMEBUFFER_ATTACHMENT));
  1528. }
  1529. *initialState = set_initial_texture_params(this->glInterface(), *info);
  1530. if (!this->uploadTexData(desc.fConfig, desc.fWidth, desc.fHeight, info->fTarget,
  1531. kNewTexture_UploadType, 0, 0, desc.fWidth, desc.fHeight, desc.fConfig,
  1532. texels, mipLevelCount, mipMapsStatus)) {
  1533. GL_CALL(DeleteTextures(1, &(info->fID)));
  1534. return false;
  1535. }
  1536. return true;
  1537. }
  1538. bool GrGLGpu::createCompressedTextureImpl(
  1539. GrGLTextureInfo* info, int width, int height, SkImage::CompressionType compression,
  1540. GrGLTextureParameters::SamplerOverriddenState* initialState, const void* data) {
  1541. info->fID = 0;
  1542. GL_CALL(GenTextures(1, &info->fID));
  1543. if (!info->fID) {
  1544. return false;
  1545. }
  1546. info->fTarget = GR_GL_TEXTURE_2D;
  1547. this->bindTextureToScratchUnit(info->fTarget, info->fID);
  1548. *initialState = set_initial_texture_params(this->glInterface(), *info);
  1549. info->fFormat = this->uploadCompressedTexData(compression, width, height, info->fTarget, data);
  1550. if (!info->fFormat) {
  1551. GL_CALL(DeleteTextures(1, &info->fID));
  1552. return false;
  1553. }
  1554. return true;
  1555. }
  1556. GrStencilAttachment* GrGLGpu::createStencilAttachmentForRenderTarget(
  1557. const GrRenderTarget* rt, int width, int height, int numStencilSamples) {
  1558. SkASSERT(width >= rt->width());
  1559. SkASSERT(height >= rt->height());
  1560. GrGLStencilAttachment::IDDesc sbDesc;
  1561. auto rtFormat = GrGLBackendFormatToGLFormat(rt->backendFormat());
  1562. int sIdx = this->getCompatibleStencilIndex(rtFormat);
  1563. if (sIdx < 0) {
  1564. return nullptr;
  1565. }
  1566. if (!sbDesc.fRenderbufferID) {
  1567. GL_CALL(GenRenderbuffers(1, &sbDesc.fRenderbufferID));
  1568. }
  1569. if (!sbDesc.fRenderbufferID) {
  1570. return nullptr;
  1571. }
  1572. GL_CALL(BindRenderbuffer(GR_GL_RENDERBUFFER, sbDesc.fRenderbufferID));
  1573. const GrGLCaps::StencilFormat& sFmt = this->glCaps().stencilFormats()[sIdx];
  1574. CLEAR_ERROR_BEFORE_ALLOC(this->glInterface());
  1575. // we do this "if" so that we don't call the multisample
  1576. // version on a GL that doesn't have an MSAA extension.
  1577. if (numStencilSamples > 1) {
  1578. SkAssertResult(renderbuffer_storage_msaa(*fGLContext,
  1579. numStencilSamples,
  1580. sFmt.fInternalFormat,
  1581. width, height));
  1582. } else {
  1583. GL_ALLOC_CALL(this->glInterface(), RenderbufferStorage(GR_GL_RENDERBUFFER,
  1584. sFmt.fInternalFormat,
  1585. width, height));
  1586. SkASSERT(GR_GL_NO_ERROR == CHECK_ALLOC_ERROR(this->glInterface()));
  1587. }
  1588. fStats.incStencilAttachmentCreates();
  1589. // After sized formats we attempt an unsized format and take
  1590. // whatever sizes GL gives us. In that case we query for the size.
  1591. GrGLStencilAttachment::Format format = sFmt;
  1592. get_stencil_rb_sizes(this->glInterface(), &format);
  1593. GrGLStencilAttachment* stencil = new GrGLStencilAttachment(this,
  1594. sbDesc,
  1595. width,
  1596. height,
  1597. numStencilSamples,
  1598. format);
  1599. return stencil;
  1600. }
  1601. ////////////////////////////////////////////////////////////////////////////////
  1602. sk_sp<GrGpuBuffer> GrGLGpu::onCreateBuffer(size_t size, GrGpuBufferType intendedType,
  1603. GrAccessPattern accessPattern, const void* data) {
  1604. return GrGLBuffer::Make(this, size, intendedType, accessPattern, data);
  1605. }
  1606. void GrGLGpu::flushScissor(const GrScissorState& scissorState, int rtWidth, int rtHeight,
  1607. GrSurfaceOrigin rtOrigin) {
  1608. if (scissorState.enabled()) {
  1609. GrGLIRect scissor;
  1610. scissor.setRelativeTo(rtHeight, scissorState.rect(), rtOrigin);
  1611. // if the scissor fully contains the viewport then we fall through and
  1612. // disable the scissor test.
  1613. if (!scissor.contains(rtWidth, rtHeight)) {
  1614. if (fHWScissorSettings.fRect != scissor) {
  1615. scissor.pushToGLScissor(this->glInterface());
  1616. fHWScissorSettings.fRect = scissor;
  1617. }
  1618. if (kYes_TriState != fHWScissorSettings.fEnabled) {
  1619. GL_CALL(Enable(GR_GL_SCISSOR_TEST));
  1620. fHWScissorSettings.fEnabled = kYes_TriState;
  1621. }
  1622. return;
  1623. }
  1624. }
  1625. // See fall through note above
  1626. this->disableScissor();
  1627. }
  1628. void GrGLGpu::flushWindowRectangles(const GrWindowRectsState& windowState,
  1629. const GrGLRenderTarget* rt, GrSurfaceOrigin origin) {
  1630. #ifndef USE_NSIGHT
  1631. typedef GrWindowRectsState::Mode Mode;
  1632. SkASSERT(!windowState.enabled() || rt->renderFBOID()); // Window rects can't be used on-screen.
  1633. SkASSERT(windowState.numWindows() <= this->caps()->maxWindowRectangles());
  1634. if (!this->caps()->maxWindowRectangles() ||
  1635. fHWWindowRectsState.knownEqualTo(origin, rt->width(), rt->height(), windowState)) {
  1636. return;
  1637. }
  1638. // This is purely a workaround for a spurious warning generated by gcc. Otherwise the above
  1639. // assert would be sufficient. https://gcc.gnu.org/bugzilla/show_bug.cgi?id=5912
  1640. int numWindows = SkTMin(windowState.numWindows(), int(GrWindowRectangles::kMaxWindows));
  1641. SkASSERT(windowState.numWindows() == numWindows);
  1642. GrGLIRect glwindows[GrWindowRectangles::kMaxWindows];
  1643. const SkIRect* skwindows = windowState.windows().data();
  1644. for (int i = 0; i < numWindows; ++i) {
  1645. glwindows[i].setRelativeTo(rt->height(), skwindows[i], origin);
  1646. }
  1647. GrGLenum glmode = (Mode::kExclusive == windowState.mode()) ? GR_GL_EXCLUSIVE : GR_GL_INCLUSIVE;
  1648. GL_CALL(WindowRectangles(glmode, numWindows, glwindows->asInts()));
  1649. fHWWindowRectsState.set(origin, rt->width(), rt->height(), windowState);
  1650. #endif
  1651. }
  1652. void GrGLGpu::disableWindowRectangles() {
  1653. #ifndef USE_NSIGHT
  1654. if (!this->caps()->maxWindowRectangles() || fHWWindowRectsState.knownDisabled()) {
  1655. return;
  1656. }
  1657. GL_CALL(WindowRectangles(GR_GL_EXCLUSIVE, 0, nullptr));
  1658. fHWWindowRectsState.setDisabled();
  1659. #endif
  1660. }
  1661. void GrGLGpu::resolveAndGenerateMipMapsForProcessorTextures(
  1662. const GrPrimitiveProcessor& primProc,
  1663. const GrPipeline& pipeline,
  1664. const GrTextureProxy* const primProcTextures[],
  1665. int numPrimitiveProcessorTextureSets) {
  1666. auto genLevelsIfNeeded = [this](GrTexture* tex, const GrSamplerState& sampler) {
  1667. SkASSERT(tex);
  1668. if (sampler.filter() == GrSamplerState::Filter::kMipMap &&
  1669. tex->texturePriv().mipMapped() == GrMipMapped::kYes &&
  1670. tex->texturePriv().mipMapsAreDirty()) {
  1671. SkASSERT(this->caps()->mipMapSupport());
  1672. this->regenerateMipMapLevels(static_cast<GrGLTexture*>(tex));
  1673. SkASSERT(!tex->asRenderTarget() || !tex->asRenderTarget()->needsResolve());
  1674. } else if (auto* rt = tex->asRenderTarget()) {
  1675. if (rt->needsResolve()) {
  1676. this->resolveRenderTarget(rt);
  1677. }
  1678. }
  1679. };
  1680. for (int set = 0, tex = 0; set < numPrimitiveProcessorTextureSets; ++set) {
  1681. for (int sampler = 0; sampler < primProc.numTextureSamplers(); ++sampler, ++tex) {
  1682. GrTexture* texture = primProcTextures[tex]->peekTexture();
  1683. genLevelsIfNeeded(texture, primProc.textureSampler(sampler).samplerState());
  1684. }
  1685. }
  1686. GrFragmentProcessor::Iter iter(pipeline);
  1687. while (const GrFragmentProcessor* fp = iter.next()) {
  1688. for (int i = 0; i < fp->numTextureSamplers(); ++i) {
  1689. const auto& textureSampler = fp->textureSampler(i);
  1690. genLevelsIfNeeded(textureSampler.peekTexture(), textureSampler.samplerState());
  1691. }
  1692. }
  1693. }
  1694. bool GrGLGpu::flushGLState(GrRenderTarget* renderTarget,
  1695. GrSurfaceOrigin origin,
  1696. const GrPrimitiveProcessor& primProc,
  1697. const GrPipeline& pipeline,
  1698. const GrPipeline::FixedDynamicState* fixedDynamicState,
  1699. const GrPipeline::DynamicStateArrays* dynamicStateArrays,
  1700. int dynamicStateArraysLength,
  1701. bool willDrawPoints) {
  1702. const GrTextureProxy* const* primProcProxiesForMipRegen = nullptr;
  1703. const GrTextureProxy* const* primProcProxiesToBind = nullptr;
  1704. int numPrimProcTextureSets = 1; // number of texture per prim proc sampler.
  1705. if (dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures) {
  1706. primProcProxiesForMipRegen = dynamicStateArrays->fPrimitiveProcessorTextures;
  1707. numPrimProcTextureSets = dynamicStateArraysLength;
  1708. } else if (fixedDynamicState && fixedDynamicState->fPrimitiveProcessorTextures) {
  1709. primProcProxiesForMipRegen = fixedDynamicState->fPrimitiveProcessorTextures;
  1710. primProcProxiesToBind = fixedDynamicState->fPrimitiveProcessorTextures;
  1711. }
  1712. SkASSERT(SkToBool(primProcProxiesForMipRegen) == SkToBool(primProc.numTextureSamplers()));
  1713. sk_sp<GrGLProgram> program(fProgramCache->refProgram(this, renderTarget, origin, primProc,
  1714. primProcProxiesForMipRegen,
  1715. pipeline, willDrawPoints));
  1716. if (!program) {
  1717. GrCapsDebugf(this->caps(), "Failed to create program!\n");
  1718. return false;
  1719. }
  1720. this->resolveAndGenerateMipMapsForProcessorTextures(
  1721. primProc, pipeline, primProcProxiesForMipRegen, numPrimProcTextureSets);
  1722. this->flushProgram(std::move(program));
  1723. // Swizzle the blend to match what the shader will output.
  1724. this->flushBlendAndColorWrite(
  1725. pipeline.getXferProcessor().getBlendInfo(), pipeline.outputSwizzle());
  1726. fHWProgram->updateUniformsAndTextureBindings(renderTarget, origin,
  1727. primProc, pipeline, primProcProxiesToBind);
  1728. GrGLRenderTarget* glRT = static_cast<GrGLRenderTarget*>(renderTarget);
  1729. GrStencilSettings stencil;
  1730. if (pipeline.isStencilEnabled()) {
  1731. // TODO: attach stencil and create settings during render target flush.
  1732. SkASSERT(glRT->renderTargetPriv().getStencilAttachment());
  1733. stencil.reset(*pipeline.getUserStencil(), pipeline.hasStencilClip(),
  1734. glRT->renderTargetPriv().numStencilBits());
  1735. }
  1736. this->flushStencil(stencil, origin);
  1737. if (pipeline.isScissorEnabled()) {
  1738. static constexpr SkIRect kBogusScissor{0, 0, 1, 1};
  1739. GrScissorState state(fixedDynamicState ? fixedDynamicState->fScissorRect : kBogusScissor);
  1740. this->flushScissor(state, glRT->width(), glRT->height(), origin);
  1741. } else {
  1742. this->disableScissor();
  1743. }
  1744. this->flushWindowRectangles(pipeline.getWindowRectsState(), glRT, origin);
  1745. this->flushHWAAState(glRT, pipeline.isHWAntialiasState());
  1746. // This must come after textures are flushed because a texture may need
  1747. // to be msaa-resolved (which will modify bound FBO state).
  1748. this->flushRenderTarget(glRT);
  1749. return true;
  1750. }
  1751. void GrGLGpu::flushProgram(sk_sp<GrGLProgram> program) {
  1752. if (!program) {
  1753. fHWProgram.reset();
  1754. fHWProgramID = 0;
  1755. return;
  1756. }
  1757. SkASSERT((program == fHWProgram) == (fHWProgramID == program->programID()));
  1758. if (program == fHWProgram) {
  1759. return;
  1760. }
  1761. auto id = program->programID();
  1762. SkASSERT(id);
  1763. GL_CALL(UseProgram(id));
  1764. fHWProgram = std::move(program);
  1765. fHWProgramID = id;
  1766. }
  1767. void GrGLGpu::flushProgram(GrGLuint id) {
  1768. SkASSERT(id);
  1769. if (fHWProgramID == id) {
  1770. SkASSERT(!fHWProgram);
  1771. return;
  1772. }
  1773. fHWProgram.reset();
  1774. GL_CALL(UseProgram(id));
  1775. fHWProgramID = id;
  1776. }
  1777. void GrGLGpu::setupGeometry(const GrBuffer* indexBuffer,
  1778. const GrBuffer* vertexBuffer,
  1779. int baseVertex,
  1780. const GrBuffer* instanceBuffer,
  1781. int baseInstance,
  1782. GrPrimitiveRestart enablePrimitiveRestart) {
  1783. SkASSERT((enablePrimitiveRestart == GrPrimitiveRestart::kNo) || indexBuffer);
  1784. GrGLAttribArrayState* attribState;
  1785. if (indexBuffer) {
  1786. SkASSERT(indexBuffer->isCpuBuffer() ||
  1787. !static_cast<const GrGpuBuffer*>(indexBuffer)->isMapped());
  1788. attribState = fHWVertexArrayState.bindInternalVertexArray(this, indexBuffer);
  1789. } else {
  1790. attribState = fHWVertexArrayState.bindInternalVertexArray(this);
  1791. }
  1792. int numAttribs = fHWProgram->numVertexAttributes() + fHWProgram->numInstanceAttributes();
  1793. attribState->enableVertexArrays(this, numAttribs, enablePrimitiveRestart);
  1794. if (int vertexStride = fHWProgram->vertexStride()) {
  1795. SkASSERT(vertexBuffer);
  1796. SkASSERT(vertexBuffer->isCpuBuffer() ||
  1797. !static_cast<const GrGpuBuffer*>(vertexBuffer)->isMapped());
  1798. size_t bufferOffset = baseVertex * static_cast<size_t>(vertexStride);
  1799. for (int i = 0; i < fHWProgram->numVertexAttributes(); ++i) {
  1800. const auto& attrib = fHWProgram->vertexAttribute(i);
  1801. static constexpr int kDivisor = 0;
  1802. attribState->set(this, attrib.fLocation, vertexBuffer, attrib.fCPUType, attrib.fGPUType,
  1803. vertexStride, bufferOffset + attrib.fOffset, kDivisor);
  1804. }
  1805. }
  1806. if (int instanceStride = fHWProgram->instanceStride()) {
  1807. SkASSERT(instanceBuffer);
  1808. SkASSERT(instanceBuffer->isCpuBuffer() ||
  1809. !static_cast<const GrGpuBuffer*>(instanceBuffer)->isMapped());
  1810. size_t bufferOffset = baseInstance * static_cast<size_t>(instanceStride);
  1811. int attribIdx = fHWProgram->numVertexAttributes();
  1812. for (int i = 0; i < fHWProgram->numInstanceAttributes(); ++i, ++attribIdx) {
  1813. const auto& attrib = fHWProgram->instanceAttribute(i);
  1814. static constexpr int kDivisor = 1;
  1815. attribState->set(this, attrib.fLocation, instanceBuffer, attrib.fCPUType,
  1816. attrib.fGPUType, instanceStride, bufferOffset + attrib.fOffset,
  1817. kDivisor);
  1818. }
  1819. }
  1820. }
  1821. GrGLenum GrGLGpu::bindBuffer(GrGpuBufferType type, const GrBuffer* buffer) {
  1822. this->handleDirtyContext();
  1823. // Index buffer state is tied to the vertex array.
  1824. if (GrGpuBufferType::kIndex == type) {
  1825. this->bindVertexArray(0);
  1826. }
  1827. auto* bufferState = this->hwBufferState(type);
  1828. if (buffer->isCpuBuffer()) {
  1829. if (!bufferState->fBufferZeroKnownBound) {
  1830. GL_CALL(BindBuffer(bufferState->fGLTarget, 0));
  1831. bufferState->fBufferZeroKnownBound = true;
  1832. bufferState->fBoundBufferUniqueID.makeInvalid();
  1833. }
  1834. } else if (static_cast<const GrGpuBuffer*>(buffer)->uniqueID() !=
  1835. bufferState->fBoundBufferUniqueID) {
  1836. const GrGLBuffer* glBuffer = static_cast<const GrGLBuffer*>(buffer);
  1837. GL_CALL(BindBuffer(bufferState->fGLTarget, glBuffer->bufferID()));
  1838. bufferState->fBufferZeroKnownBound = false;
  1839. bufferState->fBoundBufferUniqueID = glBuffer->uniqueID();
  1840. }
  1841. return bufferState->fGLTarget;
  1842. }
  1843. void GrGLGpu::disableScissor() {
  1844. if (kNo_TriState != fHWScissorSettings.fEnabled) {
  1845. GL_CALL(Disable(GR_GL_SCISSOR_TEST));
  1846. fHWScissorSettings.fEnabled = kNo_TriState;
  1847. return;
  1848. }
  1849. }
  1850. void GrGLGpu::clear(const GrFixedClip& clip, const SkPMColor4f& color,
  1851. GrRenderTarget* target, GrSurfaceOrigin origin) {
  1852. // parent class should never let us get here with no RT
  1853. SkASSERT(target);
  1854. SkASSERT(!this->caps()->performColorClearsAsDraws());
  1855. SkASSERT(!clip.scissorEnabled() || !this->caps()->performPartialClearsAsDraws());
  1856. this->handleDirtyContext();
  1857. GrGLRenderTarget* glRT = static_cast<GrGLRenderTarget*>(target);
  1858. if (clip.scissorEnabled()) {
  1859. this->flushRenderTarget(glRT, origin, clip.scissorRect());
  1860. } else {
  1861. this->flushRenderTarget(glRT);
  1862. }
  1863. this->flushScissor(clip.scissorState(), glRT->width(), glRT->height(), origin);
  1864. this->flushWindowRectangles(clip.windowRectsState(), glRT, origin);
  1865. this->flushColorWrite(true);
  1866. GrGLfloat r = color.fR, g = color.fG, b = color.fB, a = color.fA;
  1867. if (this->glCaps().clearToBoundaryValuesIsBroken() &&
  1868. (1 == r || 0 == r) && (1 == g || 0 == g) && (1 == b || 0 == b) && (1 == a || 0 == a)) {
  1869. static const GrGLfloat safeAlpha1 = nextafter(1.f, 2.f);
  1870. static const GrGLfloat safeAlpha0 = nextafter(0.f, -1.f);
  1871. a = (1 == a) ? safeAlpha1 : safeAlpha0;
  1872. }
  1873. this->flushClearColor(r, g, b, a);
  1874. GL_CALL(Clear(GR_GL_COLOR_BUFFER_BIT));
  1875. }
  1876. void GrGLGpu::clearStencil(GrRenderTarget* target, int clearValue) {
  1877. SkASSERT(!this->caps()->performStencilClearsAsDraws());
  1878. if (!target) {
  1879. return;
  1880. }
  1881. GrStencilAttachment* sb = target->renderTargetPriv().getStencilAttachment();
  1882. // this should only be called internally when we know we have a
  1883. // stencil buffer.
  1884. SkASSERT(sb);
  1885. GrGLRenderTarget* glRT = static_cast<GrGLRenderTarget*>(target);
  1886. this->flushRenderTargetNoColorWrites(glRT);
  1887. this->disableScissor();
  1888. this->disableWindowRectangles();
  1889. GL_CALL(StencilMask(0xffffffff));
  1890. GL_CALL(ClearStencil(clearValue));
  1891. GL_CALL(Clear(GR_GL_STENCIL_BUFFER_BIT));
  1892. fHWStencilSettings.invalidate();
  1893. if (!clearValue) {
  1894. sb->cleared();
  1895. }
  1896. }
  1897. void GrGLGpu::clearStencilClip(const GrFixedClip& clip,
  1898. bool insideStencilMask,
  1899. GrRenderTarget* target, GrSurfaceOrigin origin) {
  1900. SkASSERT(target);
  1901. SkASSERT(!this->caps()->performStencilClearsAsDraws());
  1902. this->handleDirtyContext();
  1903. GrStencilAttachment* sb = target->renderTargetPriv().getStencilAttachment();
  1904. // this should only be called internally when we know we have a
  1905. // stencil buffer.
  1906. SkASSERT(sb);
  1907. GrGLint stencilBitCount = sb->bits();
  1908. #if 0
  1909. SkASSERT(stencilBitCount > 0);
  1910. GrGLint clipStencilMask = (1 << (stencilBitCount - 1));
  1911. #else
  1912. // we could just clear the clip bit but when we go through
  1913. // ANGLE a partial stencil mask will cause clears to be
  1914. // turned into draws. Our contract on GrOpList says that
  1915. // changing the clip between stencil passes may or may not
  1916. // zero the client's clip bits. So we just clear the whole thing.
  1917. static const GrGLint clipStencilMask = ~0;
  1918. #endif
  1919. GrGLint value;
  1920. if (insideStencilMask) {
  1921. value = (1 << (stencilBitCount - 1));
  1922. } else {
  1923. value = 0;
  1924. }
  1925. GrGLRenderTarget* glRT = static_cast<GrGLRenderTarget*>(target);
  1926. this->flushRenderTargetNoColorWrites(glRT);
  1927. this->flushScissor(clip.scissorState(), glRT->width(), glRT->height(), origin);
  1928. this->flushWindowRectangles(clip.windowRectsState(), glRT, origin);
  1929. GL_CALL(StencilMask((uint32_t) clipStencilMask));
  1930. GL_CALL(ClearStencil(value));
  1931. GL_CALL(Clear(GR_GL_STENCIL_BUFFER_BIT));
  1932. fHWStencilSettings.invalidate();
  1933. }
  1934. bool GrGLGpu::readOrTransferPixelsFrom(GrSurface* surface, int left, int top, int width, int height,
  1935. GrColorType dstColorType, void* offsetOrPtr,
  1936. int rowWidthInPixels) {
  1937. SkASSERT(surface);
  1938. GrGLRenderTarget* renderTarget = static_cast<GrGLRenderTarget*>(surface->asRenderTarget());
  1939. if (!renderTarget && !this->glCaps().canConfigBeFBOColorAttachment(surface->config())) {
  1940. return false;
  1941. }
  1942. // TODO: Avoid this conversion by making GrGLCaps work with color types.
  1943. auto dstAsConfig = GrColorTypeToPixelConfig(dstColorType);
  1944. GrGLenum externalFormat;
  1945. GrGLenum externalType;
  1946. if (!this->glCaps().getReadPixelsFormat(surface->config(), dstAsConfig, &externalFormat,
  1947. &externalType)) {
  1948. return false;
  1949. }
  1950. if (renderTarget) {
  1951. // resolve the render target if necessary
  1952. switch (renderTarget->getResolveType()) {
  1953. case GrGLRenderTarget::kCantResolve_ResolveType:
  1954. return false;
  1955. case GrGLRenderTarget::kAutoResolves_ResolveType:
  1956. this->flushRenderTargetNoColorWrites(renderTarget);
  1957. break;
  1958. case GrGLRenderTarget::kCanResolve_ResolveType:
  1959. this->onResolveRenderTarget(renderTarget);
  1960. // we don't track the state of the READ FBO ID.
  1961. this->bindFramebuffer(GR_GL_READ_FRAMEBUFFER, renderTarget->textureFBOID());
  1962. break;
  1963. default:
  1964. SK_ABORT("Unknown resolve type");
  1965. }
  1966. } else {
  1967. // Use a temporary FBO.
  1968. this->bindSurfaceFBOForPixelOps(surface, GR_GL_FRAMEBUFFER, kSrc_TempFBOTarget);
  1969. fHWBoundRenderTargetUniqueID.makeInvalid();
  1970. }
  1971. // the read rect is viewport-relative
  1972. GrGLIRect readRect;
  1973. readRect.setRelativeTo(surface->height(), left, top, width, height, kTopLeft_GrSurfaceOrigin);
  1974. // determine if GL can read using the passed rowBytes or if we need a scratch buffer.
  1975. if (rowWidthInPixels != width) {
  1976. SkASSERT(this->glCaps().readPixelsRowBytesSupport());
  1977. GL_CALL(PixelStorei(GR_GL_PACK_ROW_LENGTH, rowWidthInPixels));
  1978. }
  1979. GL_CALL(PixelStorei(GR_GL_PACK_ALIGNMENT, config_alignment(dstAsConfig)));
  1980. bool reattachStencil = false;
  1981. if (this->glCaps().detachStencilFromMSAABuffersBeforeReadPixels() &&
  1982. renderTarget &&
  1983. renderTarget->renderTargetPriv().getStencilAttachment() &&
  1984. renderTarget->numSamples() > 1) {
  1985. // Fix Adreno devices that won't read from MSAA framebuffers with stencil attached
  1986. reattachStencil = true;
  1987. GL_CALL(FramebufferRenderbuffer(GR_GL_FRAMEBUFFER, GR_GL_STENCIL_ATTACHMENT,
  1988. GR_GL_RENDERBUFFER, 0));
  1989. }
  1990. GL_CALL(ReadPixels(readRect.fLeft, readRect.fBottom, readRect.fWidth, readRect.fHeight,
  1991. externalFormat, externalType, offsetOrPtr));
  1992. if (reattachStencil) {
  1993. GrGLStencilAttachment* stencilAttachment = static_cast<GrGLStencilAttachment*>(
  1994. renderTarget->renderTargetPriv().getStencilAttachment());
  1995. GL_CALL(FramebufferRenderbuffer(GR_GL_FRAMEBUFFER, GR_GL_STENCIL_ATTACHMENT,
  1996. GR_GL_RENDERBUFFER, stencilAttachment->renderbufferID()));
  1997. }
  1998. if (rowWidthInPixels != width) {
  1999. SkASSERT(this->glCaps().readPixelsRowBytesSupport());
  2000. GL_CALL(PixelStorei(GR_GL_PACK_ROW_LENGTH, 0));
  2001. }
  2002. if (!renderTarget) {
  2003. this->unbindTextureFBOForPixelOps(GR_GL_FRAMEBUFFER, surface);
  2004. }
  2005. return true;
  2006. }
  2007. bool GrGLGpu::onReadPixels(GrSurface* surface, int left, int top, int width, int height,
  2008. GrColorType dstColorType, void* buffer, size_t rowBytes) {
  2009. SkASSERT(surface);
  2010. int bytesPerPixel = GrColorTypeBytesPerPixel(dstColorType);
  2011. // GL_PACK_ROW_LENGTH is in terms of pixels not bytes.
  2012. int rowPixelWidth;
  2013. if (rowBytes == SkToSizeT(width * bytesPerPixel)) {
  2014. rowPixelWidth = width;
  2015. } else {
  2016. SkASSERT(!(rowBytes % bytesPerPixel));
  2017. rowPixelWidth = rowBytes / bytesPerPixel;
  2018. }
  2019. return this->readOrTransferPixelsFrom(surface, left, top, width, height, dstColorType, buffer,
  2020. rowPixelWidth);
  2021. }
  2022. GrGpuRTCommandBuffer* GrGLGpu::getCommandBuffer(
  2023. GrRenderTarget* rt, GrSurfaceOrigin origin, const SkRect& bounds,
  2024. const GrGpuRTCommandBuffer::LoadAndStoreInfo& colorInfo,
  2025. const GrGpuRTCommandBuffer::StencilLoadAndStoreInfo& stencilInfo) {
  2026. if (!fCachedRTCommandBuffer) {
  2027. fCachedRTCommandBuffer.reset(new GrGLGpuRTCommandBuffer(this));
  2028. }
  2029. fCachedRTCommandBuffer->set(rt, origin, colorInfo, stencilInfo);
  2030. return fCachedRTCommandBuffer.get();
  2031. }
  2032. GrGpuTextureCommandBuffer* GrGLGpu::getCommandBuffer(GrTexture* texture, GrSurfaceOrigin origin) {
  2033. if (!fCachedTexCommandBuffer) {
  2034. fCachedTexCommandBuffer.reset(new GrGLGpuTextureCommandBuffer(this));
  2035. }
  2036. fCachedTexCommandBuffer->set(texture, origin);
  2037. return fCachedTexCommandBuffer.get();
  2038. }
  2039. void GrGLGpu::flushRenderTarget(GrGLRenderTarget* target, GrSurfaceOrigin origin,
  2040. const SkIRect& bounds) {
  2041. this->flushRenderTargetNoColorWrites(target);
  2042. this->didWriteToSurface(target, origin, &bounds);
  2043. }
  2044. void GrGLGpu::flushRenderTarget(GrGLRenderTarget* target) {
  2045. this->flushRenderTargetNoColorWrites(target);
  2046. this->didWriteToSurface(target, kTopLeft_GrSurfaceOrigin, nullptr);
  2047. }
  2048. void GrGLGpu::flushRenderTargetNoColorWrites(GrGLRenderTarget* target) {
  2049. SkASSERT(target);
  2050. GrGpuResource::UniqueID rtID = target->uniqueID();
  2051. if (fHWBoundRenderTargetUniqueID != rtID) {
  2052. this->bindFramebuffer(GR_GL_FRAMEBUFFER, target->renderFBOID());
  2053. #ifdef SK_DEBUG
  2054. // don't do this check in Chromium -- this is causing
  2055. // lots of repeated command buffer flushes when the compositor is
  2056. // rendering with Ganesh, which is really slow; even too slow for
  2057. // Debug mode.
  2058. if (kChromium_GrGLDriver != this->glContext().driver()) {
  2059. GrGLenum status;
  2060. GL_CALL_RET(status, CheckFramebufferStatus(GR_GL_FRAMEBUFFER));
  2061. if (status != GR_GL_FRAMEBUFFER_COMPLETE) {
  2062. SkDebugf("GrGLGpu::flushRenderTarget glCheckFramebufferStatus %x\n", status);
  2063. }
  2064. }
  2065. #endif
  2066. fHWBoundRenderTargetUniqueID = rtID;
  2067. this->flushViewport(target->width(), target->height());
  2068. }
  2069. if (this->glCaps().srgbWriteControl()) {
  2070. this->flushFramebufferSRGB(GrPixelConfigIsSRGB(target->config()));
  2071. }
  2072. }
  2073. void GrGLGpu::flushFramebufferSRGB(bool enable) {
  2074. if (enable && kYes_TriState != fHWSRGBFramebuffer) {
  2075. GL_CALL(Enable(GR_GL_FRAMEBUFFER_SRGB));
  2076. fHWSRGBFramebuffer = kYes_TriState;
  2077. } else if (!enable && kNo_TriState != fHWSRGBFramebuffer) {
  2078. GL_CALL(Disable(GR_GL_FRAMEBUFFER_SRGB));
  2079. fHWSRGBFramebuffer = kNo_TriState;
  2080. }
  2081. }
  2082. void GrGLGpu::flushViewport(int width, int height) {
  2083. GrGLIRect viewport = {0, 0, width, height};
  2084. if (fHWViewport != viewport) {
  2085. viewport.pushToGLViewport(this->glInterface());
  2086. fHWViewport = viewport;
  2087. }
  2088. }
  2089. #define SWAP_PER_DRAW 0
  2090. #if SWAP_PER_DRAW
  2091. #if defined(SK_BUILD_FOR_MAC)
  2092. #include <AGL/agl.h>
  2093. #elif defined(SK_BUILD_FOR_WIN)
  2094. #include <gl/GL.h>
  2095. void SwapBuf() {
  2096. DWORD procID = GetCurrentProcessId();
  2097. HWND hwnd = GetTopWindow(GetDesktopWindow());
  2098. while(hwnd) {
  2099. DWORD wndProcID = 0;
  2100. GetWindowThreadProcessId(hwnd, &wndProcID);
  2101. if(wndProcID == procID) {
  2102. SwapBuffers(GetDC(hwnd));
  2103. }
  2104. hwnd = GetNextWindow(hwnd, GW_HWNDNEXT);
  2105. }
  2106. }
  2107. #endif
  2108. #endif
  2109. void GrGLGpu::draw(GrRenderTarget* renderTarget, GrSurfaceOrigin origin,
  2110. const GrPrimitiveProcessor& primProc,
  2111. const GrPipeline& pipeline,
  2112. const GrPipeline::FixedDynamicState* fixedDynamicState,
  2113. const GrPipeline::DynamicStateArrays* dynamicStateArrays,
  2114. const GrMesh meshes[],
  2115. int meshCount) {
  2116. this->handleDirtyContext();
  2117. bool hasPoints = false;
  2118. for (int i = 0; i < meshCount; ++i) {
  2119. if (meshes[i].primitiveType() == GrPrimitiveType::kPoints) {
  2120. hasPoints = true;
  2121. break;
  2122. }
  2123. }
  2124. if (!this->flushGLState(renderTarget, origin, primProc, pipeline, fixedDynamicState,
  2125. dynamicStateArrays, meshCount, hasPoints)) {
  2126. return;
  2127. }
  2128. bool dynamicScissor = false;
  2129. bool dynamicPrimProcTextures = false;
  2130. if (dynamicStateArrays) {
  2131. dynamicScissor = pipeline.isScissorEnabled() && dynamicStateArrays->fScissorRects;
  2132. dynamicPrimProcTextures = dynamicStateArrays->fPrimitiveProcessorTextures;
  2133. }
  2134. for (int m = 0; m < meshCount; ++m) {
  2135. if (GrXferBarrierType barrierType = pipeline.xferBarrierType(renderTarget->asTexture(),
  2136. *this->caps())) {
  2137. this->xferBarrier(renderTarget, barrierType);
  2138. }
  2139. if (dynamicScissor) {
  2140. GrGLRenderTarget* glRT = static_cast<GrGLRenderTarget*>(renderTarget);
  2141. this->flushScissor(GrScissorState(dynamicStateArrays->fScissorRects[m]),
  2142. glRT->width(), glRT->height(), origin);
  2143. }
  2144. if (dynamicPrimProcTextures) {
  2145. auto texProxyArray = dynamicStateArrays->fPrimitiveProcessorTextures +
  2146. m * primProc.numTextureSamplers();
  2147. fHWProgram->updatePrimitiveProcessorTextureBindings(primProc, texProxyArray);
  2148. }
  2149. if (this->glCaps().requiresCullFaceEnableDisableWhenDrawingLinesAfterNonLines() &&
  2150. GrIsPrimTypeLines(meshes[m].primitiveType()) &&
  2151. !GrIsPrimTypeLines(fLastPrimitiveType)) {
  2152. GL_CALL(Enable(GR_GL_CULL_FACE));
  2153. GL_CALL(Disable(GR_GL_CULL_FACE));
  2154. }
  2155. meshes[m].sendToGpu(this);
  2156. fLastPrimitiveType = meshes[m].primitiveType();
  2157. }
  2158. #if SWAP_PER_DRAW
  2159. glFlush();
  2160. #if defined(SK_BUILD_FOR_MAC)
  2161. aglSwapBuffers(aglGetCurrentContext());
  2162. int set_a_break_pt_here = 9;
  2163. aglSwapBuffers(aglGetCurrentContext());
  2164. #elif defined(SK_BUILD_FOR_WIN)
  2165. SwapBuf();
  2166. int set_a_break_pt_here = 9;
  2167. SwapBuf();
  2168. #endif
  2169. #endif
  2170. }
  2171. static GrGLenum gr_primitive_type_to_gl_mode(GrPrimitiveType primitiveType) {
  2172. switch (primitiveType) {
  2173. case GrPrimitiveType::kTriangles:
  2174. return GR_GL_TRIANGLES;
  2175. case GrPrimitiveType::kTriangleStrip:
  2176. return GR_GL_TRIANGLE_STRIP;
  2177. case GrPrimitiveType::kPoints:
  2178. return GR_GL_POINTS;
  2179. case GrPrimitiveType::kLines:
  2180. return GR_GL_LINES;
  2181. case GrPrimitiveType::kLineStrip:
  2182. return GR_GL_LINE_STRIP;
  2183. case GrPrimitiveType::kLinesAdjacency:
  2184. return GR_GL_LINES_ADJACENCY;
  2185. }
  2186. SK_ABORT("invalid GrPrimitiveType");
  2187. return GR_GL_TRIANGLES;
  2188. }
  2189. void GrGLGpu::sendMeshToGpu(GrPrimitiveType primitiveType, const GrBuffer* vertexBuffer,
  2190. int vertexCount, int baseVertex) {
  2191. const GrGLenum glPrimType = gr_primitive_type_to_gl_mode(primitiveType);
  2192. if (this->glCaps().drawArraysBaseVertexIsBroken()) {
  2193. this->setupGeometry(nullptr, vertexBuffer, baseVertex, nullptr, 0, GrPrimitiveRestart::kNo);
  2194. GL_CALL(DrawArrays(glPrimType, 0, vertexCount));
  2195. } else {
  2196. this->setupGeometry(nullptr, vertexBuffer, 0, nullptr, 0, GrPrimitiveRestart::kNo);
  2197. GL_CALL(DrawArrays(glPrimType, baseVertex, vertexCount));
  2198. }
  2199. fStats.incNumDraws();
  2200. }
  2201. static const GrGLvoid* element_ptr(const GrBuffer* indexBuffer, int baseIndex) {
  2202. size_t baseOffset = baseIndex * sizeof(uint16_t);
  2203. if (indexBuffer->isCpuBuffer()) {
  2204. return static_cast<const GrCpuBuffer*>(indexBuffer)->data() + baseOffset;
  2205. } else {
  2206. return reinterpret_cast<const GrGLvoid*>(baseOffset);
  2207. }
  2208. }
  2209. void GrGLGpu::sendIndexedMeshToGpu(GrPrimitiveType primitiveType, const GrBuffer* indexBuffer,
  2210. int indexCount, int baseIndex, uint16_t minIndexValue,
  2211. uint16_t maxIndexValue, const GrBuffer* vertexBuffer,
  2212. int baseVertex, GrPrimitiveRestart enablePrimitiveRestart) {
  2213. const GrGLenum glPrimType = gr_primitive_type_to_gl_mode(primitiveType);
  2214. const GrGLvoid* elementPtr = element_ptr(indexBuffer, baseIndex);
  2215. this->setupGeometry(indexBuffer, vertexBuffer, baseVertex, nullptr, 0, enablePrimitiveRestart);
  2216. if (this->glCaps().drawRangeElementsSupport()) {
  2217. GL_CALL(DrawRangeElements(glPrimType, minIndexValue, maxIndexValue, indexCount,
  2218. GR_GL_UNSIGNED_SHORT, elementPtr));
  2219. } else {
  2220. GL_CALL(DrawElements(glPrimType, indexCount, GR_GL_UNSIGNED_SHORT, elementPtr));
  2221. }
  2222. fStats.incNumDraws();
  2223. }
  2224. void GrGLGpu::sendInstancedMeshToGpu(GrPrimitiveType primitiveType, const GrBuffer* vertexBuffer,
  2225. int vertexCount, int baseVertex,
  2226. const GrBuffer* instanceBuffer, int instanceCount,
  2227. int baseInstance) {
  2228. GrGLenum glPrimType = gr_primitive_type_to_gl_mode(primitiveType);
  2229. int maxInstances = this->glCaps().maxInstancesPerDrawWithoutCrashing(instanceCount);
  2230. for (int i = 0; i < instanceCount; i += maxInstances) {
  2231. this->setupGeometry(nullptr, vertexBuffer, 0, instanceBuffer, baseInstance + i,
  2232. GrPrimitiveRestart::kNo);
  2233. GL_CALL(DrawArraysInstanced(glPrimType, baseVertex, vertexCount,
  2234. SkTMin(instanceCount - i, maxInstances)));
  2235. fStats.incNumDraws();
  2236. }
  2237. }
  2238. void GrGLGpu::sendIndexedInstancedMeshToGpu(GrPrimitiveType primitiveType,
  2239. const GrBuffer* indexBuffer, int indexCount,
  2240. int baseIndex, const GrBuffer* vertexBuffer,
  2241. int baseVertex, const GrBuffer* instanceBuffer,
  2242. int instanceCount, int baseInstance,
  2243. GrPrimitiveRestart enablePrimitiveRestart) {
  2244. const GrGLenum glPrimType = gr_primitive_type_to_gl_mode(primitiveType);
  2245. const GrGLvoid* elementPtr = element_ptr(indexBuffer, baseIndex);
  2246. int maxInstances = this->glCaps().maxInstancesPerDrawWithoutCrashing(instanceCount);
  2247. for (int i = 0; i < instanceCount; i += maxInstances) {
  2248. this->setupGeometry(indexBuffer, vertexBuffer, baseVertex, instanceBuffer, baseInstance + i,
  2249. enablePrimitiveRestart);
  2250. GL_CALL(DrawElementsInstanced(glPrimType, indexCount, GR_GL_UNSIGNED_SHORT, elementPtr,
  2251. SkTMin(instanceCount - i, maxInstances)));
  2252. fStats.incNumDraws();
  2253. }
  2254. }
  2255. void GrGLGpu::onResolveRenderTarget(GrRenderTarget* target) {
  2256. GrGLRenderTarget* rt = static_cast<GrGLRenderTarget*>(target);
  2257. if (rt->needsResolve()) {
  2258. // Some extensions automatically resolves the texture when it is read.
  2259. if (this->glCaps().usesMSAARenderBuffers()) {
  2260. SkASSERT(rt->textureFBOID() != rt->renderFBOID());
  2261. SkASSERT(rt->textureFBOID() != 0 && rt->renderFBOID() != 0);
  2262. this->bindFramebuffer(GR_GL_READ_FRAMEBUFFER, rt->renderFBOID());
  2263. this->bindFramebuffer(GR_GL_DRAW_FRAMEBUFFER, rt->textureFBOID());
  2264. // make sure we go through flushRenderTarget() since we've modified
  2265. // the bound DRAW FBO ID.
  2266. fHWBoundRenderTargetUniqueID.makeInvalid();
  2267. const SkIRect dirtyRect = rt->getResolveRect();
  2268. // The dirty rect tracked on the RT is always stored in the native coordinates of the
  2269. // surface. Choose kTopLeft so no adjustments are made
  2270. static constexpr auto kDirtyRectOrigin = kTopLeft_GrSurfaceOrigin;
  2271. if (GrGLCaps::kES_Apple_MSFBOType == this->glCaps().msFBOType()) {
  2272. // Apple's extension uses the scissor as the blit bounds.
  2273. GrScissorState scissorState;
  2274. scissorState.set(dirtyRect);
  2275. this->flushScissor(scissorState, rt->width(), rt->height(), kDirtyRectOrigin);
  2276. this->disableWindowRectangles();
  2277. GL_CALL(ResolveMultisampleFramebuffer());
  2278. } else {
  2279. int l, b, r, t;
  2280. if (GrGLCaps::kResolveMustBeFull_BlitFrambufferFlag &
  2281. this->glCaps().blitFramebufferSupportFlags()) {
  2282. l = 0;
  2283. b = 0;
  2284. r = target->width();
  2285. t = target->height();
  2286. } else {
  2287. GrGLIRect rect;
  2288. rect.setRelativeTo(rt->height(), dirtyRect, kDirtyRectOrigin);
  2289. l = rect.fLeft;
  2290. b = rect.fBottom;
  2291. r = rect.fLeft + rect.fWidth;
  2292. t = rect.fBottom + rect.fHeight;
  2293. }
  2294. // BlitFrameBuffer respects the scissor, so disable it.
  2295. this->disableScissor();
  2296. this->disableWindowRectangles();
  2297. GL_CALL(BlitFramebuffer(l, b, r, t, l, b, r, t,
  2298. GR_GL_COLOR_BUFFER_BIT, GR_GL_NEAREST));
  2299. }
  2300. }
  2301. rt->flagAsResolved();
  2302. }
  2303. }
  2304. namespace {
  2305. GrGLenum gr_to_gl_stencil_op(GrStencilOp op) {
  2306. static const GrGLenum gTable[kGrStencilOpCount] = {
  2307. GR_GL_KEEP, // kKeep
  2308. GR_GL_ZERO, // kZero
  2309. GR_GL_REPLACE, // kReplace
  2310. GR_GL_INVERT, // kInvert
  2311. GR_GL_INCR_WRAP, // kIncWrap
  2312. GR_GL_DECR_WRAP, // kDecWrap
  2313. GR_GL_INCR, // kIncClamp
  2314. GR_GL_DECR, // kDecClamp
  2315. };
  2316. GR_STATIC_ASSERT(0 == (int)GrStencilOp::kKeep);
  2317. GR_STATIC_ASSERT(1 == (int)GrStencilOp::kZero);
  2318. GR_STATIC_ASSERT(2 == (int)GrStencilOp::kReplace);
  2319. GR_STATIC_ASSERT(3 == (int)GrStencilOp::kInvert);
  2320. GR_STATIC_ASSERT(4 == (int)GrStencilOp::kIncWrap);
  2321. GR_STATIC_ASSERT(5 == (int)GrStencilOp::kDecWrap);
  2322. GR_STATIC_ASSERT(6 == (int)GrStencilOp::kIncClamp);
  2323. GR_STATIC_ASSERT(7 == (int)GrStencilOp::kDecClamp);
  2324. SkASSERT(op < (GrStencilOp)kGrStencilOpCount);
  2325. return gTable[(int)op];
  2326. }
  2327. void set_gl_stencil(const GrGLInterface* gl,
  2328. const GrStencilSettings::Face& face,
  2329. GrGLenum glFace) {
  2330. GrGLenum glFunc = GrToGLStencilFunc(face.fTest);
  2331. GrGLenum glFailOp = gr_to_gl_stencil_op(face.fFailOp);
  2332. GrGLenum glPassOp = gr_to_gl_stencil_op(face.fPassOp);
  2333. GrGLint ref = face.fRef;
  2334. GrGLint mask = face.fTestMask;
  2335. GrGLint writeMask = face.fWriteMask;
  2336. if (GR_GL_FRONT_AND_BACK == glFace) {
  2337. // we call the combined func just in case separate stencil is not
  2338. // supported.
  2339. GR_GL_CALL(gl, StencilFunc(glFunc, ref, mask));
  2340. GR_GL_CALL(gl, StencilMask(writeMask));
  2341. GR_GL_CALL(gl, StencilOp(glFailOp, GR_GL_KEEP, glPassOp));
  2342. } else {
  2343. GR_GL_CALL(gl, StencilFuncSeparate(glFace, glFunc, ref, mask));
  2344. GR_GL_CALL(gl, StencilMaskSeparate(glFace, writeMask));
  2345. GR_GL_CALL(gl, StencilOpSeparate(glFace, glFailOp, GR_GL_KEEP, glPassOp));
  2346. }
  2347. }
  2348. }
  2349. void GrGLGpu::flushStencil(const GrStencilSettings& stencilSettings, GrSurfaceOrigin origin) {
  2350. if (stencilSettings.isDisabled()) {
  2351. this->disableStencil();
  2352. } else if (fHWStencilSettings != stencilSettings ||
  2353. (stencilSettings.isTwoSided() && fHWStencilOrigin != origin)) {
  2354. if (kYes_TriState != fHWStencilTestEnabled) {
  2355. GL_CALL(Enable(GR_GL_STENCIL_TEST));
  2356. fHWStencilTestEnabled = kYes_TriState;
  2357. }
  2358. if (stencilSettings.isTwoSided()) {
  2359. set_gl_stencil(this->glInterface(), stencilSettings.front(origin), GR_GL_FRONT);
  2360. set_gl_stencil(this->glInterface(), stencilSettings.back(origin), GR_GL_BACK);
  2361. } else {
  2362. set_gl_stencil(
  2363. this->glInterface(), stencilSettings.frontAndBack(), GR_GL_FRONT_AND_BACK);
  2364. }
  2365. fHWStencilSettings = stencilSettings;
  2366. fHWStencilOrigin = origin;
  2367. }
  2368. }
  2369. void GrGLGpu::disableStencil() {
  2370. if (kNo_TriState != fHWStencilTestEnabled) {
  2371. GL_CALL(Disable(GR_GL_STENCIL_TEST));
  2372. fHWStencilTestEnabled = kNo_TriState;
  2373. fHWStencilSettings.invalidate();
  2374. }
  2375. }
  2376. void GrGLGpu::flushHWAAState(GrRenderTarget* rt, bool useHWAA) {
  2377. // rt is only optional if useHWAA is false.
  2378. SkASSERT(rt || !useHWAA);
  2379. #ifdef SK_DEBUG
  2380. if (useHWAA && rt->numSamples() <= 1) {
  2381. SkASSERT(this->caps()->mixedSamplesSupport());
  2382. SkASSERT(0 != static_cast<GrGLRenderTarget*>(rt)->renderFBOID());
  2383. SkASSERT(rt->renderTargetPriv().getStencilAttachment());
  2384. }
  2385. #endif
  2386. if (this->caps()->multisampleDisableSupport()) {
  2387. if (useHWAA) {
  2388. if (kYes_TriState != fMSAAEnabled) {
  2389. GL_CALL(Enable(GR_GL_MULTISAMPLE));
  2390. fMSAAEnabled = kYes_TriState;
  2391. }
  2392. } else {
  2393. if (kNo_TriState != fMSAAEnabled) {
  2394. GL_CALL(Disable(GR_GL_MULTISAMPLE));
  2395. fMSAAEnabled = kNo_TriState;
  2396. }
  2397. }
  2398. }
  2399. }
  2400. void GrGLGpu::flushBlendAndColorWrite(
  2401. const GrXferProcessor::BlendInfo& blendInfo, const GrSwizzle& swizzle) {
  2402. if (this->glCaps().neverDisableColorWrites() && !blendInfo.fWriteColor) {
  2403. // We need to work around a driver bug by using a blend state that preserves the dst color,
  2404. // rather than disabling color writes.
  2405. GrXferProcessor::BlendInfo preserveDstBlend;
  2406. preserveDstBlend.fSrcBlend = kZero_GrBlendCoeff;
  2407. preserveDstBlend.fDstBlend = kOne_GrBlendCoeff;
  2408. this->flushBlendAndColorWrite(preserveDstBlend, swizzle);
  2409. return;
  2410. }
  2411. GrBlendEquation equation = blendInfo.fEquation;
  2412. GrBlendCoeff srcCoeff = blendInfo.fSrcBlend;
  2413. GrBlendCoeff dstCoeff = blendInfo.fDstBlend;
  2414. // Any optimization to disable blending should have already been applied and
  2415. // tweaked the equation to "add" or "subtract", and the coeffs to (1, 0).
  2416. bool blendOff =
  2417. ((kAdd_GrBlendEquation == equation || kSubtract_GrBlendEquation == equation) &&
  2418. kOne_GrBlendCoeff == srcCoeff && kZero_GrBlendCoeff == dstCoeff) ||
  2419. !blendInfo.fWriteColor;
  2420. if (blendOff) {
  2421. if (kNo_TriState != fHWBlendState.fEnabled) {
  2422. GL_CALL(Disable(GR_GL_BLEND));
  2423. // Workaround for the ARM KHR_blend_equation_advanced blacklist issue
  2424. // https://code.google.com/p/skia/issues/detail?id=3943
  2425. if (kARM_GrGLVendor == this->ctxInfo().vendor() &&
  2426. GrBlendEquationIsAdvanced(fHWBlendState.fEquation)) {
  2427. SkASSERT(this->caps()->advancedBlendEquationSupport());
  2428. // Set to any basic blending equation.
  2429. GrBlendEquation blend_equation = kAdd_GrBlendEquation;
  2430. GL_CALL(BlendEquation(gXfermodeEquation2Blend[blend_equation]));
  2431. fHWBlendState.fEquation = blend_equation;
  2432. }
  2433. fHWBlendState.fEnabled = kNo_TriState;
  2434. }
  2435. } else {
  2436. if (kYes_TriState != fHWBlendState.fEnabled) {
  2437. GL_CALL(Enable(GR_GL_BLEND));
  2438. fHWBlendState.fEnabled = kYes_TriState;
  2439. }
  2440. if (fHWBlendState.fEquation != equation) {
  2441. GL_CALL(BlendEquation(gXfermodeEquation2Blend[equation]));
  2442. fHWBlendState.fEquation = equation;
  2443. }
  2444. if (GrBlendEquationIsAdvanced(equation)) {
  2445. SkASSERT(this->caps()->advancedBlendEquationSupport());
  2446. // Advanced equations have no other blend state.
  2447. return;
  2448. }
  2449. if (fHWBlendState.fSrcCoeff != srcCoeff || fHWBlendState.fDstCoeff != dstCoeff) {
  2450. GL_CALL(BlendFunc(gXfermodeCoeff2Blend[srcCoeff],
  2451. gXfermodeCoeff2Blend[dstCoeff]));
  2452. fHWBlendState.fSrcCoeff = srcCoeff;
  2453. fHWBlendState.fDstCoeff = dstCoeff;
  2454. }
  2455. if ((BlendCoeffReferencesConstant(srcCoeff) || BlendCoeffReferencesConstant(dstCoeff))) {
  2456. SkPMColor4f blendConst = swizzle.applyTo(blendInfo.fBlendConstant);
  2457. if (!fHWBlendState.fConstColorValid || fHWBlendState.fConstColor != blendConst) {
  2458. GL_CALL(BlendColor(blendConst.fR, blendConst.fG, blendConst.fB, blendConst.fA));
  2459. fHWBlendState.fConstColor = blendConst;
  2460. fHWBlendState.fConstColorValid = true;
  2461. }
  2462. }
  2463. }
  2464. this->flushColorWrite(blendInfo.fWriteColor);
  2465. }
  2466. static void get_gl_swizzle_values(const GrSwizzle& swizzle, GrGLenum glValues[4]) {
  2467. for (int i = 0; i < 4; ++i) {
  2468. switch (swizzle[i]) {
  2469. case 'r': glValues[i] = GR_GL_RED; break;
  2470. case 'g': glValues[i] = GR_GL_GREEN; break;
  2471. case 'b': glValues[i] = GR_GL_BLUE; break;
  2472. case 'a': glValues[i] = GR_GL_ALPHA; break;
  2473. case '0': glValues[i] = GR_GL_ZERO; break;
  2474. case '1': glValues[i] = GR_GL_ONE; break;
  2475. default: SK_ABORT("Unsupported component");
  2476. }
  2477. }
  2478. }
  2479. void GrGLGpu::bindTexture(int unitIdx, GrSamplerState samplerState, const GrSwizzle& swizzle,
  2480. GrGLTexture* texture) {
  2481. SkASSERT(texture);
  2482. #ifdef SK_DEBUG
  2483. if (!this->caps()->npotTextureTileSupport()) {
  2484. if (samplerState.isRepeated()) {
  2485. const int w = texture->width();
  2486. const int h = texture->height();
  2487. SkASSERT(SkIsPow2(w) && SkIsPow2(h));
  2488. }
  2489. }
  2490. #endif
  2491. // If we created a rt/tex and rendered to it without using a texture and now we're texturing
  2492. // from the rt it will still be the last bound texture, but it needs resolving. So keep this
  2493. // out of the "last != next" check.
  2494. GrGLRenderTarget* texRT = static_cast<GrGLRenderTarget*>(texture->asRenderTarget());
  2495. if (texRT) {
  2496. this->onResolveRenderTarget(texRT);
  2497. }
  2498. GrGpuResource::UniqueID textureID = texture->uniqueID();
  2499. GrGLenum target = texture->target();
  2500. if (fHWTextureUnitBindings[unitIdx].boundID(target) != textureID) {
  2501. this->setTextureUnit(unitIdx);
  2502. GL_CALL(BindTexture(target, texture->textureID()));
  2503. fHWTextureUnitBindings[unitIdx].setBoundID(target, textureID);
  2504. }
  2505. if (samplerState.filter() == GrSamplerState::Filter::kMipMap) {
  2506. if (!this->caps()->mipMapSupport() ||
  2507. texture->texturePriv().mipMapped() == GrMipMapped::kNo) {
  2508. samplerState.setFilterMode(GrSamplerState::Filter::kBilerp);
  2509. }
  2510. }
  2511. #ifdef SK_DEBUG
  2512. // We were supposed to ensure MipMaps were up-to-date before getting here.
  2513. if (samplerState.filter() == GrSamplerState::Filter::kMipMap) {
  2514. SkASSERT(!texture->texturePriv().mipMapsAreDirty());
  2515. }
  2516. #endif
  2517. auto timestamp = texture->parameters()->resetTimestamp();
  2518. bool setAll = timestamp < fResetTimestampForTextureParameters;
  2519. const GrGLTextureParameters::SamplerOverriddenState* samplerStateToRecord = nullptr;
  2520. GrGLTextureParameters::SamplerOverriddenState newSamplerState;
  2521. if (fSamplerObjectCache) {
  2522. fSamplerObjectCache->bindSampler(unitIdx, samplerState);
  2523. } else {
  2524. const GrGLTextureParameters::SamplerOverriddenState& oldSamplerState =
  2525. texture->parameters()->samplerOverriddenState();
  2526. samplerStateToRecord = &newSamplerState;
  2527. newSamplerState.fMinFilter = filter_to_gl_min_filter(samplerState.filter());
  2528. newSamplerState.fMagFilter = filter_to_gl_mag_filter(samplerState.filter());
  2529. newSamplerState.fWrapS = wrap_mode_to_gl_wrap(samplerState.wrapModeX(), this->glCaps());
  2530. newSamplerState.fWrapT = wrap_mode_to_gl_wrap(samplerState.wrapModeY(), this->glCaps());
  2531. // These are the OpenGL default values.
  2532. newSamplerState.fMinLOD = -1000.f;
  2533. newSamplerState.fMaxLOD = 1000.f;
  2534. if (setAll || newSamplerState.fMagFilter != oldSamplerState.fMagFilter) {
  2535. this->setTextureUnit(unitIdx);
  2536. GL_CALL(TexParameteri(target, GR_GL_TEXTURE_MAG_FILTER, newSamplerState.fMagFilter));
  2537. }
  2538. if (setAll || newSamplerState.fMinFilter != oldSamplerState.fMinFilter) {
  2539. this->setTextureUnit(unitIdx);
  2540. GL_CALL(TexParameteri(target, GR_GL_TEXTURE_MIN_FILTER, newSamplerState.fMinFilter));
  2541. }
  2542. if (this->glCaps().mipMapLevelAndLodControlSupport()) {
  2543. if (setAll || newSamplerState.fMinLOD != oldSamplerState.fMinLOD) {
  2544. this->setTextureUnit(unitIdx);
  2545. GL_CALL(TexParameterf(target, GR_GL_TEXTURE_MIN_LOD, newSamplerState.fMinLOD));
  2546. }
  2547. if (setAll || newSamplerState.fMaxLOD != oldSamplerState.fMaxLOD) {
  2548. this->setTextureUnit(unitIdx);
  2549. GL_CALL(TexParameterf(target, GR_GL_TEXTURE_MAX_LOD, newSamplerState.fMaxLOD));
  2550. }
  2551. }
  2552. if (setAll || newSamplerState.fWrapS != oldSamplerState.fWrapS) {
  2553. this->setTextureUnit(unitIdx);
  2554. GL_CALL(TexParameteri(target, GR_GL_TEXTURE_WRAP_S, newSamplerState.fWrapS));
  2555. }
  2556. if (setAll || newSamplerState.fWrapT != oldSamplerState.fWrapT) {
  2557. this->setTextureUnit(unitIdx);
  2558. GL_CALL(TexParameteri(target, GR_GL_TEXTURE_WRAP_T, newSamplerState.fWrapT));
  2559. }
  2560. if (this->glCaps().clampToBorderSupport()) {
  2561. // Make sure the border color is transparent black (the default)
  2562. if (setAll || oldSamplerState.fBorderColorInvalid) {
  2563. this->setTextureUnit(unitIdx);
  2564. static const GrGLfloat kTransparentBlack[4] = {0.f, 0.f, 0.f, 0.f};
  2565. GL_CALL(TexParameterfv(target, GR_GL_TEXTURE_BORDER_COLOR, kTransparentBlack));
  2566. }
  2567. }
  2568. }
  2569. GrGLTextureParameters::NonsamplerState newNonsamplerState;
  2570. newNonsamplerState.fBaseMipMapLevel = 0;
  2571. newNonsamplerState.fMaxMipMapLevel = texture->texturePriv().maxMipMapLevel();
  2572. const GrGLTextureParameters::NonsamplerState& oldNonsamplerState =
  2573. texture->parameters()->nonsamplerState();
  2574. if (!this->caps()->shaderCaps()->textureSwizzleAppliedInShader()) {
  2575. newNonsamplerState.fSwizzleKey = swizzle.asKey();
  2576. if (setAll || swizzle.asKey() != oldNonsamplerState.fSwizzleKey) {
  2577. GrGLenum glValues[4];
  2578. get_gl_swizzle_values(swizzle, glValues);
  2579. this->setTextureUnit(unitIdx);
  2580. if (GR_IS_GR_GL(this->glStandard())) {
  2581. GR_STATIC_ASSERT(sizeof(glValues[0]) == sizeof(GrGLint));
  2582. GL_CALL(TexParameteriv(target, GR_GL_TEXTURE_SWIZZLE_RGBA,
  2583. reinterpret_cast<const GrGLint*>(glValues)));
  2584. } else if (GR_IS_GR_GL_ES(this->glStandard())) {
  2585. // ES3 added swizzle support but not GL_TEXTURE_SWIZZLE_RGBA.
  2586. GL_CALL(TexParameteri(target, GR_GL_TEXTURE_SWIZZLE_R, glValues[0]));
  2587. GL_CALL(TexParameteri(target, GR_GL_TEXTURE_SWIZZLE_G, glValues[1]));
  2588. GL_CALL(TexParameteri(target, GR_GL_TEXTURE_SWIZZLE_B, glValues[2]));
  2589. GL_CALL(TexParameteri(target, GR_GL_TEXTURE_SWIZZLE_A, glValues[3]));
  2590. }
  2591. }
  2592. }
  2593. // These are not supported in ES2 contexts
  2594. if (this->glCaps().mipMapLevelAndLodControlSupport() &&
  2595. (texture->texturePriv().textureType() != GrTextureType::kExternal ||
  2596. !this->glCaps().dontSetBaseOrMaxLevelForExternalTextures())) {
  2597. if (newNonsamplerState.fBaseMipMapLevel != oldNonsamplerState.fBaseMipMapLevel) {
  2598. this->setTextureUnit(unitIdx);
  2599. GL_CALL(TexParameteri(target, GR_GL_TEXTURE_BASE_LEVEL,
  2600. newNonsamplerState.fBaseMipMapLevel));
  2601. }
  2602. if (newNonsamplerState.fMaxMipMapLevel != oldNonsamplerState.fMaxMipMapLevel) {
  2603. this->setTextureUnit(unitIdx);
  2604. GL_CALL(TexParameteri(target, GR_GL_TEXTURE_MAX_LEVEL,
  2605. newNonsamplerState.fMaxMipMapLevel));
  2606. }
  2607. }
  2608. texture->parameters()->set(samplerStateToRecord, newNonsamplerState,
  2609. fResetTimestampForTextureParameters);
  2610. }
  2611. void GrGLGpu::onResetTextureBindings() {
  2612. static constexpr GrGLenum kTargets[] = {GR_GL_TEXTURE_2D, GR_GL_TEXTURE_RECTANGLE,
  2613. GR_GL_TEXTURE_EXTERNAL};
  2614. for (int i = 0; i < this->numTextureUnits(); ++i) {
  2615. this->setTextureUnit(i);
  2616. for (auto target : kTargets) {
  2617. if (fHWTextureUnitBindings[i].hasBeenModified(target)) {
  2618. GL_CALL(BindTexture(target, 0));
  2619. }
  2620. }
  2621. fHWTextureUnitBindings[i].invalidateAllTargets(true);
  2622. }
  2623. }
  2624. void GrGLGpu::flushColorWrite(bool writeColor) {
  2625. if (!writeColor) {
  2626. if (kNo_TriState != fHWWriteToColor) {
  2627. GL_CALL(ColorMask(GR_GL_FALSE, GR_GL_FALSE,
  2628. GR_GL_FALSE, GR_GL_FALSE));
  2629. fHWWriteToColor = kNo_TriState;
  2630. }
  2631. } else {
  2632. if (kYes_TriState != fHWWriteToColor) {
  2633. GL_CALL(ColorMask(GR_GL_TRUE, GR_GL_TRUE, GR_GL_TRUE, GR_GL_TRUE));
  2634. fHWWriteToColor = kYes_TriState;
  2635. }
  2636. }
  2637. }
  2638. void GrGLGpu::flushClearColor(GrGLfloat r, GrGLfloat g, GrGLfloat b, GrGLfloat a) {
  2639. if (r != fHWClearColor[0] || g != fHWClearColor[1] ||
  2640. b != fHWClearColor[2] || a != fHWClearColor[3]) {
  2641. GL_CALL(ClearColor(r, g, b, a));
  2642. fHWClearColor[0] = r;
  2643. fHWClearColor[1] = g;
  2644. fHWClearColor[2] = b;
  2645. fHWClearColor[3] = a;
  2646. }
  2647. }
  2648. void GrGLGpu::setTextureUnit(int unit) {
  2649. SkASSERT(unit >= 0 && unit < this->numTextureUnits());
  2650. if (unit != fHWActiveTextureUnitIdx) {
  2651. GL_CALL(ActiveTexture(GR_GL_TEXTURE0 + unit));
  2652. fHWActiveTextureUnitIdx = unit;
  2653. }
  2654. }
  2655. void GrGLGpu::bindTextureToScratchUnit(GrGLenum target, GrGLint textureID) {
  2656. // Bind the last texture unit since it is the least likely to be used by GrGLProgram.
  2657. int lastUnitIdx = this->numTextureUnits() - 1;
  2658. if (lastUnitIdx != fHWActiveTextureUnitIdx) {
  2659. GL_CALL(ActiveTexture(GR_GL_TEXTURE0 + lastUnitIdx));
  2660. fHWActiveTextureUnitIdx = lastUnitIdx;
  2661. }
  2662. // Clear out the this field so that if a GrGLProgram does use this unit it will rebind the
  2663. // correct texture.
  2664. fHWTextureUnitBindings[lastUnitIdx].invalidateForScratchUse(target);
  2665. GL_CALL(BindTexture(target, textureID));
  2666. }
  2667. // Determines whether glBlitFramebuffer could be used between src and dst by onCopySurface.
  2668. static inline bool can_blit_framebuffer_for_copy_surface(const GrSurface* dst,
  2669. const GrSurface* src,
  2670. const SkIRect& srcRect,
  2671. const SkIPoint& dstPoint,
  2672. const GrGLCaps& caps) {
  2673. int dstSampleCnt = 0;
  2674. int srcSampleCnt = 0;
  2675. if (const GrRenderTarget* rt = dst->asRenderTarget()) {
  2676. dstSampleCnt = rt->numSamples();
  2677. }
  2678. if (const GrRenderTarget* rt = src->asRenderTarget()) {
  2679. srcSampleCnt = rt->numSamples();
  2680. }
  2681. SkASSERT((dstSampleCnt > 0) == SkToBool(dst->asRenderTarget()));
  2682. SkASSERT((srcSampleCnt > 0) == SkToBool(src->asRenderTarget()));
  2683. const GrGLTexture* dstTex = static_cast<const GrGLTexture*>(dst->asTexture());
  2684. const GrGLTexture* srcTex = static_cast<const GrGLTexture*>(src->asTexture());
  2685. GrTextureType dstTexType;
  2686. GrTextureType* dstTexTypePtr = nullptr;
  2687. GrTextureType srcTexType;
  2688. GrTextureType* srcTexTypePtr = nullptr;
  2689. if (dstTex) {
  2690. dstTexType = dstTex->texturePriv().textureType();
  2691. dstTexTypePtr = &dstTexType;
  2692. }
  2693. if (srcTex) {
  2694. srcTexType = srcTex->texturePriv().textureType();
  2695. srcTexTypePtr = &srcTexType;
  2696. }
  2697. return caps.canCopyAsBlit(dst->config(), dstSampleCnt, dstTexTypePtr,
  2698. src->config(), srcSampleCnt, srcTexTypePtr,
  2699. src->getBoundsRect(), true, srcRect, dstPoint);
  2700. }
  2701. static bool rt_has_msaa_render_buffer(const GrGLRenderTarget* rt, const GrGLCaps& glCaps) {
  2702. // A RT has a separate MSAA renderbuffer if:
  2703. // 1) It's multisampled
  2704. // 2) We're using an extension with separate MSAA renderbuffers
  2705. // 3) It's not FBO 0, which is special and always auto-resolves
  2706. return rt->numSamples() > 1 && glCaps.usesMSAARenderBuffers() && rt->renderFBOID() != 0;
  2707. }
  2708. static inline bool can_copy_texsubimage(const GrSurface* dst, const GrSurface* src,
  2709. const GrGLCaps& caps) {
  2710. const GrGLRenderTarget* dstRT = static_cast<const GrGLRenderTarget*>(dst->asRenderTarget());
  2711. const GrGLRenderTarget* srcRT = static_cast<const GrGLRenderTarget*>(src->asRenderTarget());
  2712. const GrGLTexture* dstTex = static_cast<const GrGLTexture*>(dst->asTexture());
  2713. const GrGLTexture* srcTex = static_cast<const GrGLTexture*>(src->asTexture());
  2714. bool dstHasMSAARenderBuffer = dstRT ? rt_has_msaa_render_buffer(dstRT, caps) : false;
  2715. bool srcHasMSAARenderBuffer = srcRT ? rt_has_msaa_render_buffer(srcRT, caps) : false;
  2716. GrTextureType dstTexType;
  2717. GrTextureType* dstTexTypePtr = nullptr;
  2718. GrTextureType srcTexType;
  2719. GrTextureType* srcTexTypePtr = nullptr;
  2720. if (dstTex) {
  2721. dstTexType = dstTex->texturePriv().textureType();
  2722. dstTexTypePtr = &dstTexType;
  2723. }
  2724. if (srcTex) {
  2725. srcTexType = srcTex->texturePriv().textureType();
  2726. srcTexTypePtr = &srcTexType;
  2727. }
  2728. return caps.canCopyTexSubImage(dst->config(), dstHasMSAARenderBuffer, dstTexTypePtr,
  2729. src->config(), srcHasMSAARenderBuffer, srcTexTypePtr);
  2730. }
  2731. // If a temporary FBO was created, its non-zero ID is returned.
  2732. void GrGLGpu::bindSurfaceFBOForPixelOps(GrSurface* surface, GrGLenum fboTarget,
  2733. TempFBOTarget tempFBOTarget) {
  2734. GrGLRenderTarget* rt = static_cast<GrGLRenderTarget*>(surface->asRenderTarget());
  2735. if (!rt) {
  2736. SkASSERT(surface->asTexture());
  2737. GrGLTexture* texture = static_cast<GrGLTexture*>(surface->asTexture());
  2738. GrGLuint texID = texture->textureID();
  2739. GrGLenum target = texture->target();
  2740. GrGLuint* tempFBOID;
  2741. tempFBOID = kSrc_TempFBOTarget == tempFBOTarget ? &fTempSrcFBOID : &fTempDstFBOID;
  2742. if (0 == *tempFBOID) {
  2743. GR_GL_CALL(this->glInterface(), GenFramebuffers(1, tempFBOID));
  2744. }
  2745. this->bindFramebuffer(fboTarget, *tempFBOID);
  2746. GR_GL_CALL(this->glInterface(), FramebufferTexture2D(fboTarget,
  2747. GR_GL_COLOR_ATTACHMENT0,
  2748. target,
  2749. texID,
  2750. 0));
  2751. texture->baseLevelWasBoundToFBO();
  2752. } else {
  2753. this->bindFramebuffer(fboTarget, rt->renderFBOID());
  2754. }
  2755. }
  2756. void GrGLGpu::unbindTextureFBOForPixelOps(GrGLenum fboTarget, GrSurface* surface) {
  2757. // bindSurfaceFBOForPixelOps temporarily binds textures that are not render targets to
  2758. if (!surface->asRenderTarget()) {
  2759. SkASSERT(surface->asTexture());
  2760. GrGLenum textureTarget = static_cast<GrGLTexture*>(surface->asTexture())->target();
  2761. GR_GL_CALL(this->glInterface(), FramebufferTexture2D(fboTarget,
  2762. GR_GL_COLOR_ATTACHMENT0,
  2763. textureTarget,
  2764. 0,
  2765. 0));
  2766. }
  2767. }
  2768. void GrGLGpu::onFBOChanged() {
  2769. if (this->caps()->workarounds().flush_on_framebuffer_change ||
  2770. this->caps()->workarounds().restore_scissor_on_fbo_change) {
  2771. GL_CALL(Flush());
  2772. }
  2773. }
  2774. void GrGLGpu::bindFramebuffer(GrGLenum target, GrGLuint fboid) {
  2775. fStats.incRenderTargetBinds();
  2776. GL_CALL(BindFramebuffer(target, fboid));
  2777. if (target == GR_GL_FRAMEBUFFER || target == GR_GL_DRAW_FRAMEBUFFER) {
  2778. fBoundDrawFramebuffer = fboid;
  2779. }
  2780. if (this->caps()->workarounds().restore_scissor_on_fbo_change) {
  2781. // The driver forgets the correct scissor when modifying the FBO binding.
  2782. if (!fHWScissorSettings.fRect.isInvalid()) {
  2783. fHWScissorSettings.fRect.pushToGLScissor(this->glInterface());
  2784. }
  2785. }
  2786. this->onFBOChanged();
  2787. }
  2788. void GrGLGpu::deleteFramebuffer(GrGLuint fboid) {
  2789. if (fboid == fBoundDrawFramebuffer &&
  2790. this->caps()->workarounds().unbind_attachments_on_bound_render_fbo_delete) {
  2791. // This workaround only applies to deleting currently bound framebuffers
  2792. // on Adreno 420. Because this is a somewhat rare case, instead of
  2793. // tracking all the attachments of every framebuffer instead just always
  2794. // unbind all attachments.
  2795. GL_CALL(FramebufferRenderbuffer(GR_GL_FRAMEBUFFER, GR_GL_COLOR_ATTACHMENT0,
  2796. GR_GL_RENDERBUFFER, 0));
  2797. GL_CALL(FramebufferRenderbuffer(GR_GL_FRAMEBUFFER, GR_GL_STENCIL_ATTACHMENT,
  2798. GR_GL_RENDERBUFFER, 0));
  2799. GL_CALL(FramebufferRenderbuffer(GR_GL_FRAMEBUFFER, GR_GL_DEPTH_ATTACHMENT,
  2800. GR_GL_RENDERBUFFER, 0));
  2801. }
  2802. GL_CALL(DeleteFramebuffers(1, &fboid));
  2803. // Deleting the currently bound framebuffer rebinds to 0.
  2804. if (fboid == fBoundDrawFramebuffer) {
  2805. this->onFBOChanged();
  2806. }
  2807. }
  2808. bool GrGLGpu::onCopySurface(GrSurface* dst, GrSurface* src, const SkIRect& srcRect,
  2809. const SkIPoint& dstPoint, bool canDiscardOutsideDstRect) {
  2810. // Don't prefer copying as a draw if the dst doesn't already have a FBO object.
  2811. // This implicitly handles this->glCaps().useDrawInsteadOfAllRenderTargetWrites().
  2812. bool preferCopy = SkToBool(dst->asRenderTarget());
  2813. if (preferCopy && this->glCaps().canCopyAsDraw(dst->config(), SkToBool(src->asTexture()))) {
  2814. if (this->copySurfaceAsDraw(dst, src, srcRect, dstPoint)) {
  2815. return true;
  2816. }
  2817. }
  2818. if (can_copy_texsubimage(dst, src, this->glCaps())) {
  2819. this->copySurfaceAsCopyTexSubImage(dst, src, srcRect, dstPoint);
  2820. return true;
  2821. }
  2822. if (can_blit_framebuffer_for_copy_surface(dst, src, srcRect, dstPoint, this->glCaps())) {
  2823. return this->copySurfaceAsBlitFramebuffer(dst, src, srcRect, dstPoint);
  2824. }
  2825. if (!preferCopy && this->glCaps().canCopyAsDraw(dst->config(), SkToBool(src->asTexture()))) {
  2826. if (this->copySurfaceAsDraw(dst, src, srcRect, dstPoint)) {
  2827. return true;
  2828. }
  2829. }
  2830. return false;
  2831. }
  2832. bool GrGLGpu::createCopyProgram(GrTexture* srcTex) {
  2833. TRACE_EVENT0("skia.gpu", TRACE_FUNC);
  2834. int progIdx = TextureToCopyProgramIdx(srcTex);
  2835. const GrShaderCaps* shaderCaps = this->caps()->shaderCaps();
  2836. GrSLType samplerType =
  2837. GrSLCombinedSamplerTypeForTextureType(srcTex->texturePriv().textureType());
  2838. if (!fCopyProgramArrayBuffer) {
  2839. static const GrGLfloat vdata[] = {
  2840. 0, 0,
  2841. 0, 1,
  2842. 1, 0,
  2843. 1, 1
  2844. };
  2845. fCopyProgramArrayBuffer = GrGLBuffer::Make(this, sizeof(vdata), GrGpuBufferType::kVertex,
  2846. kStatic_GrAccessPattern, vdata);
  2847. }
  2848. if (!fCopyProgramArrayBuffer) {
  2849. return false;
  2850. }
  2851. SkASSERT(!fCopyPrograms[progIdx].fProgram);
  2852. GL_CALL_RET(fCopyPrograms[progIdx].fProgram, CreateProgram());
  2853. if (!fCopyPrograms[progIdx].fProgram) {
  2854. return false;
  2855. }
  2856. GrShaderVar aVertex("a_vertex", kHalf2_GrSLType, GrShaderVar::kIn_TypeModifier);
  2857. GrShaderVar uTexCoordXform("u_texCoordXform", kHalf4_GrSLType,
  2858. GrShaderVar::kUniform_TypeModifier);
  2859. GrShaderVar uPosXform("u_posXform", kHalf4_GrSLType, GrShaderVar::kUniform_TypeModifier);
  2860. GrShaderVar uTexture("u_texture", samplerType, GrShaderVar::kUniform_TypeModifier);
  2861. GrShaderVar vTexCoord("v_texCoord", kHalf2_GrSLType, GrShaderVar::kOut_TypeModifier);
  2862. GrShaderVar oFragColor("o_FragColor", kHalf4_GrSLType, GrShaderVar::kOut_TypeModifier);
  2863. SkString vshaderTxt;
  2864. if (shaderCaps->noperspectiveInterpolationSupport()) {
  2865. if (const char* extension = shaderCaps->noperspectiveInterpolationExtensionString()) {
  2866. vshaderTxt.appendf("#extension %s : require\n", extension);
  2867. }
  2868. vTexCoord.addModifier("noperspective");
  2869. }
  2870. aVertex.appendDecl(shaderCaps, &vshaderTxt);
  2871. vshaderTxt.append(";");
  2872. uTexCoordXform.appendDecl(shaderCaps, &vshaderTxt);
  2873. vshaderTxt.append(";");
  2874. uPosXform.appendDecl(shaderCaps, &vshaderTxt);
  2875. vshaderTxt.append(";");
  2876. vTexCoord.appendDecl(shaderCaps, &vshaderTxt);
  2877. vshaderTxt.append(";");
  2878. vshaderTxt.append(
  2879. "// Copy Program VS\n"
  2880. "void main() {"
  2881. " v_texCoord = half2(a_vertex.xy * u_texCoordXform.xy + u_texCoordXform.zw);"
  2882. " sk_Position.xy = a_vertex * u_posXform.xy + u_posXform.zw;"
  2883. " sk_Position.zw = half2(0, 1);"
  2884. "}"
  2885. );
  2886. SkString fshaderTxt;
  2887. if (shaderCaps->noperspectiveInterpolationSupport()) {
  2888. if (const char* extension = shaderCaps->noperspectiveInterpolationExtensionString()) {
  2889. fshaderTxt.appendf("#extension %s : require\n", extension);
  2890. }
  2891. }
  2892. vTexCoord.setTypeModifier(GrShaderVar::kIn_TypeModifier);
  2893. vTexCoord.appendDecl(shaderCaps, &fshaderTxt);
  2894. fshaderTxt.append(";");
  2895. uTexture.appendDecl(shaderCaps, &fshaderTxt);
  2896. fshaderTxt.append(";");
  2897. fshaderTxt.appendf(
  2898. "// Copy Program FS\n"
  2899. "void main() {"
  2900. " sk_FragColor = texture(u_texture, v_texCoord);"
  2901. "}"
  2902. );
  2903. auto errorHandler = this->getContext()->priv().getShaderErrorHandler();
  2904. SkSL::String sksl(vshaderTxt.c_str(), vshaderTxt.size());
  2905. SkSL::Program::Settings settings;
  2906. settings.fCaps = shaderCaps;
  2907. SkSL::String glsl;
  2908. std::unique_ptr<SkSL::Program> program = GrSkSLtoGLSL(*fGLContext, SkSL::Program::kVertex_Kind,
  2909. sksl, settings, &glsl, errorHandler);
  2910. GrGLuint vshader = GrGLCompileAndAttachShader(*fGLContext, fCopyPrograms[progIdx].fProgram,
  2911. GR_GL_VERTEX_SHADER, glsl, &fStats, errorHandler);
  2912. SkASSERT(program->fInputs.isEmpty());
  2913. sksl.assign(fshaderTxt.c_str(), fshaderTxt.size());
  2914. program = GrSkSLtoGLSL(*fGLContext, SkSL::Program::kFragment_Kind, sksl, settings, &glsl,
  2915. errorHandler);
  2916. GrGLuint fshader = GrGLCompileAndAttachShader(*fGLContext, fCopyPrograms[progIdx].fProgram,
  2917. GR_GL_FRAGMENT_SHADER, glsl, &fStats,
  2918. errorHandler);
  2919. SkASSERT(program->fInputs.isEmpty());
  2920. GL_CALL(LinkProgram(fCopyPrograms[progIdx].fProgram));
  2921. GL_CALL_RET(fCopyPrograms[progIdx].fTextureUniform,
  2922. GetUniformLocation(fCopyPrograms[progIdx].fProgram, "u_texture"));
  2923. GL_CALL_RET(fCopyPrograms[progIdx].fPosXformUniform,
  2924. GetUniformLocation(fCopyPrograms[progIdx].fProgram, "u_posXform"));
  2925. GL_CALL_RET(fCopyPrograms[progIdx].fTexCoordXformUniform,
  2926. GetUniformLocation(fCopyPrograms[progIdx].fProgram, "u_texCoordXform"));
  2927. GL_CALL(BindAttribLocation(fCopyPrograms[progIdx].fProgram, 0, "a_vertex"));
  2928. GL_CALL(DeleteShader(vshader));
  2929. GL_CALL(DeleteShader(fshader));
  2930. return true;
  2931. }
  2932. bool GrGLGpu::createMipmapProgram(int progIdx) {
  2933. const bool oddWidth = SkToBool(progIdx & 0x2);
  2934. const bool oddHeight = SkToBool(progIdx & 0x1);
  2935. const int numTaps = (oddWidth ? 2 : 1) * (oddHeight ? 2 : 1);
  2936. const GrShaderCaps* shaderCaps = this->caps()->shaderCaps();
  2937. SkASSERT(!fMipmapPrograms[progIdx].fProgram);
  2938. GL_CALL_RET(fMipmapPrograms[progIdx].fProgram, CreateProgram());
  2939. if (!fMipmapPrograms[progIdx].fProgram) {
  2940. return false;
  2941. }
  2942. GrShaderVar aVertex("a_vertex", kHalf2_GrSLType, GrShaderVar::kIn_TypeModifier);
  2943. GrShaderVar uTexCoordXform("u_texCoordXform", kHalf4_GrSLType,
  2944. GrShaderVar::kUniform_TypeModifier);
  2945. GrShaderVar uTexture("u_texture", kTexture2DSampler_GrSLType,
  2946. GrShaderVar::kUniform_TypeModifier);
  2947. // We need 1, 2, or 4 texture coordinates (depending on parity of each dimension):
  2948. GrShaderVar vTexCoords[] = {
  2949. GrShaderVar("v_texCoord0", kHalf2_GrSLType, GrShaderVar::kOut_TypeModifier),
  2950. GrShaderVar("v_texCoord1", kHalf2_GrSLType, GrShaderVar::kOut_TypeModifier),
  2951. GrShaderVar("v_texCoord2", kHalf2_GrSLType, GrShaderVar::kOut_TypeModifier),
  2952. GrShaderVar("v_texCoord3", kHalf2_GrSLType, GrShaderVar::kOut_TypeModifier),
  2953. };
  2954. GrShaderVar oFragColor("o_FragColor", kHalf4_GrSLType,GrShaderVar::kOut_TypeModifier);
  2955. SkString vshaderTxt;
  2956. if (shaderCaps->noperspectiveInterpolationSupport()) {
  2957. if (const char* extension = shaderCaps->noperspectiveInterpolationExtensionString()) {
  2958. vshaderTxt.appendf("#extension %s : require\n", extension);
  2959. }
  2960. vTexCoords[0].addModifier("noperspective");
  2961. vTexCoords[1].addModifier("noperspective");
  2962. vTexCoords[2].addModifier("noperspective");
  2963. vTexCoords[3].addModifier("noperspective");
  2964. }
  2965. aVertex.appendDecl(shaderCaps, &vshaderTxt);
  2966. vshaderTxt.append(";");
  2967. uTexCoordXform.appendDecl(shaderCaps, &vshaderTxt);
  2968. vshaderTxt.append(";");
  2969. for (int i = 0; i < numTaps; ++i) {
  2970. vTexCoords[i].appendDecl(shaderCaps, &vshaderTxt);
  2971. vshaderTxt.append(";");
  2972. }
  2973. vshaderTxt.append(
  2974. "// Mipmap Program VS\n"
  2975. "void main() {"
  2976. " sk_Position.xy = a_vertex * half2(2, 2) - half2(1, 1);"
  2977. " sk_Position.zw = half2(0, 1);"
  2978. );
  2979. // Insert texture coordinate computation:
  2980. if (oddWidth && oddHeight) {
  2981. vshaderTxt.append(
  2982. " v_texCoord0 = a_vertex.xy * u_texCoordXform.yw;"
  2983. " v_texCoord1 = a_vertex.xy * u_texCoordXform.yw + half2(u_texCoordXform.x, 0);"
  2984. " v_texCoord2 = a_vertex.xy * u_texCoordXform.yw + half2(0, u_texCoordXform.z);"
  2985. " v_texCoord3 = a_vertex.xy * u_texCoordXform.yw + u_texCoordXform.xz;"
  2986. );
  2987. } else if (oddWidth) {
  2988. vshaderTxt.append(
  2989. " v_texCoord0 = a_vertex.xy * half2(u_texCoordXform.y, 1);"
  2990. " v_texCoord1 = a_vertex.xy * half2(u_texCoordXform.y, 1) + half2(u_texCoordXform.x, 0);"
  2991. );
  2992. } else if (oddHeight) {
  2993. vshaderTxt.append(
  2994. " v_texCoord0 = a_vertex.xy * half2(1, u_texCoordXform.w);"
  2995. " v_texCoord1 = a_vertex.xy * half2(1, u_texCoordXform.w) + half2(0, u_texCoordXform.z);"
  2996. );
  2997. } else {
  2998. vshaderTxt.append(
  2999. " v_texCoord0 = a_vertex.xy;"
  3000. );
  3001. }
  3002. vshaderTxt.append("}");
  3003. SkString fshaderTxt;
  3004. if (shaderCaps->noperspectiveInterpolationSupport()) {
  3005. if (const char* extension = shaderCaps->noperspectiveInterpolationExtensionString()) {
  3006. fshaderTxt.appendf("#extension %s : require\n", extension);
  3007. }
  3008. }
  3009. for (int i = 0; i < numTaps; ++i) {
  3010. vTexCoords[i].setTypeModifier(GrShaderVar::kIn_TypeModifier);
  3011. vTexCoords[i].appendDecl(shaderCaps, &fshaderTxt);
  3012. fshaderTxt.append(";");
  3013. }
  3014. uTexture.appendDecl(shaderCaps, &fshaderTxt);
  3015. fshaderTxt.append(";");
  3016. fshaderTxt.append(
  3017. "// Mipmap Program FS\n"
  3018. "void main() {"
  3019. );
  3020. if (oddWidth && oddHeight) {
  3021. fshaderTxt.append(
  3022. " sk_FragColor = (texture(u_texture, v_texCoord0) + "
  3023. " texture(u_texture, v_texCoord1) + "
  3024. " texture(u_texture, v_texCoord2) + "
  3025. " texture(u_texture, v_texCoord3)) * 0.25;"
  3026. );
  3027. } else if (oddWidth || oddHeight) {
  3028. fshaderTxt.append(
  3029. " sk_FragColor = (texture(u_texture, v_texCoord0) + "
  3030. " texture(u_texture, v_texCoord1)) * 0.5;"
  3031. );
  3032. } else {
  3033. fshaderTxt.append(
  3034. " sk_FragColor = texture(u_texture, v_texCoord0);"
  3035. );
  3036. }
  3037. fshaderTxt.append("}");
  3038. auto errorHandler = this->getContext()->priv().getShaderErrorHandler();
  3039. SkSL::String sksl(vshaderTxt.c_str(), vshaderTxt.size());
  3040. SkSL::Program::Settings settings;
  3041. settings.fCaps = shaderCaps;
  3042. SkSL::String glsl;
  3043. std::unique_ptr<SkSL::Program> program = GrSkSLtoGLSL(*fGLContext, SkSL::Program::kVertex_Kind,
  3044. sksl, settings, &glsl, errorHandler);
  3045. GrGLuint vshader = GrGLCompileAndAttachShader(*fGLContext, fMipmapPrograms[progIdx].fProgram,
  3046. GR_GL_VERTEX_SHADER, glsl, &fStats, errorHandler);
  3047. SkASSERT(program->fInputs.isEmpty());
  3048. sksl.assign(fshaderTxt.c_str(), fshaderTxt.size());
  3049. program = GrSkSLtoGLSL(*fGLContext, SkSL::Program::kFragment_Kind, sksl, settings, &glsl,
  3050. errorHandler);
  3051. GrGLuint fshader = GrGLCompileAndAttachShader(*fGLContext, fMipmapPrograms[progIdx].fProgram,
  3052. GR_GL_FRAGMENT_SHADER, glsl, &fStats,
  3053. errorHandler);
  3054. SkASSERT(program->fInputs.isEmpty());
  3055. GL_CALL(LinkProgram(fMipmapPrograms[progIdx].fProgram));
  3056. GL_CALL_RET(fMipmapPrograms[progIdx].fTextureUniform,
  3057. GetUniformLocation(fMipmapPrograms[progIdx].fProgram, "u_texture"));
  3058. GL_CALL_RET(fMipmapPrograms[progIdx].fTexCoordXformUniform,
  3059. GetUniformLocation(fMipmapPrograms[progIdx].fProgram, "u_texCoordXform"));
  3060. GL_CALL(BindAttribLocation(fMipmapPrograms[progIdx].fProgram, 0, "a_vertex"));
  3061. GL_CALL(DeleteShader(vshader));
  3062. GL_CALL(DeleteShader(fshader));
  3063. return true;
  3064. }
  3065. bool GrGLGpu::copySurfaceAsDraw(GrSurface* dst, GrSurface* src, const SkIRect& srcRect,
  3066. const SkIPoint& dstPoint) {
  3067. GrGLTexture* srcTex = static_cast<GrGLTexture*>(src->asTexture());
  3068. int progIdx = TextureToCopyProgramIdx(srcTex);
  3069. if (!this->glCaps().canConfigBeFBOColorAttachment(dst->config())) {
  3070. return false;
  3071. }
  3072. if (!fCopyPrograms[progIdx].fProgram) {
  3073. if (!this->createCopyProgram(srcTex)) {
  3074. SkDebugf("Failed to create copy program.\n");
  3075. return false;
  3076. }
  3077. }
  3078. int w = srcRect.width();
  3079. int h = srcRect.height();
  3080. // We don't swizzle at all in our copies.
  3081. this->bindTexture(0, GrSamplerState::ClampNearest(), GrSwizzle::RGBA(), srcTex);
  3082. this->bindSurfaceFBOForPixelOps(dst, GR_GL_FRAMEBUFFER, kDst_TempFBOTarget);
  3083. this->flushViewport(dst->width(), dst->height());
  3084. fHWBoundRenderTargetUniqueID.makeInvalid();
  3085. SkIRect dstRect = SkIRect::MakeXYWH(dstPoint.fX, dstPoint.fY, w, h);
  3086. this->flushProgram(fCopyPrograms[progIdx].fProgram);
  3087. fHWVertexArrayState.setVertexArrayID(this, 0);
  3088. GrGLAttribArrayState* attribs = fHWVertexArrayState.bindInternalVertexArray(this);
  3089. attribs->enableVertexArrays(this, 1);
  3090. attribs->set(this, 0, fCopyProgramArrayBuffer.get(), kFloat2_GrVertexAttribType,
  3091. kFloat2_GrSLType, 2 * sizeof(GrGLfloat), 0);
  3092. // dst rect edges in NDC (-1 to 1)
  3093. int dw = dst->width();
  3094. int dh = dst->height();
  3095. GrGLfloat dx0 = 2.f * dstPoint.fX / dw - 1.f;
  3096. GrGLfloat dx1 = 2.f * (dstPoint.fX + w) / dw - 1.f;
  3097. GrGLfloat dy0 = 2.f * dstPoint.fY / dh - 1.f;
  3098. GrGLfloat dy1 = 2.f * (dstPoint.fY + h) / dh - 1.f;
  3099. GrGLfloat sx0 = (GrGLfloat)srcRect.fLeft;
  3100. GrGLfloat sx1 = (GrGLfloat)(srcRect.fLeft + w);
  3101. GrGLfloat sy0 = (GrGLfloat)srcRect.fTop;
  3102. GrGLfloat sy1 = (GrGLfloat)(srcRect.fTop + h);
  3103. int sw = src->width();
  3104. int sh = src->height();
  3105. if (srcTex->texturePriv().textureType() != GrTextureType::kRectangle) {
  3106. // src rect edges in normalized texture space (0 to 1)
  3107. sx0 /= sw;
  3108. sx1 /= sw;
  3109. sy0 /= sh;
  3110. sy1 /= sh;
  3111. }
  3112. GL_CALL(Uniform4f(fCopyPrograms[progIdx].fPosXformUniform, dx1 - dx0, dy1 - dy0, dx0, dy0));
  3113. GL_CALL(Uniform4f(fCopyPrograms[progIdx].fTexCoordXformUniform,
  3114. sx1 - sx0, sy1 - sy0, sx0, sy0));
  3115. GL_CALL(Uniform1i(fCopyPrograms[progIdx].fTextureUniform, 0));
  3116. this->flushBlendAndColorWrite(GrXferProcessor::BlendInfo(), GrSwizzle::RGBA());
  3117. this->flushHWAAState(nullptr, false);
  3118. this->disableScissor();
  3119. this->disableWindowRectangles();
  3120. this->disableStencil();
  3121. if (this->glCaps().srgbWriteControl()) {
  3122. this->flushFramebufferSRGB(true);
  3123. }
  3124. GL_CALL(DrawArrays(GR_GL_TRIANGLE_STRIP, 0, 4));
  3125. this->unbindTextureFBOForPixelOps(GR_GL_FRAMEBUFFER, dst);
  3126. // The rect is already in device space so we pass in kTopLeft so no flip is done.
  3127. this->didWriteToSurface(dst, kTopLeft_GrSurfaceOrigin, &dstRect);
  3128. return true;
  3129. }
  3130. void GrGLGpu::copySurfaceAsCopyTexSubImage(GrSurface* dst, GrSurface* src, const SkIRect& srcRect,
  3131. const SkIPoint& dstPoint) {
  3132. SkASSERT(can_copy_texsubimage(dst, src, this->glCaps()));
  3133. this->bindSurfaceFBOForPixelOps(src, GR_GL_FRAMEBUFFER, kSrc_TempFBOTarget);
  3134. GrGLTexture* dstTex = static_cast<GrGLTexture *>(dst->asTexture());
  3135. SkASSERT(dstTex);
  3136. // We modified the bound FBO
  3137. fHWBoundRenderTargetUniqueID.makeInvalid();
  3138. this->bindTextureToScratchUnit(dstTex->target(), dstTex->textureID());
  3139. GL_CALL(CopyTexSubImage2D(dstTex->target(), 0,
  3140. dstPoint.fX, dstPoint.fY,
  3141. srcRect.fLeft, srcRect.fTop,
  3142. srcRect.width(), srcRect.height()));
  3143. this->unbindTextureFBOForPixelOps(GR_GL_FRAMEBUFFER, src);
  3144. SkIRect dstRect = SkIRect::MakeXYWH(dstPoint.fX, dstPoint.fY,
  3145. srcRect.width(), srcRect.height());
  3146. // The rect is already in device space so we pass in kTopLeft so no flip is done.
  3147. this->didWriteToSurface(dst, kTopLeft_GrSurfaceOrigin, &dstRect);
  3148. }
  3149. bool GrGLGpu::copySurfaceAsBlitFramebuffer(GrSurface* dst, GrSurface* src, const SkIRect& srcRect,
  3150. const SkIPoint& dstPoint) {
  3151. SkASSERT(can_blit_framebuffer_for_copy_surface(dst, src, srcRect, dstPoint, this->glCaps()));
  3152. SkIRect dstRect = SkIRect::MakeXYWH(dstPoint.fX, dstPoint.fY,
  3153. srcRect.width(), srcRect.height());
  3154. if (dst == src) {
  3155. if (SkIRect::IntersectsNoEmptyCheck(dstRect, srcRect)) {
  3156. return false;
  3157. }
  3158. }
  3159. this->bindSurfaceFBOForPixelOps(dst, GR_GL_DRAW_FRAMEBUFFER, kDst_TempFBOTarget);
  3160. this->bindSurfaceFBOForPixelOps(src, GR_GL_READ_FRAMEBUFFER, kSrc_TempFBOTarget);
  3161. // We modified the bound FBO
  3162. fHWBoundRenderTargetUniqueID.makeInvalid();
  3163. // BlitFrameBuffer respects the scissor, so disable it.
  3164. this->disableScissor();
  3165. this->disableWindowRectangles();
  3166. GL_CALL(BlitFramebuffer(srcRect.fLeft,
  3167. srcRect.fTop,
  3168. srcRect.fRight,
  3169. srcRect.fBottom,
  3170. dstRect.fLeft,
  3171. dstRect.fTop,
  3172. dstRect.fRight,
  3173. dstRect.fBottom,
  3174. GR_GL_COLOR_BUFFER_BIT, GR_GL_NEAREST));
  3175. this->unbindTextureFBOForPixelOps(GR_GL_DRAW_FRAMEBUFFER, dst);
  3176. this->unbindTextureFBOForPixelOps(GR_GL_READ_FRAMEBUFFER, src);
  3177. // The rect is already in device space so we pass in kTopLeft so no flip is done.
  3178. this->didWriteToSurface(dst, kTopLeft_GrSurfaceOrigin, &dstRect);
  3179. return true;
  3180. }
  3181. bool GrGLGpu::onRegenerateMipMapLevels(GrTexture* texture) {
  3182. auto glTex = static_cast<GrGLTexture*>(texture);
  3183. // Mipmaps are only supported on 2D textures:
  3184. if (GR_GL_TEXTURE_2D != glTex->target()) {
  3185. return false;
  3186. }
  3187. // Manual implementation of mipmap generation, to work around driver bugs w/sRGB.
  3188. // Uses draw calls to do a series of downsample operations to successive mips.
  3189. // The manual approach requires the ability to limit which level we're sampling and that the
  3190. // destination can be bound to a FBO:
  3191. if (!this->glCaps().doManualMipmapping() ||
  3192. !this->glCaps().canConfigBeFBOColorAttachment(texture->config())) {
  3193. GrGLenum target = glTex->target();
  3194. this->bindTextureToScratchUnit(target, glTex->textureID());
  3195. GL_CALL(GenerateMipmap(glTex->target()));
  3196. return true;
  3197. }
  3198. int width = texture->width();
  3199. int height = texture->height();
  3200. int levelCount = SkMipMap::ComputeLevelCount(width, height) + 1;
  3201. SkASSERT(levelCount == texture->texturePriv().maxMipMapLevel() + 1);
  3202. // Create (if necessary), then bind temporary FBO:
  3203. if (0 == fTempDstFBOID) {
  3204. GL_CALL(GenFramebuffers(1, &fTempDstFBOID));
  3205. }
  3206. this->bindFramebuffer(GR_GL_FRAMEBUFFER, fTempDstFBOID);
  3207. fHWBoundRenderTargetUniqueID.makeInvalid();
  3208. // Bind the texture, to get things configured for filtering.
  3209. // We'll be changing our base level further below:
  3210. this->setTextureUnit(0);
  3211. // The mipmap program does not do any swizzling.
  3212. this->bindTexture(0, GrSamplerState::ClampBilerp(), GrSwizzle::RGBA(), glTex);
  3213. // Vertex data:
  3214. if (!fMipmapProgramArrayBuffer) {
  3215. static const GrGLfloat vdata[] = {
  3216. 0, 0,
  3217. 0, 1,
  3218. 1, 0,
  3219. 1, 1
  3220. };
  3221. fMipmapProgramArrayBuffer = GrGLBuffer::Make(this, sizeof(vdata), GrGpuBufferType::kVertex,
  3222. kStatic_GrAccessPattern, vdata);
  3223. }
  3224. if (!fMipmapProgramArrayBuffer) {
  3225. return false;
  3226. }
  3227. fHWVertexArrayState.setVertexArrayID(this, 0);
  3228. GrGLAttribArrayState* attribs = fHWVertexArrayState.bindInternalVertexArray(this);
  3229. attribs->enableVertexArrays(this, 1);
  3230. attribs->set(this, 0, fMipmapProgramArrayBuffer.get(), kFloat2_GrVertexAttribType,
  3231. kFloat2_GrSLType, 2 * sizeof(GrGLfloat), 0);
  3232. // Set "simple" state once:
  3233. this->flushBlendAndColorWrite(GrXferProcessor::BlendInfo(), GrSwizzle::RGBA());
  3234. this->flushHWAAState(nullptr, false);
  3235. this->disableScissor();
  3236. this->disableWindowRectangles();
  3237. this->disableStencil();
  3238. // Do all the blits:
  3239. width = texture->width();
  3240. height = texture->height();
  3241. for (GrGLint level = 1; level < levelCount; ++level) {
  3242. // Get and bind the program for this particular downsample (filter shape can vary):
  3243. int progIdx = TextureSizeToMipmapProgramIdx(width, height);
  3244. if (!fMipmapPrograms[progIdx].fProgram) {
  3245. if (!this->createMipmapProgram(progIdx)) {
  3246. SkDebugf("Failed to create mipmap program.\n");
  3247. // Invalidate all params to cover base level change in a previous iteration.
  3248. glTex->textureParamsModified();
  3249. return false;
  3250. }
  3251. }
  3252. this->flushProgram(fMipmapPrograms[progIdx].fProgram);
  3253. // Texcoord uniform is expected to contain (1/w, (w-1)/w, 1/h, (h-1)/h)
  3254. const float invWidth = 1.0f / width;
  3255. const float invHeight = 1.0f / height;
  3256. GL_CALL(Uniform4f(fMipmapPrograms[progIdx].fTexCoordXformUniform,
  3257. invWidth, (width - 1) * invWidth, invHeight, (height - 1) * invHeight));
  3258. GL_CALL(Uniform1i(fMipmapPrograms[progIdx].fTextureUniform, 0));
  3259. // Only sample from previous mip
  3260. GL_CALL(TexParameteri(GR_GL_TEXTURE_2D, GR_GL_TEXTURE_BASE_LEVEL, level - 1));
  3261. GL_CALL(FramebufferTexture2D(GR_GL_FRAMEBUFFER, GR_GL_COLOR_ATTACHMENT0, GR_GL_TEXTURE_2D,
  3262. glTex->textureID(), level));
  3263. width = SkTMax(1, width / 2);
  3264. height = SkTMax(1, height / 2);
  3265. this->flushViewport(width, height);
  3266. GL_CALL(DrawArrays(GR_GL_TRIANGLE_STRIP, 0, 4));
  3267. }
  3268. // Unbind:
  3269. GL_CALL(FramebufferTexture2D(GR_GL_FRAMEBUFFER, GR_GL_COLOR_ATTACHMENT0,
  3270. GR_GL_TEXTURE_2D, 0, 0));
  3271. // We modified the base level param.
  3272. GrGLTextureParameters::NonsamplerState nonsamplerState = glTex->parameters()->nonsamplerState();
  3273. // We drew the 2nd to last level into the last level.
  3274. nonsamplerState.fBaseMipMapLevel = levelCount - 2;
  3275. glTex->parameters()->set(nullptr, nonsamplerState, fResetTimestampForTextureParameters);
  3276. return true;
  3277. }
  3278. void GrGLGpu::querySampleLocations(
  3279. GrRenderTarget* renderTarget, SkTArray<SkPoint>* sampleLocations) {
  3280. this->flushRenderTargetNoColorWrites(static_cast<GrGLRenderTarget*>(renderTarget));
  3281. int effectiveSampleCnt;
  3282. GR_GL_GetIntegerv(this->glInterface(), GR_GL_SAMPLES, &effectiveSampleCnt);
  3283. SkASSERT(effectiveSampleCnt >= renderTarget->numSamples());
  3284. sampleLocations->reset(effectiveSampleCnt);
  3285. for (int i = 0; i < effectiveSampleCnt; ++i) {
  3286. GL_CALL(GetMultisamplefv(GR_GL_SAMPLE_POSITION, i, &(*sampleLocations)[i].fX));
  3287. }
  3288. }
  3289. void GrGLGpu::xferBarrier(GrRenderTarget* rt, GrXferBarrierType type) {
  3290. SkASSERT(type);
  3291. switch (type) {
  3292. case kTexture_GrXferBarrierType: {
  3293. GrGLRenderTarget* glrt = static_cast<GrGLRenderTarget*>(rt);
  3294. SkASSERT(glrt->textureFBOID() != 0 && glrt->renderFBOID() != 0);
  3295. if (glrt->textureFBOID() != glrt->renderFBOID()) {
  3296. // The render target uses separate storage so no need for glTextureBarrier.
  3297. // FIXME: The render target will resolve automatically when its texture is bound,
  3298. // but we could resolve only the bounds that will be read if we do it here instead.
  3299. return;
  3300. }
  3301. SkASSERT(this->caps()->textureBarrierSupport());
  3302. GL_CALL(TextureBarrier());
  3303. return;
  3304. }
  3305. case kBlend_GrXferBarrierType:
  3306. SkASSERT(GrCaps::kAdvanced_BlendEquationSupport ==
  3307. this->caps()->blendEquationSupport());
  3308. GL_CALL(BlendBarrier());
  3309. return;
  3310. default: break; // placate compiler warnings that kNone not handled
  3311. }
  3312. }
  3313. static GrPixelConfig gl_format_to_pixel_config(GrGLFormat format) {
  3314. switch (format) {
  3315. case GrGLFormat::kRGBA8: return kRGBA_8888_GrPixelConfig;
  3316. case GrGLFormat::kRGB8: return kRGB_888_GrPixelConfig;
  3317. case GrGLFormat::kRG8: return kRG_88_GrPixelConfig;
  3318. case GrGLFormat::kBGRA8: return kBGRA_8888_GrPixelConfig;
  3319. case GrGLFormat::kLUMINANCE8: return kGray_8_GrPixelConfig;
  3320. case GrGLFormat::kSRGB8_ALPHA8: return kSRGBA_8888_GrPixelConfig;
  3321. case GrGLFormat::kRGB10_A2: return kRGBA_1010102_GrPixelConfig;
  3322. case GrGLFormat::kRGB565: return kRGB_565_GrPixelConfig;
  3323. case GrGLFormat::kRGBA4: return kRGBA_4444_GrPixelConfig;
  3324. case GrGLFormat::kRGBA32F: return kRGBA_float_GrPixelConfig;
  3325. case GrGLFormat::kRGBA16F: return kRGBA_half_GrPixelConfig;
  3326. case GrGLFormat::kR16: return kR_16_GrPixelConfig;
  3327. case GrGLFormat::kRG16: return kRG_1616_GrPixelConfig;
  3328. case GrGLFormat::kRGBA16: return kRGBA_16161616_GrPixelConfig;
  3329. case GrGLFormat::kRG16F: return kRG_half_GrPixelConfig;
  3330. case GrGLFormat::kUnknown: return kUnknown_GrPixelConfig;
  3331. // Configs with multiple equivalent formats.
  3332. case GrGLFormat::kR16F: return kAlpha_half_GrPixelConfig;
  3333. case GrGLFormat::kLUMINANCE16F: return kAlpha_half_GrPixelConfig;
  3334. case GrGLFormat::kALPHA8: return kAlpha_8_GrPixelConfig;
  3335. case GrGLFormat::kR8: return kAlpha_8_GrPixelConfig;
  3336. case GrGLFormat::kCOMPRESSED_RGB8_ETC2: return kRGB_ETC1_GrPixelConfig;
  3337. case GrGLFormat::kCOMPRESSED_ETC1_RGB8: return kRGB_ETC1_GrPixelConfig;
  3338. }
  3339. SkUNREACHABLE;
  3340. }
  3341. GrBackendTexture GrGLGpu::createBackendTexture(int w, int h,
  3342. const GrBackendFormat& format,
  3343. GrMipMapped mipMapped,
  3344. GrRenderable renderable,
  3345. const void* srcPixels, size_t rowBytes,
  3346. const SkColor4f* color,
  3347. GrProtected isProtected) {
  3348. this->handleDirtyContext();
  3349. GrGLFormat glFormat = GrGLBackendFormatToGLFormat(format);
  3350. if (glFormat == GrGLFormat::kUnknown) {
  3351. return GrBackendTexture(); // invalid
  3352. }
  3353. GrPixelConfig config = gl_format_to_pixel_config(glFormat);
  3354. if (config == kUnknown_GrPixelConfig) {
  3355. return GrBackendTexture(); // invalid
  3356. }
  3357. if (!this->caps()->isConfigTexturable(config)) {
  3358. return GrBackendTexture(); // invalid
  3359. }
  3360. if (w < 1 || w > this->caps()->maxTextureSize() ||
  3361. h < 1 || h > this->caps()->maxTextureSize()) {
  3362. return GrBackendTexture(); // invalid
  3363. }
  3364. // Currently we don't support uploading pixel data when mipped.
  3365. if (srcPixels && GrMipMapped::kYes == mipMapped) {
  3366. return GrBackendTexture(); // invalid
  3367. }
  3368. if (mipMapped == GrMipMapped::kYes && !this->caps()->mipMapSupport()) {
  3369. return GrBackendTexture(); // invalid
  3370. }
  3371. GrGLTextureInfo info;
  3372. GrGLTextureParameters::SamplerOverriddenState initialState;
  3373. int mipLevelCount = 0;
  3374. SkAutoTMalloc<GrMipLevel> texels;
  3375. SkAutoMalloc pixelStorage;
  3376. SkImage::CompressionType compressionType;
  3377. if (GrGLFormatToCompressionType(glFormat, &compressionType)) {
  3378. // Compressed textures currently must be non-MIP mapped and have initial data.
  3379. if (mipMapped == GrMipMapped::kYes) {
  3380. return GrBackendTexture();
  3381. }
  3382. if (!srcPixels) {
  3383. if (!color) {
  3384. return GrBackendTexture();
  3385. }
  3386. SkASSERT(0 == rowBytes);
  3387. size_t size = GrCompressedDataSize(compressionType, w, h);
  3388. srcPixels = pixelStorage.reset(size);
  3389. GrFillInCompressedData(compressionType, w, h, (char*)srcPixels, *color);
  3390. }
  3391. if (!this->createCompressedTextureImpl(&info, w, h, compressionType, &initialState,
  3392. srcPixels)) {
  3393. return GrBackendTexture();
  3394. }
  3395. } else {
  3396. if (srcPixels) {
  3397. mipLevelCount = 1;
  3398. texels.reset(mipLevelCount);
  3399. texels.get()[0] = {srcPixels, rowBytes};
  3400. } else if (color) {
  3401. mipLevelCount = 1;
  3402. if (GrMipMapped::kYes == mipMapped) {
  3403. mipLevelCount = SkMipMap::ComputeLevelCount(w, h) + 1;
  3404. }
  3405. texels.reset(mipLevelCount);
  3406. SkTArray<size_t> individualMipOffsets(mipLevelCount);
  3407. size_t bytesPerPixel = GrBytesPerPixel(config);
  3408. size_t totalSize = GrComputeTightCombinedBufferSize(
  3409. bytesPerPixel, w, h, &individualMipOffsets, mipLevelCount);
  3410. char* tmpPixels = (char*)pixelStorage.reset(totalSize);
  3411. GrFillInData(config, w, h, individualMipOffsets, tmpPixels, *color);
  3412. for (int i = 0; i < mipLevelCount; ++i) {
  3413. size_t offset = individualMipOffsets[i];
  3414. int twoToTheMipLevel = 1 << i;
  3415. int currentWidth = SkTMax(1, w / twoToTheMipLevel);
  3416. texels.get()[i] = {&(tmpPixels[offset]), currentWidth * bytesPerPixel};
  3417. }
  3418. }
  3419. GrSurfaceDesc desc;
  3420. desc.fWidth = w;
  3421. desc.fHeight = h;
  3422. desc.fConfig = config;
  3423. if (!this->createTextureImpl(desc, &info, renderable, &initialState, texels.get(),
  3424. mipLevelCount, nullptr)) {
  3425. return GrBackendTexture(); // invalid
  3426. }
  3427. }
  3428. // unbind the texture from the texture unit to avoid asserts
  3429. GL_CALL(BindTexture(info.fTarget, 0));
  3430. auto parameters = sk_make_sp<GrGLTextureParameters>();
  3431. parameters->set(&initialState, GrGLTextureParameters::NonsamplerState(),
  3432. fResetTimestampForTextureParameters);
  3433. GrBackendTexture beTex = GrBackendTexture(w, h, mipMapped, info, std::move(parameters));
  3434. #if GR_TEST_UTILS
  3435. // Lots of tests don't go through Skia's public interface, which will set the config, so for
  3436. // testing we make sure we set a config here.
  3437. beTex.setPixelConfig(config);
  3438. #endif
  3439. return beTex;
  3440. }
  3441. void GrGLGpu::deleteBackendTexture(const GrBackendTexture& tex) {
  3442. SkASSERT(GrBackendApi::kOpenGL == tex.backend());
  3443. GrGLTextureInfo info;
  3444. if (tex.getGLTextureInfo(&info)) {
  3445. GL_CALL(DeleteTextures(1, &info.fID));
  3446. }
  3447. }
  3448. #if GR_TEST_UTILS
  3449. bool GrGLGpu::isTestingOnlyBackendTexture(const GrBackendTexture& tex) const {
  3450. SkASSERT(GrBackendApi::kOpenGL == tex.backend());
  3451. GrGLTextureInfo info;
  3452. if (!tex.getGLTextureInfo(&info)) {
  3453. return false;
  3454. }
  3455. GrGLboolean result;
  3456. GL_CALL_RET(result, IsTexture(info.fID));
  3457. return (GR_GL_TRUE == result);
  3458. }
  3459. GrBackendRenderTarget GrGLGpu::createTestingOnlyBackendRenderTarget(int w, int h,
  3460. GrColorType colorType) {
  3461. if (w > this->caps()->maxRenderTargetSize() || h > this->caps()->maxRenderTargetSize()) {
  3462. return GrBackendRenderTarget(); // invalid
  3463. }
  3464. this->handleDirtyContext();
  3465. auto config = GrColorTypeToPixelConfig(colorType);
  3466. if (!this->glCaps().isConfigRenderable(config)) {
  3467. return {};
  3468. }
  3469. auto format = this->glCaps().getFormatFromColorType(colorType);
  3470. bool useTexture = false;
  3471. GrGLenum colorBufferFormat;
  3472. GrGLenum externalFormat = 0, externalType = 0;
  3473. if (config == kBGRA_8888_GrPixelConfig && this->glCaps().bgraIsInternalFormat()) {
  3474. // BGRA render buffers are not supported.
  3475. this->glCaps().getTexImageFormats(config, config, &colorBufferFormat, &externalFormat,
  3476. &externalType);
  3477. useTexture = true;
  3478. } else {
  3479. colorBufferFormat = this->glCaps().getRenderbufferInternalFormat(format);
  3480. }
  3481. int sFormatIdx = this->getCompatibleStencilIndex(format);
  3482. if (sFormatIdx < 0) {
  3483. return {};
  3484. }
  3485. GrGLuint colorID = 0;
  3486. GrGLuint stencilID = 0;
  3487. auto deleteIDs = [&] {
  3488. if (colorID) {
  3489. if (useTexture) {
  3490. GL_CALL(DeleteTextures(1, &colorID));
  3491. } else {
  3492. GL_CALL(DeleteRenderbuffers(1, &colorID));
  3493. }
  3494. }
  3495. if (stencilID) {
  3496. GL_CALL(DeleteRenderbuffers(1, &stencilID));
  3497. }
  3498. };
  3499. if (useTexture) {
  3500. GL_CALL(GenTextures(1, &colorID));
  3501. } else {
  3502. GL_CALL(GenRenderbuffers(1, &colorID));
  3503. }
  3504. GL_CALL(GenRenderbuffers(1, &stencilID));
  3505. if (!stencilID || !colorID) {
  3506. deleteIDs();
  3507. return {};
  3508. }
  3509. GrGLFramebufferInfo info;
  3510. info.fFBOID = 0;
  3511. info.fFormat = this->glCaps().configSizedInternalFormat(config);
  3512. GL_CALL(GenFramebuffers(1, &info.fFBOID));
  3513. if (!info.fFBOID) {
  3514. deleteIDs();
  3515. return {};
  3516. }
  3517. this->invalidateBoundRenderTarget();
  3518. this->bindFramebuffer(GR_GL_FRAMEBUFFER, info.fFBOID);
  3519. if (useTexture) {
  3520. this->bindTextureToScratchUnit(GR_GL_TEXTURE_2D, colorID);
  3521. GL_CALL(TexImage2D(GR_GL_TEXTURE_2D, 0, colorBufferFormat, w, h, 0, externalFormat,
  3522. externalType, nullptr));
  3523. GL_CALL(FramebufferTexture2D(GR_GL_FRAMEBUFFER, GR_GL_COLOR_ATTACHMENT0, GR_GL_TEXTURE_2D,
  3524. colorID, 0));
  3525. } else {
  3526. GL_CALL(BindRenderbuffer(GR_GL_RENDERBUFFER, colorID));
  3527. GL_ALLOC_CALL(this->glInterface(),
  3528. RenderbufferStorage(GR_GL_RENDERBUFFER, colorBufferFormat, w, h));
  3529. GL_CALL(FramebufferRenderbuffer(GR_GL_FRAMEBUFFER, GR_GL_COLOR_ATTACHMENT0,
  3530. GR_GL_RENDERBUFFER, colorID));
  3531. }
  3532. GL_CALL(BindRenderbuffer(GR_GL_RENDERBUFFER, stencilID));
  3533. auto stencilBufferFormat = this->glCaps().stencilFormats()[sFormatIdx].fInternalFormat;
  3534. GL_ALLOC_CALL(this->glInterface(),
  3535. RenderbufferStorage(GR_GL_RENDERBUFFER, stencilBufferFormat, w, h));
  3536. GL_CALL(FramebufferRenderbuffer(GR_GL_FRAMEBUFFER, GR_GL_STENCIL_ATTACHMENT, GR_GL_RENDERBUFFER,
  3537. stencilID));
  3538. if (this->glCaps().stencilFormats()[sFormatIdx].fPacked) {
  3539. GL_CALL(FramebufferRenderbuffer(GR_GL_FRAMEBUFFER, GR_GL_DEPTH_ATTACHMENT,
  3540. GR_GL_RENDERBUFFER, stencilID));
  3541. }
  3542. // We don't want to have to recover the renderbuffer/texture IDs later to delete them. OpenGL
  3543. // has this rule that if a renderbuffer/texture is deleted and a FBO other than the current FBO
  3544. // has the RB attached then deletion is delayed. So we unbind the FBO here and delete the
  3545. // renderbuffers/texture.
  3546. this->bindFramebuffer(GR_GL_FRAMEBUFFER, 0);
  3547. deleteIDs();
  3548. this->bindFramebuffer(GR_GL_FRAMEBUFFER, info.fFBOID);
  3549. GrGLenum status;
  3550. GL_CALL_RET(status, CheckFramebufferStatus(GR_GL_FRAMEBUFFER));
  3551. if (GR_GL_FRAMEBUFFER_COMPLETE != status) {
  3552. this->deleteFramebuffer(info.fFBOID);
  3553. return {};
  3554. }
  3555. auto stencilBits = SkToInt(this->glCaps().stencilFormats()[sFormatIdx].fStencilBits);
  3556. GrBackendRenderTarget beRT = GrBackendRenderTarget(w, h, 1, stencilBits, info);
  3557. // Lots of tests don't go through Skia's public interface which will set the config so for
  3558. // testing we make sure we set a config here.
  3559. beRT.setPixelConfig(config);
  3560. SkASSERT(kUnknown_GrPixelConfig != this->caps()->validateBackendRenderTarget(beRT, colorType));
  3561. return beRT;
  3562. }
  3563. void GrGLGpu::deleteTestingOnlyBackendRenderTarget(const GrBackendRenderTarget& backendRT) {
  3564. SkASSERT(GrBackendApi::kOpenGL == backendRT.backend());
  3565. GrGLFramebufferInfo info;
  3566. if (backendRT.getGLFramebufferInfo(&info)) {
  3567. if (info.fFBOID) {
  3568. this->deleteFramebuffer(info.fFBOID);
  3569. }
  3570. }
  3571. }
  3572. void GrGLGpu::testingOnly_flushGpuAndSync() {
  3573. GL_CALL(Finish());
  3574. }
  3575. #endif
  3576. ///////////////////////////////////////////////////////////////////////////////
  3577. GrGLAttribArrayState* GrGLGpu::HWVertexArrayState::bindInternalVertexArray(GrGLGpu* gpu,
  3578. const GrBuffer* ibuf) {
  3579. GrGLAttribArrayState* attribState;
  3580. if (gpu->glCaps().isCoreProfile()) {
  3581. if (!fCoreProfileVertexArray) {
  3582. GrGLuint arrayID;
  3583. GR_GL_CALL(gpu->glInterface(), GenVertexArrays(1, &arrayID));
  3584. int attrCount = gpu->glCaps().maxVertexAttributes();
  3585. fCoreProfileVertexArray = new GrGLVertexArray(arrayID, attrCount);
  3586. }
  3587. if (ibuf) {
  3588. attribState = fCoreProfileVertexArray->bindWithIndexBuffer(gpu, ibuf);
  3589. } else {
  3590. attribState = fCoreProfileVertexArray->bind(gpu);
  3591. }
  3592. } else {
  3593. if (ibuf) {
  3594. // bindBuffer implicitly binds VAO 0 when binding an index buffer.
  3595. gpu->bindBuffer(GrGpuBufferType::kIndex, ibuf);
  3596. } else {
  3597. this->setVertexArrayID(gpu, 0);
  3598. }
  3599. int attrCount = gpu->glCaps().maxVertexAttributes();
  3600. if (fDefaultVertexArrayAttribState.count() != attrCount) {
  3601. fDefaultVertexArrayAttribState.resize(attrCount);
  3602. }
  3603. attribState = &fDefaultVertexArrayAttribState;
  3604. }
  3605. return attribState;
  3606. }
  3607. void GrGLGpu::onFinishFlush(GrSurfaceProxy*[], int, SkSurface::BackendSurfaceAccess access,
  3608. const GrFlushInfo& info, const GrPrepareForExternalIORequests&) {
  3609. // If we inserted semaphores during the flush, we need to call GLFlush.
  3610. bool insertedSemaphore = info.fNumSemaphores > 0 && this->caps()->semaphoreSupport();
  3611. // We call finish if the client told us to sync or if we have a finished proc but don't support
  3612. // GLsync objects.
  3613. bool finish = (info.fFlags & kSyncCpu_GrFlushFlag) ||
  3614. (info.fFinishedProc && !this->caps()->fenceSyncSupport());
  3615. if (finish) {
  3616. GL_CALL(Finish());
  3617. // After a finish everything previously sent to GL is done.
  3618. for (const auto& cb : fFinishCallbacks) {
  3619. cb.fCallback(cb.fContext);
  3620. this->deleteSync(cb.fSync);
  3621. }
  3622. fFinishCallbacks.clear();
  3623. if (info.fFinishedProc) {
  3624. info.fFinishedProc(info.fFinishedContext);
  3625. }
  3626. } else {
  3627. if (info.fFinishedProc) {
  3628. FinishCallback callback;
  3629. callback.fCallback = info.fFinishedProc;
  3630. callback.fContext = info.fFinishedContext;
  3631. callback.fSync = (GrGLsync)this->insertFence();
  3632. fFinishCallbacks.push_back(callback);
  3633. GL_CALL(Flush());
  3634. } else if (insertedSemaphore) {
  3635. // Must call flush after semaphores in case they are waited on another GL context.
  3636. GL_CALL(Flush());
  3637. }
  3638. // See if any previously inserted finish procs are good to go.
  3639. this->checkFinishProcs();
  3640. }
  3641. }
  3642. void GrGLGpu::submit(GrGpuCommandBuffer* buffer) {
  3643. if (buffer->asRTCommandBuffer()) {
  3644. SkASSERT(fCachedRTCommandBuffer.get() == buffer);
  3645. fCachedRTCommandBuffer->reset();
  3646. } else {
  3647. SkASSERT(fCachedTexCommandBuffer.get() == buffer);
  3648. fCachedTexCommandBuffer->reset();
  3649. }
  3650. }
  3651. GrFence SK_WARN_UNUSED_RESULT GrGLGpu::insertFence() {
  3652. SkASSERT(this->caps()->fenceSyncSupport());
  3653. GrGLsync sync;
  3654. GL_CALL_RET(sync, FenceSync(GR_GL_SYNC_GPU_COMMANDS_COMPLETE, 0));
  3655. GR_STATIC_ASSERT(sizeof(GrFence) >= sizeof(GrGLsync));
  3656. return (GrFence)sync;
  3657. }
  3658. bool GrGLGpu::waitSync(GrGLsync sync, uint64_t timeout, bool flush) {
  3659. GrGLbitfield flags = flush ? GR_GL_SYNC_FLUSH_COMMANDS_BIT : 0;
  3660. GrGLenum result;
  3661. GL_CALL_RET(result, ClientWaitSync(sync, flags, timeout));
  3662. return (GR_GL_CONDITION_SATISFIED == result || GR_GL_ALREADY_SIGNALED == result);
  3663. }
  3664. bool GrGLGpu::waitFence(GrFence fence, uint64_t timeout) {
  3665. return this->waitSync((GrGLsync)fence, timeout, /* flush = */ true);
  3666. }
  3667. void GrGLGpu::deleteFence(GrFence fence) const {
  3668. this->deleteSync((GrGLsync)fence);
  3669. }
  3670. sk_sp<GrSemaphore> SK_WARN_UNUSED_RESULT GrGLGpu::makeSemaphore(bool isOwned) {
  3671. SkASSERT(this->caps()->semaphoreSupport());
  3672. return GrGLSemaphore::Make(this, isOwned);
  3673. }
  3674. sk_sp<GrSemaphore> GrGLGpu::wrapBackendSemaphore(const GrBackendSemaphore& semaphore,
  3675. GrResourceProvider::SemaphoreWrapType wrapType,
  3676. GrWrapOwnership ownership) {
  3677. SkASSERT(this->caps()->semaphoreSupport());
  3678. return GrGLSemaphore::MakeWrapped(this, semaphore.glSync(), ownership);
  3679. }
  3680. void GrGLGpu::insertSemaphore(sk_sp<GrSemaphore> semaphore) {
  3681. GrGLSemaphore* glSem = static_cast<GrGLSemaphore*>(semaphore.get());
  3682. GrGLsync sync;
  3683. GL_CALL_RET(sync, FenceSync(GR_GL_SYNC_GPU_COMMANDS_COMPLETE, 0));
  3684. glSem->setSync(sync);
  3685. }
  3686. void GrGLGpu::waitSemaphore(sk_sp<GrSemaphore> semaphore) {
  3687. GrGLSemaphore* glSem = static_cast<GrGLSemaphore*>(semaphore.get());
  3688. GL_CALL(WaitSync(glSem->sync(), 0, GR_GL_TIMEOUT_IGNORED));
  3689. }
  3690. void GrGLGpu::checkFinishProcs() {
  3691. // Bail after the first unfinished sync since we expect they signal in the order inserted.
  3692. while (!fFinishCallbacks.empty() && this->waitSync(fFinishCallbacks.front().fSync,
  3693. /* timeout = */ 0, /* flush = */ false)) {
  3694. fFinishCallbacks.front().fCallback(fFinishCallbacks.front().fContext);
  3695. this->deleteSync(fFinishCallbacks.front().fSync);
  3696. fFinishCallbacks.pop_front();
  3697. }
  3698. }
  3699. void GrGLGpu::deleteSync(GrGLsync sync) const {
  3700. GL_CALL(DeleteSync(sync));
  3701. }
  3702. void GrGLGpu::insertEventMarker(const char* msg) {
  3703. GL_CALL(InsertEventMarker(strlen(msg), msg));
  3704. }
  3705. sk_sp<GrSemaphore> GrGLGpu::prepareTextureForCrossContextUsage(GrTexture* texture) {
  3706. // Set up a semaphore to be signaled once the data is ready, and flush GL
  3707. sk_sp<GrSemaphore> semaphore = this->makeSemaphore(true);
  3708. this->insertSemaphore(semaphore);
  3709. // We must call flush here to make sure the GrGLSync object gets created and sent to the gpu.
  3710. GL_CALL(Flush());
  3711. return semaphore;
  3712. }
  3713. int GrGLGpu::TextureToCopyProgramIdx(GrTexture* texture) {
  3714. switch (GrSLCombinedSamplerTypeForTextureType(texture->texturePriv().textureType())) {
  3715. case kTexture2DSampler_GrSLType:
  3716. return 0;
  3717. case kTexture2DRectSampler_GrSLType:
  3718. return 1;
  3719. case kTextureExternalSampler_GrSLType:
  3720. return 2;
  3721. default:
  3722. SK_ABORT("Unexpected samper type");
  3723. return 0;
  3724. }
  3725. }
  3726. #ifdef SK_ENABLE_DUMP_GPU
  3727. #include "src/utils/SkJSONWriter.h"
  3728. void GrGLGpu::onDumpJSON(SkJSONWriter* writer) const {
  3729. // We are called by the base class, which has already called beginObject(). We choose to nest
  3730. // all of our caps information in a named sub-object.
  3731. writer->beginObject("GL GPU");
  3732. const GrGLubyte* str;
  3733. GL_CALL_RET(str, GetString(GR_GL_VERSION));
  3734. writer->appendString("GL_VERSION", (const char*)(str));
  3735. GL_CALL_RET(str, GetString(GR_GL_RENDERER));
  3736. writer->appendString("GL_RENDERER", (const char*)(str));
  3737. GL_CALL_RET(str, GetString(GR_GL_VENDOR));
  3738. writer->appendString("GL_VENDOR", (const char*)(str));
  3739. GL_CALL_RET(str, GetString(GR_GL_SHADING_LANGUAGE_VERSION));
  3740. writer->appendString("GL_SHADING_LANGUAGE_VERSION", (const char*)(str));
  3741. writer->appendName("extensions");
  3742. glInterface()->fExtensions.dumpJSON(writer);
  3743. writer->endObject();
  3744. }
  3745. #endif