wave5.c 185 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437
  1. //SPDX-License-Identifier: LGPL-2.1 OR BSD-3-Clause
  2. /*
  3. * Copyright (c) 2019, Chips&Media
  4. * All rights reserved.
  5. *
  6. * Redistribution and use in source and binary forms, with or without
  7. * modification, are permitted provided that the following conditions are met:
  8. *
  9. * 1. Redistributions of source code must retain the above copyright notice, this
  10. * list of conditions and the following disclaimer.
  11. * 2. Redistributions in binary form must reproduce the above copyright notice,
  12. * this list of conditions and the following disclaimer in the documentation
  13. * and/or other materials provided with the distribution.
  14. *
  15. * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
  16. * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
  17. * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
  18. * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
  19. * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
  20. * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
  21. * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
  22. * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  23. * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
  24. * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  25. */
  26. #include "product.h"
  27. #include "wave/wave5.h"
  28. #include "vpuerror.h"
  29. #include "wave/wave5_regdefine.h"
  30. #include "misc/debug.h"
  31. Uint32 Wave5VpuIsInit(Uint32 coreIdx)
  32. {
  33. Uint32 pc;
  34. pc = (Uint32)VpuReadReg(coreIdx, W5_VCPU_CUR_PC);
  35. return pc;
  36. }
  37. Int32 Wave5VpuIsBusy(Uint32 coreIdx)
  38. {
  39. return VpuReadReg(coreIdx, W5_VPU_BUSY_STATUS);
  40. }
  41. Int32 WaveVpuGetProductId(Uint32 coreIdx)
  42. {
  43. Uint32 productId = PRODUCT_ID_NONE;
  44. Uint32 val;
  45. if (coreIdx >= MAX_NUM_VPU_CORE)
  46. return PRODUCT_ID_NONE;
  47. val = VpuReadReg(coreIdx, W5_PRODUCT_NUMBER);
  48. switch (val) {
  49. case WAVE512_CODE: productId = PRODUCT_ID_512; break;
  50. case WAVE515_CODE: productId = PRODUCT_ID_515; break;
  51. case WAVE521_CODE: productId = PRODUCT_ID_521; break;
  52. case WAVE521C_CODE: productId = PRODUCT_ID_521; break;
  53. case WAVE511_CODE: productId = PRODUCT_ID_511; break;
  54. case WAVE521C_DUAL_CODE: productId = PRODUCT_ID_521; break;
  55. case WAVE517_CODE: productId = PRODUCT_ID_517; break;
  56. default:
  57. VLOG(ERR, "Check productId(%d)\n", val);
  58. break;
  59. }
  60. return productId;
  61. }
  62. void Wave5BitIssueCommand(CodecInst* instance, Uint32 cmd)
  63. {
  64. Uint32 instanceIndex = 0;
  65. Uint32 codecMode = 0;
  66. Uint32 coreIdx;
  67. if (instance == NULL) {
  68. return ;
  69. }
  70. instanceIndex = instance->instIndex;
  71. codecMode = instance->codecMode;
  72. coreIdx = instance->coreIdx;
  73. VpuWriteReg(coreIdx, W5_CMD_INSTANCE_INFO, (codecMode<<16)|(instanceIndex&0xffff));
  74. VpuWriteReg(coreIdx, W5_VPU_BUSY_STATUS, 1);
  75. VpuWriteReg(coreIdx, W5_COMMAND, cmd);
  76. if ((instance != NULL && instance->loggingEnable))
  77. vdi_log(coreIdx, cmd, 1);
  78. VpuWriteReg(coreIdx, W5_VPU_HOST_INT_REQ, 1);
  79. return;
  80. }
  81. static RetCode SendQuery(CodecInst* instance, QUERY_OPT queryOpt)
  82. {
  83. // Send QUERY cmd
  84. VpuWriteReg(instance->coreIdx, W5_QUERY_OPTION, queryOpt);
  85. VpuWriteReg(instance->coreIdx, W5_VPU_BUSY_STATUS, 1);
  86. Wave5BitIssueCommand(instance, W5_QUERY);
  87. if (vdi_wait_vpu_busy(instance->coreIdx, __VPU_BUSY_TIMEOUT, W5_VPU_BUSY_STATUS) == -1) {
  88. if (instance->loggingEnable)
  89. vdi_log(instance->coreIdx, W5_QUERY, 2);
  90. return RETCODE_VPU_RESPONSE_TIMEOUT;
  91. }
  92. if (VpuReadReg(instance->coreIdx, W5_RET_SUCCESS) == FALSE)
  93. return RETCODE_FAILURE;
  94. return RETCODE_SUCCESS;
  95. }
  96. RetCode Wave5VpuEncGiveCommand(CodecInst *pCodecInst, CodecCommand cmd, void *param)
  97. {
  98. RetCode ret = RETCODE_SUCCESS;
  99. switch (cmd) {
  100. default:
  101. ret = RETCODE_NOT_SUPPORTED_FEATURE;
  102. }
  103. return ret;
  104. }
  105. static RetCode SetupWave5Properties(Uint32 coreIdx)
  106. {
  107. VpuAttr* pAttr = &g_VpuCoreAttributes[coreIdx];
  108. Uint32 regVal;
  109. Uint8* str;
  110. RetCode ret = RETCODE_SUCCESS;
  111. VpuWriteReg(coreIdx, W5_QUERY_OPTION, GET_VPU_INFO);
  112. VpuWriteReg(coreIdx, W5_VPU_BUSY_STATUS, 1);
  113. VpuWriteReg(coreIdx, W5_COMMAND, W5_QUERY);
  114. VpuWriteReg(coreIdx, W5_VPU_HOST_INT_REQ, 1);
  115. if (vdi_wait_vpu_busy(coreIdx, __VPU_BUSY_TIMEOUT, W5_VPU_BUSY_STATUS) == -1) {
  116. return RETCODE_VPU_RESPONSE_TIMEOUT;
  117. }
  118. if (VpuReadReg(coreIdx, W5_RET_SUCCESS) == FALSE) {
  119. ret = RETCODE_QUERY_FAILURE;
  120. }
  121. else {
  122. regVal = VpuReadReg(coreIdx, W5_RET_PRODUCT_NAME);
  123. str = (Uint8*)&regVal;
  124. pAttr->productName[0] = str[3];
  125. pAttr->productName[1] = str[2];
  126. pAttr->productName[2] = str[1];
  127. pAttr->productName[3] = str[0];
  128. pAttr->productName[4] = 0;
  129. pAttr->productId = WaveVpuGetProductId(coreIdx);
  130. pAttr->productVersion = VpuReadReg(coreIdx, W5_RET_PRODUCT_VERSION);
  131. pAttr->fwVersion = VpuReadReg(coreIdx, W5_RET_FW_VERSION);
  132. pAttr->customerId = VpuReadReg(coreIdx, W5_RET_CUSTOMER_ID);
  133. pAttr->hwConfigDef0 = VpuReadReg(coreIdx, W5_RET_STD_DEF0);
  134. pAttr->hwConfigDef1 = VpuReadReg(coreIdx, W5_RET_STD_DEF1);
  135. pAttr->hwConfigFeature = VpuReadReg(coreIdx, W5_RET_CONF_FEATURE);
  136. pAttr->hwConfigDate = VpuReadReg(coreIdx, W5_RET_CONF_DATE);
  137. pAttr->hwConfigRev = VpuReadReg(coreIdx, W5_RET_CONF_REVISION);
  138. pAttr->hwConfigType = VpuReadReg(coreIdx, W5_RET_CONF_TYPE);
  139. pAttr->supportHEVC10bitEnc = (pAttr->hwConfigFeature>>3)&1;
  140. if ( pAttr->hwConfigRev > 167455 ) {//20190321
  141. pAttr->supportAVC10bitEnc = (pAttr->hwConfigFeature>>11)&1;
  142. } else {
  143. pAttr->supportAVC10bitEnc = pAttr->supportHEVC10bitEnc;
  144. }
  145. pAttr->supportGDIHW = TRUE;
  146. pAttr->supportDecoders = (1<<STD_HEVC);
  147. if (pAttr->productId == PRODUCT_ID_512) {
  148. pAttr->supportDecoders |= (1<<STD_VP9);
  149. }
  150. if (pAttr->productId == PRODUCT_ID_515) {
  151. pAttr->supportDecoders |= (1<<STD_VP9);
  152. pAttr->supportDecoders |= (1<<STD_AVS2);
  153. }
  154. pAttr->supportEncoders = 0;
  155. if (pAttr->productId == PRODUCT_ID_521) {
  156. pAttr->supportDecoders |= (1<<STD_AVC);
  157. pAttr->supportEncoders = (1<<STD_HEVC);
  158. pAttr->supportEncoders |= (1<<STD_AVC);
  159. pAttr->supportBackbone = TRUE;
  160. }
  161. if (pAttr->productId == PRODUCT_ID_511) {
  162. pAttr->supportDecoders |= (1<<STD_AVC);
  163. if ( (pAttr->hwConfigDef0>>16)&1 ) {
  164. pAttr->supportBackbone = TRUE;
  165. }
  166. }
  167. if (pAttr->productId == PRODUCT_ID_517) {
  168. pAttr->supportDecoders |= (1 << STD_VP9);
  169. pAttr->supportDecoders |= (1 << STD_AVS2);
  170. pAttr->supportDecoders |= (1 << STD_AVC);
  171. pAttr->supportDecoders |= (1 << STD_AV1);
  172. pAttr->supportBackbone = TRUE;
  173. }
  174. pAttr->support2AlignScaler = (BOOL)((pAttr->hwConfigDef0>>23)&0x01);
  175. pAttr->supportVcoreBackbone = (BOOL)((pAttr->hwConfigDef0>>22)&0x01);
  176. pAttr->supportCommandQueue = TRUE;
  177. pAttr->supportFBCBWOptimization = (BOOL)((pAttr->hwConfigDef1>>15)&0x01);
  178. pAttr->supportNewTimer = (BOOL)((pAttr->hwConfigDef1>>27)&0x01);
  179. pAttr->supportWTL = TRUE;
  180. pAttr->supportDualCore = (BOOL)((pAttr->hwConfigDef1>>26)&0x01);
  181. pAttr->supportTiled2Linear = FALSE;
  182. pAttr->supportMapTypes = FALSE;
  183. pAttr->support128bitBus = TRUE;
  184. pAttr->supportThumbnailMode = TRUE;
  185. pAttr->supportEndianMask = (Uint32)((1<<VDI_LITTLE_ENDIAN) | (1<<VDI_BIG_ENDIAN) | (1<<VDI_32BIT_LITTLE_ENDIAN) | (1<<VDI_32BIT_BIG_ENDIAN) | (0xffffUL<<16));
  186. pAttr->supportBitstreamMode = (1<<BS_MODE_INTERRUPT) | (1<<BS_MODE_PIC_END);
  187. pAttr->framebufferCacheType = FramebufCacheNone;
  188. pAttr->bitstreamBufferMargin = 0;
  189. pAttr->maxNumVcores = MAX_NUM_VCORE;
  190. pAttr->numberOfMemProtectRgns = 10;
  191. }
  192. return ret;
  193. }
  194. RetCode Wave5VpuGetVersion(Uint32 coreIdx, Uint32* versionInfo, Uint32* revision)
  195. {
  196. Uint32 regVal;
  197. VpuWriteReg(coreIdx, W5_QUERY_OPTION, GET_VPU_INFO);
  198. VpuWriteReg(coreIdx, W5_VPU_BUSY_STATUS, 1);
  199. VpuWriteReg(coreIdx, W5_COMMAND, W5_QUERY);
  200. VpuWriteReg(coreIdx, W5_VPU_HOST_INT_REQ, 1);
  201. if (vdi_wait_vpu_busy(coreIdx, __VPU_BUSY_TIMEOUT, W5_VPU_BUSY_STATUS) == -1) {
  202. VLOG(ERR, "Wave5VpuGetVersion timeout\n");
  203. return RETCODE_VPU_RESPONSE_TIMEOUT;
  204. }
  205. if (VpuReadReg(coreIdx, W5_RET_SUCCESS) == FALSE) {
  206. VLOG(ERR, "Wave5VpuGetVersion FALSE\n");
  207. return RETCODE_QUERY_FAILURE;
  208. }
  209. regVal = VpuReadReg(coreIdx, W5_RET_FW_VERSION);
  210. if (versionInfo != NULL) {
  211. *versionInfo = 0;
  212. }
  213. if (revision != NULL) {
  214. *revision = regVal;
  215. }
  216. return RETCODE_SUCCESS;
  217. }
  218. RetCode Wave5VpuInit(Uint32 coreIdx, void* firmware, Uint32 size)
  219. {
  220. vpu_buffer_t vb;
  221. PhysicalAddress codeBase, tempBase;
  222. Uint32 codeSize, tempSize;
  223. Uint32 i, regVal, remapSize;
  224. Uint32 hwOption = 0;
  225. RetCode ret = RETCODE_SUCCESS;
  226. vdi_get_common_memory(coreIdx, &vb);
  227. codeBase = vb.phys_addr;
  228. /* ALIGN TO 4KB */
  229. codeSize = (WAVE5_MAX_CODE_BUF_SIZE&~0xfff);
  230. if (codeSize < size*2) {
  231. return RETCODE_INSUFFICIENT_RESOURCE;
  232. }
  233. tempBase = vb.phys_addr + WAVE5_TEMPBUF_OFFSET;
  234. tempSize = WAVE5_TEMPBUF_SIZE;
  235. VLOG(INFO, "\nVPU INIT Start!!!\n");
  236. VpuWriteMem(coreIdx, codeBase, (unsigned char*)firmware, size*2, VDI_128BIT_LITTLE_ENDIAN);
  237. vdi_set_bit_firmware_to_pm(coreIdx, (Uint16*)firmware);
  238. regVal = 0;
  239. VpuWriteReg(coreIdx, W5_PO_CONF, regVal);
  240. /* clear registers */
  241. for (i=W5_CMD_REG_BASE; i<W5_CMD_REG_END; i+=4)
  242. {
  243. #if defined(SUPPORT_SW_UART) || defined(SUPPORT_SW_UART_V2)
  244. if (i == W5_SW_UART_STATUS)
  245. continue;
  246. #endif
  247. VpuWriteReg(coreIdx, i, 0x00);
  248. }
  249. /* remap page size */
  250. remapSize = (codeSize >> 12) &0x1ff;
  251. regVal = 0x80000000 | (WAVE5_UPPER_PROC_AXI_ID<<20) | (0 << 16) | (W5_REMAP_CODE_INDEX<<12) | (1<<11) | remapSize;
  252. VpuWriteReg(coreIdx, W5_VPU_REMAP_CTRL, regVal);
  253. VpuWriteReg(coreIdx, W5_VPU_REMAP_VADDR, 0x00000000); /* DO NOT CHANGE! */
  254. VpuWriteReg(coreIdx, W5_VPU_REMAP_PADDR, codeBase);
  255. VpuWriteReg(coreIdx, W5_ADDR_CODE_BASE, codeBase);
  256. VpuWriteReg(coreIdx, W5_CODE_SIZE, codeSize);
  257. VpuWriteReg(coreIdx, W5_CODE_PARAM, (WAVE5_UPPER_PROC_AXI_ID<<4) | 0);
  258. VpuWriteReg(coreIdx, W5_ADDR_TEMP_BASE, tempBase);
  259. VpuWriteReg(coreIdx, W5_TEMP_SIZE, tempSize);
  260. VpuWriteReg(coreIdx, W5_TIMEOUT_CNT, 0xffff);
  261. VpuWriteReg(coreIdx, W5_HW_OPTION, hwOption);
  262. /* Interrupt */
  263. // encoder
  264. regVal = (1<<INT_WAVE5_ENC_SET_PARAM);
  265. regVal |= (1<<INT_WAVE5_ENC_PIC);
  266. regVal |= (1<<INT_WAVE5_BSBUF_FULL);
  267. regVal |= (1<<INT_WAVE5_ENC_LOW_LATENCY);
  268. #ifdef SUPPORT_SOURCE_RELEASE_INTERRUPT
  269. regVal |= (1<<INT_WAVE5_ENC_SRC_RELEASE);
  270. #endif
  271. // decoder
  272. regVal |= (1<<INT_WAVE5_INIT_SEQ);
  273. regVal |= (1<<INT_WAVE5_DEC_PIC);
  274. regVal |= (1<<INT_WAVE5_BSBUF_EMPTY);
  275. VpuWriteReg(coreIdx, W5_VPU_VINT_ENABLE, regVal);
  276. regVal = VpuReadReg(coreIdx, W5_VPU_RET_VPU_CONFIG0);
  277. if (((regVal>>16)&1) == 1) {
  278. regVal = ((WAVE5_PROC_AXI_ID << 28) |
  279. (WAVE5_PRP_AXI_ID << 24) |
  280. (WAVE5_FBD_Y_AXI_ID << 20) |
  281. (WAVE5_FBC_Y_AXI_ID << 16) |
  282. (WAVE5_FBD_C_AXI_ID << 12) |
  283. (WAVE5_FBC_C_AXI_ID << 8) |
  284. (WAVE5_PRI_AXI_ID << 4) |
  285. (WAVE5_SEC_AXI_ID << 0));
  286. vdi_fio_write_register(coreIdx, W5_BACKBONE_PROG_AXI_ID, regVal);
  287. }
  288. if (vdi_get_sram_memory(coreIdx, &vb) < 0) // get SRAM base/size
  289. return RETCODE_INSUFFICIENT_RESOURCE;
  290. VpuWriteReg(coreIdx, W5_ADDR_SEC_AXI, vb.phys_addr);
  291. VpuWriteReg(coreIdx, W5_SEC_AXI_SIZE, vb.size);
  292. VpuWriteReg(coreIdx, W5_VPU_BUSY_STATUS, 1);
  293. VpuWriteReg(coreIdx, W5_COMMAND, W5_INIT_VPU);
  294. VpuWriteReg(coreIdx, W5_VPU_REMAP_CORE_START, 1);
  295. if (vdi_wait_vpu_busy(coreIdx, __VPU_BUSY_TIMEOUT, W5_VPU_BUSY_STATUS) == -1) {
  296. VLOG(ERR, "VPU init(W5_VPU_REMAP_CORE_START) timeout\n");
  297. return RETCODE_VPU_RESPONSE_TIMEOUT;
  298. }
  299. regVal = VpuReadReg(coreIdx, W5_RET_SUCCESS);
  300. if (regVal == 0) {
  301. Uint32 reasonCode = VpuReadReg(coreIdx, W5_RET_FAIL_REASON);
  302. VLOG(ERR, "VPU init(W5_RET_SUCCESS) failed(%d) REASON CODE(%08x)\n", regVal, reasonCode);
  303. return RETCODE_FAILURE;
  304. }
  305. ret = SetupWave5Properties(coreIdx);
  306. return ret;
  307. }
  308. RetCode Wave5VpuBuildUpDecParam(CodecInst* instance, DecOpenParam* param)
  309. {
  310. RetCode ret = RETCODE_SUCCESS;
  311. DecInfo* pDecInfo;
  312. VpuAttr* pAttr = &g_VpuCoreAttributes[instance->coreIdx];
  313. Uint32 bsEndian = 0;
  314. vpu_buffer_t vb;
  315. pDecInfo = VPU_HANDLE_TO_DECINFO(instance);
  316. pDecInfo->streamRdPtrRegAddr = W5_RET_DEC_BS_RD_PTR;
  317. pDecInfo->streamWrPtrRegAddr = W5_BS_WR_PTR;
  318. pDecInfo->frameDisplayFlagRegAddr = W5_RET_DEC_DISP_IDC;
  319. pDecInfo->currentPC = W5_VCPU_CUR_PC;
  320. pDecInfo->busyFlagAddr = W5_VPU_BUSY_STATUS;
  321. if ((pAttr->supportDecoders&(1<<param->bitstreamFormat)) == 0)
  322. return RETCODE_NOT_SUPPORTED_FEATURE;
  323. switch (param->bitstreamFormat) {
  324. case STD_HEVC:
  325. pDecInfo->seqChangeMask = SEQ_CHANGE_ENABLE_ALL_HEVC;
  326. break;
  327. case STD_VP9:
  328. pDecInfo->seqChangeMask = SEQ_CHANGE_ENABLE_ALL_VP9;
  329. break;
  330. case STD_AVS2:
  331. pDecInfo->seqChangeMask = SEQ_CHANGE_ENABLE_ALL_AVS2;
  332. break;
  333. case STD_AVC:
  334. pDecInfo->seqChangeMask = SEQ_CHANGE_ENABLE_ALL_AVC;
  335. break;
  336. case STD_SVAC:
  337. pDecInfo->seqChangeMask = SEQ_CHANGE_ENABLE_ALL_SVAC;
  338. break;
  339. case STD_AV1:
  340. pDecInfo->seqChangeMask = SEQ_CHANGE_ENABLE_ALL_AV1;
  341. break;
  342. default:
  343. return RETCODE_NOT_SUPPORTED_FEATURE;
  344. }
  345. pDecInfo->scaleWidth = 0;
  346. pDecInfo->scaleHeight = 0;
  347. if (param->vbWork.size > 0) {
  348. pDecInfo->vbWork = param->vbWork;
  349. pDecInfo->workBufferAllocExt = TRUE;
  350. vdi_attach_dma_memory(instance->coreIdx, &param->vbWork);
  351. }
  352. else {
  353. if (instance->productId == PRODUCT_ID_512) {
  354. pDecInfo->vbWork.size = WAVE512DEC_WORKBUF_SIZE;
  355. }
  356. else if (instance->productId == PRODUCT_ID_515) {
  357. pDecInfo->vbWork.size = WAVE515DEC_WORKBUF_SIZE;
  358. }
  359. else if (instance->productId == PRODUCT_ID_517) {
  360. pDecInfo->vbWork.size = (Uint32)WAVE521DEC_WORKBUF_SIZE; // FIX ME
  361. }
  362. else if (instance->productId == PRODUCT_ID_521) {
  363. pDecInfo->vbWork.size = (Uint32)WAVE521DEC_WORKBUF_SIZE; // FIX ME
  364. }
  365. else if (instance->productId == PRODUCT_ID_511) {
  366. pDecInfo->vbWork.size = (Uint32)WAVE521DEC_WORKBUF_SIZE; // FIX ME
  367. }
  368. pDecInfo->workBufferAllocExt = FALSE;
  369. APIDPRINT("ALLOC MEM - WORK\n");
  370. if (vdi_allocate_dma_memory(instance->coreIdx, &pDecInfo->vbWork, DEC_WORK, instance->instIndex) < 0) {
  371. pDecInfo->vbWork.base = 0;
  372. pDecInfo->vbWork.phys_addr = 0;
  373. pDecInfo->vbWork.size = 0;
  374. pDecInfo->vbWork.virt_addr = 0;
  375. return RETCODE_INSUFFICIENT_RESOURCE;
  376. }
  377. }
  378. VpuWriteReg(instance->coreIdx, W5_CMD_DEC_VCORE_INFO, 1);
  379. VpuWriteReg(instance->coreIdx, W5_CMD_NUM_CQ_DEPTH_M1, COMMAND_QUEUE_DEPTH -1 );
  380. vdi_get_common_memory(instance->coreIdx, &vb);
  381. pDecInfo->vbTemp.phys_addr = vb.phys_addr + WAVE5_TEMPBUF_OFFSET;
  382. pDecInfo->vbTemp.size = WAVE5_TEMPBUF_SIZE;
  383. vdi_clear_memory(instance->coreIdx, pDecInfo->vbWork.phys_addr, pDecInfo->vbWork.size, 0);
  384. VpuWriteReg(instance->coreIdx, W5_ADDR_WORK_BASE, pDecInfo->vbWork.phys_addr);
  385. VpuWriteReg(instance->coreIdx, W5_WORK_SIZE, pDecInfo->vbWork.size);
  386. VpuWriteReg(instance->coreIdx, W5_CMD_DEC_BS_START_ADDR, pDecInfo->streamBufStartAddr);
  387. VpuWriteReg(instance->coreIdx, W5_CMD_DEC_BS_SIZE, pDecInfo->streamBufSize);
  388. bsEndian = vdi_convert_endian(instance->coreIdx, param->streamEndian);
  389. /* NOTE: When endian mode is 0, SDMA reads MSB first */
  390. bsEndian = (~bsEndian&VDI_128BIT_ENDIAN_MASK);
  391. VpuWriteReg(instance->coreIdx, W5_CMD_BS_PARAM, bsEndian);
  392. VpuWriteReg(instance->coreIdx, W5_VPU_BUSY_STATUS, 1);
  393. VpuWriteReg(instance->coreIdx, W5_RET_SUCCESS, 0); //for debug
  394. Wave5BitIssueCommand(instance, W5_CREATE_INSTANCE);
  395. if (vdi_wait_vpu_busy(instance->coreIdx, __VPU_BUSY_TIMEOUT, W5_VPU_BUSY_STATUS) == -1) { // Check QUEUE_DONE
  396. if (instance->loggingEnable)
  397. vdi_log(instance->coreIdx, W5_CREATE_INSTANCE, 2);
  398. vdi_free_dma_memory(instance->coreIdx, &pDecInfo->vbWork, DEC_WORK, instance->instIndex);
  399. return RETCODE_VPU_RESPONSE_TIMEOUT;
  400. }
  401. if (VpuReadReg(instance->coreIdx, W5_RET_SUCCESS) == FALSE) { // FAILED for adding into VCPU QUEUE
  402. vdi_free_dma_memory(instance->coreIdx, &pDecInfo->vbWork, DEC_WORK, instance->instIndex);
  403. ret = RETCODE_FAILURE;
  404. }
  405. pDecInfo->productCode = VpuReadReg(instance->coreIdx, W5_PRODUCT_NUMBER);
  406. return ret;
  407. }
  408. RetCode Wave5VpuDecInitSeq(CodecInst* instance)
  409. {
  410. RetCode ret = RETCODE_SUCCESS;
  411. DecInfo* pDecInfo;
  412. Uint32 cmdOption = INIT_SEQ_NORMAL, bsOption;
  413. Uint32 regVal;
  414. if (instance == NULL)
  415. return RETCODE_INVALID_PARAM;
  416. pDecInfo = VPU_HANDLE_TO_DECINFO(instance);
  417. if (pDecInfo->thumbnailMode)
  418. cmdOption = INIT_SEQ_W_THUMBNAIL;
  419. /* Set attributes of bitstream buffer controller */
  420. bsOption = 0;
  421. switch (pDecInfo->openParam.bitstreamMode) {
  422. case BS_MODE_INTERRUPT:
  423. if(pDecInfo->seqInitEscape == TRUE)
  424. bsOption = BSOPTION_ENABLE_EXPLICIT_END;
  425. break;
  426. case BS_MODE_PIC_END:
  427. bsOption = BSOPTION_ENABLE_EXPLICIT_END;
  428. break;
  429. default:
  430. return RETCODE_INVALID_PARAM;
  431. }
  432. if (pDecInfo->streamEndflag == 1)
  433. bsOption = 3;
  434. VpuWriteReg(instance->coreIdx, W5_BS_RD_PTR, pDecInfo->streamRdPtr);
  435. VpuWriteReg(instance->coreIdx, W5_BS_WR_PTR, pDecInfo->streamWrPtr);
  436. if (instance->codecMode == W_AV1_DEC) {
  437. bsOption |= ((pDecInfo->openParam.av1Format) << 2);
  438. }
  439. VpuWriteReg(instance->coreIdx, W5_BS_OPTION, (1UL<<31) | bsOption);
  440. VpuWriteReg(instance->coreIdx, W5_COMMAND_OPTION, cmdOption);
  441. VpuWriteReg(instance->coreIdx, W5_CMD_DEC_USER_MASK, pDecInfo->userDataEnable);
  442. Wave5BitIssueCommand(instance, W5_INIT_SEQ);
  443. if (vdi_wait_vpu_busy(instance->coreIdx, __VPU_BUSY_TIMEOUT, W5_VPU_BUSY_STATUS) == -1) { // Check QUEUE_DONE
  444. if (instance->loggingEnable)
  445. vdi_log(instance->coreIdx, W5_INIT_SEQ, 2);
  446. return RETCODE_VPU_RESPONSE_TIMEOUT;
  447. }
  448. regVal = VpuReadReg(instance->coreIdx, W5_RET_QUEUE_STATUS);
  449. pDecInfo->instanceQueueCount = (regVal>>16)&0xff;
  450. pDecInfo->reportQueueCount = (regVal & 0xffff);
  451. if (VpuReadReg(instance->coreIdx, W5_RET_SUCCESS) == FALSE) { // FAILED for adding a command into VCPU QUEUE
  452. regVal = VpuReadReg(instance->coreIdx, W5_RET_FAIL_REASON);
  453. if (regVal != WAVE5_QUEUEING_FAIL)
  454. VLOG(ERR, "FAIL_REASON = 0x%x\n", regVal);
  455. if ( regVal == WAVE5_QUEUEING_FAIL)
  456. ret = RETCODE_QUEUEING_FAILURE;
  457. else if (regVal == WAVE5_SYSERR_ACCESS_VIOLATION_HW)
  458. ret = RETCODE_MEMORY_ACCESS_VIOLATION;
  459. else if (regVal == WAVE5_SYSERR_WATCHDOG_TIMEOUT)
  460. ret = RETCODE_VPU_RESPONSE_TIMEOUT;
  461. else if (regVal == WAVE5_SYSERR_DEC_VLC_BUF_FULL)
  462. ret = RETCODE_VLC_BUF_FULL;
  463. else if (regVal == WAVE5_ERROR_FW_FATAL)
  464. ret = RETCODE_ERROR_FW_FATAL;
  465. else
  466. ret = RETCODE_FAILURE;
  467. }
  468. return ret;
  469. }
  470. static void GetDecSequenceResult(CodecInst* instance, DecInitialInfo* info)
  471. {
  472. DecInfo* pDecInfo = &instance->CodecInfo->decInfo;
  473. Uint32 regVal;
  474. Uint32 profileCompatibilityFlag;
  475. Uint32 left, right, top, bottom;
  476. Uint32 progressiveFlag, interlacedFlag, outputBitDepthMinus8, frameMbsOnlyFlag = 0;
  477. pDecInfo->streamRdPtr = info->rdPtr = ProductVpuDecGetRdPtr(instance);
  478. pDecInfo->frameDisplayFlag = VpuReadReg(instance->coreIdx, W5_RET_DEC_DISP_IDC);
  479. /*regVal = VpuReadReg(instance->coreIdx, W4_BS_OPTION);
  480. pDecInfo->streamEndflag = (regVal&0x02) ? TRUE : FALSE;*/
  481. regVal = VpuReadReg(instance->coreIdx, W5_RET_DEC_PIC_SIZE);
  482. info->picWidth = ( (regVal >> 16) & 0xffff );
  483. info->picHeight = ( regVal & 0xffff );
  484. info->minFrameBufferCount = VpuReadReg(instance->coreIdx, W5_RET_DEC_NUM_REQUIRED_FB);
  485. info->frameBufDelay = VpuReadReg(instance->coreIdx, W5_RET_DEC_NUM_REORDER_DELAY);
  486. regVal = VpuReadReg(instance->coreIdx, W5_RET_DEC_CROP_LEFT_RIGHT);
  487. left = (regVal >> 16) & 0xffff;
  488. right = regVal & 0xffff;
  489. regVal = VpuReadReg(instance->coreIdx, W5_RET_DEC_CROP_TOP_BOTTOM);
  490. top = (regVal >> 16) & 0xffff;
  491. bottom = regVal & 0xffff;
  492. info->picCropRect.left = left;
  493. info->picCropRect.right = info->picWidth - right;
  494. info->picCropRect.top = top;
  495. info->picCropRect.bottom = info->picHeight - bottom;
  496. regVal = VpuReadReg(instance->coreIdx, W5_RET_DEC_SEQ_PARAM);
  497. info->level = regVal & 0xff;
  498. interlacedFlag = (regVal>>10) & 0x1;
  499. progressiveFlag = (regVal>>11) & 0x1;
  500. profileCompatibilityFlag = (regVal>>12)&0xff;
  501. info->profile = (regVal >> 24)&0x1f;
  502. info->tier = (regVal >> 29)&0x01;
  503. outputBitDepthMinus8 = (regVal >> 30)&0x03;
  504. if (instance->codecMode == W_AVC_DEC) {
  505. frameMbsOnlyFlag = (regVal >> 8) & 0x01;
  506. info->constraint_set_flag[0] = (regVal >> 16) & 0x01;
  507. info->constraint_set_flag[1] = (regVal >> 17) & 0x01;
  508. info->constraint_set_flag[2] = (regVal >> 18) & 0x01;
  509. info->constraint_set_flag[3] = (regVal >> 19) & 0x01;
  510. }
  511. info->fRateNumerator = VpuReadReg(instance->coreIdx, W5_RET_DEC_FRAME_RATE_NR);
  512. info->fRateDenominator = VpuReadReg(instance->coreIdx, W5_RET_DEC_FRAME_RATE_DR);
  513. regVal = VpuReadReg(instance->coreIdx, W5_RET_DEC_COLOR_SAMPLE_INFO);
  514. info->lumaBitdepth = (regVal>>0)&0x0f;
  515. info->chromaBitdepth = (regVal>>4)&0x0f;
  516. info->chromaFormatIDC = (regVal>>8)&0x0f;
  517. info->aspectRateInfo = (regVal>>16)&0xff;
  518. info->isExtSAR = (info->aspectRateInfo == 255 ? TRUE : FALSE);
  519. if (info->isExtSAR == TRUE) {
  520. info->aspectRateInfo = VpuReadReg(instance->coreIdx, W5_RET_DEC_ASPECT_RATIO); /* [0:15] - vertical size, [16:31] - horizontal size */
  521. }
  522. info->bitRate = VpuReadReg(instance->coreIdx, W5_RET_DEC_BIT_RATE);
  523. regVal = VpuReadReg(instance->coreIdx, W5_RET_DEC_SUB_LAYER_INFO);
  524. info->maxSubLayers = (regVal>>8)&0xff;
  525. if ( instance->codecMode == W_HEVC_DEC ) {
  526. /* Guessing Profile */
  527. if (info->profile == 0) {
  528. if ((profileCompatibilityFlag&0x06) == 0x06) info->profile = 1; /* Main profile */
  529. else if ((profileCompatibilityFlag&0x04) == 0x04) info->profile = 2; /* Main10 profile */
  530. else if ((profileCompatibilityFlag&0x08) == 0x08) info->profile = 3; /* Main Still Picture profile */
  531. else info->profile = 1; /* For old version HM */
  532. }
  533. if (progressiveFlag == 1 && interlacedFlag == 0)
  534. info->interlace = 0;
  535. else
  536. info->interlace = 1;
  537. }
  538. else if (instance->codecMode == W_AVS2_DEC) {
  539. if ((info->lumaBitdepth == 10) && (outputBitDepthMinus8 == 2))
  540. info->outputBitDepth = 10;
  541. else
  542. info->outputBitDepth = 8;
  543. if (progressiveFlag == 1)
  544. info->interlace = 0;
  545. else
  546. info->interlace = 1;
  547. }
  548. else if (instance->codecMode == W_AVC_DEC) {
  549. if (frameMbsOnlyFlag == 1)
  550. info->interlace = 0;
  551. else
  552. info->interlace = 0; // AVC on WAVE5 can't support interlace
  553. }
  554. info->vlcBufSize = VpuReadReg(instance->coreIdx, W5_RET_VLC_BUF_SIZE);
  555. info->paramBufSize = VpuReadReg(instance->coreIdx, W5_RET_PARAM_BUF_SIZE);
  556. pDecInfo->vlcBufSize = info->vlcBufSize;
  557. pDecInfo->paramBufSize = info->paramBufSize;
  558. return;
  559. }
  560. RetCode Wave5VpuDecGetSeqInfo(CodecInst* instance, DecInitialInfo* info)
  561. {
  562. RetCode ret = RETCODE_SUCCESS;
  563. Uint32 regVal, i;
  564. DecInfo* pDecInfo;
  565. pDecInfo = VPU_HANDLE_TO_DECINFO(instance);
  566. VpuWriteReg(instance->coreIdx, W5_CMD_DEC_ADDR_REPORT_BASE, pDecInfo->userDataBufAddr);
  567. VpuWriteReg(instance->coreIdx, W5_CMD_DEC_REPORT_SIZE, pDecInfo->userDataBufSize);
  568. VpuWriteReg(instance->coreIdx, W5_CMD_DEC_REPORT_PARAM, VPU_USER_DATA_ENDIAN&VDI_128BIT_ENDIAN_MASK);
  569. // Send QUERY cmd
  570. ret = SendQuery(instance, GET_RESULT);
  571. if (ret != RETCODE_SUCCESS) {
  572. regVal = VpuReadReg(instance->coreIdx, W5_RET_FAIL_REASON);
  573. if (regVal != WAVE5_QUEUEING_FAIL)
  574. VLOG(ERR, "FAIL_REASON = 0x%x\n", regVal);
  575. if (regVal == WAVE5_RESULT_NOT_READY)
  576. return RETCODE_REPORT_NOT_READY;
  577. else if (regVal == WAVE5_SYSERR_ACCESS_VIOLATION_HW)
  578. return RETCODE_MEMORY_ACCESS_VIOLATION;
  579. else if (regVal == WAVE5_SYSERR_WATCHDOG_TIMEOUT)
  580. return RETCODE_VPU_RESPONSE_TIMEOUT;
  581. else if (regVal == WAVE5_SYSERR_DEC_VLC_BUF_FULL)
  582. return RETCODE_VLC_BUF_FULL;
  583. else if (regVal == WAVE5_ERROR_FW_FATAL)
  584. return RETCODE_ERROR_FW_FATAL;
  585. else
  586. return RETCODE_QUERY_FAILURE;
  587. }
  588. if (instance->loggingEnable)
  589. vdi_log(instance->coreIdx, W5_INIT_SEQ, 0);
  590. regVal = VpuReadReg(instance->coreIdx, W5_RET_QUEUE_STATUS);
  591. pDecInfo->instanceQueueCount = (regVal>>16)&0xff;
  592. pDecInfo->reportQueueCount = (regVal & 0xffff);
  593. if (VpuReadReg(instance->coreIdx, W5_RET_DEC_DECODING_SUCCESS) != 1) {
  594. #ifdef SUPPORT_SW_UART
  595. ret = RETCODE_FAILURE;
  596. #else
  597. info->seqInitErrReason = VpuReadReg(instance->coreIdx, W5_RET_DEC_ERR_INFO);
  598. ret = RETCODE_FAILURE;
  599. #endif
  600. }
  601. else {
  602. #ifdef SUPPORT_SW_UART
  603. info->warnInfo = 0;
  604. #else
  605. info->warnInfo = VpuReadReg(instance->coreIdx, W5_RET_DEC_WARN_INFO);
  606. #endif
  607. }
  608. // Get Sequence Info
  609. info->userDataSize = 0;
  610. info->userDataNum = 0;
  611. info->userDataBufFull= 0;
  612. info->userDataHeader = VpuReadReg(instance->coreIdx, W5_RET_DEC_USERDATA_IDC);
  613. if (info->userDataHeader != 0) {
  614. regVal = info->userDataHeader;
  615. for (i=0; i<32; i++) {
  616. if (i == 1) {
  617. if (regVal & (1<<i))
  618. info->userDataBufFull = 1;
  619. }
  620. else {
  621. if (regVal & (1<<i))
  622. info->userDataNum++;
  623. }
  624. }
  625. info->userDataSize = pDecInfo->userDataBufSize;
  626. }
  627. if (instance->codecMode == W_SVAC_DEC) {
  628. regVal = VpuReadReg(instance->coreIdx, W5_RET_DEC_SUB_LAYER_INFO);
  629. info->spatialSvcEnable = (regVal>>16)&0x1;
  630. info->spatialSvcMode = (regVal>>17)&0x1;
  631. }
  632. GetDecSequenceResult(instance, info);
  633. return ret;
  634. }
  635. RetCode Wave5VpuDecRegisterFramebuffer(CodecInst* inst, FrameBuffer* fbArr, TiledMapType mapType, Uint32 count)
  636. {
  637. RetCode ret = RETCODE_SUCCESS;
  638. DecInfo* pDecInfo = &inst->CodecInfo->decInfo;
  639. DecInitialInfo* sequenceInfo = &inst->CodecInfo->decInfo.initialInfo;
  640. Int32 q, j, i, remain, idx, svcBLbaseIdx;
  641. Uint32 mvCount;
  642. Uint32 k;
  643. Int32 coreIdx, startNo, endNo;
  644. Uint32 regVal, cbcrInterleave, nv21, picSize;
  645. Uint32 endian, yuvFormat = 0;
  646. Uint32 addrY, addrCb, addrCr;
  647. Uint32 mvColSize, fbcYTblSize, fbcCTblSize;
  648. vpu_buffer_t vbBuffer;
  649. Uint32 stride;
  650. Uint32 colorFormat = 0;
  651. Uint32 outputFormat = 0;
  652. Uint32 axiID;
  653. Uint32 initPicWidth = 0, initPicHeight = 0;
  654. Uint32 scalerFlag = 0;
  655. Uint32 pixelOrder=1;
  656. Uint32 bwbFlag = (mapType == LINEAR_FRAME_MAP) ? 1 : 0;
  657. coreIdx = inst->coreIdx;
  658. axiID = pDecInfo->openParam.virtAxiID;
  659. cbcrInterleave = pDecInfo->openParam.cbcrInterleave;
  660. nv21 = pDecInfo->openParam.nv21;
  661. mvColSize = fbcYTblSize = fbcCTblSize = 0;
  662. initPicWidth = pDecInfo->initialInfo.picWidth;
  663. initPicHeight = pDecInfo->initialInfo.picHeight;
  664. if (inst->codecMode == W_SVAC_DEC && mapType == COMPRESSED_FRAME_MAP_SVAC_SVC_BL) {
  665. initPicWidth = pDecInfo->initialInfo.picWidth>>1; // BL size is half as EL
  666. initPicHeight = pDecInfo->initialInfo.picHeight>>1;
  667. svcBLbaseIdx = count;
  668. }
  669. else
  670. svcBLbaseIdx = 0;
  671. if (mapType >= COMPRESSED_FRAME_MAP) {
  672. cbcrInterleave = 0;
  673. nv21 = 0;
  674. switch (inst->codecMode) {
  675. case W_HEVC_DEC: mvColSize = WAVE5_DEC_HEVC_MVCOL_BUF_SIZE(initPicWidth, initPicHeight); break;
  676. case W_VP9_DEC: mvColSize = WAVE5_DEC_VP9_MVCOL_BUF_SIZE(initPicWidth, initPicHeight); break;
  677. case W_AVS2_DEC: mvColSize = WAVE5_DEC_AVS2_MVCOL_BUF_SIZE(initPicWidth, initPicHeight); break;
  678. case W_SVAC_DEC: mvColSize = 0; break; // case of SVAC, mvColbuffer included in WorkBuffer due to sequence change.
  679. case W_AVC_DEC: mvColSize = WAVE5_DEC_AVC_MVCOL_BUF_SIZE(initPicWidth, initPicHeight); break;
  680. case W_AV1_DEC: mvColSize = WAVE5_DEC_AV1_MVCOL_BUF_SIZE(initPicWidth, initPicHeight); break;
  681. default:
  682. return RETCODE_NOT_SUPPORTED_FEATURE;
  683. }
  684. mvColSize = VPU_ALIGN16(mvColSize);
  685. vbBuffer.phys_addr = 0;
  686. if (inst->codecMode == W_HEVC_DEC || inst->codecMode == W_AVS2_DEC || inst->codecMode == W_VP9_DEC || inst->codecMode == W_AVC_DEC || inst->codecMode == W_AV1_DEC) {
  687. vbBuffer.size = ((mvColSize+4095)&~4095)+4096; /* 4096 is a margin */
  688. mvCount = count;
  689. APIDPRINT("ALLOC MEM - MV\n");
  690. for (k=0 ; k<mvCount ; k++) {
  691. if ( pDecInfo->vbMV[k].size == 0) {
  692. if (vdi_allocate_dma_memory(inst->coreIdx, &vbBuffer, DEC_MV, inst->instIndex) < 0)
  693. return RETCODE_INSUFFICIENT_RESOURCE;
  694. pDecInfo->vbMV[k] = vbBuffer;
  695. }
  696. }
  697. }
  698. if (pDecInfo->productCode == WAVE521C_DUAL_CODE) {
  699. Uint32 bgs_width = (pDecInfo->initialInfo.lumaBitdepth >8 ? 256 : 512);
  700. Uint32 ot_bg_width = 1024;
  701. Uint32 frm_width = VPU_CEIL(initPicWidth, 16);
  702. Uint32 frm_height = VPU_CEIL(initPicHeight, 16);
  703. Uint32 comp_frm_width = VPU_CEIL(VPU_CEIL(frm_width , 16) + 16, 16); // valid_width = align(width, 16), comp_frm_width = align(valid_width+pad_x, 16)
  704. Uint32 ot_frm_width = VPU_CEIL(comp_frm_width, ot_bg_width); // 1024 = offset table BG width
  705. // sizeof_offset_table()
  706. Uint32 ot_bg_height = 32;
  707. Uint32 bgs_height = (1<<14) / bgs_width / (pDecInfo->initialInfo.lumaBitdepth >8 ? 2 : 1);
  708. Uint32 comp_frm_height = VPU_CEIL(VPU_CEIL(frm_height, 4) + 4, bgs_height);
  709. Uint32 ot_frm_height = VPU_CEIL(comp_frm_height, ot_bg_height);
  710. fbcYTblSize = (ot_frm_width/16) * (ot_frm_height/4) *2;
  711. }
  712. else {
  713. switch (inst->codecMode) {
  714. case W_HEVC_DEC: fbcYTblSize = WAVE5_FBC_LUMA_TABLE_SIZE(initPicWidth, initPicHeight); break;
  715. case W_VP9_DEC: fbcYTblSize = WAVE5_FBC_LUMA_TABLE_SIZE(VPU_ALIGN64(initPicWidth), VPU_ALIGN64(initPicHeight)); break;
  716. case W_AVS2_DEC: fbcYTblSize = WAVE5_FBC_LUMA_TABLE_SIZE(initPicWidth, initPicHeight); break;
  717. case W_SVAC_DEC: fbcYTblSize = WAVE5_FBC_LUMA_TABLE_SIZE(VPU_ALIGN128(initPicWidth), VPU_ALIGN128(initPicHeight)); break;
  718. case W_AVC_DEC: fbcYTblSize = WAVE5_FBC_LUMA_TABLE_SIZE(initPicWidth, initPicHeight); break;
  719. case W_AV1_DEC: fbcYTblSize = WAVE5_FBC_LUMA_TABLE_SIZE(VPU_ALIGN16(initPicWidth), VPU_ALIGN8(initPicHeight)); break;
  720. default:
  721. return RETCODE_NOT_SUPPORTED_FEATURE;
  722. }
  723. fbcYTblSize = VPU_ALIGN16(fbcYTblSize);
  724. }
  725. vbBuffer.phys_addr = 0;
  726. vbBuffer.size = ((fbcYTblSize+4095)&~4095)+4096;
  727. APIDPRINT("ALLOC MEM - FBC Y TBL\n");
  728. for (k=0 ; k<count ; k++) {
  729. if ( pDecInfo->vbFbcYTbl[k+svcBLbaseIdx].size == 0) {
  730. if (vdi_allocate_dma_memory(inst->coreIdx, &vbBuffer, DEC_FBCY_TBL, inst->instIndex) < 0)
  731. return RETCODE_INSUFFICIENT_RESOURCE;
  732. pDecInfo->vbFbcYTbl[k+svcBLbaseIdx] = vbBuffer;
  733. }
  734. }
  735. if (pDecInfo->productCode == WAVE521C_DUAL_CODE) {
  736. Uint32 bgs_width = (pDecInfo->initialInfo.chromaBitdepth >8 ? 256 : 512);
  737. Uint32 ot_bg_width = 1024;
  738. Uint32 frm_width = VPU_CEIL(initPicWidth, 16);
  739. Uint32 frm_height = VPU_CEIL(initPicHeight, 16);
  740. Uint32 comp_frm_width = VPU_CEIL(VPU_CEIL(frm_width/2 , 16) + 16, 16); // valid_width = align(width, 16), comp_frm_width = align(valid_width+pad_x, 16)
  741. Uint32 ot_frm_width = VPU_CEIL(comp_frm_width, ot_bg_width); // 1024 = offset table BG width
  742. // sizeof_offset_table()
  743. Uint32 ot_bg_height = 32;
  744. Uint32 bgs_height = (1<<14) / bgs_width / (pDecInfo->initialInfo.chromaBitdepth >8 ? 2 : 1);
  745. Uint32 comp_frm_height = VPU_CEIL(VPU_CEIL(frm_height, 4) + 4, bgs_height);
  746. Uint32 ot_frm_height = VPU_CEIL(comp_frm_height, ot_bg_height);
  747. fbcCTblSize = (ot_frm_width/16) * (ot_frm_height/4) *2;
  748. }
  749. else {
  750. switch (inst->codecMode) {
  751. case W_HEVC_DEC: fbcCTblSize = WAVE5_FBC_CHROMA_TABLE_SIZE(initPicWidth, initPicHeight); break;
  752. case W_VP9_DEC: fbcCTblSize = WAVE5_FBC_CHROMA_TABLE_SIZE(VPU_ALIGN64(initPicWidth), VPU_ALIGN64(initPicHeight)); break;
  753. case W_AVS2_DEC: fbcCTblSize = WAVE5_FBC_CHROMA_TABLE_SIZE(initPicWidth, initPicHeight); break;
  754. case W_SVAC_DEC: fbcCTblSize = WAVE5_FBC_CHROMA_TABLE_SIZE(VPU_ALIGN64(initPicWidth), VPU_ALIGN64(initPicHeight)); break;
  755. case W_AVC_DEC: fbcCTblSize = WAVE5_FBC_CHROMA_TABLE_SIZE(initPicWidth, initPicHeight); break;
  756. case W_AV1_DEC: fbcCTblSize = WAVE5_FBC_CHROMA_TABLE_SIZE(VPU_ALIGN16(initPicWidth), VPU_ALIGN8(initPicHeight)); break;
  757. default:
  758. return RETCODE_NOT_SUPPORTED_FEATURE;
  759. }
  760. fbcCTblSize = VPU_ALIGN16(fbcCTblSize);
  761. }
  762. vbBuffer.phys_addr = 0;
  763. vbBuffer.size = ((fbcCTblSize+4095)&~4095)+4096;
  764. APIDPRINT("ALLOC MEM - FBC C TBL\n");
  765. for (k=0 ; k<count ; k++) {
  766. if ( pDecInfo->vbFbcCTbl[k+svcBLbaseIdx].size == 0) {
  767. if (vdi_allocate_dma_memory(inst->coreIdx, &vbBuffer, DEC_FBCC_TBL, inst->instIndex) < 0)
  768. return RETCODE_INSUFFICIENT_RESOURCE;
  769. pDecInfo->vbFbcCTbl[k+svcBLbaseIdx] = vbBuffer;
  770. }
  771. }
  772. picSize = (initPicWidth<<16)|(initPicHeight);
  773. // Allocate TaskBuffer
  774. vbBuffer.size = (Uint32)((pDecInfo->vlcBufSize * VLC_BUF_NUM) + (pDecInfo->paramBufSize * COMMAND_QUEUE_DEPTH));
  775. vbBuffer.phys_addr = 0;
  776. if (vdi_allocate_dma_memory(inst->coreIdx, &vbBuffer, DEC_TASK, inst->instIndex) < 0)
  777. return RETCODE_INSUFFICIENT_RESOURCE;
  778. pDecInfo->vbTask = vbBuffer;
  779. VpuWriteReg(coreIdx, W5_CMD_SET_FB_ADDR_TASK_BUF, pDecInfo->vbTask.phys_addr);
  780. VpuWriteReg(coreIdx, W5_CMD_SET_FB_TASK_BUF_SIZE, vbBuffer.size);
  781. }
  782. else
  783. {
  784. picSize = (initPicWidth<<16)|(initPicHeight);
  785. if (pDecInfo->scalerEnable == TRUE) {
  786. picSize = (pDecInfo->scaleWidth << 16) | (pDecInfo->scaleHeight);
  787. }
  788. }
  789. endian = vdi_convert_endian(coreIdx, fbArr[0].endian);
  790. VpuWriteReg(coreIdx, W5_PIC_SIZE, picSize);
  791. yuvFormat = 0; /* YUV420 8bit */
  792. if (mapType == LINEAR_FRAME_MAP) {
  793. BOOL justified = WTL_RIGHT_JUSTIFIED;
  794. Uint32 formatNo = WTL_PIXEL_8BIT;
  795. switch (pDecInfo->wtlFormat) {
  796. case FORMAT_420_P10_16BIT_MSB:
  797. case FORMAT_422_P10_16BIT_MSB:
  798. justified = WTL_RIGHT_JUSTIFIED;
  799. formatNo = WTL_PIXEL_16BIT;
  800. break;
  801. case FORMAT_420_P10_16BIT_LSB:
  802. case FORMAT_422_P10_16BIT_LSB:
  803. justified = WTL_LEFT_JUSTIFIED;
  804. formatNo = WTL_PIXEL_16BIT;
  805. break;
  806. case FORMAT_420_P10_32BIT_MSB:
  807. case FORMAT_422_P10_32BIT_MSB:
  808. justified = WTL_RIGHT_JUSTIFIED;
  809. formatNo = WTL_PIXEL_32BIT;
  810. break;
  811. case FORMAT_420_P10_32BIT_LSB:
  812. case FORMAT_422_P10_32BIT_LSB:
  813. justified = WTL_LEFT_JUSTIFIED;
  814. formatNo = WTL_PIXEL_32BIT;
  815. break;
  816. default:
  817. break;
  818. }
  819. yuvFormat = justified<<2 | formatNo;
  820. }
  821. stride = fbArr[0].stride;
  822. if (mapType >= COMPRESSED_FRAME_MAP) {
  823. if ( pDecInfo->chFbcFrameIdx != -1 )
  824. stride = fbArr[pDecInfo->chFbcFrameIdx].stride;
  825. } else {
  826. if ( pDecInfo->chBwbFrameIdx != -1 )
  827. stride = fbArr[pDecInfo->chBwbFrameIdx].stride;
  828. }
  829. if (mapType == LINEAR_FRAME_MAP) {
  830. scalerFlag = pDecInfo->scalerEnable;
  831. switch (pDecInfo->wtlFormat) {
  832. case FORMAT_422:
  833. case FORMAT_422_P10_16BIT_MSB:
  834. case FORMAT_422_P10_16BIT_LSB:
  835. case FORMAT_422_P10_32BIT_MSB:
  836. case FORMAT_422_P10_32BIT_LSB:
  837. colorFormat = 1;
  838. outputFormat = 0;
  839. outputFormat |= (nv21 << 1);
  840. outputFormat |= (cbcrInterleave << 0);
  841. break;
  842. default:
  843. colorFormat = 0;
  844. outputFormat = 0;
  845. outputFormat |= (nv21 << 1);
  846. outputFormat |= (cbcrInterleave << 0);
  847. break;
  848. }
  849. }
  850. regVal =
  851. (scalerFlag << 29) |
  852. (bwbFlag << 28) |
  853. (axiID << 24) |
  854. (pixelOrder << 23) | /* PIXEL ORDER in 128bit. first pixel in low address */
  855. (yuvFormat << 20) |
  856. (colorFormat << 19) |
  857. (outputFormat << 16) |
  858. (stride);
  859. VpuWriteReg(coreIdx, W5_COMMON_PIC_INFO, regVal); //// 0x008012c0
  860. remain = count;
  861. q = (remain+7)/8;
  862. idx = 0;
  863. for (j=0; j<q; j++) {
  864. regVal = (endian<<16) | (j==q-1)<<4 | ((j==0)<<3);//lint !e514
  865. regVal |= (pDecInfo->openParam.bwOptimization<<26);
  866. regVal |= (pDecInfo->initialInfo.spatialSvcEnable == TRUE ) ? (mapType == COMPRESSED_FRAME_MAP_SVAC_SVC_BL ? 0 : 1<<27) : 0 ;
  867. regVal |= (pDecInfo->initialInfo.spatialSvcEnable<<28);
  868. VpuWriteReg(coreIdx, W5_SFB_OPTION, regVal);
  869. startNo = j*8;
  870. endNo = startNo + (remain>=8 ? 8 : remain) - 1;
  871. VpuWriteReg(coreIdx, W5_SET_FB_NUM, (startNo<<8)|endNo);
  872. for (i=0; i<8 && i<remain; i++) {
  873. if (mapType == LINEAR_FRAME_MAP && pDecInfo->openParam.cbcrOrder == CBCR_ORDER_REVERSED) {
  874. addrY = fbArr[i+startNo].bufY;
  875. addrCb = fbArr[i+startNo].bufCr;
  876. addrCr = fbArr[i+startNo].bufCb;
  877. }
  878. else {
  879. addrY = fbArr[i+startNo].bufY;
  880. addrCb = fbArr[i+startNo].bufCb;
  881. addrCr = fbArr[i+startNo].bufCr;
  882. }
  883. VpuWriteReg(coreIdx, W5_ADDR_LUMA_BASE0 + (i<<4), addrY);
  884. VpuWriteReg(coreIdx, W5_ADDR_CB_BASE0 + (i<<4), addrCb);
  885. APIDPRINT("REGISTER FB[%02d] Y(0x%08x), Cb(0x%08x) ", i, addrY, addrCb);
  886. if (mapType >= COMPRESSED_FRAME_MAP) {
  887. VpuWriteReg(coreIdx, W5_ADDR_FBC_Y_OFFSET0 + (i<<4), pDecInfo->vbFbcYTbl[idx+svcBLbaseIdx].phys_addr); /* Luma FBC offset table */
  888. VpuWriteReg(coreIdx, W5_ADDR_FBC_C_OFFSET0 + (i<<4), pDecInfo->vbFbcCTbl[idx+svcBLbaseIdx].phys_addr); /* Chroma FBC offset table */
  889. VpuWriteReg(coreIdx, W5_ADDR_MV_COL0 + (i<<2), pDecInfo->vbMV[idx+svcBLbaseIdx].phys_addr);
  890. APIDPRINT("Yo(0x%08x) Co(0x%08x), Mv(0x%08x)\n",
  891. pDecInfo->vbFbcYTbl[idx].phys_addr,
  892. pDecInfo->vbFbcCTbl[idx].phys_addr,
  893. pDecInfo->vbMV[idx].phys_addr);
  894. }
  895. else {
  896. VpuWriteReg(coreIdx, W5_ADDR_CR_BASE0 + (i<<4), addrCr);
  897. VpuWriteReg(coreIdx, W5_ADDR_FBC_C_OFFSET0 + (i<<4), 0);
  898. VpuWriteReg(coreIdx, W5_ADDR_MV_COL0 + (i<<2), 0);
  899. APIDPRINT("Cr(0x%08x)\n", addrCr);
  900. }
  901. idx++;
  902. }
  903. remain -= i;
  904. Wave5BitIssueCommand(inst, W5_SET_FB);
  905. if (vdi_wait_vpu_busy(coreIdx, __VPU_BUSY_TIMEOUT, W5_VPU_BUSY_STATUS) == -1) {
  906. return RETCODE_VPU_RESPONSE_TIMEOUT;
  907. }
  908. }
  909. regVal = VpuReadReg(coreIdx, W5_RET_SUCCESS);
  910. if (regVal == 0) {
  911. return RETCODE_FAILURE;
  912. }
  913. if (ConfigSecAXIWave(coreIdx, inst->codecMode,
  914. &pDecInfo->secAxiInfo, initPicWidth, initPicHeight,
  915. sequenceInfo->profile, sequenceInfo->level) == 0) {
  916. return RETCODE_INSUFFICIENT_RESOURCE;
  917. }
  918. return ret;
  919. }
  920. RetCode Wave5VpuDecUpdateFramebuffer(CodecInst* inst, FrameBuffer* fbcFb, FrameBuffer* linearFb, Int32 mvIndex, Int32 picWidth, Int32 picHeight)
  921. {
  922. RetCode ret = RETCODE_SUCCESS;
  923. DecInfo* pDecInfo = &inst->CodecInfo->decInfo;
  924. DecInitialInfo* sequenceInfo = &inst->CodecInfo->decInfo.initialInfo;
  925. Int8 fbcIndex, linearIndex;
  926. Uint32 coreIdx, regVal;
  927. Uint32 mvColSize, fbcYTblSize, fbcCTblSize;
  928. Uint32 linearStride, fbcStride;
  929. vpu_buffer_t* pvbMv = NULL;
  930. vpu_buffer_t* pvbFbcYOffset = NULL;
  931. vpu_buffer_t* pvbFbcCOffset = NULL;
  932. CodStd codec;
  933. unsigned long fbcYoffsetAddr = 0;
  934. unsigned long fbcCoffsetAddr = 0;
  935. coreIdx = inst->coreIdx;
  936. linearIndex = (linearFb == NULL) ? -1 : linearFb->myIndex - pDecInfo->numFbsForDecoding;
  937. fbcIndex = (fbcFb == NULL) ? -1 : fbcFb->myIndex;
  938. mvColSize = fbcYTblSize = fbcCTblSize = 0;
  939. codec = pDecInfo->openParam.bitstreamFormat;
  940. if (codec != STD_VP9) {
  941. return RETCODE_NOT_SUPPORTED_FEATURE;
  942. }
  943. mvColSize = WAVE5_DEC_VP9_MVCOL_BUF_SIZE(picWidth, picHeight);
  944. if ((fbcFb != NULL) && (fbcIndex >= 0)) {
  945. pDecInfo->frameBufPool[fbcIndex] = *fbcFb;
  946. }
  947. if ((linearFb != NULL) && (linearIndex >= 0)) {
  948. pDecInfo->frameBufPool[pDecInfo->numFbsForDecoding + linearIndex] = *linearFb;
  949. }
  950. if (mvIndex >= 0) {
  951. pvbMv = &pDecInfo->vbMV[mvIndex];
  952. vdi_free_dma_memory(inst->coreIdx, pvbMv, DEC_MV, inst->instIndex);
  953. pvbMv->size = ((mvColSize+4095)&~4095) + 4096;
  954. if (vdi_allocate_dma_memory(inst->coreIdx, pvbMv, DEC_MV, inst->instIndex) < 0) {
  955. return RETCODE_INSUFFICIENT_RESOURCE;
  956. }
  957. }
  958. /* Reallocate FBC offset tables */
  959. if (codec == STD_HEVC){
  960. fbcYTblSize = WAVE5_FBC_LUMA_TABLE_SIZE(picWidth, picHeight);
  961. }
  962. else if (codec == STD_VP9) {
  963. //VP9 Decoded size : 64 aligned.
  964. fbcYTblSize = WAVE5_FBC_LUMA_TABLE_SIZE(VPU_ALIGN64(picWidth), VPU_ALIGN64(picHeight));
  965. }
  966. else if (codec == STD_AVS2){
  967. fbcYTblSize = WAVE5_FBC_LUMA_TABLE_SIZE(picWidth, picHeight);
  968. }
  969. else if (codec == STD_AVC){
  970. fbcYTblSize = WAVE5_FBC_LUMA_TABLE_SIZE(picWidth, picHeight); // FIX ME
  971. }
  972. else if (codec == STD_AV1){
  973. fbcYTblSize = WAVE5_FBC_LUMA_TABLE_SIZE(VPU_ALIGN64(picWidth), VPU_ALIGN64(picHeight));
  974. }
  975. else {
  976. /* Unknown codec */
  977. return RETCODE_NOT_SUPPORTED_FEATURE;
  978. }
  979. if (fbcIndex >= 0) {
  980. pvbFbcYOffset = &pDecInfo->vbFbcYTbl[fbcIndex];
  981. vdi_free_dma_memory(inst->coreIdx, pvbFbcYOffset, DEC_FBCY_TBL, inst->instIndex);
  982. pvbFbcYOffset->phys_addr = 0;
  983. pvbFbcYOffset->size = ((fbcYTblSize+4095)&~4095)+4096;
  984. if (vdi_allocate_dma_memory(inst->coreIdx, pvbFbcYOffset, DEC_FBCY_TBL, inst->instIndex) < 0) {
  985. return RETCODE_INSUFFICIENT_RESOURCE;
  986. }
  987. fbcYoffsetAddr = pvbFbcYOffset->phys_addr;
  988. }
  989. if (codec == STD_HEVC) {
  990. fbcCTblSize = WAVE5_FBC_CHROMA_TABLE_SIZE(picWidth, picHeight);
  991. }
  992. else if (codec == STD_VP9) {
  993. fbcCTblSize = WAVE5_FBC_CHROMA_TABLE_SIZE(VPU_ALIGN64(picWidth), VPU_ALIGN64(picHeight));
  994. }
  995. else if (codec == STD_AVS2) {
  996. fbcCTblSize = WAVE5_FBC_CHROMA_TABLE_SIZE(picWidth, picHeight);
  997. }
  998. else if (codec == STD_AVC) {
  999. fbcCTblSize = WAVE5_FBC_CHROMA_TABLE_SIZE(picWidth, picHeight); // FIX ME
  1000. }
  1001. else if (codec == STD_AV1) {
  1002. fbcCTblSize = WAVE5_FBC_CHROMA_TABLE_SIZE(VPU_ALIGN64(picWidth), VPU_ALIGN64(picHeight));
  1003. }
  1004. else {
  1005. /* Unknown codec */
  1006. return RETCODE_NOT_SUPPORTED_FEATURE;
  1007. }
  1008. if (fbcIndex >= 0) {
  1009. pvbFbcCOffset = &pDecInfo->vbFbcCTbl[fbcIndex];
  1010. vdi_free_dma_memory(inst->coreIdx, pvbFbcCOffset, DEC_FBCC_TBL, inst->instIndex);
  1011. pvbFbcCOffset->phys_addr = 0;
  1012. pvbFbcCOffset->size = ((fbcCTblSize+4095)&~4095)+4096;
  1013. if (vdi_allocate_dma_memory(inst->coreIdx, pvbFbcCOffset, DEC_FBCC_TBL, inst->instIndex) < 0) {
  1014. return RETCODE_INSUFFICIENT_RESOURCE;
  1015. }
  1016. fbcCoffsetAddr = pvbFbcCOffset->phys_addr;
  1017. }
  1018. linearStride = linearFb == NULL ? 0 : linearFb->stride;
  1019. fbcStride = fbcFb == NULL ? 0 : fbcFb->stride;
  1020. regVal = linearStride<<16 | fbcStride;
  1021. VpuWriteReg(coreIdx, W5_CMD_SET_FB_STRIDE, regVal);
  1022. regVal = (picWidth<<16) | picHeight;
  1023. if (pDecInfo->scalerEnable == TRUE) {
  1024. regVal = (pDecInfo->scaleWidth << 16) | (pDecInfo->scaleHeight);
  1025. }
  1026. VpuWriteReg(coreIdx, W5_PIC_SIZE, regVal);
  1027. VLOG(INFO, "fbcIndex(%d), linearIndex(%d), mvIndex(%d)\n", fbcIndex, linearIndex, mvIndex);
  1028. regVal = (mvIndex&0xff) << 16 | (linearIndex&0xff) << 8 | (fbcIndex&0xff);
  1029. VpuWriteReg(coreIdx, W5_CMD_SET_FB_INDEX, regVal);
  1030. VpuWriteReg(coreIdx, W5_ADDR_LUMA_BASE, linearFb == NULL ? 0 : linearFb->bufY);
  1031. VpuWriteReg(coreIdx, W5_ADDR_CB_BASE, linearFb == NULL ? 0 : linearFb->bufCb);
  1032. VpuWriteReg(coreIdx, W5_ADDR_CR_BASE, linearFb == NULL ? 0 : linearFb->bufCr);
  1033. VpuWriteReg(coreIdx, W5_ADDR_MV_COL, pvbMv == NULL ? 0 : pvbMv->phys_addr);
  1034. VpuWriteReg(coreIdx, W5_ADDR_FBC_Y_BASE, fbcFb == NULL ? 0 : fbcFb->bufY);
  1035. VpuWriteReg(coreIdx, W5_ADDR_FBC_C_BASE, fbcFb == NULL ? 0 : fbcFb->bufCb);
  1036. VpuWriteReg(coreIdx, W5_ADDR_FBC_Y_OFFSET, fbcYoffsetAddr);
  1037. VpuWriteReg(coreIdx, W5_ADDR_FBC_C_OFFSET, fbcCoffsetAddr);
  1038. VpuWriteReg(coreIdx, W5_SFB_OPTION, 1); /* UPDATE FRAMEBUFFER */
  1039. Wave5BitIssueCommand(inst, W5_SET_FB);
  1040. if (vdi_wait_vpu_busy(coreIdx, __VPU_BUSY_TIMEOUT, W5_VPU_BUSY_STATUS) == -1) {
  1041. return RETCODE_VPU_RESPONSE_TIMEOUT;
  1042. }
  1043. regVal = VpuReadReg(coreIdx, W5_RET_SUCCESS);
  1044. if (regVal == 0) {
  1045. return RETCODE_FAILURE;
  1046. }
  1047. if (ConfigSecAXIWave(coreIdx, inst->codecMode,
  1048. &pDecInfo->secAxiInfo, pDecInfo->initialInfo.picWidth, pDecInfo->initialInfo.picHeight,
  1049. sequenceInfo->profile, sequenceInfo->level) == 0) {
  1050. return RETCODE_INSUFFICIENT_RESOURCE;
  1051. }
  1052. return ret;
  1053. }
  1054. RetCode Wave5VpuDecode(CodecInst* instance, DecParam* option)
  1055. {
  1056. Uint32 modeOption = DEC_PIC_NORMAL, bsOption, regVal;
  1057. DecOpenParam* pOpenParam;
  1058. Int32 forceLatency = -1;
  1059. Int32 rdptr_valid = 0;
  1060. DecInfo* pDecInfo = &instance->CodecInfo->decInfo;
  1061. pOpenParam = &pDecInfo->openParam;
  1062. if (pDecInfo->thumbnailMode) {
  1063. modeOption = DEC_PIC_W_THUMBNAIL;
  1064. }
  1065. else if (option->skipframeMode) {
  1066. switch (option->skipframeMode) {
  1067. case WAVE_SKIPMODE_NON_IRAP:
  1068. modeOption = SKIP_NON_IRAP;
  1069. forceLatency = 0;
  1070. break;
  1071. case WAVE_SKIPMODE_NON_REF:
  1072. modeOption = SKIP_NON_REF_PIC;
  1073. break;
  1074. default:
  1075. // skip off
  1076. break;
  1077. }
  1078. }
  1079. if (instance->codecMode == W_SVAC_DEC) {
  1080. switch (option->selSvacLayer) {
  1081. case SEL_SVAC_ALL_LAYER:
  1082. break;
  1083. case SEL_SVAC_BL:
  1084. modeOption |= SKIP_SVAC_EL;
  1085. break;
  1086. case SEL_SVAC_EL:
  1087. modeOption |= SKIP_SVAC_BL;
  1088. break;
  1089. default:
  1090. break;
  1091. }
  1092. }
  1093. if (option->craAsBlaFlag == TRUE) {
  1094. modeOption |= (1<<5);
  1095. }
  1096. // set disable reorder
  1097. if (pDecInfo->reorderEnable == FALSE) {
  1098. forceLatency = 0;
  1099. }
  1100. /* Set attributes of bitstream buffer controller */
  1101. bsOption = 0;
  1102. regVal = 0;
  1103. switch (pOpenParam->bitstreamMode) {
  1104. case BS_MODE_INTERRUPT:
  1105. bsOption = 0;
  1106. break;
  1107. case BS_MODE_PIC_END:
  1108. bsOption = BSOPTION_ENABLE_EXPLICIT_END;
  1109. break;
  1110. default:
  1111. return RETCODE_INVALID_PARAM;
  1112. }
  1113. VpuWriteReg(instance->coreIdx, W5_BS_RD_PTR, pDecInfo->streamRdPtr);
  1114. VpuWriteReg(instance->coreIdx, W5_BS_WR_PTR, pDecInfo->streamWrPtr);
  1115. if (pDecInfo->streamEndflag == 1)
  1116. bsOption = 3; // (streamEndFlag<<1) | EXPLICIT_END
  1117. if (pOpenParam->bitstreamMode == BS_MODE_PIC_END || pDecInfo->rdPtrValidFlag == TRUE)
  1118. rdptr_valid = 1;
  1119. if (instance->codecMode == W_AV1_DEC) {
  1120. bsOption |= ((pOpenParam->av1Format) << 2);
  1121. }
  1122. VpuWriteReg(instance->coreIdx, W5_BS_OPTION, (rdptr_valid<<31) | bsOption);
  1123. pDecInfo->rdPtrValidFlag = FALSE; // reset rdptrValidFlag.
  1124. /* Secondary AXI */
  1125. regVal = (pDecInfo->secAxiInfo.u.wave.useBitEnable<<0) |
  1126. (pDecInfo->secAxiInfo.u.wave.useIpEnable<<9) |
  1127. (pDecInfo->secAxiInfo.u.wave.useLfRowEnable<<15);
  1128. regVal |= (pDecInfo->secAxiInfo.u.wave.useSclEnable<<5);
  1129. VpuWriteReg(instance->coreIdx, W5_USE_SEC_AXI, regVal);
  1130. /* Set attributes of User buffer */
  1131. VpuWriteReg(instance->coreIdx, W5_CMD_DEC_USER_MASK, pDecInfo->userDataEnable);
  1132. if (pDecInfo->tempIdSelectMode == FALSE)
  1133. VpuWriteReg(instance->coreIdx, W5_CMD_DEC_TEMPORAL_ID_PLUS1, pDecInfo->tempIdSelectMode<<8 | (pDecInfo->targetSubLayerId+1));
  1134. else
  1135. VpuWriteReg(instance->coreIdx, W5_CMD_DEC_REL_TEMPORAL_ID, pDecInfo->tempIdSelectMode<<8 | pDecInfo->relTargetLayerId);
  1136. VpuWriteReg(instance->coreIdx, W5_CMD_SEQ_CHANGE_ENABLE_FLAG, pDecInfo->seqChangeMask);
  1137. VpuWriteReg(instance->coreIdx, W5_CMD_DEC_FORCE_FB_LATENCY_PLUS1, forceLatency+1);
  1138. VpuWriteReg(instance->coreIdx, W5_COMMAND_OPTION, modeOption);
  1139. Wave5BitIssueCommand(instance, W5_DEC_PIC);
  1140. if (vdi_wait_vpu_busy(instance->coreIdx, __VPU_BUSY_TIMEOUT, W5_VPU_BUSY_STATUS) == -1) { // Check QUEUE_DONE
  1141. if (instance->loggingEnable)
  1142. vdi_log(instance->coreIdx, W5_DEC_PIC, 2);
  1143. return RETCODE_VPU_RESPONSE_TIMEOUT;
  1144. }
  1145. regVal = VpuReadReg(instance->coreIdx, W5_RET_QUEUE_STATUS);
  1146. pDecInfo->instanceQueueCount = (regVal>>16)&0xff;
  1147. pDecInfo->reportQueueCount = (regVal & 0xffff);
  1148. if (VpuReadReg(instance->coreIdx, W5_RET_SUCCESS) == FALSE) { // FAILED for adding a command into VCPU QUEUE
  1149. regVal = VpuReadReg(instance->coreIdx, W5_RET_FAIL_REASON);
  1150. if (regVal != WAVE5_QUEUEING_FAIL)
  1151. VLOG(ERR, "FAIL_REASON = 0x%x\n", regVal);
  1152. if ( regVal == WAVE5_QUEUEING_FAIL)
  1153. return RETCODE_QUEUEING_FAILURE;
  1154. else if (regVal == WAVE5_SYSERR_ACCESS_VIOLATION_HW)
  1155. return RETCODE_MEMORY_ACCESS_VIOLATION;
  1156. else if (regVal == WAVE5_SYSERR_WATCHDOG_TIMEOUT)
  1157. return RETCODE_VPU_RESPONSE_TIMEOUT;
  1158. else if (regVal == WAVE5_SYSERR_DEC_VLC_BUF_FULL)
  1159. return RETCODE_VLC_BUF_FULL;
  1160. else if (regVal == WAVE5_ERROR_FW_FATAL)
  1161. return RETCODE_ERROR_FW_FATAL;
  1162. else
  1163. return RETCODE_FAILURE;
  1164. }
  1165. return RETCODE_SUCCESS;
  1166. }
  1167. RetCode Wave5VpuDecGetResult(CodecInst* instance, DecOutputInfo* result)
  1168. {
  1169. RetCode ret = RETCODE_SUCCESS;
  1170. Uint32 regVal, index, nalUnitType;
  1171. DecInfo* pDecInfo;
  1172. vpu_instance_pool_t* instancePool = NULL;
  1173. pDecInfo = VPU_HANDLE_TO_DECINFO(instance);
  1174. VpuWriteReg(instance->coreIdx, W5_CMD_DEC_ADDR_REPORT_BASE, pDecInfo->userDataBufAddr);
  1175. VpuWriteReg(instance->coreIdx, W5_CMD_DEC_REPORT_SIZE, pDecInfo->userDataBufSize);
  1176. VpuWriteReg(instance->coreIdx, W5_CMD_DEC_REPORT_PARAM, VPU_USER_DATA_ENDIAN&VDI_128BIT_ENDIAN_MASK);
  1177. // Send QUERY cmd
  1178. ret = SendQuery(instance, GET_RESULT);
  1179. if (ret != RETCODE_SUCCESS) {
  1180. regVal = VpuReadReg(instance->coreIdx, W5_RET_FAIL_REASON);
  1181. if (regVal != WAVE5_QUEUEING_FAIL)
  1182. VLOG(ERR, "FAIL_REASON = 0x%x\n", regVal);
  1183. if (regVal == WAVE5_RESULT_NOT_READY)
  1184. return RETCODE_REPORT_NOT_READY;
  1185. else if (regVal == WAVE5_SYSERR_ACCESS_VIOLATION_HW)
  1186. return RETCODE_MEMORY_ACCESS_VIOLATION;
  1187. else if (regVal == WAVE5_SYSERR_WATCHDOG_TIMEOUT)
  1188. return RETCODE_VPU_RESPONSE_TIMEOUT;
  1189. else if (regVal == WAVE5_SYSERR_DEC_VLC_BUF_FULL)
  1190. return RETCODE_VLC_BUF_FULL;
  1191. else if (regVal == WAVE5_ERROR_FW_FATAL)
  1192. return RETCODE_ERROR_FW_FATAL;
  1193. else
  1194. return RETCODE_QUERY_FAILURE;
  1195. }
  1196. if (instance->loggingEnable)
  1197. vdi_log(instance->coreIdx, W5_DEC_PIC, 0);
  1198. regVal = VpuReadReg(instance->coreIdx, W5_RET_QUEUE_STATUS);
  1199. pDecInfo->instanceQueueCount = (regVal>>16)&0xff;
  1200. pDecInfo->reportQueueCount = (regVal & 0xffff);
  1201. result->decodingSuccess = VpuReadReg(instance->coreIdx, W5_RET_DEC_DECODING_SUCCESS);
  1202. #ifdef SUPPORT_SW_UART
  1203. #else
  1204. if (result->decodingSuccess == FALSE) {
  1205. result->errorReason = VpuReadReg(instance->coreIdx, W5_RET_DEC_ERR_INFO);
  1206. }
  1207. else {
  1208. result->warnInfo = VpuReadReg(instance->coreIdx, W5_RET_DEC_WARN_INFO);
  1209. }
  1210. #endif
  1211. result->decOutputExtData.userDataSize = 0;
  1212. result->decOutputExtData.userDataNum = 0;
  1213. result->decOutputExtData.userDataBufFull= 0;
  1214. result->decOutputExtData.userDataHeader = VpuReadReg(instance->coreIdx, W5_RET_DEC_USERDATA_IDC);
  1215. if (result->decOutputExtData.userDataHeader != 0) {
  1216. regVal = result->decOutputExtData.userDataHeader;
  1217. for (index=0; index<32; index++) {
  1218. if (index == 1) {
  1219. if (regVal & (1<<index))
  1220. result->decOutputExtData.userDataBufFull = 1;
  1221. }
  1222. else {
  1223. if (regVal & (1<<index))
  1224. result->decOutputExtData.userDataNum++;
  1225. }
  1226. }
  1227. result->decOutputExtData.userDataSize = pDecInfo->userDataBufSize;
  1228. }
  1229. regVal = VpuReadReg(instance->coreIdx, W5_RET_DEC_PIC_TYPE);
  1230. if (instance->codecMode == W_VP9_DEC) {
  1231. if (regVal&0x01) result->picType = PIC_TYPE_I;
  1232. else if (regVal&0x02) result->picType = PIC_TYPE_P;
  1233. else if (regVal&0x04) result->picType = PIC_TYPE_REPEAT;
  1234. else result->picType = PIC_TYPE_MAX;
  1235. }
  1236. else if (instance->codecMode == W_HEVC_DEC) {
  1237. if (regVal&0x04) result->picType = PIC_TYPE_B;
  1238. else if (regVal&0x02) result->picType = PIC_TYPE_P;
  1239. else if (regVal&0x01) result->picType = PIC_TYPE_I;
  1240. else result->picType = PIC_TYPE_MAX;
  1241. }
  1242. else if (instance->codecMode == W_AVC_DEC) {
  1243. if (regVal&0x04) result->picType = PIC_TYPE_B;
  1244. else if (regVal&0x02) result->picType = PIC_TYPE_P;
  1245. else if (regVal&0x01) result->picType = PIC_TYPE_I;
  1246. else result->picType = PIC_TYPE_MAX;
  1247. }
  1248. else if (instance->codecMode == W_SVAC_DEC) {
  1249. if (regVal&0x01) result->picType = PIC_TYPE_KEY;
  1250. else if(regVal&0x02) result->picType = PIC_TYPE_INTER;
  1251. else result->picType = PIC_TYPE_MAX;
  1252. }
  1253. else if (instance->codecMode == W_AV1_DEC) {
  1254. switch (regVal & 0x07) {
  1255. case 0: result->picType = PIC_TYPE_KEY; break;
  1256. case 1: result->picType = PIC_TYPE_INTER; break;
  1257. case 2: result->picType = PIC_TYPE_AV1_INTRA; break;
  1258. case 3: result->picType = PIC_TYPE_AV1_SWITCH; break;
  1259. default:
  1260. result->picType = PIC_TYPE_MAX; break;
  1261. }
  1262. }
  1263. else { // AVS2
  1264. switch(regVal&0x07) {
  1265. case 0: result->picType = PIC_TYPE_I; break;
  1266. case 1: result->picType = PIC_TYPE_P; break;
  1267. case 2: result->picType = PIC_TYPE_B; break;
  1268. case 3: result->picType = PIC_TYPE_AVS2_F; break;
  1269. case 4: result->picType = PIC_TYPE_AVS2_S; break;
  1270. case 5: result->picType = PIC_TYPE_AVS2_G; break;
  1271. case 6: result->picType = PIC_TYPE_AVS2_GB;break;
  1272. default:
  1273. result->picType = PIC_TYPE_MAX; break;
  1274. }
  1275. }
  1276. result->outputFlag = (regVal>>31)&0x1;
  1277. nalUnitType = (regVal & 0x3f0) >> 4;
  1278. if ((nalUnitType == 19 || nalUnitType == 20) && result->picType == PIC_TYPE_I) {
  1279. /* IDR_W_RADL, IDR_N_LP */
  1280. result->picType = PIC_TYPE_IDR;
  1281. }
  1282. result->nalType = nalUnitType;
  1283. result->ctuSize = 16<<((regVal>>10)&0x3);
  1284. index = VpuReadReg(instance->coreIdx, W5_RET_DEC_DISPLAY_INDEX);
  1285. result->indexFrameDisplay = index;
  1286. result->indexFrameDisplayForTiled = index;
  1287. index = VpuReadReg(instance->coreIdx, W5_RET_DEC_DECODED_INDEX);
  1288. result->indexFrameDecoded = index;
  1289. result->indexFrameDecodedForTiled = index;
  1290. if (instance->codecMode == W_HEVC_DEC) {
  1291. result->decodedPOC = -1;
  1292. result->displayPOC = -1;
  1293. if (result->indexFrameDecoded >= 0 || result->indexFrameDecoded == DECODED_IDX_FLAG_SKIP)
  1294. result->decodedPOC = VpuReadReg(instance->coreIdx, W5_RET_DEC_PIC_POC);
  1295. regVal = VpuReadReg(instance->coreIdx, W5_RET_DEC_SUB_LAYER_INFO);
  1296. result->temporalId = regVal & 0xff;
  1297. }
  1298. else if (instance->codecMode == W_SVAC_DEC) {
  1299. regVal = VpuReadReg(instance->coreIdx, W5_RET_DEC_SUB_LAYER_INFO);
  1300. result->temporalId = regVal & 0xff;
  1301. result->svacInfo.spatialSvcFlag = (regVal >> 16) & 0x1;
  1302. result->svacInfo.spatialSvcMode = (regVal >> 17) & 0x1;
  1303. result->svacInfo.spatialSvcLayer= (regVal >> 18) & 0x1;
  1304. }
  1305. else if (instance->codecMode == W_AVS2_DEC) {
  1306. result->avs2Info.decodedPOI = -1;
  1307. result->avs2Info.displayPOI = -1;
  1308. if (result->indexFrameDecoded >= 0)
  1309. result->avs2Info.decodedPOI = VpuReadReg(instance->coreIdx, W5_RET_DEC_PIC_POC);
  1310. regVal = VpuReadReg(instance->coreIdx, W5_RET_DEC_SUB_LAYER_INFO);
  1311. result->temporalId = regVal & 0xff;
  1312. }
  1313. else if (instance->codecMode == W_AVC_DEC) {
  1314. result->decodedPOC = -1;
  1315. result->displayPOC = -1;
  1316. if (result->indexFrameDecoded >= 0 || result->indexFrameDecoded == DECODED_IDX_FLAG_SKIP)
  1317. result->decodedPOC = VpuReadReg(instance->coreIdx, W5_RET_DEC_PIC_POC);
  1318. regVal = VpuReadReg(instance->coreIdx, W5_RET_DEC_SUB_LAYER_INFO);
  1319. result->temporalId = regVal & 0xff;
  1320. }
  1321. else if (instance->codecMode == W_AV1_DEC) {
  1322. result->decodedPOC = -1;
  1323. result->displayPOC = -1;
  1324. if (result->indexFrameDecoded >= 0 || result->indexFrameDecoded == DECODED_IDX_FLAG_SKIP)
  1325. result->decodedPOC = VpuReadReg(instance->coreIdx, W5_RET_DEC_PIC_POC);
  1326. regVal = VpuReadReg(instance->coreIdx, W5_RET_DEC_SUB_LAYER_INFO);
  1327. result->temporalId = regVal & 0xff;
  1328. regVal = VpuReadReg(instance->coreIdx, W5_RET_DEC_PIC_PARAM);
  1329. result->av1Info.enableIntraBlockCopy = (regVal >> 0) & 0x1;
  1330. result->av1Info.enableScreenContents = (regVal >> 1) & 0x1;
  1331. }
  1332. result->sequenceChanged = VpuReadReg(instance->coreIdx, W5_RET_DEC_NOTIFICATION);
  1333. /*
  1334. * If current picture is the last of the current sequence and sequence-change flag is not 0, then
  1335. * the width and height of the current picture is set to the width and height of the current sequence.
  1336. */
  1337. if (result->sequenceChanged == 0) {
  1338. regVal = VpuReadReg(instance->coreIdx, W5_RET_DEC_PIC_SIZE);
  1339. result->decPicWidth = regVal>>16;
  1340. result->decPicHeight = regVal&0xffff;
  1341. }
  1342. else {
  1343. if (result->indexFrameDecoded < 0) {
  1344. result->decPicWidth = 0;
  1345. result->decPicHeight = 0;
  1346. }
  1347. else {
  1348. result->decPicWidth = pDecInfo->initialInfo.picWidth;
  1349. result->decPicHeight = pDecInfo->initialInfo.picHeight;
  1350. }
  1351. if ( instance->codecMode == W_VP9_DEC ) {
  1352. if ( result->sequenceChanged & SEQ_CHANGE_INTER_RES_CHANGE) {
  1353. regVal = VpuReadReg(instance->coreIdx, W5_RET_DEC_PIC_SIZE);
  1354. result->decPicWidth = regVal>>16;
  1355. result->decPicHeight = regVal&0xffff;
  1356. result->indexInterFrameDecoded = VpuReadReg(instance->coreIdx, W5_RET_DEC_REALLOC_INDEX);
  1357. }
  1358. }
  1359. osal_memcpy((void*)&pDecInfo->newSeqInfo, (void*)&pDecInfo->initialInfo, sizeof(DecInitialInfo));
  1360. GetDecSequenceResult(instance, &pDecInfo->newSeqInfo);
  1361. }
  1362. result->numOfErrMBs = VpuReadReg(instance->coreIdx, W5_RET_DEC_ERR_CTB_NUM)>>16;
  1363. result->numOfTotMBs = VpuReadReg(instance->coreIdx, W5_RET_DEC_ERR_CTB_NUM)&0xffff;
  1364. result->bytePosFrameStart = VpuReadReg(instance->coreIdx, W5_RET_DEC_AU_START_POS);
  1365. result->bytePosFrameEnd = VpuReadReg(instance->coreIdx, W5_RET_DEC_AU_END_POS);
  1366. pDecInfo->prevFrameEndPos = result->bytePosFrameEnd;
  1367. regVal = VpuReadReg(instance->coreIdx, W5_RET_DEC_RECOVERY_POINT);
  1368. result->h265RpSei.recoveryPocCnt = regVal & 0xFFFF; // [15:0]
  1369. result->h265RpSei.exactMatchFlag = (regVal >> 16)&0x01; // [16]
  1370. result->h265RpSei.brokenLinkFlag = (regVal >> 17)&0x01; // [17]
  1371. result->h265RpSei.exist = (regVal >> 18)&0x01; // [18]
  1372. if(result->h265RpSei.exist == 0) {
  1373. result->h265RpSei.recoveryPocCnt = 0;
  1374. result->h265RpSei.exactMatchFlag = 0;
  1375. result->h265RpSei.brokenLinkFlag = 0;
  1376. }
  1377. result->decHostCmdTick = VpuReadReg(instance->coreIdx, W5_RET_DEC_HOST_CMD_TICK);
  1378. result->decSeekStartTick = VpuReadReg(instance->coreIdx, W5_RET_DEC_SEEK_START_TICK);
  1379. result->decSeekEndTick = VpuReadReg(instance->coreIdx, W5_RET_DEC_SEEK_END__TICK);
  1380. result->decParseStartTick = VpuReadReg(instance->coreIdx, W5_RET_DEC_PARSING_START_TICK);
  1381. result->decParseEndTick = VpuReadReg(instance->coreIdx, W5_RET_DEC_PARSING_END_TICK);
  1382. result->decDecodeStartTick = VpuReadReg(instance->coreIdx, W5_RET_DEC_DECODING_START_TICK);
  1383. result->decDecodeEndTick = VpuReadReg(instance->coreIdx, W5_RET_DEC_DECODING_ENC_TICK);
  1384. instancePool = vdi_get_instance_pool(instance->coreIdx);
  1385. if (pDecInfo->firstCycleCheck == FALSE) {
  1386. result->frameCycle = (result->decDecodeEndTick - result->decHostCmdTick)*pDecInfo->cyclePerTick;
  1387. instancePool->lastPerformanceCycles = result->decDecodeEndTick;
  1388. pDecInfo->firstCycleCheck = TRUE;
  1389. }
  1390. else {
  1391. if ( result->indexFrameDecodedForTiled != -1 ) {
  1392. result->frameCycle = (result->decDecodeEndTick - instancePool->lastPerformanceCycles)*pDecInfo->cyclePerTick;
  1393. instancePool->lastPerformanceCycles = result->decDecodeEndTick;
  1394. if (instancePool->lastPerformanceCycles < result->decHostCmdTick)
  1395. result->frameCycle = (result->decDecodeEndTick - result->decHostCmdTick);
  1396. }
  1397. }
  1398. result->seekCycle = (result->decSeekEndTick - result->decSeekStartTick)*pDecInfo->cyclePerTick;
  1399. result->parseCycle = (result->decParseEndTick - result->decParseStartTick)*pDecInfo->cyclePerTick;
  1400. result->DecodedCycle = (result->decDecodeEndTick - result->decDecodeStartTick)*pDecInfo->cyclePerTick;
  1401. if (0 == pDecInfo->instanceQueueCount && 0 == pDecInfo->reportQueueCount) {
  1402. // No remaining command. Reset frame cycle.
  1403. pDecInfo->firstCycleCheck = FALSE;
  1404. }
  1405. if ( result->sequenceChanged && (instance->codecMode != W_VP9_DEC)) {
  1406. pDecInfo->scaleWidth = pDecInfo->newSeqInfo.picWidth;
  1407. pDecInfo->scaleHeight = pDecInfo->newSeqInfo.picHeight;
  1408. }
  1409. return RETCODE_SUCCESS;
  1410. }
  1411. RetCode Wave5VpuDecFlush(CodecInst* instance, FramebufferIndex* framebufferIndexes, Uint32 size)
  1412. {
  1413. RetCode ret = RETCODE_SUCCESS;
  1414. Wave5BitIssueCommand(instance, W5_FLUSH_INSTANCE);
  1415. if (vdi_wait_vpu_busy(instance->coreIdx, __VPU_BUSY_TIMEOUT, W5_VPU_BUSY_STATUS) == -1)
  1416. return RETCODE_VPU_RESPONSE_TIMEOUT;
  1417. if (VpuReadReg(instance->coreIdx, W5_RET_SUCCESS) == FALSE) {
  1418. Uint32 regVal;
  1419. regVal = VpuReadReg(instance->coreIdx, W5_RET_FAIL_REASON);
  1420. if (regVal != WAVE5_QUEUEING_FAIL)
  1421. VLOG(ERR, "FAIL_REASON = 0x%x\n", regVal);
  1422. if (regVal == WAVE5_VPU_STILL_RUNNING)
  1423. return RETCODE_VPU_STILL_RUNNING;
  1424. else if (regVal == WAVE5_SYSERR_ACCESS_VIOLATION_HW)
  1425. return RETCODE_MEMORY_ACCESS_VIOLATION;
  1426. else if (regVal == WAVE5_SYSERR_WATCHDOG_TIMEOUT)
  1427. return RETCODE_VPU_RESPONSE_TIMEOUT;
  1428. else if (regVal == WAVE5_SYSERR_DEC_VLC_BUF_FULL)
  1429. return RETCODE_VLC_BUF_FULL;
  1430. else if (regVal == WAVE5_ERROR_FW_FATAL)
  1431. return RETCODE_ERROR_FW_FATAL;
  1432. else
  1433. return RETCODE_QUERY_FAILURE;
  1434. }
  1435. else {
  1436. DecInfo* pDecInfo = VPU_HANDLE_TO_DECINFO(instance);
  1437. pDecInfo->instanceQueueCount = 0;
  1438. pDecInfo->reportQueueCount = 0;
  1439. }
  1440. return ret;
  1441. }
  1442. RetCode Wave5VpuReInit(Uint32 coreIdx, void* firmware, Uint32 size)
  1443. {
  1444. vpu_buffer_t vb;
  1445. PhysicalAddress codeBase, tempBase;
  1446. PhysicalAddress oldCodeBase, tempSize;
  1447. Uint32 codeSize;
  1448. Uint32 regVal, remapSize;
  1449. vdi_get_common_memory(coreIdx, &vb);
  1450. codeBase = vb.phys_addr;
  1451. /* ALIGN TO 4KB */
  1452. codeSize = (WAVE5_MAX_CODE_BUF_SIZE&~0xfff);
  1453. if (codeSize < size*2) {
  1454. return RETCODE_INSUFFICIENT_RESOURCE;
  1455. }
  1456. tempBase = vb.phys_addr + WAVE5_TEMPBUF_OFFSET;
  1457. tempSize = WAVE5_TEMPBUF_SIZE;
  1458. oldCodeBase = VpuReadReg(coreIdx, W5_VPU_REMAP_PADDR);
  1459. if (oldCodeBase != codeBase) {
  1460. VpuAttr* pAttr = &g_VpuCoreAttributes[coreIdx];
  1461. VpuWriteMem(coreIdx, codeBase, (unsigned char*)firmware, size*2, VDI_128BIT_LITTLE_ENDIAN);
  1462. vdi_set_bit_firmware_to_pm(coreIdx, (Uint16*)firmware);
  1463. regVal = 0;
  1464. VpuWriteReg(coreIdx, W5_PO_CONF, regVal);
  1465. if (pAttr->supportBackbone == TRUE) {
  1466. if (pAttr->supportDualCore == TRUE) {
  1467. // check CORE0
  1468. vdi_fio_write_register(coreIdx, W5_BACKBONE_BUS_CTRL_VCORE0, 0x7);
  1469. if (vdi_wait_bus_busy(coreIdx, __VPU_BUSY_TIMEOUT, W5_BACKBONE_BUS_STATUS_VCORE0) == -1) {
  1470. vdi_fio_write_register(coreIdx, W5_BACKBONE_BUS_CTRL_VCORE0, 0x00);
  1471. return RETCODE_VPU_RESPONSE_TIMEOUT;
  1472. }
  1473. // check CORE1
  1474. vdi_fio_write_register(coreIdx, W5_BACKBONE_BUS_CTRL_VCORE1, 0x7);
  1475. if (vdi_wait_bus_busy(coreIdx, __VPU_BUSY_TIMEOUT, W5_BACKBONE_BUS_STATUS_VCORE1) == -1) {
  1476. vdi_fio_write_register(coreIdx, W5_BACKBONE_BUS_CTRL_VCORE1, 0x00);
  1477. return RETCODE_VPU_RESPONSE_TIMEOUT;
  1478. }
  1479. }
  1480. else {
  1481. if (pAttr->supportVcoreBackbone == TRUE) {
  1482. // Step1 : disable request
  1483. vdi_fio_write_register(coreIdx, W5_BACKBONE_BUS_CTRL_VCORE0, 0x7);
  1484. // Step2 : Waiting for completion of bus transaction
  1485. if (vdi_wait_bus_busy(coreIdx, __VPU_BUSY_TIMEOUT, W5_BACKBONE_BUS_STATUS_VCORE0) == -1) {
  1486. vdi_fio_write_register(coreIdx, W5_BACKBONE_BUS_CTRL_VCORE0, 0x00);
  1487. return RETCODE_VPU_RESPONSE_TIMEOUT;
  1488. }
  1489. }
  1490. else {
  1491. // Step1 : disable request
  1492. vdi_fio_write_register(coreIdx, W5_COMBINED_BACKBONE_BUS_CTRL, 0x7);
  1493. // Step2 : Waiting for completion of bus transaction
  1494. if (vdi_wait_bus_busy(coreIdx, __VPU_BUSY_TIMEOUT, W5_COMBINED_BACKBONE_BUS_STATUS) == -1) {
  1495. vdi_fio_write_register(coreIdx, W5_COMBINED_BACKBONE_BUS_CTRL, 0x00);
  1496. return RETCODE_VPU_RESPONSE_TIMEOUT;
  1497. }
  1498. }
  1499. }
  1500. }
  1501. else {
  1502. // Step1 : disable request
  1503. vdi_fio_write_register(coreIdx, W5_GDI_BUS_CTRL, 0x100);
  1504. // Step2 : Waiting for completion of bus transaction
  1505. if (vdi_wait_bus_busy(coreIdx, __VPU_BUSY_TIMEOUT, W5_GDI_BUS_STATUS) == -1) {
  1506. vdi_fio_write_register(coreIdx, W5_GDI_BUS_CTRL, 0x00);
  1507. return RETCODE_VPU_RESPONSE_TIMEOUT;
  1508. }
  1509. }
  1510. // Step3 : Waiting for completion of VCPU bus transaction
  1511. if (vdi_wait_vcpu_bus_busy(coreIdx, __VPU_BUSY_TIMEOUT, W5_VPU_VCPU_STATUS) == -1) {
  1512. return RETCODE_VPU_RESPONSE_TIMEOUT;
  1513. }
  1514. /* Reset All blocks */
  1515. regVal = 0x7ffffff;
  1516. VpuWriteReg(coreIdx, W5_VPU_RESET_REQ, regVal); // Reset All blocks
  1517. /* Waiting reset done */
  1518. if (vdi_wait_vpu_busy(coreIdx, __VPU_BUSY_TIMEOUT, W5_VPU_RESET_STATUS) == -1) {
  1519. VpuWriteReg(coreIdx, W5_VPU_RESET_REQ, 0);
  1520. return RETCODE_VPU_RESPONSE_TIMEOUT;
  1521. }
  1522. VpuWriteReg(coreIdx, W5_VPU_RESET_REQ, 0);
  1523. // Step3 : must clear GDI_BUS_CTRL after done SW_RESET
  1524. if (pAttr->supportBackbone == TRUE) {
  1525. if (pAttr->supportDualCore == TRUE) {
  1526. vdi_fio_write_register(coreIdx, W5_BACKBONE_BUS_CTRL_VCORE0, 0x00);
  1527. vdi_fio_write_register(coreIdx, W5_BACKBONE_BUS_CTRL_VCORE1, 0x00);
  1528. }
  1529. else {
  1530. if (pAttr->supportVcoreBackbone == TRUE) {
  1531. vdi_fio_write_register(coreIdx, W5_BACKBONE_BUS_CTRL_VCORE0, 0x00);
  1532. }
  1533. else {
  1534. vdi_fio_write_register(coreIdx, W5_COMBINED_BACKBONE_BUS_CTRL, 0x00);
  1535. }
  1536. }
  1537. }
  1538. else {
  1539. vdi_fio_write_register(coreIdx, W5_GDI_BUS_CTRL, 0x00);
  1540. }
  1541. /* remap page size */
  1542. remapSize = (codeSize >> 12) &0x1ff;
  1543. regVal = 0x80000000 | (WAVE5_UPPER_PROC_AXI_ID<<20) | (W5_REMAP_CODE_INDEX<<12) | (0 << 16) | (1<<11) | remapSize;
  1544. VpuWriteReg(coreIdx, W5_VPU_REMAP_CTRL, regVal);
  1545. VpuWriteReg(coreIdx, W5_VPU_REMAP_VADDR, 0x00000000); /* DO NOT CHANGE! */
  1546. VpuWriteReg(coreIdx, W5_VPU_REMAP_PADDR, codeBase);
  1547. VpuWriteReg(coreIdx, W5_ADDR_CODE_BASE, codeBase);
  1548. VpuWriteReg(coreIdx, W5_CODE_SIZE, codeSize);
  1549. VpuWriteReg(coreIdx, W5_CODE_PARAM, (WAVE5_UPPER_PROC_AXI_ID<<4) | 0);
  1550. VpuWriteReg(coreIdx, W5_ADDR_TEMP_BASE, tempBase);
  1551. VpuWriteReg(coreIdx, W5_TEMP_SIZE, tempSize);
  1552. VpuWriteReg(coreIdx, W5_TIMEOUT_CNT, 0);
  1553. VpuWriteReg(coreIdx, W5_HW_OPTION, 0);
  1554. /* Interrupt */
  1555. // encoder
  1556. regVal = (1<<INT_WAVE5_ENC_SET_PARAM);
  1557. regVal |= (1<<INT_WAVE5_ENC_PIC);
  1558. regVal |= (1<<INT_WAVE5_BSBUF_FULL);
  1559. regVal |= (1<<INT_WAVE5_ENC_LOW_LATENCY);
  1560. #ifdef SUPPORT_SOURCE_RELEASE_INTERRUPT
  1561. regVal |= (1<<INT_WAVE5_ENC_SRC_RELEASE);
  1562. #endif
  1563. // decoder
  1564. regVal = (1<<INT_WAVE5_INIT_SEQ);
  1565. regVal |= (1<<INT_WAVE5_DEC_PIC);
  1566. regVal |= (1<<INT_WAVE5_BSBUF_EMPTY);
  1567. VpuWriteReg(coreIdx, W5_VPU_VINT_ENABLE, regVal);
  1568. regVal = ((WAVE5_PROC_AXI_ID<<28) |
  1569. (WAVE5_PRP_AXI_ID<<24) |
  1570. (WAVE5_FBD_Y_AXI_ID<<20) |
  1571. (WAVE5_FBC_Y_AXI_ID<<16) |
  1572. (WAVE5_FBD_C_AXI_ID<<12) |
  1573. (WAVE5_FBC_C_AXI_ID<<8) |
  1574. (WAVE5_PRI_AXI_ID<<4) |
  1575. (WAVE5_SEC_AXI_ID<<0));
  1576. vdi_fio_write_register(coreIdx, W5_BACKBONE_PROG_AXI_ID, regVal);
  1577. if (vdi_get_sram_memory(coreIdx, &vb) < 0) // get SRAM base/size
  1578. return RETCODE_INSUFFICIENT_RESOURCE;
  1579. VpuWriteReg(coreIdx, W5_ADDR_SEC_AXI, vb.phys_addr);
  1580. VpuWriteReg(coreIdx, W5_SEC_AXI_SIZE, vb.size);
  1581. VpuWriteReg(coreIdx, W5_VPU_BUSY_STATUS, 1);
  1582. VpuWriteReg(coreIdx, W5_COMMAND, W5_INIT_VPU);
  1583. VpuWriteReg(coreIdx, W5_VPU_HOST_INT_REQ, 1);
  1584. VpuWriteReg(coreIdx, W5_VPU_REMAP_CORE_START, 1);
  1585. if (vdi_wait_vpu_busy(coreIdx, __VPU_BUSY_TIMEOUT, W5_VPU_BUSY_STATUS) == -1) {
  1586. return RETCODE_VPU_RESPONSE_TIMEOUT;
  1587. }
  1588. regVal = VpuReadReg(coreIdx, W5_RET_SUCCESS);
  1589. if (regVal == 0)
  1590. return RETCODE_FAILURE;
  1591. }
  1592. SetupWave5Properties(coreIdx);
  1593. return RETCODE_SUCCESS;
  1594. }
  1595. RetCode Wave5VpuSleepWake(Uint32 coreIdx, int iSleepWake, const Uint16* code, Uint32 size, BOOL reset)
  1596. {
  1597. Uint32 regVal;
  1598. vpu_buffer_t vb;
  1599. PhysicalAddress codeBase, tempBase;
  1600. Uint32 codeSize, tempSize;
  1601. Uint32 remapSize;
  1602. if(iSleepWake==1) //saves
  1603. {
  1604. if (vdi_wait_vpu_busy(coreIdx, __VPU_BUSY_TIMEOUT, W5_VPU_BUSY_STATUS) == -1) {
  1605. return RETCODE_VPU_RESPONSE_TIMEOUT;
  1606. }
  1607. VpuWriteReg(coreIdx, W5_VPU_BUSY_STATUS, 1);
  1608. VpuWriteReg(coreIdx, W5_COMMAND, W5_SLEEP_VPU);
  1609. VpuWriteReg(coreIdx, W5_VPU_HOST_INT_REQ, 1);
  1610. if (vdi_wait_vpu_busy(coreIdx, __VPU_BUSY_TIMEOUT, W5_VPU_BUSY_STATUS) == -1)
  1611. {
  1612. return RETCODE_VPU_RESPONSE_TIMEOUT;
  1613. }
  1614. regVal = VpuReadReg(coreIdx, W5_RET_SUCCESS);
  1615. if (regVal == 0)
  1616. {
  1617. APIDPRINT("SLEEP_VPU failed [0x%x]", VpuReadReg(coreIdx, W5_RET_FAIL_REASON));
  1618. return RETCODE_FAILURE;
  1619. }
  1620. }
  1621. else //restore
  1622. {
  1623. Uint32 hwOption = 0;
  1624. vdi_get_common_memory(coreIdx, &vb);
  1625. codeBase = vb.phys_addr;
  1626. /* ALIGN TO 4KB */
  1627. codeSize = (WAVE5_MAX_CODE_BUF_SIZE&~0xfff);
  1628. if (codeSize < size*2) {
  1629. return RETCODE_INSUFFICIENT_RESOURCE;
  1630. }
  1631. tempBase = vb.phys_addr + WAVE5_TEMPBUF_OFFSET;
  1632. tempSize = WAVE5_TEMPBUF_SIZE;
  1633. regVal = 0;
  1634. VpuWriteReg(coreIdx, W5_PO_CONF, regVal);
  1635. /* SW_RESET_SAFETY */
  1636. regVal = W5_RST_BLOCK_ALL;
  1637. VpuWriteReg(coreIdx, W5_VPU_RESET_REQ, regVal); // Reset All blocks
  1638. /* Waiting reset done */
  1639. if (vdi_wait_vpu_busy(coreIdx, __VPU_BUSY_TIMEOUT, W5_VPU_RESET_STATUS) == -1) {
  1640. VpuWriteReg(coreIdx, W5_VPU_RESET_REQ, 0);
  1641. return RETCODE_VPU_RESPONSE_TIMEOUT;
  1642. }
  1643. VpuWriteReg(coreIdx, W5_VPU_RESET_REQ, 0);
  1644. /* remap page size */
  1645. remapSize = (codeSize >> 12) &0x1ff;
  1646. regVal = 0x80000000 | (WAVE5_UPPER_PROC_AXI_ID<<20) | (W5_REMAP_CODE_INDEX<<12) | (0 << 16) | (1<<11) | remapSize;
  1647. VpuWriteReg(coreIdx, W5_VPU_REMAP_CTRL, regVal);
  1648. VpuWriteReg(coreIdx, W5_VPU_REMAP_VADDR, 0x00000000); /* DO NOT CHANGE! */
  1649. VpuWriteReg(coreIdx, W5_VPU_REMAP_PADDR, codeBase);
  1650. VpuWriteReg(coreIdx, W5_ADDR_CODE_BASE, codeBase);
  1651. VpuWriteReg(coreIdx, W5_CODE_SIZE, codeSize);
  1652. VpuWriteReg(coreIdx, W5_CODE_PARAM, (WAVE5_UPPER_PROC_AXI_ID<<4) | 0);
  1653. VpuWriteReg(coreIdx, W5_ADDR_TEMP_BASE, tempBase);
  1654. VpuWriteReg(coreIdx, W5_TEMP_SIZE, tempSize);
  1655. VpuWriteReg(coreIdx, W5_TIMEOUT_CNT, 0);
  1656. VpuWriteReg(coreIdx, W5_HW_OPTION, hwOption);
  1657. // encoder
  1658. regVal = (1<<INT_WAVE5_ENC_SET_PARAM);
  1659. regVal |= (1<<INT_WAVE5_ENC_PIC);
  1660. regVal |= (1<<INT_WAVE5_BSBUF_FULL);
  1661. regVal |= (1<<INT_WAVE5_ENC_LOW_LATENCY);
  1662. #ifdef SUPPORT_SOURCE_RELEASE_INTERRUPT
  1663. regVal |= (1<<INT_WAVE5_ENC_SRC_RELEASE);
  1664. #endif
  1665. // decoder
  1666. regVal = (1<<INT_WAVE5_INIT_SEQ);
  1667. regVal |= (1<<INT_WAVE5_DEC_PIC);
  1668. regVal |= (1<<INT_WAVE5_BSBUF_EMPTY);
  1669. VpuWriteReg(coreIdx, W5_VPU_VINT_ENABLE, regVal);
  1670. regVal = ((WAVE5_PROC_AXI_ID<<28) |
  1671. (WAVE5_PRP_AXI_ID<<24) |
  1672. (WAVE5_FBD_Y_AXI_ID<<20) |
  1673. (WAVE5_FBC_Y_AXI_ID<<16) |
  1674. (WAVE5_FBD_C_AXI_ID<<12) |
  1675. (WAVE5_FBC_C_AXI_ID<<8) |
  1676. (WAVE5_PRI_AXI_ID<<4) |
  1677. (WAVE5_SEC_AXI_ID<<0));
  1678. vdi_fio_write_register(coreIdx, W5_BACKBONE_PROG_AXI_ID, regVal);
  1679. if (vdi_get_sram_memory(coreIdx, &vb) < 0) // get SRAM base/size
  1680. return RETCODE_INSUFFICIENT_RESOURCE;
  1681. VpuWriteReg(coreIdx, W5_ADDR_SEC_AXI, vb.phys_addr);
  1682. VpuWriteReg(coreIdx, W5_SEC_AXI_SIZE, vb.size);
  1683. VpuWriteReg(coreIdx, W5_VPU_BUSY_STATUS, 1);
  1684. VpuWriteReg(coreIdx, W5_COMMAND, (reset==TRUE ? W5_INIT_VPU : W5_WAKEUP_VPU));
  1685. VpuWriteReg(coreIdx, W5_VPU_REMAP_CORE_START, 1);
  1686. if (vdi_wait_vpu_busy(coreIdx, __VPU_BUSY_TIMEOUT, W5_VPU_BUSY_STATUS) == -1) {
  1687. return RETCODE_VPU_RESPONSE_TIMEOUT;
  1688. }
  1689. regVal = VpuReadReg(coreIdx, W5_RET_SUCCESS);
  1690. if (regVal == 0) {
  1691. return RETCODE_FAILURE;
  1692. }
  1693. VpuWriteReg(coreIdx, W5_VPU_VINT_REASON_CLR, 0xffff);
  1694. VpuWriteReg(coreIdx, W5_VPU_VINT_REASON_USR, 0);
  1695. VpuWriteReg(coreIdx, W5_VPU_VINT_CLEAR, 0x1);
  1696. }
  1697. return RETCODE_SUCCESS;
  1698. }
  1699. RetCode Wave5VpuReset(Uint32 coreIdx, SWResetMode resetMode)
  1700. {
  1701. Uint32 val = 0;
  1702. RetCode ret = RETCODE_SUCCESS;
  1703. VpuAttr* pAttr = &g_VpuCoreAttributes[coreIdx];
  1704. // VPU doesn't send response. Force to set BUSY flag to 0.
  1705. VpuWriteReg(coreIdx, W5_VPU_BUSY_STATUS, 0);
  1706. val = VpuReadReg(coreIdx, W5_VPU_RET_VPU_CONFIG0);
  1707. if (((val>>16) & 0x1) == 0x01) {
  1708. pAttr->supportBackbone = TRUE;
  1709. }
  1710. if (((val>>22) & 0x1) == 0x01) {
  1711. pAttr->supportVcoreBackbone = TRUE;
  1712. }
  1713. val = VpuReadReg(coreIdx, W5_VPU_RET_VPU_CONFIG1);
  1714. if (((val>>26) & 0x1) == 0x01) {
  1715. pAttr->supportDualCore = TRUE;
  1716. }
  1717. // Waiting for completion of bus transaction
  1718. if (pAttr->supportBackbone == TRUE) {
  1719. if (pAttr->supportDualCore == TRUE) {
  1720. // check CORE0
  1721. vdi_fio_write_register(coreIdx, W5_BACKBONE_BUS_CTRL_VCORE0, 0x7);
  1722. if (vdi_wait_bus_busy(coreIdx, __VPU_BUSY_TIMEOUT, W5_BACKBONE_BUS_STATUS_VCORE0) == -1) {
  1723. vdi_fio_write_register(coreIdx, W5_BACKBONE_BUS_CTRL_VCORE0, 0x00);
  1724. return RETCODE_VPU_RESPONSE_TIMEOUT;
  1725. }
  1726. // check CORE1
  1727. vdi_fio_write_register(coreIdx, W5_BACKBONE_BUS_CTRL_VCORE1, 0x7);
  1728. if (vdi_wait_bus_busy(coreIdx, __VPU_BUSY_TIMEOUT, W5_BACKBONE_BUS_STATUS_VCORE1) == -1) {
  1729. vdi_fio_write_register(coreIdx, W5_BACKBONE_BUS_CTRL_VCORE1, 0x00);
  1730. return RETCODE_VPU_RESPONSE_TIMEOUT;
  1731. }
  1732. }
  1733. else {
  1734. if (pAttr->supportVcoreBackbone == TRUE) {
  1735. // Step1 : disable request
  1736. vdi_fio_write_register(coreIdx, W5_BACKBONE_BUS_CTRL_VCORE0, 0x7);
  1737. // Step2 : Waiting for completion of bus transaction
  1738. if (vdi_wait_bus_busy(coreIdx, __VPU_BUSY_TIMEOUT, W5_BACKBONE_BUS_STATUS_VCORE0) == -1) {
  1739. vdi_fio_write_register(coreIdx, W5_BACKBONE_BUS_CTRL_VCORE0, 0x00);
  1740. VLOG(ERR, "VpuReset Error = %d\n", pAttr->supportBackbone);
  1741. return RETCODE_VPU_RESPONSE_TIMEOUT;
  1742. }
  1743. }
  1744. else {
  1745. // Step1 : disable request
  1746. vdi_fio_write_register(coreIdx, W5_COMBINED_BACKBONE_BUS_CTRL, 0x7);
  1747. // Step2 : Waiting for completion of bus transaction
  1748. if (vdi_wait_bus_busy(coreIdx, __VPU_BUSY_TIMEOUT, W5_COMBINED_BACKBONE_BUS_STATUS) == -1) {
  1749. vdi_fio_write_register(coreIdx, W5_COMBINED_BACKBONE_BUS_CTRL, 0x00);
  1750. VLOG(ERR, "VpuReset Error = %d\n", pAttr->supportBackbone);
  1751. return RETCODE_VPU_RESPONSE_TIMEOUT;
  1752. }
  1753. }
  1754. }
  1755. }
  1756. else {
  1757. // Step1 : disable request
  1758. vdi_fio_write_register(coreIdx, W5_GDI_BUS_CTRL, 0x100);
  1759. // Step2 : Waiting for completion of bus transaction
  1760. if (vdi_wait_bus_busy(coreIdx, __VPU_BUSY_TIMEOUT, W5_GDI_BUS_STATUS) == -1) {
  1761. vdi_fio_write_register(coreIdx, W5_GDI_BUS_CTRL, 0x00);
  1762. VLOG(ERR, "VpuReset Error = %d\n", pAttr->supportBackbone);
  1763. return RETCODE_VPU_RESPONSE_TIMEOUT;
  1764. }
  1765. }
  1766. if (resetMode == SW_RESET_SAFETY) {
  1767. if ((ret=Wave5VpuSleepWake(coreIdx, TRUE, NULL, 0, TRUE)) != RETCODE_SUCCESS) {
  1768. return ret;
  1769. }
  1770. }
  1771. switch (resetMode) {
  1772. case SW_RESET_ON_BOOT:
  1773. case SW_RESET_FORCE:
  1774. case SW_RESET_SAFETY:
  1775. val = W5_RST_BLOCK_ALL;
  1776. break;
  1777. default:
  1778. return RETCODE_INVALID_PARAM;
  1779. }
  1780. if (val) {
  1781. VpuWriteReg(coreIdx, W5_VPU_RESET_REQ, val);
  1782. if (vdi_wait_vpu_busy(coreIdx, __VPU_BUSY_TIMEOUT, W5_VPU_RESET_STATUS) == -1) {
  1783. VpuWriteReg(coreIdx, W5_VPU_RESET_REQ, 0);
  1784. vdi_log(coreIdx, W5_RESET_VPU, 2);
  1785. return RETCODE_VPU_RESPONSE_TIMEOUT;
  1786. }
  1787. VpuWriteReg(coreIdx, W5_VPU_RESET_REQ, 0);
  1788. }
  1789. // Step3 : must clear GDI_BUS_CTRL after done SW_RESET
  1790. if (pAttr->supportBackbone == TRUE) {
  1791. if (pAttr->supportDualCore == TRUE) {
  1792. vdi_fio_write_register(coreIdx, W5_BACKBONE_BUS_CTRL_VCORE0, 0x00);
  1793. vdi_fio_write_register(coreIdx, W5_BACKBONE_BUS_CTRL_VCORE1, 0x00);
  1794. }
  1795. else {
  1796. if (pAttr->supportVcoreBackbone == TRUE) {
  1797. vdi_fio_write_register(coreIdx, W5_BACKBONE_BUS_CTRL_VCORE0, 0x00);
  1798. }
  1799. else {
  1800. vdi_fio_write_register(coreIdx, W5_COMBINED_BACKBONE_BUS_CTRL, 0x00);
  1801. }
  1802. }
  1803. }
  1804. else {
  1805. vdi_fio_write_register(coreIdx, W5_GDI_BUS_CTRL, 0x00);
  1806. }
  1807. if (resetMode == SW_RESET_SAFETY || resetMode == SW_RESET_FORCE ) {
  1808. ret = Wave5VpuSleepWake(coreIdx, FALSE, NULL, 0, TRUE);
  1809. }
  1810. return ret;
  1811. }
  1812. RetCode Wave5VpuDecFiniSeq(CodecInst* instance)
  1813. {
  1814. RetCode ret = RETCODE_SUCCESS;
  1815. Wave5BitIssueCommand(instance, W5_DESTROY_INSTANCE);
  1816. if (vdi_wait_vpu_busy(instance->coreIdx, __VPU_BUSY_TIMEOUT, W5_VPU_BUSY_STATUS) == -1)
  1817. return RETCODE_VPU_RESPONSE_TIMEOUT;
  1818. if (VpuReadReg(instance->coreIdx, W5_RET_SUCCESS) == FALSE) {
  1819. Uint32 regVal;
  1820. regVal = VpuReadReg(instance->coreIdx, W5_RET_FAIL_REASON);
  1821. if (regVal != WAVE5_QUEUEING_FAIL && regVal != WAVE5_VPU_STILL_RUNNING)
  1822. VLOG(ERR, "FAIL_REASON = 0x%x\n", regVal);
  1823. if (regVal == WAVE5_VPU_STILL_RUNNING)
  1824. ret = RETCODE_VPU_STILL_RUNNING;
  1825. else if (regVal == WAVE5_SYSERR_ACCESS_VIOLATION_HW)
  1826. ret = RETCODE_MEMORY_ACCESS_VIOLATION;
  1827. else if (regVal == WAVE5_SYSERR_WATCHDOG_TIMEOUT)
  1828. ret = RETCODE_VPU_RESPONSE_TIMEOUT;
  1829. else if (regVal == WAVE5_SYSERR_DEC_VLC_BUF_FULL)
  1830. ret = RETCODE_VLC_BUF_FULL;
  1831. else if (regVal == WAVE5_ERROR_FW_FATAL)
  1832. ret = RETCODE_ERROR_FW_FATAL;
  1833. else
  1834. ret = RETCODE_FAILURE;
  1835. }
  1836. return ret;
  1837. }
  1838. RetCode Wave5VpuDecSetBitstreamFlag(CodecInst* instance, BOOL running, BOOL eos, BOOL explictEnd)
  1839. {
  1840. DecInfo* pDecInfo = &instance->CodecInfo->decInfo;
  1841. BitStreamMode bsMode = (BitStreamMode)pDecInfo->openParam.bitstreamMode;
  1842. pDecInfo->streamEndflag = (eos == 1) ? TRUE : FALSE;
  1843. if (bsMode == BS_MODE_INTERRUPT) {
  1844. if (pDecInfo->streamEndflag == TRUE) explictEnd = TRUE;
  1845. VpuWriteReg(instance->coreIdx, W5_BS_OPTION, (pDecInfo->streamEndflag<<1) | explictEnd);
  1846. VpuWriteReg(instance->coreIdx, W5_BS_WR_PTR, pDecInfo->streamWrPtr);
  1847. Wave5BitIssueCommand(instance, W5_UPDATE_BS);
  1848. if (vdi_wait_vpu_busy(instance->coreIdx, __VPU_BUSY_TIMEOUT, W5_VPU_BUSY_STATUS) == -1) {
  1849. return RETCODE_VPU_RESPONSE_TIMEOUT;
  1850. }
  1851. if (VpuReadReg(instance->coreIdx, W5_RET_SUCCESS) == 0) {
  1852. return RETCODE_FAILURE;
  1853. }
  1854. }
  1855. return RETCODE_SUCCESS;
  1856. }
  1857. RetCode Wave5DecClrDispFlag(CodecInst* instance, Uint32 index)
  1858. {
  1859. RetCode ret = RETCODE_SUCCESS;
  1860. DecInfo * pDecInfo;
  1861. pDecInfo = &instance->CodecInfo->decInfo;
  1862. VpuWriteReg(instance->coreIdx, W5_CMD_DEC_CLR_DISP_IDC, (1<<index));
  1863. VpuWriteReg(instance->coreIdx, W5_CMD_DEC_SET_DISP_IDC, 0);
  1864. ret = SendQuery(instance, UPDATE_DISP_FLAG);
  1865. if (ret != RETCODE_SUCCESS) {
  1866. VLOG(ERR, "Wave5DecClrDispFlag QUERY FAILURE\n");
  1867. return RETCODE_QUERY_FAILURE;
  1868. }
  1869. pDecInfo->frameDisplayFlag = VpuReadReg(instance->coreIdx, pDecInfo->frameDisplayFlagRegAddr);
  1870. return RETCODE_SUCCESS;
  1871. }
  1872. RetCode Wave5DecSetDispFlag(CodecInst* instance, Uint32 index)
  1873. {
  1874. RetCode ret = RETCODE_SUCCESS;
  1875. VpuWriteReg(instance->coreIdx, W5_CMD_DEC_CLR_DISP_IDC, 0);
  1876. VpuWriteReg(instance->coreIdx, W5_CMD_DEC_SET_DISP_IDC, (1<<index));
  1877. ret = SendQuery(instance, UPDATE_DISP_FLAG);
  1878. return ret;
  1879. }
  1880. Int32 Wave5VpuWaitInterrupt(CodecInst* instance, Int32 timeout, BOOL pending)
  1881. {
  1882. Int32 reason = -1;
  1883. #ifdef SUPPORT_MULTI_INST_INTR
  1884. #else
  1885. Int32 orgReason = -1;
  1886. Int32 remain_intr = -1; // to set VPU_VINT_REASON for remain interrupt.
  1887. Int32 ownInt = 0;
  1888. Uint32 regVal;
  1889. #ifdef SUPPORT_SOURCE_RELEASE_INTERRUPT
  1890. Uint32 IntrMask = ((1 << INT_WAVE5_BSBUF_EMPTY) | (1 << INT_WAVE5_DEC_PIC) | (1 << INT_WAVE5_INIT_SEQ) | (1 << INT_WAVE5_ENC_SET_PARAM) | (1 << INT_WAVE5_ENC_SRC_RELEASE));
  1891. #else
  1892. Uint32 IntrMask = ((1 << INT_WAVE5_BSBUF_EMPTY) | (1 << INT_WAVE5_DEC_PIC) | (1 << INT_WAVE5_INIT_SEQ) | (1 << INT_WAVE5_ENC_SET_PARAM));
  1893. #endif
  1894. #endif /* SUPPORT_MULTI_INST_INTR */
  1895. #ifdef SUPPORT_MULTI_INST_INTR
  1896. // check an interrupt for my instance during timeout
  1897. reason = vdi_wait_interrupt(instance->coreIdx, instance->instIndex, timeout);
  1898. #else
  1899. EnterLock(instance->coreIdx);
  1900. // check one interrupt for current instance even if the number of interrupt triggered more than one.
  1901. if ((reason = vdi_wait_interrupt(instance->coreIdx, timeout)) > 0) {
  1902. remain_intr = reason;
  1903. if (reason & (1 << INT_WAVE5_BSBUF_EMPTY)) {
  1904. regVal = VpuReadReg(instance->coreIdx, W5_RET_BS_EMPTY_INST);
  1905. regVal = (regVal & 0xffff);
  1906. if (regVal & (1 << instance->instIndex)) {
  1907. ownInt = 1;
  1908. reason = (1 << INT_WAVE5_BSBUF_EMPTY);
  1909. remain_intr &= ~(Uint32)reason;
  1910. regVal = regVal & ~(1UL << instance->instIndex);
  1911. VpuWriteReg(instance->coreIdx, W5_RET_BS_EMPTY_INST, regVal);
  1912. }
  1913. }
  1914. if (reason & (1 << INT_WAVE5_INIT_SEQ)) {
  1915. regVal = VpuReadReg(instance->coreIdx, W5_RET_QUEUE_CMD_DONE_INST);
  1916. regVal = (regVal & 0xffff);
  1917. if (regVal & (1 << instance->instIndex)) {
  1918. ownInt = 1;
  1919. reason = (1 << INT_WAVE5_INIT_SEQ);
  1920. remain_intr &= ~(Uint32)reason;
  1921. regVal = regVal & ~(1UL << instance->instIndex);
  1922. VpuWriteReg(instance->coreIdx, W5_RET_QUEUE_CMD_DONE_INST, regVal);
  1923. }
  1924. }
  1925. if (reason & (1 << INT_WAVE5_DEC_PIC)) {
  1926. regVal = VpuReadReg(instance->coreIdx, W5_RET_QUEUE_CMD_DONE_INST);
  1927. regVal = (regVal & 0xffff);
  1928. if (regVal & (1 << instance->instIndex)) {
  1929. ownInt = 1;
  1930. orgReason = reason;
  1931. reason = (1 << INT_WAVE5_DEC_PIC);
  1932. remain_intr &= ~(Uint32)reason;
  1933. /* Clear Low Latency Interrupt if two interrupts are occured */
  1934. if (orgReason & (1 << INT_WAVE5_ENC_LOW_LATENCY)) {
  1935. regVal = VpuReadReg(instance->coreIdx, W5_RET_QUEUE_CMD_DONE_INST);
  1936. regVal = (regVal>>16);
  1937. if (regVal & (1 << instance->instIndex)) {
  1938. remain_intr &= ~(1<<INT_WAVE5_ENC_LOW_LATENCY);
  1939. Wave5VpuClearInterrupt(instance->coreIdx, 1<<INT_WAVE5_ENC_LOW_LATENCY);
  1940. }
  1941. }
  1942. regVal = regVal & ~(1UL << instance->instIndex);
  1943. VpuWriteReg(instance->coreIdx, W5_RET_QUEUE_CMD_DONE_INST, regVal);
  1944. }
  1945. }
  1946. if (reason & (1 << INT_WAVE5_ENC_SET_PARAM)) {
  1947. regVal = VpuReadReg(instance->coreIdx, W5_RET_QUEUE_CMD_DONE_INST);
  1948. regVal = (regVal & 0xffff);
  1949. if (regVal & (1 << instance->instIndex)) {
  1950. ownInt = 1;
  1951. reason = (1 << INT_WAVE5_ENC_SET_PARAM);
  1952. remain_intr &= ~(Uint32)reason;
  1953. regVal = regVal & ~(1UL << instance->instIndex);
  1954. VpuWriteReg(instance->coreIdx, W5_RET_QUEUE_CMD_DONE_INST, regVal);
  1955. }
  1956. }
  1957. #ifdef SUPPORT_SOURCE_RELEASE_INTERRUPT
  1958. if (reason & (1 << INT_WAVE5_ENC_SRC_RELEASE)) {
  1959. regVal = VpuReadReg(instance->coreIdx, W5_RET_RELEASED_SRC_INSTANCE);
  1960. regVal = (regVal & 0xffff);
  1961. if (regVal & (1 << instance->instIndex)) {
  1962. ownInt = 1;
  1963. reason = (1 << INT_WAVE5_ENC_SRC_RELEASE);
  1964. remain_intr &= ~(Uint32)reason;
  1965. regVal = regVal & ~(1UL << instance->instIndex);
  1966. VpuWriteReg(instance->coreIdx, W5_RET_RELEASED_SRC_INSTANCE, regVal);
  1967. }
  1968. }
  1969. #endif
  1970. if (reason & (1 << INT_WAVE5_ENC_LOW_LATENCY)) {
  1971. regVal = VpuReadReg(instance->coreIdx, W5_RET_QUEUE_CMD_DONE_INST);
  1972. regVal = (regVal>>16);
  1973. if (regVal & (1 << instance->instIndex)) {
  1974. ownInt = 1;
  1975. reason = (1 << INT_WAVE5_ENC_LOW_LATENCY);
  1976. remain_intr &= ~(Uint32)reason;
  1977. regVal = regVal & ~(1UL << instance->instIndex);
  1978. regVal = (regVal << 16);
  1979. VpuWriteReg(instance->coreIdx, W5_RET_QUEUE_CMD_DONE_INST, regVal);
  1980. }
  1981. }
  1982. if (reason & ~IntrMask) { // when interrupt is not for empty, dec_pic, init_seq.
  1983. regVal = VpuReadReg(instance->coreIdx, W5_RET_SEQ_DONE_INSTANCE_INFO)&0xFF;
  1984. if (regVal == instance->instIndex) {
  1985. ownInt = 1;
  1986. reason = (reason & ~IntrMask);
  1987. remain_intr &= ~(Uint32)reason;
  1988. }
  1989. }
  1990. VpuWriteReg(instance->coreIdx, W5_VPU_VINT_REASON, remain_intr); // set remain interrupt flag to trigger interrupt next time.
  1991. if (!ownInt)
  1992. reason = -1; // if there was no interrupt for current instance id, reason should be -1;
  1993. }
  1994. LeaveLock(instance->coreIdx);
  1995. #endif
  1996. return reason;
  1997. }
  1998. RetCode Wave5VpuClearInterrupt(Uint32 coreIdx, Uint32 flags)
  1999. {
  2000. Uint32 interruptReason;
  2001. interruptReason = VpuReadReg(coreIdx, W5_VPU_VINT_REASON_USR);
  2002. interruptReason &= ~flags;
  2003. VpuWriteReg(coreIdx, W5_VPU_VINT_REASON_USR, interruptReason);
  2004. return RETCODE_SUCCESS;
  2005. }
  2006. RetCode Wave5VpuDecGetRdPtr(CodecInst* instance, PhysicalAddress *rdPtr)
  2007. {
  2008. RetCode ret = RETCODE_SUCCESS;
  2009. ret = SendQuery(instance, GET_BS_RD_PTR);
  2010. if (ret != RETCODE_SUCCESS)
  2011. return RETCODE_QUERY_FAILURE;
  2012. *rdPtr = VpuReadReg(instance->coreIdx, W5_RET_QUERY_DEC_BS_RD_PTR);
  2013. return RETCODE_SUCCESS;
  2014. }
  2015. RetCode Wave5VpuGetBwReport(CodecInst* instance, VPUBWData* bwMon)
  2016. {
  2017. RetCode ret = RETCODE_SUCCESS;
  2018. Int32 coreIdx;
  2019. coreIdx = instance->coreIdx;
  2020. ret = SendQuery(instance, GET_BW_REPORT);
  2021. if (ret != RETCODE_SUCCESS) {
  2022. if (VpuReadReg(coreIdx, W5_RET_FAIL_REASON) == WAVE5_RESULT_NOT_READY)
  2023. return RETCODE_REPORT_NOT_READY;
  2024. else
  2025. return RETCODE_QUERY_FAILURE;
  2026. }
  2027. bwMon->prpBwRead = VpuReadReg(coreIdx, RET_QUERY_BW_PRP_AXI_READ) * 16;
  2028. bwMon->prpBwWrite = VpuReadReg(coreIdx, RET_QUERY_BW_PRP_AXI_WRITE) * 16;
  2029. bwMon->fbdYRead = VpuReadReg(coreIdx, RET_QUERY_BW_FBD_Y_AXI_READ) * 16;
  2030. bwMon->fbcYWrite = VpuReadReg(coreIdx, RET_QUERY_BW_FBC_Y_AXI_WRITE) * 16;
  2031. bwMon->fbdCRead = VpuReadReg(coreIdx, RET_QUERY_BW_FBD_C_AXI_READ) * 16;
  2032. bwMon->fbcCWrite = VpuReadReg(coreIdx, RET_QUERY_BW_FBC_C_AXI_WRITE) * 16;
  2033. bwMon->priBwRead = VpuReadReg(coreIdx, RET_QUERY_BW_PRI_AXI_READ) * 16;
  2034. bwMon->priBwWrite = VpuReadReg(coreIdx, RET_QUERY_BW_PRI_AXI_WRITE) * 16;
  2035. bwMon->secBwRead = VpuReadReg(coreIdx, RET_QUERY_BW_SEC_AXI_READ) * 16;
  2036. bwMon->secBwWrite = VpuReadReg(coreIdx, RET_QUERY_BW_SEC_AXI_WRITE) * 16;
  2037. bwMon->procBwRead = VpuReadReg(coreIdx, RET_QUERY_BW_PROC_AXI_READ) * 16;
  2038. bwMon->procBwWrite = VpuReadReg(coreIdx, RET_QUERY_BW_PROC_AXI_WRITE) * 16;
  2039. return RETCODE_SUCCESS;
  2040. }
  2041. RetCode Wave5VpuGetDebugInfo(CodecInst* instance, VPUDebugInfo* info)
  2042. {
  2043. RetCode ret = RETCODE_SUCCESS;
  2044. Int32 coreIdx;
  2045. coreIdx = instance->coreIdx;
  2046. ret = SendQuery(instance, GET_DEBUG_INFO);
  2047. if (ret != RETCODE_SUCCESS) {
  2048. if (VpuReadReg(coreIdx, W5_RET_FAIL_REASON) == WAVE5_RESULT_NOT_READY)
  2049. return RETCODE_REPORT_NOT_READY;
  2050. else
  2051. return RETCODE_QUERY_FAILURE;
  2052. }
  2053. info->priReason = VpuReadReg(coreIdx, W5_RET_QUERY_DEBUG_PRI_REASON);
  2054. return RETCODE_SUCCESS;
  2055. }
  2056. /************************************************************************/
  2057. /* ENCODER functions */
  2058. /************************************************************************/
  2059. RetCode Wave5VpuEncUpdateBS(CodecInst* instance, BOOL updateNewBsbuf)
  2060. {
  2061. EncInfo* pEncInfo;
  2062. Int32 coreIdx;
  2063. Uint32 regVal = 0, bsEndian;
  2064. EncOpenParam* pOpenParam;
  2065. pEncInfo = VPU_HANDLE_TO_ENCINFO(instance);
  2066. pOpenParam = &pEncInfo->openParam;
  2067. coreIdx = instance->coreIdx;
  2068. regVal = vdi_convert_endian(coreIdx, pOpenParam->streamEndian);
  2069. bsEndian = (~regVal&VDI_128BIT_ENDIAN_MASK);
  2070. VpuWriteReg(coreIdx, W5_CMD_ENC_BS_START_ADDR, pEncInfo->streamRdPtr);
  2071. VpuWriteReg(coreIdx, W5_CMD_ENC_BS_SIZE, pEncInfo->streamBufSize);
  2072. VpuWriteReg(coreIdx, W5_BS_OPTION, (pEncInfo->lineBufIntEn<<6) | bsEndian);
  2073. Wave5BitIssueCommand(instance, W5_UPDATE_BS);
  2074. if (vdi_wait_vpu_busy(instance->coreIdx, __VPU_BUSY_TIMEOUT, W5_VPU_BUSY_STATUS) == -1) {
  2075. return RETCODE_VPU_RESPONSE_TIMEOUT;
  2076. }
  2077. if (VpuReadReg(instance->coreIdx, W5_RET_SUCCESS) == 0) {
  2078. return RETCODE_FAILURE;
  2079. }
  2080. return RETCODE_SUCCESS;
  2081. }
  2082. RetCode Wave5VpuEncGetRdWrPtr(CodecInst* instance, PhysicalAddress *rdPtr, PhysicalAddress *wrPtr)
  2083. {
  2084. EncInfo* pEncInfo = VPU_HANDLE_TO_ENCINFO(instance);
  2085. RetCode ret = RETCODE_SUCCESS;
  2086. VpuWriteReg(instance->coreIdx, W5_CMD_ENC_REASON_SEL, pEncInfo->encWrPtrSel);
  2087. ret = SendQuery(instance, GET_BS_WR_PTR);
  2088. if (ret != RETCODE_SUCCESS)
  2089. return RETCODE_QUERY_FAILURE;
  2090. *rdPtr = VpuReadReg(instance->coreIdx, W5_RET_ENC_RD_PTR);
  2091. *wrPtr = VpuReadReg(instance->coreIdx, W5_RET_ENC_WR_PTR);
  2092. return RETCODE_SUCCESS;
  2093. }
  2094. RetCode Wave5VpuBuildUpEncParam(CodecInst* instance, EncOpenParam* param)
  2095. {
  2096. RetCode ret = RETCODE_SUCCESS;
  2097. EncInfo* pEncInfo;
  2098. VpuAttr* pAttr = &g_VpuCoreAttributes[instance->coreIdx];
  2099. vpu_buffer_t vb;
  2100. Uint32 regVal = 0;
  2101. Uint32 bsEndian;
  2102. pEncInfo = VPU_HANDLE_TO_ENCINFO(instance);
  2103. pEncInfo->streamRdPtrRegAddr = W5_RET_ENC_RD_PTR;
  2104. pEncInfo->streamWrPtrRegAddr = W5_RET_ENC_WR_PTR;
  2105. pEncInfo->currentPC = W5_VCPU_CUR_PC;
  2106. pEncInfo->busyFlagAddr = W5_VPU_BUSY_STATUS;
  2107. if ((pAttr->supportEncoders&(1<<param->bitstreamFormat)) == 0)
  2108. return RETCODE_NOT_SUPPORTED_FEATURE;
  2109. if (param->bitstreamFormat == STD_HEVC)
  2110. instance->codecMode = W_HEVC_ENC;
  2111. else if (param->bitstreamFormat == STD_SVAC)
  2112. instance->codecMode = W_SVAC_ENC;
  2113. else if (param->bitstreamFormat == STD_AVC)
  2114. instance->codecMode = W_AVC_ENC;
  2115. vdi_get_common_memory(instance->coreIdx, &vb);
  2116. pEncInfo->vbTemp.base = vb.phys_addr + WAVE5_TEMPBUF_OFFSET;
  2117. pEncInfo->vbTemp.phys_addr = pEncInfo->vbTemp.base;
  2118. pEncInfo->vbTemp.virt_addr = pEncInfo->vbTemp.base;
  2119. pEncInfo->vbTemp.size = WAVE5_TEMPBUF_SIZE;
  2120. if (instance->productId == PRODUCT_ID_521)
  2121. pEncInfo->vbWork.size = WAVE521ENC_WORKBUF_SIZE;
  2122. if (vdi_allocate_dma_memory(instance->coreIdx, &pEncInfo->vbWork, ENC_WORK, instance->instIndex) < 0) {
  2123. pEncInfo->vbWork.base = 0;
  2124. pEncInfo->vbWork.phys_addr = 0;
  2125. pEncInfo->vbWork.size = 0;
  2126. pEncInfo->vbWork.virt_addr = 0;
  2127. return RETCODE_INSUFFICIENT_RESOURCE;
  2128. }
  2129. vdi_clear_memory(instance->coreIdx, pEncInfo->vbWork.phys_addr, pEncInfo->vbWork.size, 0);
  2130. VpuWriteReg(instance->coreIdx, W5_ADDR_WORK_BASE, pEncInfo->vbWork.phys_addr);
  2131. VpuWriteReg(instance->coreIdx, W5_WORK_SIZE, pEncInfo->vbWork.size);
  2132. regVal = vdi_convert_endian(instance->coreIdx, param->streamEndian);
  2133. bsEndian = (~regVal&VDI_128BIT_ENDIAN_MASK);
  2134. regVal = (param->lowLatencyMode<<7) | (param->lineBufIntEn<<6) | bsEndian;
  2135. VpuWriteReg(instance->coreIdx, W5_CMD_BS_PARAM, regVal);
  2136. VpuWriteReg(instance->coreIdx, W5_CMD_NUM_CQ_DEPTH_M1, COMMAND_QUEUE_DEPTH -1 );
  2137. regVal = 0;
  2138. #ifdef SUPPORT_SOURCE_RELEASE_INTERRUPT
  2139. regVal |= (param->srcReleaseIntEnable<<2);
  2140. #endif
  2141. VpuWriteReg(instance->coreIdx, W5_CMD_ENC_SRC_OPTIONS, regVal);
  2142. VpuWriteReg(instance->coreIdx, W5_VPU_BUSY_STATUS, 1);
  2143. VpuWriteReg(instance->coreIdx, W5_RET_SUCCESS, 0); //for debug
  2144. VpuWriteReg(instance->coreIdx, W5_CMD_ENC_VCORE_INFO, 1);
  2145. Wave5BitIssueCommand(instance, W5_CREATE_INSTANCE);
  2146. if (vdi_wait_vpu_busy(instance->coreIdx, __VPU_BUSY_TIMEOUT, W5_VPU_BUSY_STATUS) == -1) { // Check QUEUE_DONE
  2147. if (instance->loggingEnable)
  2148. vdi_log(instance->coreIdx, W5_CREATE_INSTANCE, 2);
  2149. vdi_free_dma_memory(instance->coreIdx, &pEncInfo->vbWork, ENC_WORK, instance->instIndex);
  2150. return RETCODE_VPU_RESPONSE_TIMEOUT;
  2151. }
  2152. if (VpuReadReg(instance->coreIdx, W5_RET_SUCCESS) == FALSE) { // FAILED for adding into VCPU QUEUE
  2153. vdi_free_dma_memory(instance->coreIdx, &pEncInfo->vbWork, ENC_WORK, instance->instIndex);
  2154. regVal = VpuReadReg(instance->coreIdx, W5_RET_FAIL_REASON);
  2155. if (regVal != WAVE5_QUEUEING_FAIL)
  2156. VLOG(ERR, "FAIL_REASON = 0x%x\n", regVal);
  2157. if (regVal == 2)
  2158. ret = RETCODE_INVALID_SFS_INSTANCE;
  2159. else if (regVal == WAVE5_SYSERR_WATCHDOG_TIMEOUT)
  2160. ret = RETCODE_VPU_RESPONSE_TIMEOUT;
  2161. else if (regVal == WAVE5_ERROR_FW_FATAL)
  2162. ret = RETCODE_ERROR_FW_FATAL;
  2163. else
  2164. ret = RETCODE_FAILURE;
  2165. }
  2166. pEncInfo->streamRdPtr = param->bitstreamBuffer;
  2167. pEncInfo->streamWrPtr = param->bitstreamBuffer;
  2168. pEncInfo->lineBufIntEn = param->lineBufIntEn;
  2169. pEncInfo->streamBufStartAddr = param->bitstreamBuffer;
  2170. pEncInfo->streamBufSize = param->bitstreamBufferSize;
  2171. pEncInfo->streamBufEndAddr = param->bitstreamBuffer + param->bitstreamBufferSize;
  2172. pEncInfo->streamBufTobeReadStartAddr = param->bitstreamBuffer;
  2173. pEncInfo->streamBufTobeReadSize = param->bitstreamBufferSize;
  2174. pEncInfo->streamBufTobeReadEndAddr = param->bitstreamBuffer + param->bitstreamBufferSize;
  2175. pEncInfo->stride = 0;
  2176. pEncInfo->vbFrame.size = 0;
  2177. pEncInfo->vbPPU.size = 0;
  2178. pEncInfo->frameAllocExt = 0;
  2179. pEncInfo->ppuAllocExt = 0;
  2180. pEncInfo->initialInfoObtained = 0;
  2181. pEncInfo->productCode = VpuReadReg(instance->coreIdx, W5_PRODUCT_NUMBER);
  2182. return ret;
  2183. }
  2184. RetCode Wave5VpuEncInitSeq(CodecInst* instance)
  2185. {
  2186. Int32 coreIdx, alignedWidth = 0, alignedHeight=0;
  2187. Uint32 regVal = 0, rotMirMode;
  2188. EncInfo* pEncInfo;
  2189. EncOpenParam* pOpenParam;
  2190. EncWaveParam* pParam;
  2191. coreIdx = instance->coreIdx;
  2192. pEncInfo = &instance->CodecInfo->encInfo;
  2193. pOpenParam = &pEncInfo->openParam;
  2194. pParam = &pOpenParam->EncStdParam.waveParam;
  2195. /*==============================================*/
  2196. /* OPT_CUSTOM_GOP */
  2197. /*==============================================*/
  2198. /*
  2199. * SET_PARAM + CUSTOM_GOP
  2200. * only when gopPresetIdx == custom_gop, custom_gop related registers should be set
  2201. */
  2202. if (pParam->gopPresetIdx == PRESET_IDX_CUSTOM_GOP) {
  2203. int i=0, j = 0;
  2204. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_SET_PARAM_OPTION, OPT_CUSTOM_GOP);
  2205. VpuWriteReg(coreIdx, W5_CMD_ENC_CUSTOM_GOP_PARAM, pParam->gopParam.customGopSize);
  2206. for (i=0 ; i<pParam->gopParam.customGopSize; i++) {
  2207. VpuWriteReg(coreIdx, W5_CMD_ENC_CUSTOM_GOP_PIC_PARAM_0 + (i*4), (pParam->gopParam.picParam[i].picType<<0) |
  2208. (pParam->gopParam.picParam[i].pocOffset<<2) |
  2209. (pParam->gopParam.picParam[i].picQp<<6) |
  2210. (pParam->gopParam.picParam[i].numRefPicL0<<12) |
  2211. ((pParam->gopParam.picParam[i].refPocL0&0x1F)<<14) |
  2212. ((pParam->gopParam.picParam[i].refPocL1&0x1F)<<19) |
  2213. (pParam->gopParam.picParam[i].temporalId<<24));
  2214. }
  2215. for (j = i; j < MAX_GOP_NUM; j++) {
  2216. VpuWriteReg(coreIdx, W5_CMD_ENC_CUSTOM_GOP_PIC_PARAM_0 + (j*4), 0);
  2217. }
  2218. Wave5BitIssueCommand(instance, W5_ENC_SET_PARAM);
  2219. if (vdi_wait_vpu_busy(coreIdx, __VPU_BUSY_TIMEOUT, W5_VPU_BUSY_STATUS) == -1) {
  2220. if (instance->loggingEnable)
  2221. vdi_log(coreIdx, W5_ENC_SET_PARAM, 2);
  2222. return RETCODE_VPU_RESPONSE_TIMEOUT;
  2223. }
  2224. }
  2225. /*======================================================================*/
  2226. /* OPT_COMMON */
  2227. /* : the last SET_PARAM command should be called with OPT_COMMON */
  2228. /*======================================================================*/
  2229. rotMirMode = 0;
  2230. /* CMD_ENC_ROT_MODE :
  2231. * | hor_mir | ver_mir | rot_angle | rot_en |
  2232. * [4] [3] [2:1] [0]
  2233. */
  2234. if (pEncInfo->rotationEnable == TRUE) {
  2235. switch (pEncInfo->rotationAngle) {
  2236. case 0:
  2237. rotMirMode |= 0x0;
  2238. break;
  2239. case 90:
  2240. rotMirMode |= 0x3;
  2241. break;
  2242. case 180:
  2243. rotMirMode |= 0x5;
  2244. break;
  2245. case 270:
  2246. rotMirMode |= 0x7;
  2247. break;
  2248. }
  2249. }
  2250. if (pEncInfo->mirrorEnable == TRUE) {
  2251. switch (pEncInfo->mirrorDirection) {
  2252. case MIRDIR_NONE :
  2253. rotMirMode |= 0x0;
  2254. break;
  2255. case MIRDIR_VER :
  2256. rotMirMode |= 0x9;
  2257. break;
  2258. case MIRDIR_HOR :
  2259. rotMirMode |= 0x11;
  2260. break;
  2261. case MIRDIR_HOR_VER :
  2262. rotMirMode |= 0x19;
  2263. break;
  2264. }
  2265. }
  2266. if (instance->codecMode == W_AVC_ENC) {
  2267. alignedWidth = (pOpenParam->picWidth + 15) & ~15;
  2268. alignedHeight= (pOpenParam->picHeight+ 15) & ~15;
  2269. }
  2270. else {
  2271. alignedWidth = (pOpenParam->picWidth + 31) & ~31;
  2272. alignedHeight= (pOpenParam->picHeight+ 31) & ~31;
  2273. }
  2274. if (((rotMirMode != 0) && !((pEncInfo->rotationAngle == 180) && (pEncInfo->mirrorDirection == MIRDIR_HOR_VER))) && ((pOpenParam->picWidth != alignedWidth) || (pOpenParam->picHeight != alignedHeight))) // if rot/mir enable && pic size is not 32(16)-aligned, set crop info.
  2275. CalcEncCropInfo(instance, pParam, rotMirMode, pOpenParam->picWidth, pOpenParam->picHeight);
  2276. /* SET_PARAM + COMMON */
  2277. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_SET_PARAM_OPTION, OPT_COMMON);
  2278. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_SRC_SIZE, pOpenParam->picHeight<<16 | pOpenParam->picWidth);
  2279. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_CUSTOM_MAP_ENDIAN, VDI_LITTLE_ENDIAN);
  2280. if (instance->codecMode == W_SVAC_ENC) {
  2281. regVal = (pParam->profile<<0) |
  2282. (pParam->level<<3) |
  2283. (pParam->internalBitDepth<<14) |
  2284. (pParam->useLongTerm<<21) |
  2285. (pParam->saoEnable<<24) |
  2286. (pParam->svcEnable<<28) |
  2287. (pParam->svcMode<<29);
  2288. }
  2289. else if (instance->codecMode == W_AVC_ENC) {
  2290. regVal = (pParam->profile<<0) |
  2291. (pParam->level<<3) |
  2292. (pParam->internalBitDepth<<14) |
  2293. (pParam->useLongTerm<<21) |
  2294. (pParam->scalingListEnable<<22) ;
  2295. }
  2296. else { // HEVC enc
  2297. regVal = (pParam->profile<<0) |
  2298. (pParam->level<<3) |
  2299. (pParam->tier<<12) |
  2300. (pParam->internalBitDepth<<14) |
  2301. (pParam->useLongTerm<<21) |
  2302. (pParam->scalingListEnable<<22) |
  2303. (pParam->tmvpEnable<<23) |
  2304. (pParam->saoEnable<<24) |
  2305. (pParam->skipIntraTrans<<25) |
  2306. (pParam->strongIntraSmoothEnable<<27) |
  2307. (pParam->enStillPicture<<30);
  2308. }
  2309. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_SPS_PARAM, regVal);
  2310. if (instance->codecMode == W_SVAC_ENC) {
  2311. regVal = (pParam->disableDeblk<<5) |
  2312. ((pParam->chromaDcQpOffset&0x1F)<<14) |
  2313. ((pParam->chromaAcQpOffset&0x1F)<<19) |
  2314. ((pParam->lumaDcQpOffset&0x1F)<<24);
  2315. }
  2316. else {
  2317. regVal = (pParam->losslessEnable) |
  2318. (pParam->constIntraPredFlag<<1) |
  2319. (pParam->lfCrossSliceBoundaryEnable<<2) |
  2320. ((pParam->weightPredEnable&1)<<3) |
  2321. (pParam->wppEnable<<4) |
  2322. (pParam->disableDeblk<<5) |
  2323. ((pParam->betaOffsetDiv2&0xF)<<6) |
  2324. ((pParam->tcOffsetDiv2&0xF)<<10) |
  2325. ((pParam->chromaCbQpOffset&0x1F)<<14) |
  2326. ((pParam->chromaCrQpOffset&0x1F)<<19) |
  2327. (pParam->transform8x8Enable<<29) |
  2328. (pParam->entropyCodingMode<<30);
  2329. }
  2330. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_PPS_PARAM, regVal);
  2331. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_GOP_PARAM, pParam->gopPresetIdx);
  2332. if (instance->codecMode == W_AVC_ENC) {
  2333. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_INTRA_PARAM, (pParam->intraQP<<0) | ((pParam->intraPeriod&0x7ff)<<6) | ((pParam->avcIdrPeriod&0x7ff)<<17) | ((pParam->forcedIdrHeaderEnable&3)<<28));
  2334. }
  2335. else {
  2336. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_INTRA_PARAM, (pParam->decodingRefreshType<<0) | (pParam->intraQP<<3) | (pParam->forcedIdrHeaderEnable<<9) | (pParam->intraPeriod<<16));
  2337. }
  2338. regVal = (pParam->useRecommendEncParam) |
  2339. (pParam->rdoSkip<<2) |
  2340. (pParam->lambdaScalingEnable<<3) |
  2341. (pParam->coefClearDisable<<4) |
  2342. (pParam->cuSizeMode<<5) |
  2343. (pParam->intraNxNEnable<<8) |
  2344. (pParam->maxNumMerge<<18) |
  2345. (pParam->customMDEnable<<20) |
  2346. (pParam->customLambdaEnable<<21) |
  2347. (pParam->monochromeEnable<<22);
  2348. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_RDO_PARAM, regVal);
  2349. if (instance->codecMode == W_AVC_ENC) {
  2350. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_INTRA_REFRESH, pParam->intraMbRefreshArg<<16 | pParam->intraMbRefreshMode);
  2351. }
  2352. else {
  2353. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_INTRA_REFRESH, pParam->intraRefreshArg<<16 | pParam->intraRefreshMode);
  2354. }
  2355. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_RC_FRAME_RATE, pOpenParam->frameRateInfo);
  2356. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_RC_TARGET_RATE, pOpenParam->bitRate);
  2357. if (instance->codecMode == W_AVC_ENC) {
  2358. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_RC_PARAM, (pOpenParam->rcEnable<<0) |
  2359. (pParam->mbLevelRcEnable<<1) |
  2360. (pParam->hvsQPEnable<<2) |
  2361. (pParam->hvsQpScale<<4) |
  2362. (pParam->bitAllocMode<<8) |
  2363. (pParam->roiEnable<<13) |
  2364. ((pParam->initialRcQp&0x3F)<<14) |
  2365. (pOpenParam->vbvBufferSize<<20));
  2366. }
  2367. else {
  2368. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_RC_PARAM, (pOpenParam->rcEnable<<0) |
  2369. (pParam->cuLevelRCEnable<<1) |
  2370. (pParam->hvsQPEnable<<2) |
  2371. (pParam->hvsQpScale<<4) |
  2372. (pParam->bitAllocMode<<8) |
  2373. (pParam->roiEnable<<13) |
  2374. ((pParam->initialRcQp&0x3F)<<14) |
  2375. (pOpenParam->vbvBufferSize<<20));
  2376. }
  2377. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_RC_WEIGHT_PARAM, pParam->rcWeightBuf<<8 | pParam->rcWeightParam);
  2378. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_RC_MIN_MAX_QP, (pParam->minQpI<<0) |
  2379. (pParam->maxQpI<<6) |
  2380. (pParam->hvsMaxDeltaQp<<12));
  2381. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_RC_INTER_MIN_MAX_QP, (pParam->minQpP << 0) |
  2382. (pParam->maxQpP << 6) |
  2383. (pParam->minQpB << 12) |
  2384. (pParam->maxQpB << 18));
  2385. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_RC_BIT_RATIO_LAYER_0_3, (pParam->fixedBitRatio[0]<<0) |
  2386. (pParam->fixedBitRatio[1]<<8) |
  2387. (pParam->fixedBitRatio[2]<<16) |
  2388. (pParam->fixedBitRatio[3]<<24));
  2389. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_RC_BIT_RATIO_LAYER_4_7, (pParam->fixedBitRatio[4]<<0) |
  2390. (pParam->fixedBitRatio[5]<<8) |
  2391. (pParam->fixedBitRatio[6]<<16) |
  2392. (pParam->fixedBitRatio[7]<<24));
  2393. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_ROT_PARAM, rotMirMode);
  2394. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_BG_PARAM, (pParam->bgDetectEnable) |
  2395. (pParam->bgThrDiff<<1) |
  2396. (pParam->bgThrMeanDiff<<10) |
  2397. (pParam->bgLambdaQp<<18) |
  2398. ((pParam->bgDeltaQp&0x1F)<<24) |
  2399. (instance->codecMode == W_AVC_ENC ? pParam->s2fmeDisable<<29 : 0));
  2400. if (instance->codecMode == W_HEVC_ENC || instance->codecMode == W_AVC_ENC) {
  2401. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_CUSTOM_LAMBDA_ADDR, pParam->customLambdaAddr);
  2402. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_CONF_WIN_TOP_BOT, pParam->confWinBot<<16 | pParam->confWinTop);
  2403. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_CONF_WIN_LEFT_RIGHT, pParam->confWinRight<<16 | pParam->confWinLeft);
  2404. if (instance->codecMode == W_AVC_ENC) {
  2405. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_INDEPENDENT_SLICE, pParam->avcSliceArg<<16 | pParam->avcSliceMode);
  2406. }
  2407. else {
  2408. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_INDEPENDENT_SLICE, pParam->independSliceModeArg<<16 | pParam->independSliceMode);
  2409. }
  2410. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_USER_SCALING_LIST_ADDR, pParam->userScalingListAddr);
  2411. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_NUM_UNITS_IN_TICK, pParam->numUnitsInTick);
  2412. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_TIME_SCALE, pParam->timeScale);
  2413. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_NUM_TICKS_POC_DIFF_ONE, pParam->numTicksPocDiffOne);
  2414. }
  2415. if (instance->codecMode == W_HEVC_ENC) {
  2416. // SVAC encoder can't configure below parameters
  2417. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_CUSTOM_MD_PU04, (pParam->pu04DeltaRate&0xFF) |
  2418. ((pParam->pu04IntraPlanarDeltaRate&0xFF)<<8) |
  2419. ((pParam->pu04IntraDcDeltaRate&0xFF)<<16) |
  2420. ((pParam->pu04IntraAngleDeltaRate&0xFF)<<24));
  2421. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_CUSTOM_MD_PU08, (pParam->pu08DeltaRate&0xFF) |
  2422. ((pParam->pu08IntraPlanarDeltaRate&0xFF)<<8) |
  2423. ((pParam->pu08IntraDcDeltaRate&0xFF)<<16) |
  2424. ((pParam->pu08IntraAngleDeltaRate&0xFF)<<24));
  2425. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_CUSTOM_MD_PU16, (pParam->pu16DeltaRate&0xFF) |
  2426. ((pParam->pu16IntraPlanarDeltaRate&0xFF)<<8) |
  2427. ((pParam->pu16IntraDcDeltaRate&0xFF)<<16) |
  2428. ((pParam->pu16IntraAngleDeltaRate&0xFF)<<24));
  2429. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_CUSTOM_MD_PU32, (pParam->pu32DeltaRate&0xFF) |
  2430. ((pParam->pu32IntraPlanarDeltaRate&0xFF)<<8) |
  2431. ((pParam->pu32IntraDcDeltaRate&0xFF)<<16) |
  2432. ((pParam->pu32IntraAngleDeltaRate&0xFF)<<24));
  2433. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_CUSTOM_MD_CU08, (pParam->cu08IntraDeltaRate&0xFF) |
  2434. ((pParam->cu08InterDeltaRate&0xFF)<<8) |
  2435. ((pParam->cu08MergeDeltaRate&0xFF)<<16));
  2436. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_CUSTOM_MD_CU16, (pParam->cu16IntraDeltaRate&0xFF) |
  2437. ((pParam->cu16InterDeltaRate&0xFF)<<8) |
  2438. ((pParam->cu16MergeDeltaRate&0xFF)<<16));
  2439. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_CUSTOM_MD_CU32, (pParam->cu32IntraDeltaRate&0xFF) |
  2440. ((pParam->cu32InterDeltaRate&0xFF)<<8) |
  2441. ((pParam->cu32MergeDeltaRate&0xFF)<<16));
  2442. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_DEPENDENT_SLICE, pParam->dependSliceModeArg<<16 | pParam->dependSliceMode);
  2443. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_NR_PARAM, (pParam->nrYEnable<<0) |
  2444. (pParam->nrCbEnable<<1) |
  2445. (pParam->nrCrEnable<<2) |
  2446. (pParam->nrNoiseEstEnable<<3)|
  2447. (pParam->nrNoiseSigmaY<<4) |
  2448. (pParam->nrNoiseSigmaCb<<12) |
  2449. (pParam->nrNoiseSigmaCr<<20));
  2450. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_NR_WEIGHT, (pParam->nrIntraWeightY<<0) |
  2451. (pParam->nrIntraWeightCb<<5) |
  2452. (pParam->nrIntraWeightCr<<10)|
  2453. (pParam->nrInterWeightY<<15) |
  2454. (pParam->nrInterWeightCb<<20)|
  2455. (pParam->nrInterWeightCr<<25));
  2456. }
  2457. #ifdef SUPPORT_LOOK_AHEAD_RC
  2458. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_LOOK_AHEAD_RC, (pParam->larcEnable<<0) |
  2459. ((pParam->larcPass-1)<<1) |
  2460. (pParam->larcSize<<2) |
  2461. (pParam->larcWeight<<9));
  2462. #endif
  2463. Wave5BitIssueCommand(instance, W5_ENC_SET_PARAM);
  2464. if (vdi_wait_vpu_busy(coreIdx, __VPU_BUSY_TIMEOUT, W5_VPU_BUSY_STATUS) == -1) {
  2465. if (instance->loggingEnable)
  2466. vdi_log(coreIdx, W5_ENC_SET_PARAM, 2);
  2467. return RETCODE_VPU_RESPONSE_TIMEOUT;
  2468. }
  2469. if (VpuReadReg(coreIdx, W5_RET_SUCCESS) == 0) {
  2470. regVal = VpuReadReg(instance->coreIdx, W5_RET_FAIL_REASON);
  2471. if (regVal != WAVE5_QUEUEING_FAIL)
  2472. VLOG(ERR, "FAIL_REASON = 0x%x\n", regVal);
  2473. if ( regVal == WAVE5_QUEUEING_FAIL)
  2474. return RETCODE_QUEUEING_FAILURE;
  2475. else if (regVal == WAVE5_SYSERR_ACCESS_VIOLATION_HW)
  2476. return RETCODE_MEMORY_ACCESS_VIOLATION;
  2477. else if (regVal == WAVE5_SYSERR_WATCHDOG_TIMEOUT)
  2478. return RETCODE_VPU_RESPONSE_TIMEOUT;
  2479. else if (regVal == WAVE5_ERROR_FW_FATAL)
  2480. return RETCODE_ERROR_FW_FATAL;
  2481. else
  2482. return RETCODE_FAILURE;
  2483. }
  2484. return RETCODE_SUCCESS;
  2485. }
  2486. #ifdef SUPPORT_LOOK_AHEAD_RC
  2487. #define LARC_DATA_REG_CNT 16
  2488. RetCode Wave5VpuEncSetLarcData(CodecInst* instance, EncLarcInfo larcInfo)
  2489. {
  2490. Int32 coreIdx;
  2491. EncInfo* pEncInfo;
  2492. EncOpenParam* pOpenParam;
  2493. EncWaveParam* pParam;
  2494. Uint32 regVal = 0;
  2495. Uint32 i, addr, k, remain;
  2496. Uint32 quotient;
  2497. Uint32 larcSizeMinus1;
  2498. coreIdx = instance->coreIdx;
  2499. pEncInfo = &instance->CodecInfo->encInfo;
  2500. pOpenParam = &pEncInfo->openParam;
  2501. pParam = &pOpenParam->EncStdParam.waveParam;
  2502. EnterLock(coreIdx);
  2503. larcSizeMinus1 = pParam->larcSize-1;
  2504. quotient = VPU_ALIGN16(larcSizeMinus1)/LARC_DATA_REG_CNT;//16 registers
  2505. if (pParam->larcEnable == TRUE) {
  2506. remain = larcSizeMinus1;
  2507. for ( k=0, i = 0 ; i < quotient ; i++ ) {
  2508. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_SET_PARAM_OPTION, OPT_LOOKAHEAD_PARAM_1+i);
  2509. for (addr = W5_CMD_ENC_SEQ_LOOK_AHEAD_RC_START ; addr < W5_CMD_ENC_SEQ_LOOK_AHEAD_RC_END ;addr+=12) {
  2510. if ( k < larcSizeMinus1) {
  2511. VpuWriteReg(coreIdx, addr, larcInfo.larcData[k][0]);
  2512. VpuWriteReg(coreIdx, addr+4, larcInfo.larcData[k][1]);
  2513. VpuWriteReg(coreIdx, addr+8, larcInfo.larcData[k][2]);
  2514. // VLOG(ERR, "PASS2 set (%d) = %x, %x, %x\n", k, larcInfo.larcData[k][0], larcInfo.larcData[k][1], larcInfo.larcData[k][2]);
  2515. }
  2516. else {
  2517. VpuWriteReg(coreIdx, addr, -1);
  2518. // VLOG(ERR, "PASS2 set (%d) = -1\n", k);
  2519. }
  2520. k++;
  2521. }
  2522. remain -= LARC_DATA_REG_CNT;
  2523. Wave5BitIssueCommand(instance, W5_ENC_SET_PARAM);
  2524. if (vdi_wait_vpu_busy(coreIdx, __VPU_BUSY_TIMEOUT, W5_VPU_BUSY_STATUS) == -1) {
  2525. if (instance->loggingEnable)
  2526. vdi_log(coreIdx, W5_ENC_SET_PARAM, 2);
  2527. return RETCODE_VPU_RESPONSE_TIMEOUT;
  2528. }
  2529. if (VpuReadReg(coreIdx, W5_RET_SUCCESS) == 0) {
  2530. regVal = VpuReadReg(instance->coreIdx, W5_RET_FAIL_REASON);
  2531. if ( regVal == WAVE5_QUEUEING_FAIL)
  2532. return RETCODE_QUEUEING_FAILURE;
  2533. else if (regVal == WAVE5_SYSERR_ACCESS_VIOLATION_HW)
  2534. return RETCODE_MEMORY_ACCESS_VIOLATION;
  2535. else if (regVal == WAVE5_SYSERR_WATCHDOG_TIMEOUT)
  2536. return RETCODE_VPU_RESPONSE_TIMEOUT;
  2537. else
  2538. return RETCODE_FAILURE;
  2539. }
  2540. }
  2541. }
  2542. LeaveLock(coreIdx);
  2543. return RETCODE_SUCCESS;
  2544. }
  2545. #endif
  2546. RetCode Wave5VpuEncGetSeqInfo(CodecInst* instance, EncInitialInfo* info)
  2547. {
  2548. RetCode ret = RETCODE_SUCCESS;
  2549. Uint32 regVal;
  2550. EncInfo* pEncInfo;
  2551. pEncInfo = VPU_HANDLE_TO_ENCINFO(instance);
  2552. // Send QUERY cmd
  2553. ret = SendQuery(instance, GET_RESULT);
  2554. if (ret != RETCODE_SUCCESS) {
  2555. regVal = VpuReadReg(instance->coreIdx, W5_RET_FAIL_REASON);
  2556. if (regVal != WAVE5_QUEUEING_FAIL)
  2557. VLOG(ERR, "FAIL_REASON = 0x%x\n", regVal);
  2558. if (regVal == WAVE5_RESULT_NOT_READY)
  2559. return RETCODE_REPORT_NOT_READY;
  2560. else if(regVal == WAVE5_SYSERR_ACCESS_VIOLATION_HW)
  2561. return RETCODE_MEMORY_ACCESS_VIOLATION;
  2562. else if (regVal == WAVE5_SYSERR_WATCHDOG_TIMEOUT)
  2563. return RETCODE_VPU_RESPONSE_TIMEOUT;
  2564. else if (regVal == WAVE5_ERROR_FW_FATAL)
  2565. return RETCODE_ERROR_FW_FATAL;
  2566. else
  2567. return RETCODE_QUERY_FAILURE;
  2568. }
  2569. if (instance->loggingEnable)
  2570. vdi_log(instance->coreIdx, W5_INIT_SEQ, 0);
  2571. regVal = VpuReadReg(instance->coreIdx, W5_RET_QUEUE_STATUS);
  2572. pEncInfo->instanceQueueCount = (regVal>>16)&0xff;
  2573. pEncInfo->reportQueueCount = (regVal & 0xffff);
  2574. if (VpuReadReg(instance->coreIdx, W5_RET_ENC_ENCODING_SUCCESS) != 1) {
  2575. info->seqInitErrReason = VpuReadReg(instance->coreIdx, W5_RET_ENC_ERR_INFO);
  2576. ret = RETCODE_FAILURE;
  2577. }
  2578. else {
  2579. info->warnInfo = VpuReadReg(instance->coreIdx, W5_RET_ENC_WARN_INFO);
  2580. }
  2581. info->minFrameBufferCount = VpuReadReg(instance->coreIdx, W5_RET_ENC_NUM_REQUIRED_FB);
  2582. info->minSrcFrameCount = VpuReadReg(instance->coreIdx, W5_RET_ENC_MIN_SRC_BUF_NUM);
  2583. info->maxLatencyPictures = VpuReadReg(instance->coreIdx, W5_RET_ENC_PIC_MAX_LATENCY_PICTURES);
  2584. info->vlcBufSize = VpuReadReg(instance->coreIdx, W5_RET_VLC_BUF_SIZE);
  2585. info->paramBufSize = VpuReadReg(instance->coreIdx, W5_RET_PARAM_BUF_SIZE);
  2586. pEncInfo->vlcBufSize = info->vlcBufSize;
  2587. pEncInfo->paramBufSize = info->paramBufSize;
  2588. return ret;
  2589. }
  2590. RetCode Wave5VpuEncRegisterFramebuffer(CodecInst* inst, FrameBuffer* fbArr, TiledMapType mapType, Uint32 count)
  2591. {
  2592. RetCode ret = RETCODE_SUCCESS;
  2593. Int32 q, j, i, remain, idx, coreIdx, startNo, endNo, stride;
  2594. Uint32 regVal=0, picSize=0, mvColSize, fbcYTblSize, fbcCTblSize, subSampledSize=0;
  2595. Uint32 endian, nv21=0, cbcrInterleave = 0, lumaStride, chromaStride, bufHeight = 0, bufWidth = 0;
  2596. Uint32 addrY, addrCb, addrCr;
  2597. Uint32 svacMvColSize0 = 0, svacMvColSize1 = 0;
  2598. vpu_buffer_t vbMV = {0,};
  2599. vpu_buffer_t vbFbcYTbl = {0,};
  2600. vpu_buffer_t vbFbcCTbl = {0,};
  2601. vpu_buffer_t vbSubSamBuf = {0,};
  2602. vpu_buffer_t vbTask = {0,};
  2603. EncOpenParam* pOpenParam;
  2604. EncInfo* pEncInfo = &inst->CodecInfo->encInfo;
  2605. pOpenParam = &pEncInfo->openParam;
  2606. coreIdx = inst->coreIdx;
  2607. mvColSize = fbcYTblSize = fbcCTblSize = 0;
  2608. stride = pEncInfo->stride;
  2609. bufWidth = VPU_ALIGN8(pOpenParam->picWidth);
  2610. bufHeight = VPU_ALIGN8(pOpenParam->picHeight);
  2611. if (mapType == COMPRESSED_FRAME_MAP_SVAC_SVC_BL) {
  2612. bufWidth = pOpenParam->picWidthBL;
  2613. bufHeight = pOpenParam->picHeightBL;
  2614. }
  2615. if ((pEncInfo->rotationAngle != 0 || pEncInfo->mirrorDirection != 0) && !(pEncInfo->rotationAngle == 180 && pEncInfo->mirrorDirection == MIRDIR_HOR_VER)) {
  2616. bufWidth = VPU_ALIGN32(pOpenParam->picWidth);
  2617. bufHeight = VPU_ALIGN32(pOpenParam->picHeight);
  2618. }
  2619. if (pEncInfo->rotationAngle == 90 || pEncInfo->rotationAngle == 270) {
  2620. bufWidth = VPU_ALIGN32(pOpenParam->picHeight);
  2621. bufHeight = VPU_ALIGN32(pOpenParam->picWidth);
  2622. }
  2623. svacMvColSize0 = WAVE5_ENC_SVAC_MVCOL_0_BUF_SIZE(bufWidth, bufHeight);
  2624. svacMvColSize1 = WAVE5_ENC_SVAC_MVCOL_1_BUF_SIZE(bufWidth, bufHeight);
  2625. picSize = (bufWidth<<16) | bufHeight;
  2626. if (mapType >= COMPRESSED_FRAME_MAP) {
  2627. nv21 = 0;
  2628. cbcrInterleave = 0;
  2629. if (inst->codecMode == W_SVAC_ENC) {
  2630. mvColSize = svacMvColSize0 + svacMvColSize1;
  2631. vbMV.phys_addr = 0;
  2632. vbMV.size = ((mvColSize+4095)&~4095)+4096; /* 4096 is a margin */
  2633. }
  2634. else if (inst->codecMode == W_HEVC_ENC) {
  2635. mvColSize = WAVE5_ENC_HEVC_MVCOL_BUF_SIZE(bufWidth, bufHeight);
  2636. mvColSize = VPU_ALIGN16(mvColSize);
  2637. vbMV.phys_addr = 0;
  2638. vbMV.size = ((mvColSize*count+4095)&~4095)+4096; /* 4096 is a margin */
  2639. }
  2640. else if (inst->codecMode == W_AVC_ENC) {
  2641. mvColSize = WAVE5_ENC_AVC_MVCOL_BUF_SIZE(bufWidth, bufHeight);
  2642. vbMV.phys_addr = 0;
  2643. vbMV.size = ((mvColSize*count+4095)&~4095)+4096; /* 4096 is a margin */
  2644. }
  2645. if (vdi_allocate_dma_memory(inst->coreIdx, &vbMV, ENC_MV, inst->instIndex) < 0)
  2646. return RETCODE_INSUFFICIENT_RESOURCE;
  2647. if (mapType == COMPRESSED_FRAME_MAP_SVAC_SVC_BL)
  2648. pEncInfo->vbMVBL = vbMV;
  2649. else
  2650. pEncInfo->vbMV = vbMV;
  2651. if (pEncInfo->productCode == WAVE521C_DUAL_CODE) {
  2652. Uint32 bgs_width, ot_bg_width, comp_frm_width, ot_frm_width, ot_bg_height, bgs_height, comp_frm_height, ot_frm_height;
  2653. Uint32 frm_width, frm_height;
  2654. bgs_width = (pOpenParam->EncStdParam.waveParam.internalBitDepth >8 ? 256 : 512);
  2655. ot_bg_width = 1024;
  2656. frm_width = VPU_CEIL(bufWidth, 16);
  2657. frm_height = VPU_CEIL(bufHeight, 16);
  2658. comp_frm_width = VPU_CEIL(VPU_CEIL(frm_width , 16) + 16, 16); // valid_width = align(width, 16), comp_frm_width = align(valid_width+pad_x, 16)
  2659. ot_frm_width = VPU_CEIL(comp_frm_width, ot_bg_width); // 1024 = offset table BG width
  2660. // sizeof_offset_table()
  2661. ot_bg_height = 32;
  2662. bgs_height = (1<<14) / bgs_width / (pOpenParam->EncStdParam.waveParam.internalBitDepth>8 ? 2 : 1);
  2663. comp_frm_height = VPU_CEIL(VPU_CEIL(frm_height, 4) + 4, bgs_height);
  2664. ot_frm_height = VPU_CEIL(comp_frm_height, ot_bg_height);
  2665. fbcYTblSize = (ot_frm_width/16) * (ot_frm_height/4) *2;
  2666. }
  2667. else {
  2668. fbcYTblSize = WAVE5_FBC_LUMA_TABLE_SIZE(bufWidth, bufHeight);
  2669. fbcYTblSize = VPU_ALIGN16(fbcYTblSize);
  2670. }
  2671. vbFbcYTbl.phys_addr = 0;
  2672. vbFbcYTbl.size = ((fbcYTblSize*count+4095)&~4095)+4096;
  2673. if (vdi_allocate_dma_memory(inst->coreIdx, &vbFbcYTbl, ENC_FBCY_TBL, inst->instIndex) < 0)
  2674. return RETCODE_INSUFFICIENT_RESOURCE;
  2675. if (mapType == COMPRESSED_FRAME_MAP_SVAC_SVC_BL)
  2676. pEncInfo->vbFbcYTblBL = vbFbcYTbl;
  2677. else
  2678. pEncInfo->vbFbcYTbl = vbFbcYTbl;
  2679. if (pEncInfo->productCode == WAVE521C_DUAL_CODE) {
  2680. Uint32 bgs_width, ot_bg_width, comp_frm_width, ot_frm_width, ot_bg_height, bgs_height, comp_frm_height, ot_frm_height;
  2681. Uint32 frm_width, frm_height;
  2682. bgs_width = (pOpenParam->EncStdParam.waveParam.internalBitDepth >8 ? 256 : 512);
  2683. ot_bg_width = 1024;
  2684. frm_width = VPU_CEIL(bufWidth, 16);
  2685. frm_height = VPU_CEIL(bufHeight, 16);
  2686. comp_frm_width = VPU_CEIL(VPU_CEIL(frm_width/2 , 16) + 16, 16); // valid_width = align(width, 16), comp_frm_width = align(valid_width+pad_x, 16)
  2687. ot_frm_width = VPU_CEIL(comp_frm_width, ot_bg_width); // 1024 = offset table BG width
  2688. // sizeof_offset_table()
  2689. ot_bg_height = 32;
  2690. bgs_height = (1<<14) / bgs_width / (pOpenParam->EncStdParam.waveParam.internalBitDepth>8 ? 2 : 1);
  2691. comp_frm_height = VPU_CEIL(VPU_CEIL(frm_height, 4) + 4, bgs_height);
  2692. ot_frm_height = VPU_CEIL(comp_frm_height, ot_bg_height);
  2693. fbcCTblSize = (ot_frm_width/16) * (ot_frm_height/4) *2;
  2694. }
  2695. else {
  2696. fbcCTblSize = WAVE5_FBC_CHROMA_TABLE_SIZE(bufWidth, bufHeight);
  2697. fbcCTblSize = VPU_ALIGN16(fbcCTblSize);
  2698. }
  2699. vbFbcCTbl.phys_addr = 0;
  2700. vbFbcCTbl.size = ((fbcCTblSize*count+4095)&~4095)+4096;
  2701. if (vdi_allocate_dma_memory(inst->coreIdx, &vbFbcCTbl, ENC_FBCC_TBL, inst->instIndex) < 0)
  2702. return RETCODE_INSUFFICIENT_RESOURCE;
  2703. if (mapType == COMPRESSED_FRAME_MAP_SVAC_SVC_BL)
  2704. pEncInfo->vbFbcCTblBL = vbFbcCTbl;
  2705. else
  2706. pEncInfo->vbFbcCTbl = vbFbcCTbl;
  2707. if (pOpenParam->bitstreamFormat == STD_AVC) {
  2708. subSampledSize = WAVE5_AVC_SUBSAMPLED_ONE_SIZE(bufWidth, bufHeight);
  2709. }
  2710. else {
  2711. subSampledSize = WAVE5_SUBSAMPLED_ONE_SIZE(bufWidth, bufHeight);
  2712. }
  2713. vbSubSamBuf.size = ((subSampledSize*count+4095)&~4095)+4096;
  2714. vbSubSamBuf.phys_addr = 0;
  2715. if (vdi_allocate_dma_memory(coreIdx, &vbSubSamBuf, ENC_SUBSAMBUF, inst->instIndex) < 0)
  2716. return RETCODE_INSUFFICIENT_RESOURCE;
  2717. if (mapType == COMPRESSED_FRAME_MAP_SVAC_SVC_BL)
  2718. pEncInfo->vbSubSamBufBL = vbSubSamBuf;
  2719. else
  2720. pEncInfo->vbSubSamBuf = vbSubSamBuf;
  2721. vbTask.size = (Uint32)((pEncInfo->vlcBufSize * VLC_BUF_NUM) + (pEncInfo->paramBufSize * COMMAND_QUEUE_DEPTH));
  2722. vbTask.phys_addr = 0;
  2723. if (pEncInfo->vbTask.size == 0) {
  2724. if (vdi_allocate_dma_memory(coreIdx, &vbTask, ENC_TASK, inst->instIndex) < 0)
  2725. return RETCODE_INSUFFICIENT_RESOURCE;
  2726. pEncInfo->vbTask = vbTask;
  2727. VpuWriteReg(coreIdx, W5_CMD_SET_FB_ADDR_TASK_BUF, pEncInfo->vbTask.phys_addr);
  2728. VpuWriteReg(coreIdx, W5_CMD_SET_FB_TASK_BUF_SIZE, vbTask.size);
  2729. }
  2730. }
  2731. VpuWriteReg(coreIdx, W5_ADDR_SUB_SAMPLED_FB_BASE, vbSubSamBuf.phys_addr); // set sub-sampled buffer base addr
  2732. VpuWriteReg(coreIdx, W5_SUB_SAMPLED_ONE_FB_SIZE, subSampledSize); // set sub-sampled buffer size for one frame
  2733. endian = vdi_convert_endian(coreIdx, fbArr[0].endian);
  2734. VpuWriteReg(coreIdx, W5_PIC_SIZE, picSize);
  2735. // set stride of Luma/Chroma for compressed buffer
  2736. if ((pEncInfo->rotationAngle != 0 || pEncInfo->mirrorDirection != 0) && !(pEncInfo->rotationAngle == 180 && pEncInfo->mirrorDirection == MIRDIR_HOR_VER)){
  2737. lumaStride = VPU_ALIGN32(bufWidth)*(pOpenParam->EncStdParam.waveParam.internalBitDepth >8 ? 5 : 4);
  2738. lumaStride = VPU_ALIGN32(lumaStride);
  2739. chromaStride = VPU_ALIGN16(bufWidth/2)*(pOpenParam->EncStdParam.waveParam.internalBitDepth >8 ? 5 : 4);
  2740. chromaStride = VPU_ALIGN32(chromaStride);
  2741. }
  2742. else {
  2743. if (mapType == COMPRESSED_FRAME_MAP_SVAC_SVC_BL) {
  2744. lumaStride = VPU_ALIGN16(pOpenParam->picWidthBL)*(pOpenParam->EncStdParam.waveParam.internalBitDepth >8 ? 5 : 4);
  2745. lumaStride = VPU_ALIGN32(lumaStride);
  2746. chromaStride = VPU_ALIGN16(pOpenParam->picWidthBL/2)*(pOpenParam->EncStdParam.waveParam.internalBitDepth >8 ? 5 : 4);
  2747. chromaStride = VPU_ALIGN32(chromaStride);
  2748. }
  2749. else {
  2750. lumaStride = VPU_ALIGN16(pOpenParam->picWidth)*(pOpenParam->EncStdParam.waveParam.internalBitDepth >8 ? 5 : 4);
  2751. lumaStride = VPU_ALIGN32(lumaStride);
  2752. chromaStride = VPU_ALIGN16(pOpenParam->picWidth/2)*(pOpenParam->EncStdParam.waveParam.internalBitDepth >8 ? 5 : 4);
  2753. chromaStride = VPU_ALIGN32(chromaStride);
  2754. }
  2755. }
  2756. VpuWriteReg(coreIdx, W5_FBC_STRIDE, lumaStride<<16 | chromaStride);
  2757. cbcrInterleave = pOpenParam->cbcrInterleave;
  2758. stride = pEncInfo->stride;
  2759. if (mapType == COMPRESSED_FRAME_MAP_SVAC_SVC_BL) {
  2760. stride = VPU_ALIGN32(VPU_ALIGN32(VPU_ALIGN16(pOpenParam->picWidthBL)*5)/4);
  2761. }
  2762. regVal =(nv21 << 29) |
  2763. (cbcrInterleave << 16) |
  2764. (stride);
  2765. VpuWriteReg(coreIdx, W5_COMMON_PIC_INFO, regVal);
  2766. remain = count;
  2767. q = (remain+7)/8;
  2768. idx = 0;
  2769. for (j=0; j<q; j++) {
  2770. regVal = (endian<<16) | (j==q-1)<<4 | ((j==0)<<3);//lint !e514
  2771. regVal |= (pOpenParam->EncStdParam.waveParam.svcEnable == TRUE) ? (mapType == COMPRESSED_FRAME_MAP_SVAC_SVC_BL ? 0 : 1 << 27) : 0; // 0 = BL, 1 = EL
  2772. regVal |= (pOpenParam->enableNonRefFbcWrite<< 26);
  2773. VpuWriteReg(coreIdx, W5_SFB_OPTION, regVal);
  2774. startNo = j*8;
  2775. endNo = startNo + (remain>=8 ? 8 : remain) - 1;
  2776. VpuWriteReg(coreIdx, W5_SET_FB_NUM, (startNo<<8)|endNo);
  2777. for (i=0; i<8 && i<remain; i++) {
  2778. if (mapType == LINEAR_FRAME_MAP && pEncInfo->openParam.cbcrOrder == CBCR_ORDER_REVERSED) {
  2779. addrY = fbArr[i+startNo].bufY;
  2780. addrCb = fbArr[i+startNo].bufCr;
  2781. addrCr = fbArr[i+startNo].bufCb;
  2782. }
  2783. else {
  2784. addrY = fbArr[i+startNo].bufY;
  2785. addrCb = fbArr[i+startNo].bufCb;
  2786. addrCr = fbArr[i+startNo].bufCr;
  2787. }
  2788. VpuWriteReg(coreIdx, W5_ADDR_LUMA_BASE0 + (i<<4), addrY);
  2789. VpuWriteReg(coreIdx, W5_ADDR_CB_BASE0 + (i<<4), addrCb);
  2790. APIDPRINT("REGISTER FB[%02d] Y(0x%08x), Cb(0x%08x) ", i, addrY, addrCb);
  2791. if (mapType >= COMPRESSED_FRAME_MAP) {
  2792. VpuWriteReg(coreIdx, W5_ADDR_FBC_Y_OFFSET0 + (i<<4), vbFbcYTbl.phys_addr+idx*fbcYTblSize); /* Luma FBC offset table */
  2793. VpuWriteReg(coreIdx, W5_ADDR_FBC_C_OFFSET0 + (i<<4), vbFbcCTbl.phys_addr+idx*fbcCTblSize); /* Chroma FBC offset table */
  2794. if (inst->codecMode == W_SVAC_ENC) {
  2795. if (idx == 0) { // SVAC encoder needs only 2 mv-col buffers. (COL0 = for RDO, COL1 = for MVP)
  2796. VpuWriteReg(coreIdx, W5_ADDR_MV_COL0, vbMV.phys_addr);
  2797. VpuWriteReg(coreIdx, W5_ADDR_MV_COL1, vbMV.phys_addr + svacMvColSize0);
  2798. APIDPRINT("Yo(0x%08x) Co(0x%08x), Mv(0x%08x), Mv1(0x%8x)\n",
  2799. vbFbcYTbl.phys_addr, vbFbcCTbl.phys_addr,
  2800. vbMV.phys_addr, vbMV.phys_addr + svacMvColSize0);
  2801. }
  2802. else {
  2803. APIDPRINT("Yo(0x%08x) Co(0x%08x)\n", vbFbcYTbl.phys_addr+idx*fbcYTblSize, vbFbcCTbl.phys_addr+idx*fbcCTblSize);
  2804. }
  2805. }
  2806. else {
  2807. VpuWriteReg(coreIdx, W5_ADDR_MV_COL0 + (i<<2), vbMV.phys_addr+idx*mvColSize);
  2808. APIDPRINT("Yo(0x%08x) Co(0x%08x), Mv(0x%08x)\n",
  2809. vbFbcYTbl.phys_addr+idx*fbcYTblSize,
  2810. vbFbcCTbl.phys_addr+idx*fbcCTblSize,
  2811. vbMV.phys_addr+idx*mvColSize);
  2812. }
  2813. }
  2814. else {
  2815. VpuWriteReg(coreIdx, W5_ADDR_CR_BASE0 + (i<<4), addrCr);
  2816. VpuWriteReg(coreIdx, W5_ADDR_FBC_C_OFFSET0 + (i<<4), 0);
  2817. VpuWriteReg(coreIdx, W5_ADDR_MV_COL0 + (i<<2), 0);
  2818. APIDPRINT("Cr(0x%08x)\n", addrCr);
  2819. }
  2820. idx++;
  2821. }
  2822. remain -= i;
  2823. Wave5BitIssueCommand(inst, W5_SET_FB);
  2824. if (vdi_wait_vpu_busy(coreIdx, __VPU_BUSY_TIMEOUT, W5_VPU_BUSY_STATUS) == -1) {
  2825. return RETCODE_VPU_RESPONSE_TIMEOUT;
  2826. }
  2827. }
  2828. regVal = VpuReadReg(coreIdx, W5_RET_SUCCESS);
  2829. if (regVal == 0) {
  2830. return RETCODE_FAILURE;
  2831. }
  2832. if (ConfigSecAXIWave(coreIdx, inst->codecMode,
  2833. &pEncInfo->secAxiInfo, pOpenParam->picWidth, pOpenParam->picHeight,
  2834. pOpenParam->EncStdParam.waveParam.profile, pOpenParam->EncStdParam.waveParam.level) == 0) {
  2835. return RETCODE_INSUFFICIENT_RESOURCE;
  2836. }
  2837. return ret;
  2838. }
  2839. RetCode Wave5VpuEncode(CodecInst* instance, EncParam* option)
  2840. {
  2841. Int32 coreIdx, srcFrameFormat, srcPixelFormat, packedFormat;
  2842. Uint32 regVal = 0, bsEndian;
  2843. Uint32 srcStrideC = 0;
  2844. EncInfo* pEncInfo;
  2845. FrameBuffer* pSrcFrame;
  2846. EncOpenParam* pOpenParam;
  2847. BOOL justified = WTL_RIGHT_JUSTIFIED;
  2848. Uint32 formatNo = WTL_PIXEL_8BIT;
  2849. coreIdx = instance->coreIdx;
  2850. pEncInfo = VPU_HANDLE_TO_ENCINFO(instance);
  2851. pOpenParam = &pEncInfo->openParam;
  2852. pSrcFrame = option->sourceFrame;
  2853. switch (pOpenParam->srcFormat) {
  2854. case FORMAT_420_P10_16BIT_MSB:
  2855. case FORMAT_YUYV_P10_16BIT_MSB:
  2856. case FORMAT_YVYU_P10_16BIT_MSB:
  2857. case FORMAT_UYVY_P10_16BIT_MSB:
  2858. case FORMAT_VYUY_P10_16BIT_MSB:
  2859. justified = WTL_RIGHT_JUSTIFIED;
  2860. formatNo = WTL_PIXEL_16BIT;
  2861. break;
  2862. case FORMAT_420_P10_16BIT_LSB:
  2863. case FORMAT_YUYV_P10_16BIT_LSB:
  2864. case FORMAT_YVYU_P10_16BIT_LSB:
  2865. case FORMAT_UYVY_P10_16BIT_LSB:
  2866. case FORMAT_VYUY_P10_16BIT_LSB:
  2867. justified = WTL_LEFT_JUSTIFIED;
  2868. formatNo = WTL_PIXEL_16BIT;
  2869. break;
  2870. case FORMAT_420_P10_32BIT_MSB:
  2871. case FORMAT_YUYV_P10_32BIT_MSB:
  2872. case FORMAT_YVYU_P10_32BIT_MSB:
  2873. case FORMAT_UYVY_P10_32BIT_MSB:
  2874. case FORMAT_VYUY_P10_32BIT_MSB:
  2875. justified = WTL_RIGHT_JUSTIFIED;
  2876. formatNo = WTL_PIXEL_32BIT;
  2877. break;
  2878. case FORMAT_420_P10_32BIT_LSB:
  2879. case FORMAT_YUYV_P10_32BIT_LSB:
  2880. case FORMAT_YVYU_P10_32BIT_LSB:
  2881. case FORMAT_UYVY_P10_32BIT_LSB:
  2882. case FORMAT_VYUY_P10_32BIT_LSB:
  2883. justified = WTL_LEFT_JUSTIFIED;
  2884. formatNo = WTL_PIXEL_32BIT;
  2885. break;
  2886. case FORMAT_420:
  2887. case FORMAT_YUYV:
  2888. case FORMAT_YVYU:
  2889. case FORMAT_UYVY:
  2890. case FORMAT_VYUY:
  2891. justified = WTL_LEFT_JUSTIFIED;
  2892. formatNo = WTL_PIXEL_8BIT;
  2893. break;
  2894. default:
  2895. return RETCODE_FAILURE;
  2896. }
  2897. packedFormat = (pOpenParam->packedFormat >= 1) ? 1 : 0;
  2898. srcFrameFormat = packedFormat<<2 |
  2899. pOpenParam->cbcrInterleave<<1 |
  2900. pOpenParam->nv21;
  2901. switch (pOpenParam->packedFormat) { // additional packed format (interleave & nv21 bit are used to present these modes)
  2902. case PACKED_YVYU:
  2903. srcFrameFormat = 0x5;
  2904. break;
  2905. case PACKED_UYVY:
  2906. srcFrameFormat = 0x6;
  2907. break;
  2908. case PACKED_VYUY:
  2909. srcFrameFormat = 0x7;
  2910. break;
  2911. default:
  2912. break;
  2913. }
  2914. srcPixelFormat = justified<<2 | formatNo;
  2915. VpuWriteReg(coreIdx, W5_CMD_ENC_BS_START_ADDR, option->picStreamBufferAddr);
  2916. VpuWriteReg(coreIdx, W5_CMD_ENC_BS_SIZE, option->picStreamBufferSize);
  2917. pEncInfo->streamBufStartAddr = option->picStreamBufferAddr;
  2918. pEncInfo->streamBufSize = option->picStreamBufferSize;
  2919. pEncInfo->streamBufEndAddr = option->picStreamBufferAddr + option->picStreamBufferSize;
  2920. VpuWriteReg(coreIdx, W5_CMD_ENC_PIC_SRC_AXI_SEL, DEFAULT_SRC_AXI);
  2921. /* Secondary AXI */
  2922. VpuWriteReg(coreIdx, W5_CMD_ENC_PIC_USE_SEC_AXI, (pEncInfo->secAxiInfo.u.wave.useEncRdoEnable<<11) | (pEncInfo->secAxiInfo.u.wave.useEncLfEnable<<15));
  2923. VpuWriteReg(coreIdx, W5_CMD_ENC_PIC_REPORT_ENDIAN, VDI_128BIT_LITTLE_ENDIAN);
  2924. if (option->codeOption.implicitHeaderEncode == 1) {
  2925. VpuWriteReg(coreIdx, W5_CMD_ENC_PIC_CODE_OPTION, CODEOPT_ENC_HEADER_IMPLICIT | CODEOPT_ENC_VCL | // implicitly encode a header(headers) for generating bitstream. (to encode a header only, use ENC_PUT_VIDEO_HEADER for GiveCommand)
  2926. (option->codeOption.encodeAUD<<5) |
  2927. (option->codeOption.encodeEOS<<6) |
  2928. (option->codeOption.encodeEOB<<7));
  2929. }
  2930. else {
  2931. VpuWriteReg(coreIdx, W5_CMD_ENC_PIC_CODE_OPTION, (option->codeOption.implicitHeaderEncode<<0) |
  2932. (option->codeOption.encodeVCL<<1) |
  2933. (option->codeOption.encodeVPS<<2) |
  2934. (option->codeOption.encodeSPS<<3) |
  2935. (option->codeOption.encodePPS<<4) |
  2936. (option->codeOption.encodeAUD<<5) |
  2937. (option->codeOption.encodeEOS<<6) |
  2938. (option->codeOption.encodeEOB<<7) |
  2939. (option->codeOption.encodeFiller<<8));
  2940. }
  2941. VpuWriteReg(coreIdx, W5_CMD_ENC_PIC_PIC_PARAM, (option->skipPicture<<0) |
  2942. (option->forcePicQpEnable<<1) |
  2943. (option->forcePicQpI<<2) |
  2944. (option->forcePicQpP<<8) |
  2945. (option->forcePicQpB<<14) |
  2946. (option->forcePicTypeEnable<<20) |
  2947. (option->forcePicType<<21) |
  2948. (option->forceAllCtuCoefDropEnable<<24) |
  2949. (option->svcLayerFlag<<25));
  2950. if (option->srcEndFlag == 1)
  2951. VpuWriteReg(coreIdx, W5_CMD_ENC_PIC_SRC_PIC_IDX, 0xFFFFFFFF); // no more source image.
  2952. else
  2953. VpuWriteReg(coreIdx, W5_CMD_ENC_PIC_SRC_PIC_IDX, option->srcIdx);
  2954. VpuWriteReg(coreIdx, W5_CMD_ENC_PIC_SRC_ADDR_Y, pSrcFrame->bufY);
  2955. if (pOpenParam->cbcrOrder == CBCR_ORDER_NORMAL) {
  2956. VpuWriteReg(coreIdx, W5_CMD_ENC_PIC_SRC_ADDR_U, pSrcFrame->bufCb);
  2957. VpuWriteReg(coreIdx, W5_CMD_ENC_PIC_SRC_ADDR_V, pSrcFrame->bufCr);
  2958. }
  2959. else {
  2960. VpuWriteReg(coreIdx, W5_CMD_ENC_PIC_SRC_ADDR_U, pSrcFrame->bufCr);
  2961. VpuWriteReg(coreIdx, W5_CMD_ENC_PIC_SRC_ADDR_V, pSrcFrame->bufCb);
  2962. }
  2963. if (formatNo == WTL_PIXEL_32BIT) {
  2964. srcStrideC = VPU_ALIGN16(pSrcFrame->stride/2)*(1<<pSrcFrame->cbcrInterleave);
  2965. if ( pSrcFrame->cbcrInterleave == 1)
  2966. srcStrideC = pSrcFrame->stride;
  2967. }
  2968. else {
  2969. srcStrideC = (pSrcFrame->cbcrInterleave == 1) ? pSrcFrame->stride : (pSrcFrame->stride>>1);
  2970. }
  2971. VpuWriteReg(coreIdx, W5_CMD_ENC_PIC_SRC_STRIDE, (pSrcFrame->stride<<16) | srcStrideC );
  2972. regVal = vdi_convert_endian(coreIdx, pOpenParam->sourceEndian);
  2973. bsEndian = (~regVal&VDI_128BIT_ENDIAN_MASK);
  2974. VpuWriteReg(coreIdx, W5_CMD_ENC_PIC_SRC_FORMAT, (srcFrameFormat<<0) |
  2975. (srcPixelFormat<<3) |
  2976. (bsEndian<<6));
  2977. VpuWriteReg(coreIdx, W5_CMD_ENC_PIC_CUSTOM_MAP_OPTION_ADDR, option->customMapOpt.addrCustomMap);
  2978. VpuWriteReg(coreIdx, W5_CMD_ENC_PIC_CUSTOM_MAP_OPTION_PARAM, (option->customMapOpt.customRoiMapEnable << 0) |
  2979. (option->customMapOpt.roiAvgQp << 1) |
  2980. (option->customMapOpt.customLambdaMapEnable<< 8) |
  2981. (option->customMapOpt.customModeMapEnable<< 9) |
  2982. (option->customMapOpt.customCoefDropEnable<< 10));
  2983. VpuWriteReg(coreIdx, W5_CMD_ENC_PIC_LONGTERM_PIC, (option->useCurSrcAsLongtermPic<<0) | (option->useLongtermRef<<1));
  2984. if (instance->codecMode == W_HEVC_ENC || instance->codecMode == W_AVC_ENC) {
  2985. VpuWriteReg(coreIdx, W5_CMD_ENC_PIC_WP_PIXEL_SIGMA_Y, option->wpPixSigmaY);
  2986. VpuWriteReg(coreIdx, W5_CMD_ENC_PIC_WP_PIXEL_SIGMA_C, (option->wpPixSigmaCr<<16) | option->wpPixSigmaCb);
  2987. VpuWriteReg(coreIdx, W5_CMD_ENC_PIC_WP_PIXEL_MEAN_Y, option->wpPixMeanY);
  2988. VpuWriteReg(coreIdx, W5_CMD_ENC_PIC_WP_PIXEL_MEAN_C, (option->wpPixMeanCr<<16) | (option->wpPixMeanCb));
  2989. }
  2990. else if (instance->codecMode == W_SVAC_ENC) {
  2991. VpuWriteReg(coreIdx, W5_CMD_ENC_PIC_LF_PARAM_0, ((option->lfRefDeltaIntra&0x7f)<<0) |
  2992. ((option->lfRefDeltaRef0&0x7f)<<7) |
  2993. ((option->lfRefDeltaRef1&0x7f)<<14) |
  2994. ((option->lfModeDelta&0x7f)<<21) |
  2995. (option->sharpLevel<<28));
  2996. VpuWriteReg(coreIdx, W5_CMD_ENC_PIC_LF_PARAM_1, (option->userFilterLevelEnable<<0) | ((option->lfFilterLevel&0x3f)<<1));
  2997. }
  2998. #ifdef SUPPORT_LOOK_AHEAD_RC
  2999. if (TRUE == pOpenParam->EncStdParam.waveParam.larcEnable && pOpenParam->EncStdParam.waveParam.larcPass == 2) {
  3000. VpuWriteReg(coreIdx, W5_CMD_ENC_LARC_RC_B, option->larcData[0]);
  3001. VpuWriteReg(coreIdx, W5_CMD_ENC_LARC_RC_BPP, option->larcData[1]);
  3002. VpuWriteReg(coreIdx, W5_CMD_ENC_LARC_RC_ENC_ORDER, option->larcData[2]);
  3003. }
  3004. #endif
  3005. Wave5BitIssueCommand(instance, W5_ENC_PIC);
  3006. if (vdi_wait_vpu_busy(instance->coreIdx, __VPU_BUSY_TIMEOUT, W5_VPU_BUSY_STATUS) == -1) { // Check QUEUE_DONE
  3007. if (instance->loggingEnable)
  3008. vdi_log(instance->coreIdx, W5_ENC_PIC, 2);
  3009. return RETCODE_VPU_RESPONSE_TIMEOUT;
  3010. }
  3011. regVal = VpuReadReg(instance->coreIdx, W5_RET_QUEUE_STATUS);
  3012. pEncInfo->instanceQueueCount = (regVal>>16)&0xff;
  3013. pEncInfo->reportQueueCount = (regVal & 0xffff);
  3014. if (VpuReadReg(instance->coreIdx, W5_RET_SUCCESS) == FALSE) { // FAILED for adding a command into VCPU QUEUE
  3015. regVal = VpuReadReg(instance->coreIdx, W5_RET_FAIL_REASON);
  3016. if (regVal != WAVE5_QUEUEING_FAIL)
  3017. VLOG(ERR, "FAIL_REASON = 0x%x\n", regVal);
  3018. if (regVal == WAVE5_QUEUEING_FAIL)
  3019. return RETCODE_QUEUEING_FAILURE;
  3020. else if (regVal == WAVE5_SYSERR_WATCHDOG_TIMEOUT)
  3021. return RETCODE_VPU_RESPONSE_TIMEOUT;
  3022. else if (regVal == WAVE5_ERROR_FW_FATAL)
  3023. return RETCODE_ERROR_FW_FATAL;
  3024. else
  3025. return RETCODE_FAILURE;
  3026. }
  3027. return RETCODE_SUCCESS;
  3028. }
  3029. RetCode Wave5VpuEncGetResult(CodecInst* instance, EncOutputInfo* result)
  3030. {
  3031. RetCode ret = RETCODE_SUCCESS;
  3032. Uint32 encodingSuccess;
  3033. Uint32 regVal;
  3034. Int32 coreIdx;
  3035. EncInfo* pEncInfo = VPU_HANDLE_TO_ENCINFO(instance);
  3036. vpu_instance_pool_t* instancePool = NULL;
  3037. coreIdx = instance->coreIdx;
  3038. ret = SendQuery(instance, GET_RESULT);
  3039. if (ret != RETCODE_SUCCESS) {
  3040. regVal = VpuReadReg(instance->coreIdx, W5_RET_FAIL_REASON);
  3041. if (regVal != WAVE5_QUEUEING_FAIL)
  3042. VLOG(ERR, "FAIL_REASON = 0x%x\n", regVal);
  3043. if (regVal == WAVE5_RESULT_NOT_READY)
  3044. return RETCODE_REPORT_NOT_READY;
  3045. else if(regVal == WAVE5_SYSERR_ACCESS_VIOLATION_HW)
  3046. return RETCODE_MEMORY_ACCESS_VIOLATION;
  3047. else if (regVal == WAVE5_SYSERR_WATCHDOG_TIMEOUT)
  3048. return RETCODE_VPU_RESPONSE_TIMEOUT;
  3049. else if (regVal == WAVE5_ERROR_FW_FATAL)
  3050. return RETCODE_ERROR_FW_FATAL;
  3051. else
  3052. return RETCODE_QUERY_FAILURE;
  3053. }
  3054. if (instance->loggingEnable)
  3055. vdi_log(coreIdx, W5_ENC_PIC, 0);
  3056. regVal = VpuReadReg(coreIdx, W5_RET_QUEUE_STATUS);
  3057. pEncInfo->instanceQueueCount = (regVal>>16)&0xff;
  3058. pEncInfo->reportQueueCount = (regVal & 0xffff);
  3059. encodingSuccess = VpuReadReg(coreIdx, W5_RET_ENC_ENCODING_SUCCESS);
  3060. if (encodingSuccess == FALSE) {
  3061. result->errorReason = VpuReadReg(coreIdx, W5_RET_ENC_ERR_INFO);
  3062. if (result->errorReason == WAVE5_SYSERR_ENC_VLC_BUF_FULL) {
  3063. return RETCODE_VLC_BUF_FULL;
  3064. }
  3065. return RETCODE_FAILURE;
  3066. } else {
  3067. result->warnInfo = VpuReadReg(instance->coreIdx, W5_RET_ENC_WARN_INFO);
  3068. }
  3069. result->encPicCnt = VpuReadReg(coreIdx, W5_RET_ENC_PIC_NUM);
  3070. regVal= VpuReadReg(coreIdx, W5_RET_ENC_PIC_TYPE);
  3071. result->picType = regVal & 0xFFFF;
  3072. result->encVclNut = VpuReadReg(coreIdx, W5_RET_ENC_VCL_NUT);
  3073. result->reconFrameIndex = VpuReadReg(coreIdx, W5_RET_ENC_PIC_IDX);
  3074. if (result->reconFrameIndex >= 0)
  3075. result->reconFrame = pEncInfo->frameBufPool[result->reconFrameIndex];
  3076. result->isSvcLayerEL = VpuReadReg(coreIdx, W5_RET_ENC_SVC_LAYER);
  3077. if (pEncInfo->openParam.EncStdParam.waveParam.svcEnable == 1 && result->isSvcLayerEL == FALSE) {
  3078. if (result->reconFrameIndex >= 0)
  3079. result->reconFrame = pEncInfo->frameBufPool[result->reconFrameIndex+pEncInfo->numFrameBuffers];
  3080. }
  3081. result->numOfSlices = VpuReadReg(coreIdx, W5_RET_ENC_PIC_SLICE_NUM);
  3082. result->picSkipped = VpuReadReg(coreIdx, W5_RET_ENC_PIC_SKIP);
  3083. result->numOfIntra = VpuReadReg(coreIdx, W5_RET_ENC_PIC_NUM_INTRA);
  3084. result->numOfMerge = VpuReadReg(coreIdx, W5_RET_ENC_PIC_NUM_MERGE);
  3085. result->numOfSkipBlock = VpuReadReg(coreIdx, W5_RET_ENC_PIC_NUM_SKIP);
  3086. result->bitstreamWrapAround = 0; // wave520 only support line-buffer mode.
  3087. result->avgCtuQp = VpuReadReg(coreIdx, W5_RET_ENC_PIC_AVG_CTU_QP);
  3088. result->encPicByte = VpuReadReg(coreIdx, W5_RET_ENC_PIC_BYTE);
  3089. result->encGopPicIdx = VpuReadReg(coreIdx, W5_RET_ENC_GOP_PIC_IDX);
  3090. result->encPicPoc = VpuReadReg(coreIdx, W5_RET_ENC_PIC_POC);
  3091. result->encSrcIdx = VpuReadReg(coreIdx, W5_RET_ENC_USED_SRC_IDX);
  3092. result->releaseSrcFlag = VpuReadReg(coreIdx, W5_RET_ENC_SRC_BUF_FLAG);
  3093. pEncInfo->streamWrPtr = VpuReadReg(coreIdx, pEncInfo->streamWrPtrRegAddr);
  3094. pEncInfo->streamRdPtr = VpuReadReg(coreIdx, pEncInfo->streamRdPtrRegAddr);
  3095. result->picDistortionLow = VpuReadReg(coreIdx, W5_RET_ENC_PIC_DIST_LOW);
  3096. result->picDistortionHigh = VpuReadReg(coreIdx, W5_RET_ENC_PIC_DIST_HIGH);
  3097. result->bitstreamBuffer = VpuReadReg(coreIdx, pEncInfo->streamRdPtrRegAddr);
  3098. result->rdPtr = pEncInfo->streamRdPtr;
  3099. result->wrPtr = pEncInfo->streamWrPtr;
  3100. if (result->reconFrameIndex == RECON_IDX_FLAG_HEADER_ONLY) //result for header only(no vcl) encoding
  3101. result->bitstreamSize = result->encPicByte;
  3102. else if (result->reconFrameIndex < 0)
  3103. result->bitstreamSize = 0;
  3104. else
  3105. result->bitstreamSize = result->encPicByte;
  3106. result->encHostCmdTick = VpuReadReg(coreIdx, W5_RET_ENC_HOST_CMD_TICK);
  3107. result->encPrepareStartTick = VpuReadReg(coreIdx, W5_RET_ENC_PREPARE_START_TICK);
  3108. result->encPrepareEndTick = VpuReadReg(coreIdx, W5_RET_ENC_PREPARE_END_TICK);
  3109. result->encProcessingStartTick = VpuReadReg(coreIdx, W5_RET_ENC_PROCESSING_START_TICK);
  3110. result->encProcessingEndTick = VpuReadReg(coreIdx, W5_RET_ENC_PROCESSING_END_TICK);
  3111. result->encEncodeStartTick = VpuReadReg(coreIdx, W5_RET_ENC_ENCODING_START_TICK);
  3112. result->encEncodeEndTick = VpuReadReg(coreIdx, W5_RET_ENC_ENCODING_END_TICK);
  3113. instancePool = vdi_get_instance_pool(instance->coreIdx);
  3114. if ( pEncInfo->firstCycleCheck == FALSE ) {
  3115. result->frameCycle = (result->encEncodeEndTick - result->encHostCmdTick) * pEncInfo->cyclePerTick;
  3116. pEncInfo->firstCycleCheck = TRUE;
  3117. }
  3118. else {
  3119. result->frameCycle = (result->encEncodeEndTick - instancePool->lastPerformanceCycles) * pEncInfo->cyclePerTick;
  3120. if (instancePool->lastPerformanceCycles < result->encHostCmdTick)
  3121. result->frameCycle = (result->encEncodeEndTick - result->encHostCmdTick) * pEncInfo->cyclePerTick;
  3122. }
  3123. instancePool->lastPerformanceCycles = result->encEncodeEndTick;
  3124. result->prepareCycle = (result->encPrepareEndTick - result->encPrepareStartTick) * pEncInfo->cyclePerTick;
  3125. result->processing = (result->encProcessingEndTick - result->encProcessingStartTick) * pEncInfo->cyclePerTick;
  3126. result->EncodedCycle = (result->encEncodeEndTick - result->encEncodeStartTick) * pEncInfo->cyclePerTick;
  3127. #ifdef SUPPORT_LOOK_AHEAD_RC
  3128. result->larcData[0] = VpuReadReg(coreIdx, W5_RET_ENC_LARC_RC_B);
  3129. result->larcData[1] = VpuReadReg(coreIdx, W5_RET_ENC_LARC_RC_BPP);
  3130. result->larcData[2] = VpuReadReg(coreIdx, W5_RET_ENC_LARC_RC_ENC_ORDER);
  3131. #endif
  3132. return RETCODE_SUCCESS;
  3133. }
  3134. RetCode Wave5VpuEncGetHeader(EncHandle instance, EncHeaderParam * encHeaderParam)
  3135. {
  3136. Int32 coreIdx;
  3137. Uint32 regVal = 0;
  3138. EncInfo* pEncInfo = VPU_HANDLE_TO_ENCINFO(instance);
  3139. coreIdx = instance->coreIdx;
  3140. EnterLock(coreIdx);
  3141. VpuWriteReg(coreIdx, W5_CMD_ENC_BS_START_ADDR, encHeaderParam->buf);
  3142. VpuWriteReg(coreIdx, W5_CMD_ENC_BS_SIZE, encHeaderParam->size);
  3143. pEncInfo->streamRdPtr = encHeaderParam->buf;
  3144. pEncInfo->streamWrPtr = encHeaderParam->buf;
  3145. pEncInfo->streamBufStartAddr = encHeaderParam->buf;
  3146. pEncInfo->streamBufSize = encHeaderParam->size;
  3147. pEncInfo->streamBufEndAddr = encHeaderParam->buf + encHeaderParam->size;
  3148. /* Secondary AXI */
  3149. VpuWriteReg(coreIdx, W5_CMD_ENC_PIC_USE_SEC_AXI, (pEncInfo->secAxiInfo.u.wave.useEncImdEnable<<9) |
  3150. (pEncInfo->secAxiInfo.u.wave.useEncRdoEnable<<11) |
  3151. (pEncInfo->secAxiInfo.u.wave.useEncLfEnable<<15));
  3152. VpuWriteReg(coreIdx, W5_CMD_ENC_PIC_CODE_OPTION, encHeaderParam->headerType);
  3153. VpuWriteReg(coreIdx, W5_CMD_ENC_PIC_SRC_PIC_IDX, 0);
  3154. Wave5BitIssueCommand(instance, W5_ENC_PIC);
  3155. if (vdi_wait_vpu_busy(instance->coreIdx, __VPU_BUSY_TIMEOUT, W5_VPU_BUSY_STATUS) == -1) { // Check QUEUE_DONE
  3156. if (instance->loggingEnable)
  3157. vdi_log(instance->coreIdx, W5_ENC_PIC, 2);
  3158. LeaveLock(coreIdx);
  3159. return RETCODE_VPU_RESPONSE_TIMEOUT;
  3160. }
  3161. regVal = VpuReadReg(instance->coreIdx, W5_RET_QUEUE_STATUS);
  3162. pEncInfo->instanceQueueCount = (regVal>>16)&0xff;
  3163. pEncInfo->reportQueueCount = (regVal & 0xffff);
  3164. if (VpuReadReg(instance->coreIdx, W5_RET_SUCCESS) == FALSE) { // FAILED for adding a command into VCPU QUEUE
  3165. regVal = VpuReadReg(instance->coreIdx, W5_RET_FAIL_REASON);
  3166. if (regVal != WAVE5_QUEUEING_FAIL)
  3167. VLOG(ERR, "FAIL_REASON = 0x%x\n", regVal);
  3168. LeaveLock(coreIdx);
  3169. if ( regVal == WAVE5_QUEUEING_FAIL)
  3170. return RETCODE_QUEUEING_FAILURE;
  3171. else if (regVal == WAVE5_SYSERR_ACCESS_VIOLATION_HW)
  3172. return RETCODE_MEMORY_ACCESS_VIOLATION;
  3173. else if (regVal == WAVE5_SYSERR_WATCHDOG_TIMEOUT)
  3174. return RETCODE_VPU_RESPONSE_TIMEOUT;
  3175. else if (regVal == WAVE5_ERROR_FW_FATAL)
  3176. return RETCODE_ERROR_FW_FATAL;
  3177. else
  3178. return RETCODE_FAILURE;
  3179. }
  3180. LeaveLock(coreIdx);
  3181. return RETCODE_SUCCESS;
  3182. }
  3183. RetCode Wave5VpuEncFiniSeq(CodecInst* instance )
  3184. {
  3185. RetCode ret = RETCODE_SUCCESS;
  3186. Wave5BitIssueCommand(instance, W5_DESTROY_INSTANCE);
  3187. if (vdi_wait_vpu_busy(instance->coreIdx, __VPU_BUSY_TIMEOUT, W5_VPU_BUSY_STATUS) == -1)
  3188. return RETCODE_VPU_RESPONSE_TIMEOUT;
  3189. if (VpuReadReg(instance->coreIdx, W5_RET_SUCCESS) == FALSE) {
  3190. if (VpuReadReg(instance->coreIdx, W5_RET_FAIL_REASON) == WAVE5_VPU_STILL_RUNNING)
  3191. ret = RETCODE_VPU_STILL_RUNNING;
  3192. else
  3193. ret = RETCODE_FAILURE;
  3194. }
  3195. return ret;
  3196. }
  3197. RetCode Wave5VpuEncParaChange(EncHandle instance, EncChangeParam* param)
  3198. {
  3199. Int32 coreIdx;
  3200. Uint32 regVal = 0;
  3201. EncInfo* pEncInfo;
  3202. coreIdx = instance->coreIdx;
  3203. pEncInfo = &instance->CodecInfo->encInfo;
  3204. EnterLock(coreIdx);
  3205. /* SET_PARAM + COMMON */
  3206. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_SET_PARAM_OPTION, OPT_CHANGE_PARAM);
  3207. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_SET_PARAM_ENABLE, param->enable_option);
  3208. if (param->enable_option & ENC_SET_CHANGE_PARAM_PPS) {
  3209. if (instance->codecMode == W_SVAC_ENC) {
  3210. regVal = (param->disableDeblk<<5) |
  3211. ((param->chromaDcQpOffset&0x1F)<<14) |
  3212. ((param->chromaAcQpOffset&0x1F)<<19) |
  3213. ((param->lumaDcQpOffset&0x1F)<<24);
  3214. }
  3215. else {
  3216. regVal = (param->constIntraPredFlag<<1) |
  3217. (param->lfCrossSliceBoundaryEnable<<2) |
  3218. ((param->weightPredEnable&1)<<3) |
  3219. (param->disableDeblk<<5) |
  3220. ((param->betaOffsetDiv2&0xF)<<6) |
  3221. ((param->tcOffsetDiv2&0xF)<<10) |
  3222. ((param->chromaCbQpOffset&0x1F)<<14) |
  3223. ((param->chromaCrQpOffset&0x1F)<<19) |
  3224. (param->transform8x8Enable<<29) |
  3225. (param->entropyCodingMode<<30);
  3226. }
  3227. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_PPS_PARAM, regVal);
  3228. }
  3229. if (param->enable_option & ENC_SET_CHANGE_PARAM_INTRA_PARAM) {
  3230. if (instance->codecMode == W_AVC_ENC) {
  3231. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_INTRA_PARAM, (param->intraQP<<0) | ( (param->intraPeriod&0x7ff)<<6) | ( (param->avcIdrPeriod&0x7ff)<<17) | ( (param->forcedIdrHeaderEnable&3)<<28));
  3232. }
  3233. else {
  3234. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_INTRA_PARAM, (param->intraQP<<3) | (param->forcedIdrHeaderEnable<<9) | (param->intraPeriod<<16));
  3235. }
  3236. }
  3237. if (param->enable_option & ENC_SET_CHANGE_PARAM_RC_TARGET_RATE) {
  3238. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_RC_TARGET_RATE, param->bitRate);
  3239. }
  3240. if (param->enable_option & ENC_SET_CHANGE_PARAM_RC) {
  3241. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_RC_PARAM, (param->hvsQPEnable<<2) |
  3242. (param->hvsQpScale<<4) |
  3243. (param->vbvBufferSize<<20));
  3244. }
  3245. if (param->enable_option & ENC_SET_CHANGE_PARAM_RC_MIN_MAX_QP) {
  3246. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_RC_MIN_MAX_QP, (param->minQpI<<0) |
  3247. (param->maxQpI<<6) |
  3248. (param->hvsMaxDeltaQp<<12));
  3249. }
  3250. if (param->enable_option & ENC_SET_CHANGE_PARAM_RC_INTER_MIN_MAX_QP) {
  3251. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_RC_INTER_MIN_MAX_QP, (param->minQpP<<0) |
  3252. (param->maxQpP<<6) |
  3253. (param->minQpB<<12) |
  3254. (param->maxQpB<<18));
  3255. }
  3256. if (param->enable_option & ENC_SET_CHANGE_PARAM_RC_BIT_RATIO_LAYER) {
  3257. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_RC_BIT_RATIO_LAYER_0_3, (param->fixedBitRatio[0]<<0) |
  3258. (param->fixedBitRatio[1]<<8) |
  3259. (param->fixedBitRatio[2]<<16) |
  3260. (param->fixedBitRatio[3]<<24));
  3261. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_RC_BIT_RATIO_LAYER_4_7, (param->fixedBitRatio[4]<<0) |
  3262. (param->fixedBitRatio[5]<<8) |
  3263. (param->fixedBitRatio[6]<<16) |
  3264. (param->fixedBitRatio[7]<<24));
  3265. }
  3266. if (param->enable_option & ENC_SET_CHANGE_PARAM_RC_WEIGHT) {
  3267. regVal = (param->rcWeightBuf<<8 | param->rcWeightParam);
  3268. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_RC_WEIGHT_PARAM, regVal);
  3269. }
  3270. if (param->enable_option & ENC_SET_CHANGE_PARAM_RDO) {
  3271. regVal = (param->rdoSkip<<2) |
  3272. (param->lambdaScalingEnable<<3) |
  3273. (param->coefClearDisable<<4) |
  3274. (param->intraNxNEnable<<8) |
  3275. (param->maxNumMerge<<18) |
  3276. (param->customMDEnable<<20) |
  3277. (param->customLambdaEnable<<21);
  3278. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_RDO_PARAM, regVal);
  3279. }
  3280. if (param->enable_option & ENC_SET_CHANGE_PARAM_INDEPEND_SLICE) {
  3281. if (instance->codecMode == W_HEVC_ENC ) {
  3282. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_INDEPENDENT_SLICE, param->independSliceModeArg<<16 | param->independSliceMode);
  3283. }
  3284. else if (instance->codecMode == W_AVC_ENC) {
  3285. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_INDEPENDENT_SLICE, param->avcSliceArg<<16 | param->avcSliceMode);
  3286. }
  3287. }
  3288. if (instance->codecMode == W_HEVC_ENC && param->enable_option & ENC_SET_CHANGE_PARAM_DEPEND_SLICE) {
  3289. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_DEPENDENT_SLICE, param->dependSliceModeArg<<16 | param->dependSliceMode);
  3290. }
  3291. if (instance->codecMode == W_HEVC_ENC && param->enable_option & ENC_SET_CHANGE_PARAM_NR) {
  3292. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_NR_PARAM, (param->nrYEnable<<0) |
  3293. (param->nrCbEnable<<1) |
  3294. (param->nrCrEnable<<2) |
  3295. (param->nrNoiseEstEnable<<3)|
  3296. (param->nrNoiseSigmaY<<4) |
  3297. (param->nrNoiseSigmaCb<<12) |
  3298. (param->nrNoiseSigmaCr<<20));
  3299. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_NR_WEIGHT, (param->nrIntraWeightY<<0) |
  3300. (param->nrIntraWeightCb<<5) |
  3301. (param->nrIntraWeightCr<<10)|
  3302. (param->nrInterWeightY<<15) |
  3303. (param->nrInterWeightCb<<20)|
  3304. (param->nrInterWeightCr<<25));
  3305. }
  3306. if (param->enable_option & ENC_SET_CHANGE_PARAM_BG) {
  3307. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_BG_PARAM, (param->bgThrDiff<<1) |
  3308. (param->bgThrMeanDiff<<10) |
  3309. (param->bgLambdaQp<<18) |
  3310. ((param->bgDeltaQp&0x1F)<<24) |
  3311. (instance->codecMode == W_AVC_ENC ? param->s2fmeDisable<<29 : 0));
  3312. }
  3313. if (instance->codecMode == W_HEVC_ENC && param->enable_option & ENC_SET_CHANGE_PARAM_CUSTOM_MD) {
  3314. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_CUSTOM_MD_PU04, (param->pu04DeltaRate&0xFF) |
  3315. ((param->pu04IntraPlanarDeltaRate&0xFF)<<8) |
  3316. ((param->pu04IntraDcDeltaRate&0xFF)<<16) |
  3317. ((param->pu04IntraAngleDeltaRate&0xFF)<<24));
  3318. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_CUSTOM_MD_PU08, (param->pu08DeltaRate&0xFF) |
  3319. ((param->pu08IntraPlanarDeltaRate&0xFF)<<8) |
  3320. ((param->pu08IntraDcDeltaRate&0xFF)<<16) |
  3321. ((param->pu08IntraAngleDeltaRate&0xFF)<<24));
  3322. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_CUSTOM_MD_PU16, (param->pu16DeltaRate&0xFF) |
  3323. ((param->pu16IntraPlanarDeltaRate&0xFF)<<8) |
  3324. ((param->pu16IntraDcDeltaRate&0xFF)<<16) |
  3325. ((param->pu16IntraAngleDeltaRate&0xFF)<<24));
  3326. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_CUSTOM_MD_PU32, (param->pu32DeltaRate&0xFF) |
  3327. ((param->pu32IntraPlanarDeltaRate&0xFF)<<8) |
  3328. ((param->pu32IntraDcDeltaRate&0xFF)<<16) |
  3329. ((param->pu32IntraAngleDeltaRate&0xFF)<<24));
  3330. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_CUSTOM_MD_CU08, (param->cu08IntraDeltaRate&0xFF) |
  3331. ((param->cu08InterDeltaRate&0xFF)<<8) |
  3332. ((param->cu08MergeDeltaRate&0xFF)<<16));
  3333. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_CUSTOM_MD_CU16, (param->cu16IntraDeltaRate&0xFF) |
  3334. ((param->cu16InterDeltaRate&0xFF)<<8) |
  3335. ((param->cu16MergeDeltaRate&0xFF)<<16));
  3336. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_CUSTOM_MD_CU32, (param->cu32IntraDeltaRate&0xFF) |
  3337. ((param->cu32InterDeltaRate&0xFF)<<8) |
  3338. ((param->cu32MergeDeltaRate&0xFF)<<16));
  3339. }
  3340. if (instance->codecMode == W_HEVC_ENC && param->enable_option & ENC_SET_CHANGE_PARAM_CUSTOM_LAMBDA) {
  3341. VpuWriteReg(coreIdx, W5_CMD_ENC_SEQ_CUSTOM_LAMBDA_ADDR, param->customLambdaAddr);
  3342. }
  3343. Wave5BitIssueCommand(instance, W5_ENC_SET_PARAM);
  3344. if (vdi_wait_vpu_busy(coreIdx, __VPU_BUSY_TIMEOUT, W5_VPU_BUSY_STATUS) == -1) {
  3345. if (instance->loggingEnable)
  3346. vdi_log(coreIdx, W5_ENC_SET_PARAM, 2);
  3347. return RETCODE_VPU_RESPONSE_TIMEOUT;
  3348. }
  3349. regVal = VpuReadReg(coreIdx, W5_RET_QUEUE_STATUS);
  3350. pEncInfo->instanceQueueCount = (regVal>>16) & 0xFF;
  3351. pEncInfo->reportQueueCount = (regVal & 0xFFFF);
  3352. if (VpuReadReg(coreIdx, W5_RET_SUCCESS) == 0) {
  3353. regVal = VpuReadReg(instance->coreIdx, W5_RET_FAIL_REASON);
  3354. if (regVal != WAVE5_QUEUEING_FAIL)
  3355. VLOG(ERR, "FAIL_REASON = 0x%x\n", regVal);
  3356. LeaveLock(coreIdx);
  3357. if ( regVal == WAVE5_QUEUEING_FAIL)
  3358. return RETCODE_QUEUEING_FAILURE;
  3359. else if (regVal == WAVE5_SYSERR_ACCESS_VIOLATION_HW)
  3360. return RETCODE_MEMORY_ACCESS_VIOLATION;
  3361. else if (regVal == WAVE5_SYSERR_WATCHDOG_TIMEOUT)
  3362. return RETCODE_VPU_RESPONSE_TIMEOUT;
  3363. else if (regVal == WAVE5_ERROR_FW_FATAL)
  3364. return RETCODE_ERROR_FW_FATAL;
  3365. else
  3366. return RETCODE_FAILURE;
  3367. }
  3368. LeaveLock(coreIdx);
  3369. return RETCODE_SUCCESS;
  3370. }
  3371. RetCode Wave5VpuGetSrcBufFlag(CodecInst* instance, Uint32* flag) {
  3372. RetCode ret = RETCODE_SUCCESS;
  3373. ret = SendQuery(instance, GET_SRC_BUF_FLAG);
  3374. if (ret != RETCODE_SUCCESS)
  3375. return RETCODE_QUERY_FAILURE;
  3376. *flag = VpuReadReg(instance->coreIdx, W5_RET_ENC_SRC_BUF_FLAG);
  3377. return RETCODE_SUCCESS;
  3378. }
  3379. //static RetCode CalcEncCropInfo(EncWaveParam* param, int rotMode, int srcWidth, int srcHeight);
  3380. static Uint32 presetGopSize[] = {
  3381. 1, /* Custom GOP, Not used */
  3382. 1, /* All Intra */
  3383. 1, /* IPP Cyclic GOP size 1 */
  3384. 1, /* IBB Cyclic GOP size 1 */
  3385. 2, /* IBP Cyclic GOP size 2 */
  3386. 4, /* IBBBP */
  3387. 4,
  3388. 4,
  3389. 8,
  3390. };
  3391. RetCode CheckEncCommonParamValid(EncOpenParam* pop)
  3392. {
  3393. RetCode ret = RETCODE_SUCCESS;
  3394. Int32 low_delay = 0;
  3395. Int32 intra_period_gop_step_size;
  3396. Int32 i;
  3397. EncWaveParam* param = &pop->EncStdParam.waveParam;
  3398. // check low-delay gop structure
  3399. if(param->gopPresetIdx == PRESET_IDX_CUSTOM_GOP) // common gop
  3400. {
  3401. Int32 minVal = 0;
  3402. if(param->gopParam.customGopSize > 1)
  3403. {
  3404. minVal = param->gopParam.picParam[0].pocOffset;
  3405. low_delay = 1;
  3406. for(i = 1; i < param->gopParam.customGopSize; i++)
  3407. {
  3408. if(minVal > param->gopParam.picParam[i].pocOffset)
  3409. {
  3410. low_delay = 0;
  3411. break;
  3412. }
  3413. else
  3414. minVal = param->gopParam.picParam[i].pocOffset;
  3415. }
  3416. }
  3417. }
  3418. else if(param->gopPresetIdx == PRESET_IDX_ALL_I ||
  3419. param->gopPresetIdx == PRESET_IDX_IPP ||
  3420. param->gopPresetIdx == PRESET_IDX_IBBB ||
  3421. param->gopPresetIdx == PRESET_IDX_IPPPP ||
  3422. param->gopPresetIdx == PRESET_IDX_IBBBB) // low-delay case (IPPP, IBBB)
  3423. low_delay = 1;
  3424. if(low_delay) {
  3425. intra_period_gop_step_size = 1;
  3426. }
  3427. else {
  3428. if (param->gopPresetIdx == PRESET_IDX_CUSTOM_GOP) {
  3429. intra_period_gop_step_size = param->gopParam.customGopSize;
  3430. }
  3431. else {
  3432. intra_period_gop_step_size = presetGopSize[param->gopPresetIdx];
  3433. }
  3434. }
  3435. if (pop->bitstreamFormat == STD_HEVC) {
  3436. if( !low_delay && (param->intraPeriod != 0) && (param->decodingRefreshType != 0) && (intra_period_gop_step_size != 0) &&
  3437. (param->intraPeriod < intra_period_gop_step_size * 2)) {
  3438. VLOG(ERR,"CFG FAIL : Not support intra period[%d] for the gop structure\n", param->intraPeriod);
  3439. VLOG(ERR,"RECOMMEND CONFIG PARAMETER : Intra period >= %d\n", intra_period_gop_step_size * 2);
  3440. ret = RETCODE_FAILURE;
  3441. }
  3442. }
  3443. else if (pop->bitstreamFormat == STD_SVAC) {
  3444. if( !low_delay && (param->intraPeriod != 0) && (intra_period_gop_step_size != 0) &&
  3445. (param->intraPeriod % intra_period_gop_step_size) != 1) {
  3446. VLOG(ERR,"CFG FAIL : Not support intra period[%d] for the gop structure\n", param->intraPeriod);
  3447. VLOG(ERR,"RECOMMEND CONFIG PARAMETER : Intra period = %d\n", intra_period_gop_step_size * (param->intraPeriod / intra_period_gop_step_size));
  3448. ret = RETCODE_FAILURE;
  3449. }
  3450. }
  3451. if( !low_delay && (param->intraPeriod != 0) && (intra_period_gop_step_size != 0) && ((param->intraPeriod % intra_period_gop_step_size) == 1) && param->decodingRefreshType == 0)
  3452. {
  3453. VLOG(ERR,"CFG FAIL : Not support decoding refresh type[%d] for closed gop structure\n", param->decodingRefreshType );
  3454. VLOG(ERR,"RECOMMEND CONFIG PARAMETER : Decoding refresh type = IDR\n");
  3455. ret = RETCODE_FAILURE;
  3456. }
  3457. if (param->gopPresetIdx == PRESET_IDX_CUSTOM_GOP) {
  3458. for(i = 0; i < param->gopParam.customGopSize; i++)
  3459. {
  3460. if (param->gopParam.picParam[i].temporalId >= MAX_NUM_TEMPORAL_LAYER )
  3461. {
  3462. VLOG(ERR,"CFG FAIL : temporalId %d exceeds MAX_NUM_TEMPORAL_LAYER\n", param->gopParam.picParam[i].temporalId);
  3463. VLOG(ERR,"RECOMMEND CONFIG PARAMETER : Adjust temporal ID under MAX_NUM_TEMPORAL_LAYER(7) in GOP structure\n");
  3464. ret = RETCODE_FAILURE;
  3465. }
  3466. if (param->gopParam.picParam[i].temporalId < 0)
  3467. {
  3468. VLOG(ERR,"CFG FAIL : Must be %d-th temporal_id >= 0\n",param->gopParam.picParam[i].temporalId);
  3469. VLOG(ERR,"RECOMMEND CONFIG PARAMETER : Adjust temporal layer above '0' in GOP structure\n");
  3470. ret = RETCODE_FAILURE;
  3471. }
  3472. }
  3473. }
  3474. if (param->useRecommendEncParam == 0)
  3475. {
  3476. // RDO
  3477. {
  3478. int align_32_width_flag = pop->picWidth % 32;
  3479. int align_16_width_flag = pop->picWidth % 16;
  3480. int align_8_width_flag = pop->picWidth % 8;
  3481. int align_32_height_flag = pop->picHeight % 32;
  3482. int align_16_height_flag = pop->picHeight % 16;
  3483. int align_8_height_flag = pop->picHeight % 8;
  3484. if( ((param->cuSizeMode&0x1) == 0) && ((align_8_width_flag != 0) || (align_8_height_flag != 0)) )
  3485. {
  3486. VLOG(ERR,"CFG FAIL : Picture width and height must be aligned with 8 pixels when enable CU8x8 of cuSizeMode \n");
  3487. VLOG(ERR,"RECOMMEND CONFIG PARAMETER : cuSizeMode |= 0x1 (CU8x8)\n");
  3488. ret = RETCODE_FAILURE;
  3489. }
  3490. else if(((param->cuSizeMode&0x1) == 0) && ((param->cuSizeMode&0x2) == 0) && ((align_16_width_flag != 0) || (align_16_height_flag != 0)) )
  3491. {
  3492. VLOG(ERR,"CFG FAIL : Picture width and height must be aligned with 16 pixels when enable CU16x16 of cuSizeMode\n");
  3493. VLOG(ERR,"RECOMMEND CONFIG PARAMETER : cuSizeMode |= 0x2 (CU16x16)\n");
  3494. ret = RETCODE_FAILURE;
  3495. }
  3496. else if(((param->cuSizeMode&0x1) == 0) && ((param->cuSizeMode&0x2) == 0) && ((param->cuSizeMode&0x4) == 0) && ((align_32_width_flag != 0) || (align_32_height_flag != 0)) )
  3497. {
  3498. VLOG(ERR,"CFG FAIL : Picture width and height must be aligned with 32 pixels when enable CU32x32 of cuSizeMode\n");
  3499. VLOG(ERR,"RECOMMEND CONFIG PARAMETER : cuSizeMode |= 0x4 (CU32x32)\n");
  3500. ret = RETCODE_FAILURE;
  3501. }
  3502. }
  3503. }
  3504. // Slice
  3505. {
  3506. if ( param->wppEnable == 1 && param->independSliceMode == 1) {
  3507. int num_ctb_in_width = VPU_ALIGN64(pop->picWidth)>>6;
  3508. if (param->independSliceModeArg % num_ctb_in_width) {
  3509. printf("CFG FAIL : If WaveFrontSynchro(WPP) '1', the number of IndeSliceArg(ctb_num) must be multiple of num_ctb_in_width\n");
  3510. printf("RECOMMEND CONFIG PARAMETER : IndeSliceArg = num_ctb_in_width * a\n");
  3511. ret = RETCODE_FAILURE;
  3512. }
  3513. }
  3514. // multi-slice & wpp
  3515. if(param->wppEnable == 1 && param->dependSliceMode != 0) {
  3516. VLOG(ERR,"CFG FAIL : If WaveFrontSynchro(WPP) '1', the option of multi-slice must be '0'\n");
  3517. VLOG(ERR,"RECOMMEND CONFIG PARAMETER : independSliceMode = 0, dependSliceMode = 0\n");
  3518. ret = RETCODE_FAILURE;
  3519. }
  3520. if(param->independSliceMode == 0 && param->dependSliceMode != 0)
  3521. {
  3522. VLOG(ERR,"CFG FAIL : If independSliceMode is '0', dependSliceMode must be '0'\n");
  3523. VLOG(ERR,"RECOMMEND CONFIG PARAMETER : independSliceMode = 1, independSliceModeArg = TotalCtuNum\n");
  3524. ret = RETCODE_FAILURE;
  3525. }
  3526. else if((param->independSliceMode == 1) && (param->dependSliceMode == 1) )
  3527. {
  3528. if(param->independSliceModeArg < param->dependSliceModeArg)
  3529. {
  3530. VLOG(ERR,"CFG FAIL : If independSliceMode & dependSliceMode is both '1' (multi-slice with ctu count), must be independSliceModeArg >= dependSliceModeArg\n");
  3531. VLOG(ERR,"RECOMMEND CONFIG PARAMETER : dependSliceMode = 0\n");
  3532. ret = RETCODE_FAILURE;
  3533. }
  3534. }
  3535. if (param->independSliceMode != 0)
  3536. {
  3537. if (param->independSliceModeArg > 65535)
  3538. {
  3539. VLOG(ERR,"CFG FAIL : If independSliceMode is not 0, must be independSliceModeArg <= 0xFFFF\n");
  3540. ret = RETCODE_FAILURE;
  3541. }
  3542. }
  3543. if (param->dependSliceMode != 0)
  3544. {
  3545. if (param->dependSliceModeArg > 65535)
  3546. {
  3547. VLOG(ERR,"CFG FAIL : If dependSliceMode is not 0, must be dependSliceModeArg <= 0xFFFF\n");
  3548. ret = RETCODE_FAILURE;
  3549. }
  3550. }
  3551. }
  3552. if (param->confWinTop % 2) {
  3553. VLOG(ERR, "CFG FAIL : conf_win_top : %d value is not available. The value should be equal to multiple of 2.\n", param->confWinTop);
  3554. ret = RETCODE_FAILURE;
  3555. }
  3556. if (param->confWinBot % 2) {
  3557. VLOG(ERR, "CFG FAIL : conf_win_bot : %d value is not available. The value should be equal to multiple of 2.\n", param->confWinBot);
  3558. ret = RETCODE_FAILURE;
  3559. }
  3560. if (param->confWinLeft % 2) {
  3561. VLOG(ERR, "CFG FAIL : conf_win_left : %d value is not available. The value should be equal to multiple of 2.\n", param->confWinLeft);
  3562. ret = RETCODE_FAILURE;
  3563. }
  3564. if (param->confWinRight % 2) {
  3565. VLOG(ERR, "CFG FAIL : conf_win_right : %d value is not available. The value should be equal to multiple of 2.\n", param->confWinRight);
  3566. ret = RETCODE_FAILURE;
  3567. }
  3568. // RDO
  3569. if (param->cuSizeMode == 0) {
  3570. VLOG(ERR, "CFG FAIL : EnCu8x8, EnCu16x16, and EnCu32x32 must be equal to 1 respectively.\n");
  3571. ret = RETCODE_FAILURE;
  3572. }
  3573. if (param->losslessEnable && (param->nrYEnable || param->nrCbEnable || param->nrCrEnable)) {
  3574. VLOG(ERR, "CFG FAIL : LosslessCoding and NoiseReduction (EnNrY, EnNrCb, and EnNrCr) cannot be used simultaneously.\n");
  3575. ret = RETCODE_FAILURE;
  3576. }
  3577. if (param->losslessEnable && param->bgDetectEnable) {
  3578. VLOG(ERR, "CFG FAIL : LosslessCoding and BgDetect cannot be used simultaneously.\n");
  3579. ret = RETCODE_FAILURE;
  3580. }
  3581. if (param->losslessEnable && pop->rcEnable) {
  3582. VLOG(ERR, "CFG FAIL : osslessCoding and RateControl cannot be used simultaneously.\n");
  3583. ret = RETCODE_FAILURE;
  3584. }
  3585. if (param->losslessEnable && param->roiEnable) {
  3586. VLOG(ERR, "CFG FAIL : LosslessCoding and Roi cannot be used simultaneously.\n");
  3587. ret = RETCODE_FAILURE;
  3588. }
  3589. if (param->losslessEnable && !param->skipIntraTrans) {
  3590. VLOG(ERR, "CFG FAIL : IntraTransSkip must be enabled when LosslessCoding is enabled.\n");
  3591. ret = RETCODE_FAILURE;
  3592. }
  3593. // Intra refresh
  3594. {
  3595. Int32 num_ctu_row = (pop->picHeight + 64 - 1) / 64;
  3596. Int32 num_ctu_col = (pop->picWidth + 64 - 1) / 64;
  3597. if(param->intraRefreshMode && param->intraRefreshArg <= 0)
  3598. {
  3599. VLOG(ERR, "CFG FAIL : IntraCtuRefreshArg must be greater then 0 when IntraCtuRefreshMode is enabled.\n");
  3600. ret = RETCODE_FAILURE;
  3601. }
  3602. if(param->intraRefreshMode == 1 && param->intraRefreshArg > num_ctu_row)
  3603. {
  3604. VLOG(ERR, "CFG FAIL : IntraCtuRefreshArg must be less then the number of CTUs in a row when IntraCtuRefreshMode is equal to 1.\n");
  3605. ret = RETCODE_FAILURE;
  3606. }
  3607. if(param->intraRefreshMode == 2 && param->intraRefreshArg > num_ctu_col)
  3608. {
  3609. VLOG(ERR, "CFG FAIL : IntraCtuRefreshArg must be less then the number of CTUs in a column when IntraCtuRefreshMode is equal to 2.\n");
  3610. ret = RETCODE_FAILURE;
  3611. }
  3612. if(param->intraRefreshMode == 3 && param->intraRefreshArg > num_ctu_row*num_ctu_col)
  3613. {
  3614. VLOG(ERR, "CFG FAIL : IntraCtuRefreshArg must be less then the number of CTUs in a picture when IntraCtuRefreshMode is equal to 3.\n");
  3615. ret = RETCODE_FAILURE;
  3616. }
  3617. if(param->intraRefreshMode == 4 && param->intraRefreshArg > num_ctu_row*num_ctu_col)
  3618. {
  3619. VLOG(ERR, "CFG FAIL : IntraCtuRefreshArg must be less then the number of CTUs in a picture when IntraCtuRefreshMode is equal to 4.\n");
  3620. ret = RETCODE_FAILURE;
  3621. }
  3622. if(param->intraRefreshMode == 4 && param->losslessEnable)
  3623. {
  3624. VLOG(ERR, "CFG FAIL : LosslessCoding and IntraCtuRefreshMode (4) cannot be used simultaneously.\n");
  3625. ret = RETCODE_FAILURE;
  3626. }
  3627. if(param->intraRefreshMode == 4 && param->roiEnable)
  3628. {
  3629. VLOG(ERR, "CFG FAIL : Roi and IntraCtuRefreshMode (4) cannot be used simultaneously.\n");
  3630. ret = RETCODE_FAILURE;
  3631. }
  3632. }
  3633. return ret;
  3634. }
  3635. RetCode CheckEncRcParamValid(EncOpenParam* pop)
  3636. {
  3637. RetCode ret = RETCODE_SUCCESS;
  3638. EncWaveParam* param = &pop->EncStdParam.waveParam;
  3639. if(pop->rcEnable == 1)
  3640. {
  3641. if((param->minQpI > param->maxQpI) || (param->minQpP > param->maxQpP) || (param->minQpB > param->maxQpB))
  3642. {
  3643. VLOG(ERR,"CFG FAIL : Not allowed MinQP > MaxQP\n");
  3644. VLOG(ERR,"RECOMMEND CONFIG PARAMETER : MinQP = MaxQP\n");
  3645. ret = RETCODE_FAILURE;
  3646. }
  3647. if(pop->bitRate <= (int) pop->frameRateInfo)
  3648. {
  3649. VLOG(ERR,"CFG FAIL : Not allowed EncBitRate <= FrameRate\n");
  3650. VLOG(ERR,"RECOMMEND CONFIG PARAMETER : EncBitRate = FrameRate * 10000\n");
  3651. ret = RETCODE_FAILURE;
  3652. }
  3653. }
  3654. return ret;
  3655. }
  3656. RetCode CheckEncCustomGopParamValid(EncOpenParam* pop)
  3657. {
  3658. RetCode ret = RETCODE_SUCCESS;
  3659. CustomGopParam* gopParam;
  3660. CustomGopPicParam* gopPicParam;
  3661. CustomGopPicParam new_gop[MAX_GOP_NUM*2 +1];
  3662. Int32 curr_poc, i, ei, gi, gop_size;
  3663. Int32 enc_tid[MAX_GOP_NUM*2 +1];
  3664. gopParam = &(pop->EncStdParam.waveParam.gopParam);
  3665. gop_size = gopParam->customGopSize;
  3666. new_gop[0].pocOffset = 0;
  3667. new_gop[0].temporalId = 0;
  3668. new_gop[0].picType = PIC_TYPE_I;
  3669. new_gop[0].numRefPicL0 = 0;
  3670. enc_tid[0] = 0;
  3671. for(i = 0; i < gop_size * 2; i++)
  3672. {
  3673. ei = i % gop_size;
  3674. gi = i / gop_size;
  3675. gopPicParam = &gopParam->picParam[ei];
  3676. curr_poc = gi * gop_size + gopPicParam->pocOffset;
  3677. new_gop[i + 1].pocOffset = curr_poc;
  3678. new_gop[i + 1].temporalId = gopPicParam->temporalId;
  3679. new_gop[i + 1].picType = gopPicParam->picType;
  3680. new_gop[i + 1].refPocL0 = gopPicParam->refPocL0 + gi * gop_size;
  3681. new_gop[i + 1].refPocL1 = gopPicParam->refPocL1 + gi * gop_size;
  3682. new_gop[i + 1].numRefPicL0 = gopPicParam->numRefPicL0;
  3683. enc_tid[i + 1] = -1;
  3684. }
  3685. for(i = 0; i < gop_size; i++)
  3686. {
  3687. gopPicParam = &gopParam->picParam[ei];
  3688. if(gopPicParam->pocOffset <= 0)
  3689. {
  3690. VLOG(ERR, "CFG FAIL : the POC of the %d-th picture must be greater then -1\n", i+1);
  3691. ret = RETCODE_FAILURE;
  3692. }
  3693. if(gopPicParam->pocOffset > gop_size)
  3694. {
  3695. VLOG(ERR, "CFG FAIL : the POC of the %d-th picture must be less then GopSize + 1\n", i+1);
  3696. ret = RETCODE_FAILURE;
  3697. }
  3698. if(gopPicParam->temporalId < 0)
  3699. {
  3700. VLOG(ERR, "CFG FAIL : the temporal_id of the %d-th picture must be greater than -1\n", i+1);
  3701. ret = RETCODE_FAILURE;
  3702. }
  3703. }
  3704. for(ei = 1; ei < gop_size * 2 + 1; ei++)
  3705. {
  3706. CustomGopPicParam* cur_pic = &new_gop[ei];
  3707. if(ei <= gop_size)
  3708. {
  3709. enc_tid[cur_pic->pocOffset] = cur_pic->temporalId;
  3710. continue;
  3711. }
  3712. if(new_gop[ei].picType != PIC_TYPE_I)
  3713. {
  3714. Int32 ref_poc = cur_pic->refPocL0;
  3715. if(enc_tid[ref_poc] < 0) // reference picture is not encoded yet
  3716. {
  3717. VLOG(ERR, "CFG FAIL : the 1st reference picture cannot be used as the reference of the picture (POC %d) in encoding order\n", cur_pic->pocOffset - gop_size);
  3718. ret = RETCODE_FAILURE;
  3719. }
  3720. if(enc_tid[ref_poc] > cur_pic->temporalId)
  3721. {
  3722. VLOG(ERR, "CFG FAIL : the temporal_id of the picture (POC %d) is wrong\n", cur_pic->pocOffset - gop_size);
  3723. ret = RETCODE_FAILURE;
  3724. }
  3725. if(ref_poc >= cur_pic->pocOffset)
  3726. {
  3727. VLOG(ERR, "CFG FAIL : the POC of the 1st reference picture of %d-th picture is wrong\n", cur_pic->pocOffset - gop_size);
  3728. ret = RETCODE_FAILURE;
  3729. }
  3730. }
  3731. if(new_gop[ei].picType != PIC_TYPE_P)
  3732. {
  3733. Int32 ref_poc = cur_pic->refPocL1;
  3734. if(enc_tid[ref_poc] < 0) // reference picture is not encoded yet
  3735. {
  3736. VLOG(ERR, "CFG FAIL : the 2nd reference picture cannot be used as the reference of the picture (POC %d) in encoding order\n", cur_pic->pocOffset - gop_size);
  3737. ret = RETCODE_FAILURE;
  3738. }
  3739. if(enc_tid[ref_poc] > cur_pic->temporalId)
  3740. {
  3741. VLOG(ERR, "CFG FAIL : the temporal_id of %d-th picture is wrong\n", cur_pic->pocOffset - gop_size);
  3742. ret = RETCODE_FAILURE;
  3743. }
  3744. if(new_gop[ei].picType == PIC_TYPE_P && new_gop[ei].numRefPicL0>1)
  3745. {
  3746. if(ref_poc >= cur_pic->pocOffset)
  3747. {
  3748. VLOG(ERR, "CFG FAIL : the POC of the 2nd reference picture of %d-th picture is wrong\n", cur_pic->pocOffset - gop_size);
  3749. ret = RETCODE_FAILURE;
  3750. }
  3751. }
  3752. else // HOST_PIC_TYPE_B
  3753. {
  3754. if(ref_poc == cur_pic->pocOffset)
  3755. {
  3756. VLOG(ERR, "CFG FAIL : the POC of the 2nd reference picture of %d-th picture is wrong\n", cur_pic->pocOffset - gop_size);
  3757. ret = RETCODE_FAILURE;
  3758. }
  3759. }
  3760. }
  3761. curr_poc = cur_pic->pocOffset;
  3762. enc_tid[curr_poc] = cur_pic->temporalId;
  3763. }
  3764. return ret;
  3765. }