0001-add-b-v-extension-support.patch 206 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519
  1. From e0e44ae667aea74a1cf3738cd7a6174b45897a82 Mon Sep 17 00:00:00 2001
  2. From: "max.ma" <max.ma@starfivetech.com>
  3. Date: Mon, 1 Nov 2021 18:13:28 -0700
  4. Subject: [PATCH 1/1] add b & v extension support
  5. ---
  6. gas/config/tc-riscv.c | 464 ++++++++
  7. include/opcode/riscv-opc.h | 2192 +++++++++++++++++++++++++++++++++++-
  8. include/opcode/riscv.h | 88 +-
  9. opcodes/riscv-dis.c | 92 ++
  10. opcodes/riscv-opc.c | 1334 ++++++++++++++++++++++
  11. 5 files changed, 4168 insertions(+), 2 deletions(-)
  12. diff --git a/gas/config/tc-riscv.c b/gas/config/tc-riscv.c
  13. index 9df6d3f415..45f5bda4a2 100644
  14. --- a/gas/config/tc-riscv.c
  15. +++ b/gas/config/tc-riscv.c
  16. @@ -235,6 +235,25 @@ riscv_multi_subset_supports (enum riscv_insn_class insn_class)
  17. case INSN_CLASS_Q: return riscv_subset_supports ("q");
  18. + case INSN_CLASS_ZBB:
  19. + return riscv_subset_supports ("zbb");
  20. + case INSN_CLASS_ZBA:
  21. + return riscv_subset_supports ("zba");
  22. + case INSN_CLASS_ZBC:
  23. + return riscv_subset_supports ("zbc");
  24. + case INSN_CLASS_ZBS:
  25. + return riscv_subset_supports ("zbs");
  26. +
  27. + case INSN_CLASS_V:
  28. + return riscv_subset_supports ("v");
  29. + case INSN_CLASS_V_AND_F:
  30. + return riscv_subset_supports ("v") && riscv_subset_supports ("f");
  31. + case INSN_CLASS_V_OR_ZVAMO:
  32. + return (riscv_subset_supports ("a")
  33. + && (riscv_subset_supports ("v")
  34. + || riscv_subset_supports ("zvamo")));
  35. + case INSN_CLASS_V_OR_ZVLSSEG:
  36. + return riscv_subset_supports ("v") || riscv_subset_supports ("zvlsseg");
  37. default:
  38. as_fatal ("Unreachable");
  39. return FALSE;
  40. @@ -599,6 +618,8 @@ enum reg_class
  41. {
  42. RCLASS_GPR,
  43. RCLASS_FPR,
  44. + RCLASS_VECR,
  45. + RCLASS_VECM,
  46. RCLASS_MAX,
  47. RCLASS_CSR
  48. @@ -700,6 +721,12 @@ riscv_csr_address (const char *csr_name,
  49. case CSR_CLASS_DEBUG:
  50. need_check_version = FALSE;
  51. break;
  52. + case CSR_CLASS_V:
  53. + result = (riscv_subset_supports ("v")
  54. + || riscv_subset_supports ("zvamo")
  55. + || riscv_subset_supports ("zvlsseg"));
  56. + need_check_version = false;
  57. + break;
  58. default:
  59. as_bad (_("internal: bad RISC-V CSR class (0x%x)"), csr_class);
  60. }
  61. @@ -914,6 +941,8 @@ validate_riscv_insn (const struct riscv_opcode *opc, int length)
  62. case ')': break;
  63. case '<': USE_BITS (OP_MASK_SHAMTW, OP_SH_SHAMTW); break;
  64. case '>': USE_BITS (OP_MASK_SHAMT, OP_SH_SHAMT); break;
  65. + case '#': used_bits |= ENCODE_PREF_TIMM (-1U); break;
  66. + case '+': used_bits |= ENCODE_PREF_SIMM (-1U); break;
  67. case 'A': break;
  68. case 'D': USE_BITS (OP_MASK_RD, OP_SH_RD); break;
  69. case 'Z': USE_BITS (OP_MASK_RS1, OP_SH_RS1); break;
  70. @@ -966,6 +995,33 @@ validate_riscv_insn (const struct riscv_opcode *opc, int length)
  71. return FALSE;
  72. }
  73. break;
  74. +
  75. + case 'V': /* RVV */
  76. + switch (c = *p++)
  77. + {
  78. + case 'd':
  79. + case 'f': USE_BITS (OP_MASK_VD, OP_SH_VD); break;
  80. + case 'e': USE_BITS (OP_MASK_VWD, OP_SH_VWD); break;
  81. + case 's': USE_BITS (OP_MASK_VS1, OP_SH_VS1); break;
  82. + case 't': USE_BITS (OP_MASK_VS2, OP_SH_VS2); break;
  83. + case 'u': USE_BITS (OP_MASK_VS1, OP_SH_VS1);
  84. + USE_BITS (OP_MASK_VS2, OP_SH_VS2); break;
  85. + case 'v': USE_BITS (OP_MASK_VD, OP_SH_VD);
  86. + USE_BITS (OP_MASK_VS1, OP_SH_VS1);
  87. + USE_BITS (OP_MASK_VS2, OP_SH_VS2); break;
  88. + case '0': break;
  89. + case 'b': used_bits |= ENCODE_RVV_VB_IMM (-1U); break;
  90. + case 'c': used_bits |= ENCODE_RVV_VC_IMM (-1U); break;
  91. + case 'i':
  92. + case 'j':
  93. + case 'k': USE_BITS (OP_MASK_VIMM, OP_SH_VIMM); break;
  94. + case 'm': USE_BITS (OP_MASK_VMASK, OP_SH_VMASK); break;
  95. + default:
  96. + as_bad (_("internal: bad RISC-V opcode (unknown operand type `V%c'): %s %s"),
  97. + c, opc->name, opc->args);
  98. + }
  99. + break;
  100. +
  101. default:
  102. as_bad (_("internal: bad RISC-V opcode "
  103. "(unknown operand type `%c'): %s %s"),
  104. @@ -1044,6 +1100,8 @@ md_begin (void)
  105. hash_reg_names (RCLASS_GPR, riscv_gpr_names_abi, NGPR);
  106. hash_reg_names (RCLASS_FPR, riscv_fpr_names_numeric, NFPR);
  107. hash_reg_names (RCLASS_FPR, riscv_fpr_names_abi, NFPR);
  108. + hash_reg_names (RCLASS_VECR, riscv_vecr_names_numeric, NVECR);
  109. + hash_reg_names (RCLASS_VECM, riscv_vecm_names_numeric, NVECM);
  110. /* Add "fp" as an alias for "s0". */
  111. hash_reg_name (RCLASS_GPR, "fp", 8);
  112. @@ -1202,6 +1260,41 @@ macro_build (expressionS *ep, const char *name, const char *fmt, ...)
  113. break;
  114. case ',':
  115. continue;
  116. +
  117. + case 'V': /* RVV */
  118. + {
  119. + switch (*fmt++)
  120. + {
  121. + case 'd':
  122. + INSERT_OPERAND (VD, insn, va_arg (args, int));
  123. + continue;
  124. +
  125. + case 's':
  126. + INSERT_OPERAND (VS1, insn, va_arg (args, int));
  127. + continue;
  128. +
  129. + case 't':
  130. + INSERT_OPERAND (VS2, insn, va_arg (args, int));
  131. + continue;
  132. +
  133. + case 'm':
  134. + {
  135. + int reg = va_arg (args, int);
  136. + if (reg == -1)
  137. + {
  138. + INSERT_OPERAND (VMASK, insn, 1);
  139. + continue;
  140. + }
  141. + else if (reg == 0)
  142. + {
  143. + INSERT_OPERAND (VMASK, insn, 0);
  144. + continue;
  145. + }
  146. + }
  147. + /* fallthru */
  148. + }
  149. + }
  150. +
  151. default:
  152. as_fatal (_("internal error: invalid macro"));
  153. }
  154. @@ -1267,6 +1360,27 @@ check_absolute_expr (struct riscv_cl_insn *ip, expressionS *ex,
  155. normalize_constant_expr (ex);
  156. }
  157. +/* The pref type should be one of the following:
  158. + 0000 - scalar prefetch L1, fetch data as if for a normal scalar load,
  159. + and imply load into all lower cache destination levels.
  160. + 0001 - scalar prefetch L2, fetch data and place the cache-line into L2,
  161. + and imply load into all lower cache destination levels.
  162. + 0010 - scalar prefetch L3, fetch data and place the cache-line into L3,
  163. + and imply load into all lower cache destination levels.
  164. + 0011 - scalar prefetch L4, fetch data and place the cache-line into L4,
  165. + and imply load into all lower cache destination levels
  166. + (if the L5 is in the cache heirarchy).
  167. + 1000 - vector prefetch L1, fetch data as if for a normal scalar load
  168. + but place the cache-line into vector buffer (vector L1) ,
  169. + and imply load into all lower cache destination levels.
  170. +*/
  171. +
  172. +static bfd_boolean check_pref_type(unsigned long type)
  173. +{
  174. + if (type != 0 && type != 1 && type != 3 && type != 8)
  175. + as_bad (_("Improper pref type (%lu)"), type);
  176. +}
  177. +
  178. static symbolS *
  179. make_internal_label (void)
  180. {
  181. @@ -1366,6 +1480,113 @@ load_const (int reg, expressionS *ep)
  182. }
  183. }
  184. +/* Expand RISC-V Vector macros into one of more instructions. */
  185. +
  186. +static void
  187. +vector_macro (struct riscv_cl_insn *ip)
  188. +{
  189. + int vd = (ip->insn_opcode >> OP_SH_VD) & OP_MASK_VD;
  190. + int vs1 = (ip->insn_opcode >> OP_SH_VS1) & OP_MASK_VS1;
  191. + int vs2 = (ip->insn_opcode >> OP_SH_VS2) & OP_MASK_VS2;
  192. + int vm = (ip->insn_opcode >> OP_SH_VMASK) & OP_MASK_VMASK;
  193. + int vtemp = (ip->insn_opcode >> OP_SH_VFUNCT6) & OP_MASK_VFUNCT6;
  194. + int mask = ip->insn_mo->mask;
  195. +
  196. + switch (mask)
  197. + {
  198. + case M_VMSGE:
  199. + if (vm)
  200. + {
  201. + /* Unmasked. */
  202. + macro_build (NULL, "vmslt.vx", "Vd,Vt,sVm", vd, vs2, vs1, -1);
  203. + macro_build (NULL, "vmnand.mm", "Vd,Vt,Vs", vd, vd, vd);
  204. + break;
  205. + }
  206. + if (vtemp != 0)
  207. + {
  208. + /* Masked. Have vtemp to avoid overlap constraints. */
  209. + if (vd == vm)
  210. + {
  211. + macro_build (NULL, "vmslt.vx", "Vd,Vt,s", vtemp, vs2, vs1);
  212. + macro_build (NULL, "vmandnot.mm", "Vd,Vt,Vs", vd, vm, vtemp);
  213. + }
  214. + else
  215. + {
  216. + /* Preserve the value of vd if not updating by vm. */
  217. + macro_build (NULL, "vmslt.vx", "Vd,Vt,s", vtemp, vs2, vs1);
  218. + macro_build (NULL, "vmandnot.mm", "Vd,Vt,Vs", vtemp, vm, vtemp);
  219. + macro_build (NULL, "vmandnot.mm", "Vd,Vt,Vs", vd, vd, vm);
  220. + macro_build (NULL, "vmor.mm", "Vd,Vt,Vs", vd, vtemp, vd);
  221. + }
  222. + }
  223. + else if (vd != vm)
  224. + {
  225. + /* Masked. This may cause the vd overlaps vs2, when LMUL > 1. */
  226. + macro_build (NULL, "vmslt.vx", "Vd,Vt,sVm", vd, vs2, vs1, vm);
  227. + macro_build (NULL, "vmxor.mm", "Vd,Vt,Vs", vd, vd, vm);
  228. + }
  229. + else
  230. + as_bad (_("must provide temp if destination overlaps mask"));
  231. + break;
  232. +
  233. + case M_VMSGEU:
  234. + if (vm)
  235. + {
  236. + /* Unmasked. */
  237. + macro_build (NULL, "vmsltu.vx", "Vd,Vt,sVm", vd, vs2, vs1, -1);
  238. + macro_build (NULL, "vmnand.mm", "Vd,Vt,Vs", vd, vd, vd);
  239. + break;
  240. + }
  241. + if (vtemp != 0)
  242. + {
  243. + /* Masked. Have vtemp to avoid overlap constraints. */
  244. + if (vd == vm)
  245. + {
  246. + macro_build (NULL, "vmsltu.vx", "Vd,Vt,s", vtemp, vs2, vs1);
  247. + macro_build (NULL, "vmandnot.mm", "Vd,Vt,Vs", vd, vm, vtemp);
  248. + }
  249. + else
  250. + {
  251. + /* Preserve the value of vd if not updating by vm. */
  252. + macro_build (NULL, "vmsltu.vx", "Vd,Vt,s", vtemp, vs2, vs1);
  253. + macro_build (NULL, "vmandnot.mm", "Vd,Vt,Vs", vtemp, vm, vtemp);
  254. + macro_build (NULL, "vmandnot.mm", "Vd,Vt,Vs", vd, vd, vm);
  255. + macro_build (NULL, "vmor.mm", "Vd,Vt,Vs", vd, vtemp, vd);
  256. + }
  257. + }
  258. + else if (vd != vm)
  259. + {
  260. + /* Masked. This may cause the vd overlaps vs2, when LMUL > 1. */
  261. + macro_build (NULL, "vmsltu.vx", "Vd,Vt,sVm", vd, vs2, vs1, vm);
  262. + macro_build (NULL, "vmxor.mm", "Vd,Vt,Vs", vd, vd, vm);
  263. + }
  264. + else
  265. + as_bad (_("must provide temp if destination overlaps mask"));
  266. + break;
  267. +
  268. + default:
  269. + as_bad (_("Macro %s not implemented"), ip->insn_mo->name);
  270. + break;
  271. + }
  272. +}
  273. +
  274. +/* Zero extend and sign extend byte/half-word/word. */
  275. +
  276. +static void
  277. +riscv_ext (int destreg, int srcreg, unsigned shift, bool sign)
  278. +{
  279. + if (sign)
  280. + {
  281. + md_assemblef ("slli x%d, x%d, 0x%x", destreg, srcreg, shift);
  282. + md_assemblef ("srai x%d, x%d, 0x%x", destreg, destreg, shift);
  283. + }
  284. + else
  285. + {
  286. + md_assemblef ("slli x%d, x%d, 0x%x", destreg, srcreg, shift);
  287. + md_assemblef ("srli x%d, x%d, 0x%x", destreg, destreg, shift);
  288. + }
  289. +}
  290. +
  291. /* Expand RISC-V assembly macros into one or more instructions. */
  292. static void
  293. macro (struct riscv_cl_insn *ip, expressionS *imm_expr,
  294. @@ -1640,6 +1861,66 @@ my_getSmallExpression (expressionS *ep, bfd_reloc_code_real_type *reloc,
  295. return reloc_index;
  296. }
  297. +/* Parse string STR as a vsetvli operand. Store the expression in *EP.
  298. + On exit, EXPR_END points to the first character after the expression. */
  299. +
  300. +static void
  301. +my_getVsetvliExpression (expressionS *ep, char *str)
  302. +{
  303. + unsigned int vsew_value = 0, vlmul_value = 0;
  304. + unsigned int vta_value = 0, vma_value = 0;
  305. + bfd_boolean vsew_found = FALSE, vlmul_found = FALSE;
  306. + bfd_boolean vta_found = FALSE, vma_found = FALSE;
  307. +
  308. + if (arg_lookup (&str, riscv_vsew, ARRAY_SIZE (riscv_vsew), &vsew_value))
  309. + {
  310. + if (*str == ',')
  311. + ++str;
  312. + if (vsew_found)
  313. + as_bad (_("multiple vsew constants"));
  314. + vsew_found = TRUE;
  315. + }
  316. + if (arg_lookup (&str, riscv_vlmul, ARRAY_SIZE (riscv_vlmul), &vlmul_value))
  317. + {
  318. + if (*str == ',')
  319. + ++str;
  320. + if (vlmul_found)
  321. + as_bad (_("multiple vlmul constants"));
  322. + vlmul_found = TRUE;
  323. + }
  324. + if (arg_lookup (&str, riscv_vta, ARRAY_SIZE (riscv_vta), &vta_value))
  325. + {
  326. + if (*str == ',')
  327. + ++str;
  328. + if (vta_found)
  329. + as_bad (_("multiple vta constants"));
  330. + vta_found = TRUE;
  331. + }
  332. + if (arg_lookup (&str, riscv_vma, ARRAY_SIZE (riscv_vma), &vma_value))
  333. + {
  334. + if (*str == ',')
  335. + ++str;
  336. + if (vma_found)
  337. + as_bad (_("multiple vma constants"));
  338. + vma_found = TRUE;
  339. + }
  340. +
  341. + if (vsew_found || vlmul_found || vta_found || vma_found)
  342. + {
  343. + ep->X_op = O_constant;
  344. + ep->X_add_number = (vlmul_value << OP_SH_VLMUL)
  345. + | (vsew_value << OP_SH_VSEW)
  346. + | (vta_value << OP_SH_VTA)
  347. + | (vma_value << OP_SH_VMA);
  348. + expr_end = str;
  349. + }
  350. + else
  351. + {
  352. + my_getExpression (ep, str);
  353. + str = expr_end;
  354. + }
  355. +}
  356. +
  357. /* Parse opcode name, could be an mnemonics or number. */
  358. static size_t
  359. my_getOpcodeExpression (expressionS *ep, bfd_reloc_code_real_type *reloc,
  360. @@ -2187,6 +2468,25 @@ riscv_ip (char *str, struct riscv_cl_insn *ip, expressionS *imm_expr,
  361. s = expr_end;
  362. continue;
  363. + case '#':
  364. + my_getExpression (imm_expr, s);
  365. + check_pref_type((unsigned long)imm_expr->X_add_number);
  366. + INSERT_OPERAND (PREF_TYPE, *ip, imm_expr->X_add_number);
  367. + imm_expr->X_op = O_absent;
  368. + s = expr_end;
  369. + continue;
  370. +
  371. + case '+':
  372. + my_getExpression (imm_expr, s);
  373. + check_absolute_expr (ip, imm_expr, FALSE);
  374. + if ((unsigned long) imm_expr->X_add_number > 0xff)
  375. + as_bad (_("Improper pref offset value (%lu)"),
  376. + (unsigned long) imm_expr->X_add_number);
  377. + INSERT_OPERAND (PREF_OFFSET, *ip, imm_expr->X_add_number);
  378. + imm_expr->X_op = O_absent;
  379. + s = expr_end;
  380. + continue;
  381. +
  382. case 'E': /* Control register. */
  383. insn_with_csr = TRUE;
  384. explicit_priv_attr = TRUE;
  385. @@ -2503,6 +2803,170 @@ riscv_ip (char *str, struct riscv_cl_insn *ip, expressionS *imm_expr,
  386. imm_expr->X_op = O_absent;
  387. continue;
  388. + case 'V': /* RVV */
  389. + switch (*++args)
  390. + {
  391. + case 'd': /* VD */
  392. + if (!reg_lookup (&s, RCLASS_VECR, &regno))
  393. + break;
  394. + INSERT_OPERAND (VD, *ip, regno);
  395. + continue;
  396. +
  397. + case 'e': /* AMO VD */
  398. + if (reg_lookup (&s, RCLASS_GPR, &regno) && regno == 0)
  399. + INSERT_OPERAND (VWD, *ip, 0);
  400. + else if (reg_lookup (&s, RCLASS_VECR, &regno))
  401. + {
  402. + INSERT_OPERAND (VWD, *ip, 1);
  403. + INSERT_OPERAND (VD, *ip, regno);
  404. + }
  405. + else
  406. + break;
  407. + continue;
  408. +
  409. + case 'f': /* AMO VS3 */
  410. + if (!reg_lookup (&s, RCLASS_VECR, &regno))
  411. + break;
  412. + if (!EXTRACT_OPERAND (VWD, ip->insn_opcode))
  413. + INSERT_OPERAND (VD, *ip, regno);
  414. + else
  415. + {
  416. + /* VS3 must match VD. */
  417. + if (EXTRACT_OPERAND (VD, ip->insn_opcode) != regno)
  418. + break;
  419. + }
  420. + continue;
  421. +
  422. + case 's': /* VS1 */
  423. + if (!reg_lookup (&s, RCLASS_VECR, &regno))
  424. + break;
  425. + INSERT_OPERAND (VS1, *ip, regno);
  426. + continue;
  427. +
  428. + case 't': /* VS2 */
  429. + if (!reg_lookup (&s, RCLASS_VECR, &regno))
  430. + break;
  431. + INSERT_OPERAND (VS2, *ip, regno);
  432. + continue;
  433. +
  434. + case 'u': /* VS1 == VS2 */
  435. + if (!reg_lookup (&s, RCLASS_VECR, &regno))
  436. + break;
  437. + INSERT_OPERAND (VS1, *ip, regno);
  438. + INSERT_OPERAND (VS2, *ip, regno);
  439. + continue;
  440. +
  441. + case 'v': /* VD == VS1 == VS2 */
  442. + if (!reg_lookup (&s, RCLASS_VECR, &regno))
  443. + break;
  444. + INSERT_OPERAND (VD, *ip, regno);
  445. + INSERT_OPERAND (VS1, *ip, regno);
  446. + INSERT_OPERAND (VS2, *ip, regno);
  447. + continue;
  448. +
  449. + /* The `V0` is carry-in register for v[m]adc and v[m]sbc,
  450. + and is used to choose vs1/rs1/frs1/imm or vs2 for
  451. + v[f]merge. It use the same encoding as the vector mask
  452. + register. */
  453. + case '0':
  454. + if (reg_lookup (&s, RCLASS_VECR, &regno) && regno == 0)
  455. + continue;
  456. + break;
  457. +
  458. + case 'b': /* vtypei for vsetivli */
  459. + my_getVsetvliExpression (imm_expr, s);
  460. + check_absolute_expr (ip, imm_expr, FALSE);
  461. + if (!VALID_RVV_VB_IMM (imm_expr->X_add_number))
  462. + as_bad (_("bad value for vsetivli immediate field, "
  463. + "value must be 0..1023"));
  464. + ip->insn_opcode
  465. + |= ENCODE_RVV_VB_IMM (imm_expr->X_add_number);
  466. + imm_expr->X_op = O_absent;
  467. + s = expr_end;
  468. + continue;
  469. +
  470. + case 'c': /* vtypei for vsetvli */
  471. + my_getVsetvliExpression (imm_expr, s);
  472. + check_absolute_expr (ip, imm_expr, FALSE);
  473. + if (!VALID_RVV_VC_IMM (imm_expr->X_add_number))
  474. + as_bad (_("bad value for vsetvli immediate field, "
  475. + "value must be 0..2047"));
  476. + ip->insn_opcode
  477. + |= ENCODE_RVV_VC_IMM (imm_expr->X_add_number);
  478. + imm_expr->X_op = O_absent;
  479. + s = expr_end;
  480. + continue;
  481. +
  482. + case 'i': /* vector arith signed immediate */
  483. + my_getExpression (imm_expr, s);
  484. + check_absolute_expr (ip, imm_expr, FALSE);
  485. + if (imm_expr->X_add_number > 15
  486. + || imm_expr->X_add_number < -16)
  487. + as_bad (_("bad value for vector immediate field, "
  488. + "value must be -16...15"));
  489. + INSERT_OPERAND (VIMM, *ip, imm_expr->X_add_number);
  490. + imm_expr->X_op = O_absent;
  491. + s = expr_end;
  492. + continue;
  493. +
  494. + case 'j': /* vector arith unsigned immediate */
  495. + my_getExpression (imm_expr, s);
  496. + check_absolute_expr (ip, imm_expr, FALSE);
  497. + if (imm_expr->X_add_number < 0
  498. + || imm_expr->X_add_number >= 32)
  499. + as_bad (_("bad value for vector immediate field, "
  500. + "value must be 0...31"));
  501. + INSERT_OPERAND (VIMM, *ip, imm_expr->X_add_number);
  502. + imm_expr->X_op = O_absent;
  503. + s = expr_end;
  504. + continue;
  505. +
  506. + case 'k': /* vector arith signed immediate, minus 1 */
  507. + my_getExpression (imm_expr, s);
  508. + check_absolute_expr (ip, imm_expr, FALSE);
  509. + if (imm_expr->X_add_number > 16
  510. + || imm_expr->X_add_number < -15)
  511. + as_bad (_("bad value for vector immediate field, "
  512. + "value must be -15...16"));
  513. + INSERT_OPERAND (VIMM, *ip, imm_expr->X_add_number - 1);
  514. + imm_expr->X_op = O_absent;
  515. + s = expr_end;
  516. + continue;
  517. +
  518. + case 'm': /* optional vector mask */
  519. + if (*s == '\0')
  520. + {
  521. + INSERT_OPERAND (VMASK, *ip, 1);
  522. + continue;
  523. + }
  524. + else if (*s == ',' && s++
  525. + && reg_lookup (&s, RCLASS_VECM, &regno)
  526. + && regno == 0)
  527. + {
  528. + INSERT_OPERAND (VMASK, *ip, 0);
  529. + continue;
  530. + }
  531. + break;
  532. +
  533. + /* The following ones are only used in macros. */
  534. + case 'M': /* required vector mask */
  535. + if (reg_lookup (&s, RCLASS_VECM, &regno) && regno == 0)
  536. + {
  537. + INSERT_OPERAND (VMASK, *ip, 0);
  538. + continue;
  539. + }
  540. + break;
  541. +
  542. + case 'T': /* vector macro temporary register */
  543. + if (!reg_lookup (&s, RCLASS_VECR, &regno) || regno == 0)
  544. + break;
  545. + /* Store it in the FUNCT6 field as we don't have anyplace
  546. + else to store it. */
  547. + INSERT_OPERAND (VFUNCT6, *ip, regno);
  548. + continue;
  549. + }
  550. + break;
  551. +
  552. default:
  553. as_fatal (_("internal error: bad argument type %c"), *args);
  554. }
  555. diff --git a/include/opcode/riscv-opc.h b/include/opcode/riscv-opc.h
  556. index 158de32485..0c23fac724 100644
  557. --- a/include/opcode/riscv-opc.h
  558. +++ b/include/opcode/riscv-opc.h
  559. @@ -113,6 +113,18 @@
  560. #define MASK_SRL 0xfe00707f
  561. #define MATCH_SRA 0x40005033
  562. #define MASK_SRA 0xfe00707f
  563. +#define MATCH_SLO 0x20001033
  564. +#define MASK_SLO 0xfe00707f
  565. +#define MATCH_SLOI 0x20001013
  566. +#define MASK_SLOI 0xfc00707f
  567. +#define MATCH_SLOW 0x2000103b
  568. +#define MASK_SLOW 0xfe00707f
  569. +#define MATCH_SRO 0x20005033
  570. +#define MASK_SRO 0xfe00707f
  571. +#define MATCH_SROI 0x20005013
  572. +#define MASK_SROI 0xfc00707f
  573. +#define MATCH_SROW 0x2000503b
  574. +#define MASK_SROW 0xfe00707f
  575. #define MATCH_OR 0x6033
  576. #define MASK_OR 0xfe00707f
  577. #define MATCH_AND 0x7033
  578. @@ -157,6 +169,8 @@
  579. #define MASK_SW 0x707f
  580. #define MATCH_SD 0x3023
  581. #define MASK_SD 0x707f
  582. +#define MATCH_PAUSE 0x0100000f
  583. +#define MASK_PAUSE 0xffffffff
  584. #define MATCH_FENCE 0xf
  585. #define MASK_FENCE 0x707f
  586. #define MATCH_FENCE_I 0x100f
  587. @@ -253,6 +267,16 @@
  588. #define MASK_SFENCE_VMA 0xfe007fff
  589. #define MATCH_WFI 0x10500073
  590. #define MASK_WFI 0xffffffff
  591. +/* Custom CSRs instruction */
  592. +#define MATCH_CFLUSH_D_L1 0xfc000073
  593. +#define MASK_CFLUSH_D_L1 0xfff07fff
  594. +#define MATCH_CDISCARD_D_L1 0xfc200073
  595. +#define MASK_CDISCARD_D_L1 0xfff07fff
  596. +#define MATCH_CFLUSH_D_L2 0xfc400073
  597. +#define MASK_CFLUSH_D_L2 0xfff07fff
  598. +#define MATCH_CDISCARD_D_L2 0xfc600073
  599. +#define MASK_CDISCARD_D_L2 0xfff07fff
  600. +#define MASK_PREF 0x7fff
  601. #define MATCH_CSRRW 0x1073
  602. #define MASK_CSRRW 0x707f
  603. #define MATCH_CSRRS 0x2073
  604. @@ -421,6 +445,200 @@
  605. #define MASK_FCVT_Q_LU 0xfff0007f
  606. #define MATCH_FMV_Q_X 0xf6000053
  607. #define MASK_FMV_Q_X 0xfff0707f
  608. +#define MATCH_CLZ 0x60001013
  609. +#define MASK_CLZ 0xfff0707f
  610. +#define MATCH_CTZ 0x60101013
  611. +#define MASK_CTZ 0xfff0707f
  612. +#define MATCH_CPOP 0x60201013
  613. +#define MASK_CPOP 0xfff0707f
  614. +#define MATCH_BMATFLIP 0x60301013
  615. +#define MASK_BMATFLIP 0xfff0707f
  616. +#define MATCH_CRC32_B 0x61001013
  617. +#define MASK_CRC32_B 0xfff0707f
  618. +#define MATCH_CRC32_H 0x61101013
  619. +#define MASK_CRC32_H 0xfff0707f
  620. +#define MATCH_CRC32_W 0x61201013
  621. +#define MASK_CRC32_W 0xfff0707f
  622. +#define MATCH_CRC32_D 0x61301013
  623. +#define MASK_CRC32_D 0xfff0707f
  624. +#define MATCH_CRC32C_B 0x61801013
  625. +#define MASK_CRC32C_B 0xfff0707f
  626. +#define MATCH_CRC32C_H 0x61901013
  627. +#define MASK_CRC32C_H 0xfff0707f
  628. +#define MATCH_CRC32C_W 0x61A01013
  629. +#define MASK_CRC32C_W 0xfff0707f
  630. +#define MATCH_CRC32C_D 0x61B01013
  631. +#define MASK_CRC32C_D 0xfff0707f
  632. +#define MATCH_MIN 0xa004033
  633. +#define MASK_MIN 0xfe00707f
  634. +#define MATCH_MINU 0xa005033
  635. +#define MASK_MINU 0xfe00707f
  636. +#define MATCH_MAX 0xa006033
  637. +#define MASK_MAX 0xfe00707f
  638. +#define MATCH_MAXU 0xa007033
  639. +#define MASK_MAXU 0xfe00707f
  640. +#define MATCH_SHFL 0x8001033
  641. +#define MASK_SHFL 0xfe00707f
  642. +#define MATCH_SHFLI 0x8001013
  643. +#define MASK_SHFLI 0xfc00707f
  644. +#define MATCH_UNSHFL 0x8005033
  645. +#define MASK_UNSHFL 0xfe00707f
  646. +#define MATCH_UNSHFLI 0x8005013
  647. +#define MASK_UNSHFLI 0xfc00707f
  648. +#define MATCH_BCOMPRESS 0x8006033
  649. +#define MASK_BCOMPRESS 0xfe00707f
  650. +#define MATCH_BDECOMPRESS 0x48006033
  651. +#define MASK_BDECOMPRESS 0xfe00707f
  652. +#define MATCH_SEXT_B 0x60401013
  653. +#define MASK_SEXT_B 0xfff0707f
  654. +#define MATCH_SEXT_H 0x60501013
  655. +#define MASK_SEXT_H 0xfff0707f
  656. +#define MATCH_PACK 0x8004033
  657. +#define MASK_PACK 0xfe00707f
  658. +#define MATCH_PACKU 0x48004033
  659. +#define MASK_PACKU 0xfe00707f
  660. +#define MATCH_BMATOR 0x8003033
  661. +#define MASK_BMATOR 0xfe00707f
  662. +#define MATCH_BMATXOR 0x48003033
  663. +#define MASK_BMATXOR 0xfe00707f
  664. +#define MATCH_PACKH 0x8007033
  665. +#define MASK_PACKH 0xfe00707f
  666. +#define MATCH_BFP 0x48007033
  667. +#define MASK_BFP 0xfe00707f
  668. +#define MATCH_PACKW 0x800403b
  669. +#define MASK_PACKW 0xfe00707f
  670. +#define MATCH_PACKUW 0x4800403b
  671. +#define MASK_PACKUW 0xfe00707f
  672. +#define MATCH_ANDN 0x40007033
  673. +#define MASK_ANDN 0xfe00707f
  674. +#define MATCH_ORN 0x40006033
  675. +#define MASK_ORN 0xfe00707f
  676. +#define MATCH_XNOR 0x40004033
  677. +#define MASK_XNOR 0xfe00707f
  678. +#define MATCH_ROL 0x60001033
  679. +#define MASK_ROL 0xfe00707f
  680. +#define MATCH_ROR 0x60005033
  681. +#define MASK_ROR 0xfe00707f
  682. +#define MATCH_RORI 0x60005013
  683. +#define MASK_RORI 0xfc00707f
  684. +#define MATCH_BCLR 0x48001033
  685. +#define MASK_BCLR 0xfe00707f
  686. +#define MATCH_BCLRI 0x48001013
  687. +#define MASK_BCLRI 0xfc00707f
  688. +#define MATCH_BSET 0x28001033
  689. +#define MASK_BSET 0xfe00707f
  690. +#define MATCH_BSETI 0x28001013
  691. +#define MASK_BSETI 0xfc00707f
  692. +#define MATCH_BINV 0x68001033
  693. +#define MASK_BINV 0xfe00707f
  694. +#define MATCH_BINVI 0x68001013
  695. +#define MASK_BINVI 0xfc00707f
  696. +#define MATCH_BEXT 0x48005033
  697. +#define MASK_BEXT 0xfe00707f
  698. +#define MATCH_BEXTI 0x48005013
  699. +#define MASK_BEXTI 0xfc00707f
  700. +#define MATCH_GREV 0x68005033
  701. +#define MASK_GREV 0xfe00707f
  702. +#define MATCH_GREVI 0x68005013
  703. +#define MASK_GREVI 0xfc00707f
  704. +#define MATCH_GORC 0x28005033
  705. +#define MASK_GORC 0xfe00707f
  706. +#define MATCH_GORCI 0x28005013
  707. +#define MASK_GORCI 0xfc00707f
  708. +#define MATCH_CMIX 0x6001033
  709. +#define MASK_CMIX 0x600707f
  710. +#define MATCH_CMOV 0x6005033
  711. +#define MASK_CMOV 0x600707f
  712. +#define MATCH_FSL 0x4001033
  713. +#define MASK_FSL 0x600707f
  714. +#define MATCH_FSR 0x4005033
  715. +#define MASK_FSR 0x600707f
  716. +#define MATCH_FSRI 0x4005013
  717. +#define MASK_FSRI 0x400707f
  718. +#define MATCH_CLZW 0x6000101b
  719. +#define MASK_CLZW 0xfff0707f
  720. +#define MATCH_CTZW 0x6010101b
  721. +#define MASK_CTZW 0xfff0707f
  722. +#define MATCH_CPOPW 0x6020101b
  723. +#define MASK_CPOPW 0xfff0707f
  724. +#define MATCH_ROLW 0x6000103b
  725. +#define MASK_ROLW 0xfe00707f
  726. +#define MATCH_RORW 0x6000503b
  727. +#define MASK_RORW 0xfe00707f
  728. +#define MATCH_RORIW 0x6000501b
  729. +#define MASK_RORIW 0xfe00707f
  730. +#define MATCH_SH1ADD 0x20002033
  731. +#define MASK_SH1ADD 0xfe00707f
  732. +#define MATCH_SH2ADD 0x20004033
  733. +#define MASK_SH2ADD 0xfe00707f
  734. +#define MATCH_SH3ADD 0x20006033
  735. +#define MASK_SH3ADD 0xfe00707f
  736. +#define MATCH_SH1ADD_UW 0x2000203b
  737. +#define MASK_SH1ADD_UW 0xfe00707f
  738. +#define MATCH_SH2ADD_UW 0x2000403b
  739. +#define MASK_SH2ADD_UW 0xfe00707f
  740. +#define MATCH_SH3ADD_UW 0x2000603b
  741. +#define MASK_SH3ADD_UW 0xfe00707f
  742. +#define MATCH_BCLRW 0x4800103b
  743. +#define MASK_BCLRW 0xfe00707f
  744. +#define MATCH_BSETW 0x2800103b
  745. +#define MASK_BSETW 0xfe00707f
  746. +#define MATCH_BINVW 0x6800103b
  747. +#define MASK_BINVW 0xfe00707f
  748. +#define MATCH_BEXTW 0x4800503b
  749. +#define MASK_BEXTW 0xfe00707f
  750. +#define MATCH_GORCW 0x2800503b
  751. +#define MASK_GORCW 0xfe00707f
  752. +#define MATCH_GREVW 0x6800503b
  753. +#define MASK_GREVW 0xfe00707f
  754. +#define MATCH_SLOIW 0x2000101b
  755. +#define MASK_SLOIW 0xfe00707f
  756. +#define MATCH_SROIW 0x2000501b
  757. +#define MASK_SROIW 0xfe00707f
  758. +#define MATCH_BCLRIW 0x4800101b
  759. +#define MASK_BCLRIW 0xfe00707f
  760. +#define MATCH_BSETIW 0x2800101b
  761. +#define MASK_BSETIW 0xfe00707f
  762. +#define MATCH_BINVIW 0x6800101b
  763. +#define MASK_BINVIW 0xfe00707f
  764. +#define MATCH_GORCIW 0x2800501b
  765. +#define MASK_GORCIW 0xfe00707f
  766. +#define MATCH_GREVIW 0x6800501b
  767. +#define MASK_GREVIW 0xfe00707f
  768. +#define MATCH_FSLW 0x400103b
  769. +#define MASK_FSLW 0x600707f
  770. +#define MATCH_FSRW 0x400503B
  771. +#define MASK_FSRW 0x600707f
  772. +#define MATCH_FSRIW 0x400501B
  773. +#define MASK_FSRIW 0x600707f
  774. +#define MATCH_SHFLW 0x800103b
  775. +#define MASK_SHFLW 0xfe00707f
  776. +#define MATCH_UNSHFLW 0x800503b
  777. +#define MASK_UNSHFLW 0xfe00707f
  778. +#define MATCH_BCOMPRESSW 0x800603B
  779. +#define MASK_BCOMPRESSW 0xfe00707f
  780. +#define MATCH_BDECOMPRESSW 0x4800603b
  781. +#define MASK_BDECOMPRESSW 0xfe00707f
  782. +#define MATCH_BFPW 0x4800703b
  783. +#define MASK_BFPW 0xfe00707f
  784. +#define MATCH_XPERM_N 0x28002033
  785. +#define MASK_XPERM_N 0xfe00707f
  786. +#define MATCH_XPERM_B 0x28004033
  787. +#define MASK_XPERM_B 0xfe00707f
  788. +#define MATCH_XPERM_H 0x28006033
  789. +#define MASK_XPERM_H 0xfe00707f
  790. +#define MATCH_XPERM_W 0x28000033
  791. +#define MASK_XPERM_W 0xfe00707f
  792. +#define MATCH_ADD_UW 0x800003b
  793. +#define MASK_ADD_UW 0xfe00707f
  794. +#define MATCH_SLLI_UW 0x800101b
  795. +#define MASK_SLLI_UW 0xfc00707f
  796. +#define MATCH_CLMUL 0xa001033
  797. +#define MASK_CLMUL 0xfe00707f
  798. +#define MATCH_CLMULH 0xa003033
  799. +#define MASK_CLMULH 0xfe00707f
  800. +#define MATCH_CLMULR 0xa002033
  801. +#define MASK_CLMULR 0xfe00707f
  802. #define MATCH_FLW 0x2007
  803. #define MASK_FLW 0x707f
  804. #define MATCH_FLD 0x3007
  805. @@ -547,6 +765,1854 @@
  806. #define MASK_C_LDSP 0xe003
  807. #define MATCH_C_SDSP 0xe002
  808. #define MASK_C_SDSP 0xe003
  809. +
  810. +/* RVV */
  811. +/* Version 1.0-draft-20210130. */
  812. +
  813. +/* Temporary configuration-setting encoding info
  814. +
  815. +`-` means zimm
  816. +
  817. +31 30 zimm RS2 RS1/uimm funct3 RD opcode
  818. +1 0 00000 xxxxx xxxxx 111 xxxxx 1010111 vsetvl
  819. +1 1 ----- ----- xxxxx 111 xxxxx 1010111 vsetivli
  820. +0 - ----- ----- xxxxx 111 xxxxx 1010111 vsetvli
  821. +*/
  822. +
  823. +#define MATCH_VSETVL 0x80007057
  824. +#define MASK_VSETVL 0xfe00707f
  825. +#define MATCH_VSETIVLI 0xc0007057
  826. +#define MASK_VSETIVLI 0xc000707f
  827. +#define MATCH_VSETVLI 0x00007057
  828. +#define MASK_VSETVLI 0x8000707f
  829. +
  830. +/* Temporary Load/store encoding info
  831. +
  832. +MOP load
  833. +00 unit-stride LE<EEW>, VLE<EEW>FF, VL<nf>RE<EEW> (nf = 1, 2, 4, 8)
  834. +01 indexed-unordered VLUXEI<EEW>
  835. +10 strided VLSE<EEW>
  836. +11 indexed-ordered VLOXEI<EEW>
  837. +
  838. +MOP store
  839. +00 unit-stride VSE<EEW>, VS<nf>R (nf = 1, 2, 4, 8)
  840. +01 indexed-unordered VSUXEI<EEW>
  841. +10 strided VSSE<EEW>
  842. +11 indexed-ordered VSOXEI<EEW>
  843. +
  844. +VM 0 masked
  845. +VM 1 unmasked
  846. +
  847. +LUMOP
  848. +00000 unit-stride load
  849. +01000 unit-stride, whole registers load
  850. +01011 unit-stride, mask load, EEW = 1
  851. +10000 unit-stride first-fault
  852. +xxxxx other encodings reserved, x != 0
  853. +
  854. +SUMOP
  855. +00000 unit-stride store
  856. +01000 unit-stride, whole registers store
  857. +01011 unit-stride, mask store, EEW = 1
  858. +0xxxx other encodings reserved, x != 0
  859. +
  860. +`-` means EEW =
  861. +MEW WIDTH
  862. +x 001 FLH/FSH
  863. +x 010 FLW/FSW
  864. +x 011 FLD/FSW
  865. +x 100 FLQ/FSQ
  866. +0 000 VLxE8/VSxE8, VLxEI8/VSxEI8, VL<nf>RE8, VS<nf>R
  867. +0 101 VLxE16/VSxE16, VLxEI16/VSxEI16, VL<nf>RE16
  868. +0 110 VLxE32/VSxE32, VLxEI32/VSxEI32, VL<nf>RE32
  869. +0 111 VLxE64/VSxE64, VLxEI64/VSxEI64, VL<nf>RE64
  870. +1 000 Reserved (VLxE128/VSxE128, VL<nf>RE128)
  871. +1 101 Reserved (VLxE256/VSxE256, VL<nf>RE256)
  872. +1 110 Reserved (VLxE512/VSxE512, VL<nf>RE512)
  873. +1 111 Reserved (VLxE1024/VSxE1024, VL<nf>RE1024)
  874. +
  875. +NF MEW MOP VM LUMOP/RS2 RS1 WIDTH VD opcode
  876. +000 - 00 x 00000 xxxxx --- xxxxx 0000111 VLE<EEW>
  877. +000 - 00 x 00000 xxxxx --- xxxxx 0100111 VSE<EEW>
  878. +000 - 00 1 01011 xxxxx --- xxxxx 0000111 VLE, EEW = 1
  879. +000 - 00 1 01011 xxxxx --- xxxxx 0100111 VSE, EEW = 1
  880. +000 - 10 x xxxxx xxxxx --- xxxxx 0000111 VLSE<EEW>
  881. +000 - 10 x xxxxx xxxxx --- xxxxx 0100111 VSSE<EEW>
  882. +000 0 11 x xxxxx xxxxx --- xxxxx 0000111 VLOXE<EEW>I
  883. +000 0 11 x xxxxx xxxxx --- xxxxx 0100111 VSOXE<EEW>I
  884. +000 0 01 x xxxxx xxxxx --- xxxxx 0000111 VLUXE<EEW>I
  885. +000 0 01 x xxxxx xxxxx --- xxxxx 0100111 VSUXE<EEW>I
  886. +000 - 00 x 10000 xxxxx --- xxxxx 0000111 VLE<EEW>FF
  887. +xxx - 00 1 01000 xxxxx --- xxxxx 0000111 VL<nf>RE<EEW>, nf = 1,2,4,8
  888. +xxx 0 00 1 01000 xxxxx 000 xxxxx 0100111 VS<nf>R, nf = 1,2,4,8
  889. +
  890. +xxx - 00 x 00000 xxxxx --- xxxxx 0000111 VLSEG<nf>E<EEW>
  891. +xxx - 00 x 00000 xxxxx --- xxxxx 0100111 VSSEG<nf>E<EEW>
  892. +xxx - 10 x 00000 xxxxx --- xxxxx 0000111 VLSSEG<nf>E<EEW>
  893. +xxx - 10 x 00000 xxxxx --- xxxxx 0100111 VSSSEG<nf>E<EEW>
  894. +xxx - 11 x 00000 xxxxx --- xxxxx 0000111 VLOXSEG<nf>E<EEW>I
  895. +xxx - 11 x 00000 xxxxx --- xxxxx 0100111 VSOXSEG<nf>E<EEW>I
  896. +xxx - 01 x 00000 xxxxx --- xxxxx 0000111 VLUXSEG<nf>E<EEW>I
  897. +xxx - 01 x 00000 xxxxx --- xxxxx 0100111 VSUXSEG<nf>E<EEW>I
  898. +xxx - 00 x 10000 xxxxx --- xxxxx 0000111 VLSEG<nf>E<EEW>FF
  899. +*/
  900. +
  901. +#define MATCH_VLE1V 0x02b00007
  902. +#define MASK_VLE1V 0xfff0707f
  903. +#define MATCH_VSE1V 0x02b00027
  904. +#define MASK_VSE1V 0xfff0707f
  905. +
  906. +#define MATCH_VLE8V 0x00000007
  907. +#define MASK_VLE8V 0xfdf0707f
  908. +#define MATCH_VLE16V 0x00005007
  909. +#define MASK_VLE16V 0xfdf0707f
  910. +#define MATCH_VLE32V 0x00006007
  911. +#define MASK_VLE32V 0xfdf0707f
  912. +#define MATCH_VLE64V 0x00007007
  913. +#define MASK_VLE64V 0xfdf0707f
  914. +
  915. +#define MATCH_VSE8V 0x00000027
  916. +#define MASK_VSE8V 0xfdf0707f
  917. +#define MATCH_VSE16V 0x00005027
  918. +#define MASK_VSE16V 0xfdf0707f
  919. +#define MATCH_VSE32V 0x00006027
  920. +#define MASK_VSE32V 0xfdf0707f
  921. +#define MATCH_VSE64V 0x00007027
  922. +#define MASK_VSE64V 0xfdf0707f
  923. +
  924. +#define MATCH_VLSE8V 0x08000007
  925. +#define MASK_VLSE8V 0xfc00707f
  926. +#define MATCH_VLSE16V 0x08005007
  927. +#define MASK_VLSE16V 0xfc00707f
  928. +#define MATCH_VLSE32V 0x08006007
  929. +#define MASK_VLSE32V 0xfc00707f
  930. +#define MATCH_VLSE64V 0x08007007
  931. +#define MASK_VLSE64V 0xfc00707f
  932. +
  933. +#define MATCH_VSSE8V 0x08000027
  934. +#define MASK_VSSE8V 0xfc00707f
  935. +#define MATCH_VSSE16V 0x08005027
  936. +#define MASK_VSSE16V 0xfc00707f
  937. +#define MATCH_VSSE32V 0x08006027
  938. +#define MASK_VSSE32V 0xfc00707f
  939. +#define MATCH_VSSE64V 0x08007027
  940. +#define MASK_VSSE64V 0xfc00707f
  941. +
  942. +#define MATCH_VLOXEI8V 0x0c000007
  943. +#define MASK_VLOXEI8V 0xfc00707f
  944. +#define MATCH_VLOXEI16V 0x0c005007
  945. +#define MASK_VLOXEI16V 0xfc00707f
  946. +#define MATCH_VLOXEI32V 0x0c006007
  947. +#define MASK_VLOXEI32V 0xfc00707f
  948. +#define MATCH_VLOXEI64V 0x0c007007
  949. +#define MASK_VLOXEI64V 0xfc00707f
  950. +
  951. +#define MATCH_VSOXEI8V 0x0c000027
  952. +#define MASK_VSOXEI8V 0xfc00707f
  953. +#define MATCH_VSOXEI16V 0x0c005027
  954. +#define MASK_VSOXEI16V 0xfc00707f
  955. +#define MATCH_VSOXEI32V 0x0c006027
  956. +#define MASK_VSOXEI32V 0xfc00707f
  957. +#define MATCH_VSOXEI64V 0x0c007027
  958. +#define MASK_VSOXEI64V 0xfc00707f
  959. +
  960. +#define MATCH_VLUXEI8V 0x04000007
  961. +#define MASK_VLUXEI8V 0xfc00707f
  962. +#define MATCH_VLUXEI16V 0x04005007
  963. +#define MASK_VLUXEI16V 0xfc00707f
  964. +#define MATCH_VLUXEI32V 0x04006007
  965. +#define MASK_VLUXEI32V 0xfc00707f
  966. +#define MATCH_VLUXEI64V 0x04007007
  967. +#define MASK_VLUXEI64V 0xfc00707f
  968. +
  969. +#define MATCH_VSUXEI8V 0x04000027
  970. +#define MASK_VSUXEI8V 0xfc00707f
  971. +#define MATCH_VSUXEI16V 0x04005027
  972. +#define MASK_VSUXEI16V 0xfc00707f
  973. +#define MATCH_VSUXEI32V 0x04006027
  974. +#define MASK_VSUXEI32V 0xfc00707f
  975. +#define MATCH_VSUXEI64V 0x04007027
  976. +#define MASK_VSUXEI64V 0xfc00707f
  977. +
  978. +#define MATCH_VLE8FFV 0x01000007
  979. +#define MASK_VLE8FFV 0xfdf0707f
  980. +#define MATCH_VLE16FFV 0x01005007
  981. +#define MASK_VLE16FFV 0xfdf0707f
  982. +#define MATCH_VLE32FFV 0x01006007
  983. +#define MASK_VLE32FFV 0xfdf0707f
  984. +#define MATCH_VLE64FFV 0x01007007
  985. +#define MASK_VLE64FFV 0xfdf0707f
  986. +
  987. +#define MATCH_VLSEG2E8V 0x20000007
  988. +#define MASK_VLSEG2E8V 0xfdf0707f
  989. +#define MATCH_VSSEG2E8V 0x20000027
  990. +#define MASK_VSSEG2E8V 0xfdf0707f
  991. +#define MATCH_VLSEG3E8V 0x40000007
  992. +#define MASK_VLSEG3E8V 0xfdf0707f
  993. +#define MATCH_VSSEG3E8V 0x40000027
  994. +#define MASK_VSSEG3E8V 0xfdf0707f
  995. +#define MATCH_VLSEG4E8V 0x60000007
  996. +#define MASK_VLSEG4E8V 0xfdf0707f
  997. +#define MATCH_VSSEG4E8V 0x60000027
  998. +#define MASK_VSSEG4E8V 0xfdf0707f
  999. +#define MATCH_VLSEG5E8V 0x80000007
  1000. +#define MASK_VLSEG5E8V 0xfdf0707f
  1001. +#define MATCH_VSSEG5E8V 0x80000027
  1002. +#define MASK_VSSEG5E8V 0xfdf0707f
  1003. +#define MATCH_VLSEG6E8V 0xa0000007
  1004. +#define MASK_VLSEG6E8V 0xfdf0707f
  1005. +#define MATCH_VSSEG6E8V 0xa0000027
  1006. +#define MASK_VSSEG6E8V 0xfdf0707f
  1007. +#define MATCH_VLSEG7E8V 0xc0000007
  1008. +#define MASK_VLSEG7E8V 0xfdf0707f
  1009. +#define MATCH_VSSEG7E8V 0xc0000027
  1010. +#define MASK_VSSEG7E8V 0xfdf0707f
  1011. +#define MATCH_VLSEG8E8V 0xe0000007
  1012. +#define MASK_VLSEG8E8V 0xfdf0707f
  1013. +#define MATCH_VSSEG8E8V 0xe0000027
  1014. +#define MASK_VSSEG8E8V 0xfdf0707f
  1015. +
  1016. +#define MATCH_VLSEG2E16V 0x20005007
  1017. +#define MASK_VLSEG2E16V 0xfdf0707f
  1018. +#define MATCH_VSSEG2E16V 0x20005027
  1019. +#define MASK_VSSEG2E16V 0xfdf0707f
  1020. +#define MATCH_VLSEG3E16V 0x40005007
  1021. +#define MASK_VLSEG3E16V 0xfdf0707f
  1022. +#define MATCH_VSSEG3E16V 0x40005027
  1023. +#define MASK_VSSEG3E16V 0xfdf0707f
  1024. +#define MATCH_VLSEG4E16V 0x60005007
  1025. +#define MASK_VLSEG4E16V 0xfdf0707f
  1026. +#define MATCH_VSSEG4E16V 0x60005027
  1027. +#define MASK_VSSEG4E16V 0xfdf0707f
  1028. +#define MATCH_VLSEG5E16V 0x80005007
  1029. +#define MASK_VLSEG5E16V 0xfdf0707f
  1030. +#define MATCH_VSSEG5E16V 0x80005027
  1031. +#define MASK_VSSEG5E16V 0xfdf0707f
  1032. +#define MATCH_VLSEG6E16V 0xa0005007
  1033. +#define MASK_VLSEG6E16V 0xfdf0707f
  1034. +#define MATCH_VSSEG6E16V 0xa0005027
  1035. +#define MASK_VSSEG6E16V 0xfdf0707f
  1036. +#define MATCH_VLSEG7E16V 0xc0005007
  1037. +#define MASK_VLSEG7E16V 0xfdf0707f
  1038. +#define MATCH_VSSEG7E16V 0xc0005027
  1039. +#define MASK_VSSEG7E16V 0xfdf0707f
  1040. +#define MATCH_VLSEG8E16V 0xe0005007
  1041. +#define MASK_VLSEG8E16V 0xfdf0707f
  1042. +#define MATCH_VSSEG8E16V 0xe0005027
  1043. +#define MASK_VSSEG8E16V 0xfdf0707f
  1044. +
  1045. +#define MATCH_VLSEG2E32V 0x20006007
  1046. +#define MASK_VLSEG2E32V 0xfdf0707f
  1047. +#define MATCH_VSSEG2E32V 0x20006027
  1048. +#define MASK_VSSEG2E32V 0xfdf0707f
  1049. +#define MATCH_VLSEG3E32V 0x40006007
  1050. +#define MASK_VLSEG3E32V 0xfdf0707f
  1051. +#define MATCH_VSSEG3E32V 0x40006027
  1052. +#define MASK_VSSEG3E32V 0xfdf0707f
  1053. +#define MATCH_VLSEG4E32V 0x60006007
  1054. +#define MASK_VLSEG4E32V 0xfdf0707f
  1055. +#define MATCH_VSSEG4E32V 0x60006027
  1056. +#define MASK_VSSEG4E32V 0xfdf0707f
  1057. +#define MATCH_VLSEG5E32V 0x80006007
  1058. +#define MASK_VLSEG5E32V 0xfdf0707f
  1059. +#define MATCH_VSSEG5E32V 0x80006027
  1060. +#define MASK_VSSEG5E32V 0xfdf0707f
  1061. +#define MATCH_VLSEG6E32V 0xa0006007
  1062. +#define MASK_VLSEG6E32V 0xfdf0707f
  1063. +#define MATCH_VSSEG6E32V 0xa0006027
  1064. +#define MASK_VSSEG6E32V 0xfdf0707f
  1065. +#define MATCH_VLSEG7E32V 0xc0006007
  1066. +#define MASK_VLSEG7E32V 0xfdf0707f
  1067. +#define MATCH_VSSEG7E32V 0xc0006027
  1068. +#define MASK_VSSEG7E32V 0xfdf0707f
  1069. +#define MATCH_VLSEG8E32V 0xe0006007
  1070. +#define MASK_VLSEG8E32V 0xfdf0707f
  1071. +#define MATCH_VSSEG8E32V 0xe0006027
  1072. +#define MASK_VSSEG8E32V 0xfdf0707f
  1073. +
  1074. +#define MATCH_VLSEG2E64V 0x20007007
  1075. +#define MASK_VLSEG2E64V 0xfdf0707f
  1076. +#define MATCH_VSSEG2E64V 0x20007027
  1077. +#define MASK_VSSEG2E64V 0xfdf0707f
  1078. +#define MATCH_VLSEG3E64V 0x40007007
  1079. +#define MASK_VLSEG3E64V 0xfdf0707f
  1080. +#define MATCH_VSSEG3E64V 0x40007027
  1081. +#define MASK_VSSEG3E64V 0xfdf0707f
  1082. +#define MATCH_VLSEG4E64V 0x60007007
  1083. +#define MASK_VLSEG4E64V 0xfdf0707f
  1084. +#define MATCH_VSSEG4E64V 0x60007027
  1085. +#define MASK_VSSEG4E64V 0xfdf0707f
  1086. +#define MATCH_VLSEG5E64V 0x80007007
  1087. +#define MASK_VLSEG5E64V 0xfdf0707f
  1088. +#define MATCH_VSSEG5E64V 0x80007027
  1089. +#define MASK_VSSEG5E64V 0xfdf0707f
  1090. +#define MATCH_VLSEG6E64V 0xa0007007
  1091. +#define MASK_VLSEG6E64V 0xfdf0707f
  1092. +#define MATCH_VSSEG6E64V 0xa0007027
  1093. +#define MASK_VSSEG6E64V 0xfdf0707f
  1094. +#define MATCH_VLSEG7E64V 0xc0007007
  1095. +#define MASK_VLSEG7E64V 0xfdf0707f
  1096. +#define MATCH_VSSEG7E64V 0xc0007027
  1097. +#define MASK_VSSEG7E64V 0xfdf0707f
  1098. +#define MATCH_VLSEG8E64V 0xe0007007
  1099. +#define MASK_VLSEG8E64V 0xfdf0707f
  1100. +#define MATCH_VSSEG8E64V 0xe0007027
  1101. +#define MASK_VSSEG8E64V 0xfdf0707f
  1102. +
  1103. +#define MATCH_VLSSEG2E8V 0x28000007
  1104. +#define MASK_VLSSEG2E8V 0xfc00707f
  1105. +#define MATCH_VSSSEG2E8V 0x28000027
  1106. +#define MASK_VSSSEG2E8V 0xfc00707f
  1107. +#define MATCH_VLSSEG3E8V 0x48000007
  1108. +#define MASK_VLSSEG3E8V 0xfc00707f
  1109. +#define MATCH_VSSSEG3E8V 0x48000027
  1110. +#define MASK_VSSSEG3E8V 0xfc00707f
  1111. +#define MATCH_VLSSEG4E8V 0x68000007
  1112. +#define MASK_VLSSEG4E8V 0xfc00707f
  1113. +#define MATCH_VSSSEG4E8V 0x68000027
  1114. +#define MASK_VSSSEG4E8V 0xfc00707f
  1115. +#define MATCH_VLSSEG5E8V 0x88000007
  1116. +#define MASK_VLSSEG5E8V 0xfc00707f
  1117. +#define MATCH_VSSSEG5E8V 0x88000027
  1118. +#define MASK_VSSSEG5E8V 0xfc00707f
  1119. +#define MATCH_VLSSEG6E8V 0xa8000007
  1120. +#define MASK_VLSSEG6E8V 0xfc00707f
  1121. +#define MATCH_VSSSEG6E8V 0xa8000027
  1122. +#define MASK_VSSSEG6E8V 0xfc00707f
  1123. +#define MATCH_VLSSEG7E8V 0xc8000007
  1124. +#define MASK_VLSSEG7E8V 0xfc00707f
  1125. +#define MATCH_VSSSEG7E8V 0xc8000027
  1126. +#define MASK_VSSSEG7E8V 0xfc00707f
  1127. +#define MATCH_VLSSEG8E8V 0xe8000007
  1128. +#define MASK_VLSSEG8E8V 0xfc00707f
  1129. +#define MATCH_VSSSEG8E8V 0xe8000027
  1130. +#define MASK_VSSSEG8E8V 0xfc00707f
  1131. +
  1132. +#define MATCH_VLSSEG2E16V 0x28005007
  1133. +#define MASK_VLSSEG2E16V 0xfc00707f
  1134. +#define MATCH_VSSSEG2E16V 0x28005027
  1135. +#define MASK_VSSSEG2E16V 0xfc00707f
  1136. +#define MATCH_VLSSEG3E16V 0x48005007
  1137. +#define MASK_VLSSEG3E16V 0xfc00707f
  1138. +#define MATCH_VSSSEG3E16V 0x48005027
  1139. +#define MASK_VSSSEG3E16V 0xfc00707f
  1140. +#define MATCH_VLSSEG4E16V 0x68005007
  1141. +#define MASK_VLSSEG4E16V 0xfc00707f
  1142. +#define MATCH_VSSSEG4E16V 0x68005027
  1143. +#define MASK_VSSSEG4E16V 0xfc00707f
  1144. +#define MATCH_VLSSEG5E16V 0x88005007
  1145. +#define MASK_VLSSEG5E16V 0xfc00707f
  1146. +#define MATCH_VSSSEG5E16V 0x88005027
  1147. +#define MASK_VSSSEG5E16V 0xfc00707f
  1148. +#define MATCH_VLSSEG6E16V 0xa8005007
  1149. +#define MASK_VLSSEG6E16V 0xfc00707f
  1150. +#define MATCH_VSSSEG6E16V 0xa8005027
  1151. +#define MASK_VSSSEG6E16V 0xfc00707f
  1152. +#define MATCH_VLSSEG7E16V 0xc8005007
  1153. +#define MASK_VLSSEG7E16V 0xfc00707f
  1154. +#define MATCH_VSSSEG7E16V 0xc8005027
  1155. +#define MASK_VSSSEG7E16V 0xfc00707f
  1156. +#define MATCH_VLSSEG8E16V 0xe8005007
  1157. +#define MASK_VLSSEG8E16V 0xfc00707f
  1158. +#define MATCH_VSSSEG8E16V 0xe8005027
  1159. +#define MASK_VSSSEG8E16V 0xfc00707f
  1160. +
  1161. +#define MATCH_VLSSEG2E32V 0x28006007
  1162. +#define MASK_VLSSEG2E32V 0xfc00707f
  1163. +#define MATCH_VSSSEG2E32V 0x28006027
  1164. +#define MASK_VSSSEG2E32V 0xfc00707f
  1165. +#define MATCH_VLSSEG3E32V 0x48006007
  1166. +#define MASK_VLSSEG3E32V 0xfc00707f
  1167. +#define MATCH_VSSSEG3E32V 0x48006027
  1168. +#define MASK_VSSSEG3E32V 0xfc00707f
  1169. +#define MATCH_VLSSEG4E32V 0x68006007
  1170. +#define MASK_VLSSEG4E32V 0xfc00707f
  1171. +#define MATCH_VSSSEG4E32V 0x68006027
  1172. +#define MASK_VSSSEG4E32V 0xfc00707f
  1173. +#define MATCH_VLSSEG5E32V 0x88006007
  1174. +#define MASK_VLSSEG5E32V 0xfc00707f
  1175. +#define MATCH_VSSSEG5E32V 0x88006027
  1176. +#define MASK_VSSSEG5E32V 0xfc00707f
  1177. +#define MATCH_VLSSEG6E32V 0xa8006007
  1178. +#define MASK_VLSSEG6E32V 0xfc00707f
  1179. +#define MATCH_VSSSEG6E32V 0xa8006027
  1180. +#define MASK_VSSSEG6E32V 0xfc00707f
  1181. +#define MATCH_VLSSEG7E32V 0xc8006007
  1182. +#define MASK_VLSSEG7E32V 0xfc00707f
  1183. +#define MATCH_VSSSEG7E32V 0xc8006027
  1184. +#define MASK_VSSSEG7E32V 0xfc00707f
  1185. +#define MATCH_VLSSEG8E32V 0xe8006007
  1186. +#define MASK_VLSSEG8E32V 0xfc00707f
  1187. +#define MATCH_VSSSEG8E32V 0xe8006027
  1188. +#define MASK_VSSSEG8E32V 0xfc00707f
  1189. +
  1190. +#define MATCH_VLSSEG2E64V 0x28007007
  1191. +#define MASK_VLSSEG2E64V 0xfc00707f
  1192. +#define MATCH_VSSSEG2E64V 0x28007027
  1193. +#define MASK_VSSSEG2E64V 0xfc00707f
  1194. +#define MATCH_VLSSEG3E64V 0x48007007
  1195. +#define MASK_VLSSEG3E64V 0xfc00707f
  1196. +#define MATCH_VSSSEG3E64V 0x48007027
  1197. +#define MASK_VSSSEG3E64V 0xfc00707f
  1198. +#define MATCH_VLSSEG4E64V 0x68007007
  1199. +#define MASK_VLSSEG4E64V 0xfc00707f
  1200. +#define MATCH_VSSSEG4E64V 0x68007027
  1201. +#define MASK_VSSSEG4E64V 0xfc00707f
  1202. +#define MATCH_VLSSEG5E64V 0x88007007
  1203. +#define MASK_VLSSEG5E64V 0xfc00707f
  1204. +#define MATCH_VSSSEG5E64V 0x88007027
  1205. +#define MASK_VSSSEG5E64V 0xfc00707f
  1206. +#define MATCH_VLSSEG6E64V 0xa8007007
  1207. +#define MASK_VLSSEG6E64V 0xfc00707f
  1208. +#define MATCH_VSSSEG6E64V 0xa8007027
  1209. +#define MASK_VSSSEG6E64V 0xfc00707f
  1210. +#define MATCH_VLSSEG7E64V 0xc8007007
  1211. +#define MASK_VLSSEG7E64V 0xfc00707f
  1212. +#define MATCH_VSSSEG7E64V 0xc8007027
  1213. +#define MASK_VSSSEG7E64V 0xfc00707f
  1214. +#define MATCH_VLSSEG8E64V 0xe8007007
  1215. +#define MASK_VLSSEG8E64V 0xfc00707f
  1216. +#define MATCH_VSSSEG8E64V 0xe8007027
  1217. +#define MASK_VSSSEG8E64V 0xfc00707f
  1218. +
  1219. +#define MATCH_VLOXSEG2EI8V 0x2c000007
  1220. +#define MASK_VLOXSEG2EI8V 0xfc00707f
  1221. +#define MATCH_VSOXSEG2EI8V 0x2c000027
  1222. +#define MASK_VSOXSEG2EI8V 0xfc00707f
  1223. +#define MATCH_VLOXSEG3EI8V 0x4c000007
  1224. +#define MASK_VLOXSEG3EI8V 0xfc00707f
  1225. +#define MATCH_VSOXSEG3EI8V 0x4c000027
  1226. +#define MASK_VSOXSEG3EI8V 0xfc00707f
  1227. +#define MATCH_VLOXSEG4EI8V 0x6c000007
  1228. +#define MASK_VLOXSEG4EI8V 0xfc00707f
  1229. +#define MATCH_VSOXSEG4EI8V 0x6c000027
  1230. +#define MASK_VSOXSEG4EI8V 0xfc00707f
  1231. +#define MATCH_VLOXSEG5EI8V 0x8c000007
  1232. +#define MASK_VLOXSEG5EI8V 0xfc00707f
  1233. +#define MATCH_VSOXSEG5EI8V 0x8c000027
  1234. +#define MASK_VSOXSEG5EI8V 0xfc00707f
  1235. +#define MATCH_VLOXSEG6EI8V 0xac000007
  1236. +#define MASK_VLOXSEG6EI8V 0xfc00707f
  1237. +#define MATCH_VSOXSEG6EI8V 0xac000027
  1238. +#define MASK_VSOXSEG6EI8V 0xfc00707f
  1239. +#define MATCH_VLOXSEG7EI8V 0xcc000007
  1240. +#define MASK_VLOXSEG7EI8V 0xfc00707f
  1241. +#define MATCH_VSOXSEG7EI8V 0xcc000027
  1242. +#define MASK_VSOXSEG7EI8V 0xfc00707f
  1243. +#define MATCH_VLOXSEG8EI8V 0xec000007
  1244. +#define MASK_VLOXSEG8EI8V 0xfc00707f
  1245. +#define MATCH_VSOXSEG8EI8V 0xec000027
  1246. +#define MASK_VSOXSEG8EI8V 0xfc00707f
  1247. +
  1248. +#define MATCH_VLUXSEG2EI8V 0x24000007
  1249. +#define MASK_VLUXSEG2EI8V 0xfc00707f
  1250. +#define MATCH_VSUXSEG2EI8V 0x24000027
  1251. +#define MASK_VSUXSEG2EI8V 0xfc00707f
  1252. +#define MATCH_VLUXSEG3EI8V 0x44000007
  1253. +#define MASK_VLUXSEG3EI8V 0xfc00707f
  1254. +#define MATCH_VSUXSEG3EI8V 0x44000027
  1255. +#define MASK_VSUXSEG3EI8V 0xfc00707f
  1256. +#define MATCH_VLUXSEG4EI8V 0x64000007
  1257. +#define MASK_VLUXSEG4EI8V 0xfc00707f
  1258. +#define MATCH_VSUXSEG4EI8V 0x64000027
  1259. +#define MASK_VSUXSEG4EI8V 0xfc00707f
  1260. +#define MATCH_VLUXSEG5EI8V 0x84000007
  1261. +#define MASK_VLUXSEG5EI8V 0xfc00707f
  1262. +#define MATCH_VSUXSEG5EI8V 0x84000027
  1263. +#define MASK_VSUXSEG5EI8V 0xfc00707f
  1264. +#define MATCH_VLUXSEG6EI8V 0xa4000007
  1265. +#define MASK_VLUXSEG6EI8V 0xfc00707f
  1266. +#define MATCH_VSUXSEG6EI8V 0xa4000027
  1267. +#define MASK_VSUXSEG6EI8V 0xfc00707f
  1268. +#define MATCH_VLUXSEG7EI8V 0xc4000007
  1269. +#define MASK_VLUXSEG7EI8V 0xfc00707f
  1270. +#define MATCH_VSUXSEG7EI8V 0xc4000027
  1271. +#define MASK_VSUXSEG7EI8V 0xfc00707f
  1272. +#define MATCH_VLUXSEG8EI8V 0xe4000007
  1273. +#define MASK_VLUXSEG8EI8V 0xfc00707f
  1274. +#define MATCH_VSUXSEG8EI8V 0xe4000027
  1275. +#define MASK_VSUXSEG8EI8V 0xfc00707f
  1276. +
  1277. +#define MATCH_VLOXSEG2EI16V 0x2c005007
  1278. +#define MASK_VLOXSEG2EI16V 0xfc00707f
  1279. +#define MATCH_VSOXSEG2EI16V 0x2c005027
  1280. +#define MASK_VSOXSEG2EI16V 0xfc00707f
  1281. +#define MATCH_VLOXSEG3EI16V 0x4c005007
  1282. +#define MASK_VLOXSEG3EI16V 0xfc00707f
  1283. +#define MATCH_VSOXSEG3EI16V 0x4c005027
  1284. +#define MASK_VSOXSEG3EI16V 0xfc00707f
  1285. +#define MATCH_VLOXSEG4EI16V 0x6c005007
  1286. +#define MASK_VLOXSEG4EI16V 0xfc00707f
  1287. +#define MATCH_VSOXSEG4EI16V 0x6c005027
  1288. +#define MASK_VSOXSEG4EI16V 0xfc00707f
  1289. +#define MATCH_VLOXSEG5EI16V 0x8c005007
  1290. +#define MASK_VLOXSEG5EI16V 0xfc00707f
  1291. +#define MATCH_VSOXSEG5EI16V 0x8c005027
  1292. +#define MASK_VSOXSEG5EI16V 0xfc00707f
  1293. +#define MATCH_VLOXSEG6EI16V 0xac005007
  1294. +#define MASK_VLOXSEG6EI16V 0xfc00707f
  1295. +#define MATCH_VSOXSEG6EI16V 0xac005027
  1296. +#define MASK_VSOXSEG6EI16V 0xfc00707f
  1297. +#define MATCH_VLOXSEG7EI16V 0xcc005007
  1298. +#define MASK_VLOXSEG7EI16V 0xfc00707f
  1299. +#define MATCH_VSOXSEG7EI16V 0xcc005027
  1300. +#define MASK_VSOXSEG7EI16V 0xfc00707f
  1301. +#define MATCH_VLOXSEG8EI16V 0xec005007
  1302. +#define MASK_VLOXSEG8EI16V 0xfc00707f
  1303. +#define MATCH_VSOXSEG8EI16V 0xec005027
  1304. +#define MASK_VSOXSEG8EI16V 0xfc00707f
  1305. +
  1306. +#define MATCH_VLUXSEG2EI16V 0x24005007
  1307. +#define MASK_VLUXSEG2EI16V 0xfc00707f
  1308. +#define MATCH_VSUXSEG2EI16V 0x24005027
  1309. +#define MASK_VSUXSEG2EI16V 0xfc00707f
  1310. +#define MATCH_VLUXSEG3EI16V 0x44005007
  1311. +#define MASK_VLUXSEG3EI16V 0xfc00707f
  1312. +#define MATCH_VSUXSEG3EI16V 0x44005027
  1313. +#define MASK_VSUXSEG3EI16V 0xfc00707f
  1314. +#define MATCH_VLUXSEG4EI16V 0x64005007
  1315. +#define MASK_VLUXSEG4EI16V 0xfc00707f
  1316. +#define MATCH_VSUXSEG4EI16V 0x64005027
  1317. +#define MASK_VSUXSEG4EI16V 0xfc00707f
  1318. +#define MATCH_VLUXSEG5EI16V 0x84005007
  1319. +#define MASK_VLUXSEG5EI16V 0xfc00707f
  1320. +#define MATCH_VSUXSEG5EI16V 0x84005027
  1321. +#define MASK_VSUXSEG5EI16V 0xfc00707f
  1322. +#define MATCH_VLUXSEG6EI16V 0xa4005007
  1323. +#define MASK_VLUXSEG6EI16V 0xfc00707f
  1324. +#define MATCH_VSUXSEG6EI16V 0xa4005027
  1325. +#define MASK_VSUXSEG6EI16V 0xfc00707f
  1326. +#define MATCH_VLUXSEG7EI16V 0xc4005007
  1327. +#define MASK_VLUXSEG7EI16V 0xfc00707f
  1328. +#define MATCH_VSUXSEG7EI16V 0xc4005027
  1329. +#define MASK_VSUXSEG7EI16V 0xfc00707f
  1330. +#define MATCH_VLUXSEG8EI16V 0xe4005007
  1331. +#define MASK_VLUXSEG8EI16V 0xfc00707f
  1332. +#define MATCH_VSUXSEG8EI16V 0xe4005027
  1333. +#define MASK_VSUXSEG8EI16V 0xfc00707f
  1334. +
  1335. +#define MATCH_VLOXSEG2EI32V 0x2c006007
  1336. +#define MASK_VLOXSEG2EI32V 0xfc00707f
  1337. +#define MATCH_VSOXSEG2EI32V 0x2c006027
  1338. +#define MASK_VSOXSEG2EI32V 0xfc00707f
  1339. +#define MATCH_VLOXSEG3EI32V 0x4c006007
  1340. +#define MASK_VLOXSEG3EI32V 0xfc00707f
  1341. +#define MATCH_VSOXSEG3EI32V 0x4c006027
  1342. +#define MASK_VSOXSEG3EI32V 0xfc00707f
  1343. +#define MATCH_VLOXSEG4EI32V 0x6c006007
  1344. +#define MASK_VLOXSEG4EI32V 0xfc00707f
  1345. +#define MATCH_VSOXSEG4EI32V 0x6c006027
  1346. +#define MASK_VSOXSEG4EI32V 0xfc00707f
  1347. +#define MATCH_VLOXSEG5EI32V 0x8c006007
  1348. +#define MASK_VLOXSEG5EI32V 0xfc00707f
  1349. +#define MATCH_VSOXSEG5EI32V 0x8c006027
  1350. +#define MASK_VSOXSEG5EI32V 0xfc00707f
  1351. +#define MATCH_VLOXSEG6EI32V 0xac006007
  1352. +#define MASK_VLOXSEG6EI32V 0xfc00707f
  1353. +#define MATCH_VSOXSEG6EI32V 0xac006027
  1354. +#define MASK_VSOXSEG6EI32V 0xfc00707f
  1355. +#define MATCH_VLOXSEG7EI32V 0xcc006007
  1356. +#define MASK_VLOXSEG7EI32V 0xfc00707f
  1357. +#define MATCH_VSOXSEG7EI32V 0xcc006027
  1358. +#define MASK_VSOXSEG7EI32V 0xfc00707f
  1359. +#define MATCH_VLOXSEG8EI32V 0xec006007
  1360. +#define MASK_VLOXSEG8EI32V 0xfc00707f
  1361. +#define MATCH_VSOXSEG8EI32V 0xec006027
  1362. +#define MASK_VSOXSEG8EI32V 0xfc00707f
  1363. +
  1364. +#define MATCH_VLUXSEG2EI32V 0x24006007
  1365. +#define MASK_VLUXSEG2EI32V 0xfc00707f
  1366. +#define MATCH_VSUXSEG2EI32V 0x24006027
  1367. +#define MASK_VSUXSEG2EI32V 0xfc00707f
  1368. +#define MATCH_VLUXSEG3EI32V 0x44006007
  1369. +#define MASK_VLUXSEG3EI32V 0xfc00707f
  1370. +#define MATCH_VSUXSEG3EI32V 0x44006027
  1371. +#define MASK_VSUXSEG3EI32V 0xfc00707f
  1372. +#define MATCH_VLUXSEG4EI32V 0x64006007
  1373. +#define MASK_VLUXSEG4EI32V 0xfc00707f
  1374. +#define MATCH_VSUXSEG4EI32V 0x64006027
  1375. +#define MASK_VSUXSEG4EI32V 0xfc00707f
  1376. +#define MATCH_VLUXSEG5EI32V 0x84006007
  1377. +#define MASK_VLUXSEG5EI32V 0xfc00707f
  1378. +#define MATCH_VSUXSEG5EI32V 0x84006027
  1379. +#define MASK_VSUXSEG5EI32V 0xfc00707f
  1380. +#define MATCH_VLUXSEG6EI32V 0xa4006007
  1381. +#define MASK_VLUXSEG6EI32V 0xfc00707f
  1382. +#define MATCH_VSUXSEG6EI32V 0xa4006027
  1383. +#define MASK_VSUXSEG6EI32V 0xfc00707f
  1384. +#define MATCH_VLUXSEG7EI32V 0xc4006007
  1385. +#define MASK_VLUXSEG7EI32V 0xfc00707f
  1386. +#define MATCH_VSUXSEG7EI32V 0xc4006027
  1387. +#define MASK_VSUXSEG7EI32V 0xfc00707f
  1388. +#define MATCH_VLUXSEG8EI32V 0xe4006007
  1389. +#define MASK_VLUXSEG8EI32V 0xfc00707f
  1390. +#define MATCH_VSUXSEG8EI32V 0xe4006027
  1391. +#define MASK_VSUXSEG8EI32V 0xfc00707f
  1392. +
  1393. +#define MATCH_VLOXSEG2EI64V 0x2c007007
  1394. +#define MASK_VLOXSEG2EI64V 0xfc00707f
  1395. +#define MATCH_VSOXSEG2EI64V 0x2c007027
  1396. +#define MASK_VSOXSEG2EI64V 0xfc00707f
  1397. +#define MATCH_VLOXSEG3EI64V 0x4c007007
  1398. +#define MASK_VLOXSEG3EI64V 0xfc00707f
  1399. +#define MATCH_VSOXSEG3EI64V 0x4c007027
  1400. +#define MASK_VSOXSEG3EI64V 0xfc00707f
  1401. +#define MATCH_VLOXSEG4EI64V 0x6c007007
  1402. +#define MASK_VLOXSEG4EI64V 0xfc00707f
  1403. +#define MATCH_VSOXSEG4EI64V 0x6c007027
  1404. +#define MASK_VSOXSEG4EI64V 0xfc00707f
  1405. +#define MATCH_VLOXSEG5EI64V 0x8c007007
  1406. +#define MASK_VLOXSEG5EI64V 0xfc00707f
  1407. +#define MATCH_VSOXSEG5EI64V 0x8c007027
  1408. +#define MASK_VSOXSEG5EI64V 0xfc00707f
  1409. +#define MATCH_VLOXSEG6EI64V 0xac007007
  1410. +#define MASK_VLOXSEG6EI64V 0xfc00707f
  1411. +#define MATCH_VSOXSEG6EI64V 0xac007027
  1412. +#define MASK_VSOXSEG6EI64V 0xfc00707f
  1413. +#define MATCH_VLOXSEG7EI64V 0xcc007007
  1414. +#define MASK_VLOXSEG7EI64V 0xfc00707f
  1415. +#define MATCH_VSOXSEG7EI64V 0xcc007027
  1416. +#define MASK_VSOXSEG7EI64V 0xfc00707f
  1417. +#define MATCH_VLOXSEG8EI64V 0xec007007
  1418. +#define MASK_VLOXSEG8EI64V 0xfc00707f
  1419. +#define MATCH_VSOXSEG8EI64V 0xec007027
  1420. +#define MASK_VSOXSEG8EI64V 0xfc00707f
  1421. +
  1422. +#define MATCH_VLUXSEG2EI64V 0x24007007
  1423. +#define MASK_VLUXSEG2EI64V 0xfc00707f
  1424. +#define MATCH_VSUXSEG2EI64V 0x24007027
  1425. +#define MASK_VSUXSEG2EI64V 0xfc00707f
  1426. +#define MATCH_VLUXSEG3EI64V 0x44007007
  1427. +#define MASK_VLUXSEG3EI64V 0xfc00707f
  1428. +#define MATCH_VSUXSEG3EI64V 0x44007027
  1429. +#define MASK_VSUXSEG3EI64V 0xfc00707f
  1430. +#define MATCH_VLUXSEG4EI64V 0x64007007
  1431. +#define MASK_VLUXSEG4EI64V 0xfc00707f
  1432. +#define MATCH_VSUXSEG4EI64V 0x64007027
  1433. +#define MASK_VSUXSEG4EI64V 0xfc00707f
  1434. +#define MATCH_VLUXSEG5EI64V 0x84007007
  1435. +#define MASK_VLUXSEG5EI64V 0xfc00707f
  1436. +#define MATCH_VSUXSEG5EI64V 0x84007027
  1437. +#define MASK_VSUXSEG5EI64V 0xfc00707f
  1438. +#define MATCH_VLUXSEG6EI64V 0xa4007007
  1439. +#define MASK_VLUXSEG6EI64V 0xfc00707f
  1440. +#define MATCH_VSUXSEG6EI64V 0xa4007027
  1441. +#define MASK_VSUXSEG6EI64V 0xfc00707f
  1442. +#define MATCH_VLUXSEG7EI64V 0xc4007007
  1443. +#define MASK_VLUXSEG7EI64V 0xfc00707f
  1444. +#define MATCH_VSUXSEG7EI64V 0xc4007027
  1445. +#define MASK_VSUXSEG7EI64V 0xfc00707f
  1446. +#define MATCH_VLUXSEG8EI64V 0xe4007007
  1447. +#define MASK_VLUXSEG8EI64V 0xfc00707f
  1448. +#define MATCH_VSUXSEG8EI64V 0xe4007027
  1449. +#define MASK_VSUXSEG8EI64V 0xfc00707f
  1450. +
  1451. +#define MATCH_VLSEG2E8FFV 0x21000007
  1452. +#define MASK_VLSEG2E8FFV 0xfdf0707f
  1453. +#define MATCH_VLSEG3E8FFV 0x41000007
  1454. +#define MASK_VLSEG3E8FFV 0xfdf0707f
  1455. +#define MATCH_VLSEG4E8FFV 0x61000007
  1456. +#define MASK_VLSEG4E8FFV 0xfdf0707f
  1457. +#define MATCH_VLSEG5E8FFV 0x81000007
  1458. +#define MASK_VLSEG5E8FFV 0xfdf0707f
  1459. +#define MATCH_VLSEG6E8FFV 0xa1000007
  1460. +#define MASK_VLSEG6E8FFV 0xfdf0707f
  1461. +#define MATCH_VLSEG7E8FFV 0xc1000007
  1462. +#define MASK_VLSEG7E8FFV 0xfdf0707f
  1463. +#define MATCH_VLSEG8E8FFV 0xe1000007
  1464. +#define MASK_VLSEG8E8FFV 0xfdf0707f
  1465. +
  1466. +#define MATCH_VLSEG2E16FFV 0x21005007
  1467. +#define MASK_VLSEG2E16FFV 0xfdf0707f
  1468. +#define MATCH_VLSEG3E16FFV 0x41005007
  1469. +#define MASK_VLSEG3E16FFV 0xfdf0707f
  1470. +#define MATCH_VLSEG4E16FFV 0x61005007
  1471. +#define MASK_VLSEG4E16FFV 0xfdf0707f
  1472. +#define MATCH_VLSEG5E16FFV 0x81005007
  1473. +#define MASK_VLSEG5E16FFV 0xfdf0707f
  1474. +#define MATCH_VLSEG6E16FFV 0xa1005007
  1475. +#define MASK_VLSEG6E16FFV 0xfdf0707f
  1476. +#define MATCH_VLSEG7E16FFV 0xc1005007
  1477. +#define MASK_VLSEG7E16FFV 0xfdf0707f
  1478. +#define MATCH_VLSEG8E16FFV 0xe1005007
  1479. +#define MASK_VLSEG8E16FFV 0xfdf0707f
  1480. +
  1481. +#define MATCH_VLSEG2E32FFV 0x21006007
  1482. +#define MASK_VLSEG2E32FFV 0xfdf0707f
  1483. +#define MATCH_VLSEG3E32FFV 0x41006007
  1484. +#define MASK_VLSEG3E32FFV 0xfdf0707f
  1485. +#define MATCH_VLSEG4E32FFV 0x61006007
  1486. +#define MASK_VLSEG4E32FFV 0xfdf0707f
  1487. +#define MATCH_VLSEG5E32FFV 0x81006007
  1488. +#define MASK_VLSEG5E32FFV 0xfdf0707f
  1489. +#define MATCH_VLSEG6E32FFV 0xa1006007
  1490. +#define MASK_VLSEG6E32FFV 0xfdf0707f
  1491. +#define MATCH_VLSEG7E32FFV 0xc1006007
  1492. +#define MASK_VLSEG7E32FFV 0xfdf0707f
  1493. +#define MATCH_VLSEG8E32FFV 0xe1006007
  1494. +#define MASK_VLSEG8E32FFV 0xfdf0707f
  1495. +
  1496. +#define MATCH_VLSEG2E64FFV 0x21007007
  1497. +#define MASK_VLSEG2E64FFV 0xfdf0707f
  1498. +#define MATCH_VLSEG3E64FFV 0x41007007
  1499. +#define MASK_VLSEG3E64FFV 0xfdf0707f
  1500. +#define MATCH_VLSEG4E64FFV 0x61007007
  1501. +#define MASK_VLSEG4E64FFV 0xfdf0707f
  1502. +#define MATCH_VLSEG5E64FFV 0x81007007
  1503. +#define MASK_VLSEG5E64FFV 0xfdf0707f
  1504. +#define MATCH_VLSEG6E64FFV 0xa1007007
  1505. +#define MASK_VLSEG6E64FFV 0xfdf0707f
  1506. +#define MATCH_VLSEG7E64FFV 0xc1007007
  1507. +#define MASK_VLSEG7E64FFV 0xfdf0707f
  1508. +#define MATCH_VLSEG8E64FFV 0xe1007007
  1509. +#define MASK_VLSEG8E64FFV 0xfdf0707f
  1510. +
  1511. +#define MATCH_VL1RE8V 0x02800007
  1512. +#define MASK_VL1RE8V 0xfff0707f
  1513. +#define MATCH_VL1RE16V 0x02805007
  1514. +#define MASK_VL1RE16V 0xfff0707f
  1515. +#define MATCH_VL1RE32V 0x02806007
  1516. +#define MASK_VL1RE32V 0xfff0707f
  1517. +#define MATCH_VL1RE64V 0x02807007
  1518. +#define MASK_VL1RE64V 0xfff0707f
  1519. +
  1520. +#define MATCH_VL2RE8V 0x22800007
  1521. +#define MASK_VL2RE8V 0xfff0707f
  1522. +#define MATCH_VL2RE16V 0x22805007
  1523. +#define MASK_VL2RE16V 0xfff0707f
  1524. +#define MATCH_VL2RE32V 0x22806007
  1525. +#define MASK_VL2RE32V 0xfff0707f
  1526. +#define MATCH_VL2RE64V 0x22807007
  1527. +#define MASK_VL2RE64V 0xfff0707f
  1528. +
  1529. +#define MATCH_VL4RE8V 0x62800007
  1530. +#define MASK_VL4RE8V 0xfff0707f
  1531. +#define MATCH_VL4RE16V 0x62805007
  1532. +#define MASK_VL4RE16V 0xfff0707f
  1533. +#define MATCH_VL4RE32V 0x62806007
  1534. +#define MASK_VL4RE32V 0xfff0707f
  1535. +#define MATCH_VL4RE64V 0x62807007
  1536. +#define MASK_VL4RE64V 0xfff0707f
  1537. +
  1538. +#define MATCH_VL8RE8V 0xe2800007
  1539. +#define MASK_VL8RE8V 0xfff0707f
  1540. +#define MATCH_VL8RE16V 0xe2805007
  1541. +#define MASK_VL8RE16V 0xfff0707f
  1542. +#define MATCH_VL8RE32V 0xe2806007
  1543. +#define MASK_VL8RE32V 0xfff0707f
  1544. +#define MATCH_VL8RE64V 0xe2807007
  1545. +#define MASK_VL8RE64V 0xfff0707f
  1546. +
  1547. +#define MATCH_VS1RV 0x02800027
  1548. +#define MASK_VS1RV 0xfff0707f
  1549. +#define MATCH_VS2RV 0x22800027
  1550. +#define MASK_VS2RV 0xfff0707f
  1551. +#define MATCH_VS4RV 0x62800027
  1552. +#define MASK_VS4RV 0xfff0707f
  1553. +#define MATCH_VS8RV 0xe2800027
  1554. +#define MASK_VS8RV 0xfff0707f
  1555. +
  1556. +/* Temporary AMO encoding info
  1557. +
  1558. +width
  1559. +010 AMO*.W
  1560. +011 AMO*.D
  1561. +100 AMO*.Q
  1562. +000 VAMO*EI8.V
  1563. +101 VAMO*EI16.V
  1564. +110 VAMO*EI32.V
  1565. +111 VAMO*EI64.V
  1566. +
  1567. +amoop
  1568. +00001 vamoswap
  1569. +00000 vamoadd
  1570. +00100 vamoxor
  1571. +01100 vamoand
  1572. +01000 vamoor
  1573. +10000 vamomin
  1574. +10100 vamomax
  1575. +11000 vamominu
  1576. +11100 vamomaxu
  1577. +
  1578. + 31-27 26 25 24-20 19-15 14-12 11-7 6-0
  1579. + amoop wd vm vs2 rs1 width vs3/vd opcode
  1580. + 00001 x 1 xxxxx xxxxx 110 xxxxx 0101111
  1581. + 0000 1x1x xxxx xxxx x110 xxxx x010 1111
  1582. + 1111 1010 0000 0000 0111 0000 0111 1111 */
  1583. +
  1584. +#define MATCH_VAMOADDEI8V 0x0000002f
  1585. +#define MASK_VAMOADDEI8V 0xf800707f
  1586. +#define MATCH_VAMOSWAPEI8V 0x0800002f
  1587. +#define MASK_VAMOSWAPEI8V 0xf800707f
  1588. +#define MATCH_VAMOXOREI8V 0x2000002f
  1589. +#define MASK_VAMOXOREI8V 0xf800707f
  1590. +#define MATCH_VAMOANDEI8V 0x6000002f
  1591. +#define MASK_VAMOANDEI8V 0xf800707f
  1592. +#define MATCH_VAMOOREI8V 0x4000002f
  1593. +#define MASK_VAMOOREI8V 0xf800707f
  1594. +#define MATCH_VAMOMINEI8V 0x8000002f
  1595. +#define MASK_VAMOMINEI8V 0xf800707f
  1596. +#define MATCH_VAMOMAXEI8V 0xa000002f
  1597. +#define MASK_VAMOMAXEI8V 0xf800707f
  1598. +#define MATCH_VAMOMINUEI8V 0xc000002f
  1599. +#define MASK_VAMOMINUEI8V 0xf800707f
  1600. +#define MATCH_VAMOMAXUEI8V 0xe000002f
  1601. +#define MASK_VAMOMAXUEI8V 0xf800707f
  1602. +
  1603. +#define MATCH_VAMOADDEI16V 0x0000502f
  1604. +#define MASK_VAMOADDEI16V 0xf800707f
  1605. +#define MATCH_VAMOSWAPEI16V 0x0800502f
  1606. +#define MASK_VAMOSWAPEI16V 0xf800707f
  1607. +#define MATCH_VAMOXOREI16V 0x2000502f
  1608. +#define MASK_VAMOXOREI16V 0xf800707f
  1609. +#define MATCH_VAMOANDEI16V 0x6000502f
  1610. +#define MASK_VAMOANDEI16V 0xf800707f
  1611. +#define MATCH_VAMOOREI16V 0x4000502f
  1612. +#define MASK_VAMOOREI16V 0xf800707f
  1613. +#define MATCH_VAMOMINEI16V 0x8000502f
  1614. +#define MASK_VAMOMINEI16V 0xf800707f
  1615. +#define MATCH_VAMOMAXEI16V 0xa000502f
  1616. +#define MASK_VAMOMAXEI16V 0xf800707f
  1617. +#define MATCH_VAMOMINUEI16V 0xc000502f
  1618. +#define MASK_VAMOMINUEI16V 0xf800707f
  1619. +#define MATCH_VAMOMAXUEI16V 0xe000502f
  1620. +#define MASK_VAMOMAXUEI16V 0xf800707f
  1621. +
  1622. +#define MATCH_VAMOADDEI32V 0x0000602f
  1623. +#define MASK_VAMOADDEI32V 0xf800707f
  1624. +#define MATCH_VAMOSWAPEI32V 0x0800602f
  1625. +#define MASK_VAMOSWAPEI32V 0xf800707f
  1626. +#define MATCH_VAMOXOREI32V 0x2000602f
  1627. +#define MASK_VAMOXOREI32V 0xf800707f
  1628. +#define MATCH_VAMOANDEI32V 0x6000602f
  1629. +#define MASK_VAMOANDEI32V 0xf800707f
  1630. +#define MATCH_VAMOOREI32V 0x4000602f
  1631. +#define MASK_VAMOOREI32V 0xf800707f
  1632. +#define MATCH_VAMOMINEI32V 0x8000602f
  1633. +#define MASK_VAMOMINEI32V 0xf800707f
  1634. +#define MATCH_VAMOMAXEI32V 0xa000602f
  1635. +#define MASK_VAMOMAXEI32V 0xf800707f
  1636. +#define MATCH_VAMOMINUEI32V 0xc000602f
  1637. +#define MASK_VAMOMINUEI32V 0xf800707f
  1638. +#define MATCH_VAMOMAXUEI32V 0xe000602f
  1639. +#define MASK_VAMOMAXUEI32V 0xf800707f
  1640. +
  1641. +#define MATCH_VAMOADDEI64V 0x0000702f
  1642. +#define MASK_VAMOADDEI64V 0xf800707f
  1643. +#define MATCH_VAMOSWAPEI64V 0x0800702f
  1644. +#define MASK_VAMOSWAPEI64V 0xf800707f
  1645. +#define MATCH_VAMOXOREI64V 0x2000702f
  1646. +#define MASK_VAMOXOREI64V 0xf800707f
  1647. +#define MATCH_VAMOANDEI64V 0x6000702f
  1648. +#define MASK_VAMOANDEI64V 0xf800707f
  1649. +#define MATCH_VAMOOREI64V 0x4000702f
  1650. +#define MASK_VAMOOREI64V 0xf800707f
  1651. +#define MATCH_VAMOMINEI64V 0x8000702f
  1652. +#define MASK_VAMOMINEI64V 0xf800707f
  1653. +#define MATCH_VAMOMAXEI64V 0xa000702f
  1654. +#define MASK_VAMOMAXEI64V 0xf800707f
  1655. +#define MATCH_VAMOMINUEI64V 0xc000702f
  1656. +#define MASK_VAMOMINUEI64V 0xf800707f
  1657. +#define MATCH_VAMOMAXUEI64V 0xe000702f
  1658. +#define MASK_VAMOMAXUEI64V 0xf800707f
  1659. +
  1660. +/* Temporary ALU encoding info
  1661. +
  1662. +funct3
  1663. +000 OPIVV vv
  1664. +001 OPFVV vv
  1665. +010 OPMVV vv
  1666. +011 OPIVI vi simm[4:0]
  1667. +100 OPIVX vx GPR x-reg rs1
  1668. +101 OPFVF vf FP f-reg rs1
  1669. +110 OPMVX vx GPR x-reg rs1
  1670. +111 OPCFG si GPR x-reg rs1 & rs2/imm
  1671. +
  1672. +INT OPI
  1673. +funct6
  1674. +000000 vadd
  1675. +000001
  1676. +000010 vsub
  1677. +000011 vrsub
  1678. +000100 vminu
  1679. +000101 vmin
  1680. +000110 vmaxu
  1681. +000111 vmax
  1682. +001000
  1683. +001001 vand
  1684. +001010 vor
  1685. +001011 vxor
  1686. +001100 vrgather
  1687. +001101
  1688. +001110 vslideup, vrgatherei16
  1689. +001111 vslidedown
  1690. +010000 vadc
  1691. +010001 vmadc
  1692. +010010 vsbc
  1693. +010011 vmsbc
  1694. +010100
  1695. +010101
  1696. +010110
  1697. +010111 vmerge/vmv
  1698. +011000 vmseq
  1699. +011001 vmsne
  1700. +011010 vmsltu
  1701. +011011 vmslt
  1702. +011100 vmsleu
  1703. +011101 vmsle
  1704. +011110 vmsgtu
  1705. +011111 vmsgt
  1706. +100000 vsaddu
  1707. +100001 vsadd
  1708. +100010 vssubu
  1709. +100011 vssub
  1710. +100100
  1711. +100101 vsll
  1712. +100110
  1713. +100111 vmv<nf>r (nf = 1, 2, 4, 8)
  1714. +101000 vsrl
  1715. +101001 vsra
  1716. +101010 vssrl
  1717. +101011 vssra
  1718. +101100 vnsrl
  1719. +101101 vnsra
  1720. +101110 vnclipu
  1721. +101111 vnclip
  1722. +110000 vwredsumu
  1723. +110001 vwredsum
  1724. +110010
  1725. +110011
  1726. +110100
  1727. +110101
  1728. +110110
  1729. +110111
  1730. +111000 vdotu **
  1731. +111001 vdot **
  1732. +111010
  1733. +111011
  1734. +111100 vqmaccu
  1735. +111101 vqmacc
  1736. +111110 vqmaccus
  1737. +111111 vqmaccsu
  1738. +
  1739. +INT OPM
  1740. +funct6
  1741. +000000 vredsum
  1742. +000001 vredand
  1743. +000010 vredor
  1744. +000011 vredxor
  1745. +000100 vredminu
  1746. +000101 vredmin
  1747. +000110 vredmaxu
  1748. +000111 vredmax
  1749. +001000 vaaddu
  1750. +001001 vaadd
  1751. +001010 vasubu
  1752. +001011 vasub
  1753. +001100
  1754. +001101
  1755. +001110 vslide1up
  1756. +001111 vslide1down
  1757. +010000 VRXUNARY0/VWXUNARY0
  1758. +010001
  1759. +010010 VXUNARY0
  1760. +010011
  1761. +010100 VMUNARY0
  1762. +010101
  1763. +010110
  1764. +010111 vcompress
  1765. +011000 vmandnot
  1766. +011001 vmand
  1767. +011010 vmor
  1768. +011011 vmxor
  1769. +011100 vmornot
  1770. +011101 vmnand
  1771. +011110 vmnor
  1772. +011111 vmxnor
  1773. +100000 vdivu
  1774. +100001 vdiv
  1775. +100010 vremu
  1776. +100011 vrem
  1777. +100100 vmulhu
  1778. +100101 vmul
  1779. +100110 vmulhsu
  1780. +100111 vmulh
  1781. +101000
  1782. +101001 vmadd
  1783. +101010
  1784. +101011 vnmsub
  1785. +101100
  1786. +101101 vmacc
  1787. +101110
  1788. +101111 vnmsac
  1789. +110000 vwaddu
  1790. +110001 vwadd
  1791. +110010 vwsubu
  1792. +110011 vwsub
  1793. +110100 vwaddu.w
  1794. +110101 vwadd.w
  1795. +110110 vwsubu.w
  1796. +110111 vwsub.w
  1797. +111000 vwmulu
  1798. +111001
  1799. +111010 vwmulsu
  1800. +111011 vwmul
  1801. +111100 vwmaccu
  1802. +111101 vwmacc
  1803. +111110 vwmaccus
  1804. +111111 vwmaccsu
  1805. +
  1806. +VRXUNARY0
  1807. +vs2, funct3=X
  1808. +00000 vmv.s.x
  1809. +
  1810. +VWXUNARY0
  1811. +vs1, funct3=V
  1812. +00000 vmv.x.s
  1813. +10000 vpopc
  1814. +10001 vfirst
  1815. +
  1816. +VXUNARY0
  1817. +vs1, funct3=V
  1818. +00010 vzext.vf8
  1819. +00011 vsext.vf8
  1820. +00100 vzext.vf4
  1821. +00101 vsext.vf4
  1822. +00110 vzext.vf2
  1823. +00111 vsext.vf2
  1824. +
  1825. +VMUNARY0
  1826. +rs1
  1827. +00001 vmsbf
  1828. +00010 vmsof
  1829. +00011 vmsif
  1830. +10000 viota
  1831. +10001 vid
  1832. +
  1833. +VFLOAT
  1834. +funct6
  1835. +000000 vfadd
  1836. +000001 vfredsum
  1837. +000010 vfsub
  1838. +000011 vfredosum
  1839. +000100 vfmin
  1840. +000101 vfredmin
  1841. +000110 vfmax
  1842. +000111 vfredmax
  1843. +001000 vfsgnj
  1844. +001001 vfsgnn
  1845. +001010 vfsgnx
  1846. +001011
  1847. +001100
  1848. +001101
  1849. +001110 vfslide1up
  1850. +001111 vfslide1down
  1851. +010000 VRFUNARY0/VWFUNARY0
  1852. +010001
  1853. +010010 VFUNARY0
  1854. +010011 VFUNARY1
  1855. +010100
  1856. +010101
  1857. +010110
  1858. +010111 vfmerge/vfmv
  1859. +011000 vmfeq
  1860. +011001 vmfle
  1861. +011010
  1862. +011011 vmflt
  1863. +011100 vmfne
  1864. +011101 vmfgt
  1865. +011110
  1866. +011111 vmfge
  1867. +100000 vfdiv
  1868. +100001 vfrdiv
  1869. +100010
  1870. +100011
  1871. +100100 vfmul
  1872. +100101
  1873. +100110
  1874. +100111 vfrsub
  1875. +101000 vfmadd
  1876. +101001 vfnmadd
  1877. +101010 vfmsub
  1878. +101011 vfnmsub
  1879. +101100 vfmacc
  1880. +101101 vfnmacc
  1881. +101110 vfmsac
  1882. +101111 vfnmsac
  1883. +110000 vfwadd
  1884. +110001 vfwredsum
  1885. +110010 vfwsub
  1886. +110011 vfwredosum
  1887. +110100 vfwadd.w
  1888. +110101
  1889. +110110 vfwsub.w
  1890. +110111
  1891. +111000 vfwmul
  1892. +111001 vfdot
  1893. +111010
  1894. +111011
  1895. +111100 vfwmacc
  1896. +111101 vfwnmacc
  1897. +111110 vfwmsac
  1898. +111111 vfwnmsac
  1899. +
  1900. +VRFUNARY0
  1901. +vs2, funct3=F
  1902. +00000 vfmv.s.f
  1903. +
  1904. +VWFUNARY0
  1905. +vs1, funct3=V
  1906. +00000 vfmv.f.s
  1907. +
  1908. +VFUNARY0
  1909. +vs1
  1910. +00000 vfcvt.xu.f.v
  1911. +00001 vfcvt.x.f.v
  1912. +00010 vfcvt.f.xu.v
  1913. +00011 vfcvt.f.x.v
  1914. +00110 vfcvt.rtz.xu.f.v
  1915. +00111 vfcvt.rtz.x.f.v
  1916. +
  1917. +01000 vfwcvt.xu.f.v
  1918. +01001 vfwcvt.x.f.v
  1919. +01010 vfwcvt.f.xu.v
  1920. +01011 vfwcvt.f.x.v
  1921. +01100 vfwcvt.f.f.v
  1922. +01110 vfwcvt.rtz.xu.f.v
  1923. +01111 vfwcvt.rtz.x.f.v
  1924. +
  1925. +10000 vfncvt.xu.f.w
  1926. +10001 vfncvt.x.f.w
  1927. +10010 vfncvt.f.xu.w
  1928. +10011 vfncvt.f.x.w
  1929. +10100 vfncvt.f.f.w
  1930. +10101 vfncvt.rod.f.f.w
  1931. +10110 vfncvt.rtz.xu.f.v
  1932. +10111 vfncvt.rtz.x.f.v
  1933. +
  1934. +VFUNARY1
  1935. +vs1
  1936. +00000 vfsqrt.v
  1937. +00100 vfrsqrte7.v
  1938. +00101 vfrece7.v
  1939. +10000 vfclass.v
  1940. +
  1941. +31-26 25 24-20 19-15 14-12 11-7 6-0
  1942. +funct6 VM VS2 VS1/RS1/IMM funct3 VD opcode
  1943. +010000 x xxxxx 00000 001 xxxxx 1010111
  1944. +0100 00xx xxxx 0000 0001 xxxx x101 0111
  1945. +*/
  1946. +
  1947. +#define MATCH_VADDVV 0x00000057
  1948. +#define MASK_VADDVV 0xfc00707f
  1949. +#define MATCH_VADDVX 0x00004057
  1950. +#define MASK_VADDVX 0xfc00707f
  1951. +#define MATCH_VADDVI 0x00003057
  1952. +#define MASK_VADDVI 0xfc00707f
  1953. +#define MATCH_VSUBVV 0x08000057
  1954. +#define MASK_VSUBVV 0xfc00707f
  1955. +#define MATCH_VSUBVX 0x08004057
  1956. +#define MASK_VSUBVX 0xfc00707f
  1957. +#define MATCH_VRSUBVX 0x0c004057
  1958. +#define MASK_VRSUBVX 0xfc00707f
  1959. +#define MATCH_VRSUBVI 0x0c003057
  1960. +#define MASK_VRSUBVI 0xfc00707f
  1961. +
  1962. +#define MATCH_VWCVTXXV 0xc4006057
  1963. +#define MASK_VWCVTXXV 0xfc0ff07f
  1964. +#define MATCH_VWCVTUXXV 0xc0006057
  1965. +#define MASK_VWCVTUXXV 0xfc0ff07f
  1966. +
  1967. +#define MATCH_VWADDVV 0xc4002057
  1968. +#define MASK_VWADDVV 0xfc00707f
  1969. +#define MATCH_VWADDVX 0xc4006057
  1970. +#define MASK_VWADDVX 0xfc00707f
  1971. +#define MATCH_VWSUBVV 0xcc002057
  1972. +#define MASK_VWSUBVV 0xfc00707f
  1973. +#define MATCH_VWSUBVX 0xcc006057
  1974. +#define MASK_VWSUBVX 0xfc00707f
  1975. +#define MATCH_VWADDWV 0xd4002057
  1976. +#define MASK_VWADDWV 0xfc00707f
  1977. +#define MATCH_VWADDWX 0xd4006057
  1978. +#define MASK_VWADDWX 0xfc00707f
  1979. +#define MATCH_VWSUBWV 0xdc002057
  1980. +#define MASK_VWSUBWV 0xfc00707f
  1981. +#define MATCH_VWSUBWX 0xdc006057
  1982. +#define MASK_VWSUBWX 0xfc00707f
  1983. +#define MATCH_VWADDUVV 0xc0002057
  1984. +#define MASK_VWADDUVV 0xfc00707f
  1985. +#define MATCH_VWADDUVX 0xc0006057
  1986. +#define MASK_VWADDUVX 0xfc00707f
  1987. +#define MATCH_VWSUBUVV 0xc8002057
  1988. +#define MASK_VWSUBUVV 0xfc00707f
  1989. +#define MATCH_VWSUBUVX 0xc8006057
  1990. +#define MASK_VWSUBUVX 0xfc00707f
  1991. +#define MATCH_VWADDUWV 0xd0002057
  1992. +#define MASK_VWADDUWV 0xfc00707f
  1993. +#define MATCH_VWADDUWX 0xd0006057
  1994. +#define MASK_VWADDUWX 0xfc00707f
  1995. +#define MATCH_VWSUBUWV 0xd8002057
  1996. +#define MASK_VWSUBUWV 0xfc00707f
  1997. +#define MATCH_VWSUBUWX 0xd8006057
  1998. +#define MASK_VWSUBUWX 0xfc00707f
  1999. +
  2000. +#define MATCH_VZEXT_VF8 0x48012057
  2001. +#define MASK_VZEXT_VF8 0xfc0ff07f
  2002. +#define MATCH_VSEXT_VF8 0x4801a057
  2003. +#define MASK_VSEXT_VF8 0xfc0ff07f
  2004. +#define MATCH_VZEXT_VF4 0x48022057
  2005. +#define MASK_VZEXT_VF4 0xfc0ff07f
  2006. +#define MATCH_VSEXT_VF4 0x4802a057
  2007. +#define MASK_VSEXT_VF4 0xfc0ff07f
  2008. +#define MATCH_VZEXT_VF2 0x48032057
  2009. +#define MASK_VZEXT_VF2 0xfc0ff07f
  2010. +#define MATCH_VSEXT_VF2 0x4803a057
  2011. +#define MASK_VSEXT_VF2 0xfc0ff07f
  2012. +
  2013. +#define MATCH_VADCVVM 0x40000057
  2014. +#define MASK_VADCVVM 0xfe00707f
  2015. +#define MATCH_VADCVXM 0x40004057
  2016. +#define MASK_VADCVXM 0xfe00707f
  2017. +#define MATCH_VADCVIM 0x40003057
  2018. +#define MASK_VADCVIM 0xfe00707f
  2019. +#define MATCH_VMADCVVM 0x44000057
  2020. +#define MASK_VMADCVVM 0xfe00707f
  2021. +#define MATCH_VMADCVXM 0x44004057
  2022. +#define MASK_VMADCVXM 0xfe00707f
  2023. +#define MATCH_VMADCVIM 0x44003057
  2024. +#define MASK_VMADCVIM 0xfe00707f
  2025. +#define MATCH_VMADCVV 0x46000057
  2026. +#define MASK_VMADCVV 0xfe00707f
  2027. +#define MATCH_VMADCVX 0x46004057
  2028. +#define MASK_VMADCVX 0xfe00707f
  2029. +#define MATCH_VMADCVI 0x46003057
  2030. +#define MASK_VMADCVI 0xfe00707f
  2031. +#define MATCH_VSBCVVM 0x48000057
  2032. +#define MASK_VSBCVVM 0xfe00707f
  2033. +#define MATCH_VSBCVXM 0x48004057
  2034. +#define MASK_VSBCVXM 0xfe00707f
  2035. +#define MATCH_VMSBCVVM 0x4c000057
  2036. +#define MASK_VMSBCVVM 0xfe00707f
  2037. +#define MATCH_VMSBCVXM 0x4c004057
  2038. +#define MASK_VMSBCVXM 0xfe00707f
  2039. +#define MATCH_VMSBCVV 0x4e000057
  2040. +#define MASK_VMSBCVV 0xfe00707f
  2041. +#define MATCH_VMSBCVX 0x4e004057
  2042. +#define MASK_VMSBCVX 0xfe00707f
  2043. +
  2044. +#define MATCH_VNOTV 0x2c0fb057
  2045. +#define MASK_VNOTV 0xfc0ff07f
  2046. +
  2047. +#define MATCH_VANDVV 0x24000057
  2048. +#define MASK_VANDVV 0xfc00707f
  2049. +#define MATCH_VANDVX 0x24004057
  2050. +#define MASK_VANDVX 0xfc00707f
  2051. +#define MATCH_VANDVI 0x24003057
  2052. +#define MASK_VANDVI 0xfc00707f
  2053. +#define MATCH_VORVV 0x28000057
  2054. +#define MASK_VORVV 0xfc00707f
  2055. +#define MATCH_VORVX 0x28004057
  2056. +#define MASK_VORVX 0xfc00707f
  2057. +#define MATCH_VORVI 0x28003057
  2058. +#define MASK_VORVI 0xfc00707f
  2059. +#define MATCH_VXORVV 0x2c000057
  2060. +#define MASK_VXORVV 0xfc00707f
  2061. +#define MATCH_VXORVX 0x2c004057
  2062. +#define MASK_VXORVX 0xfc00707f
  2063. +#define MATCH_VXORVI 0x2c003057
  2064. +#define MASK_VXORVI 0xfc00707f
  2065. +
  2066. +#define MATCH_VSLLVV 0x94000057
  2067. +#define MASK_VSLLVV 0xfc00707f
  2068. +#define MATCH_VSLLVX 0x94004057
  2069. +#define MASK_VSLLVX 0xfc00707f
  2070. +#define MATCH_VSLLVI 0x94003057
  2071. +#define MASK_VSLLVI 0xfc00707f
  2072. +#define MATCH_VSRLVV 0xa0000057
  2073. +#define MASK_VSRLVV 0xfc00707f
  2074. +#define MATCH_VSRLVX 0xa0004057
  2075. +#define MASK_VSRLVX 0xfc00707f
  2076. +#define MATCH_VSRLVI 0xa0003057
  2077. +#define MASK_VSRLVI 0xfc00707f
  2078. +#define MATCH_VSRAVV 0xa4000057
  2079. +#define MASK_VSRAVV 0xfc00707f
  2080. +#define MATCH_VSRAVX 0xa4004057
  2081. +#define MASK_VSRAVX 0xfc00707f
  2082. +#define MATCH_VSRAVI 0xa4003057
  2083. +#define MASK_VSRAVI 0xfc00707f
  2084. +
  2085. +#define MATCH_VNCVTXXW 0xb0004057
  2086. +#define MASK_VNCVTXXW 0xfc0ff07f
  2087. +
  2088. +#define MATCH_VNSRLWV 0xb0000057
  2089. +#define MASK_VNSRLWV 0xfc00707f
  2090. +#define MATCH_VNSRLWX 0xb0004057
  2091. +#define MASK_VNSRLWX 0xfc00707f
  2092. +#define MATCH_VNSRLWI 0xb0003057
  2093. +#define MASK_VNSRLWI 0xfc00707f
  2094. +#define MATCH_VNSRAWV 0xb4000057
  2095. +#define MASK_VNSRAWV 0xfc00707f
  2096. +#define MATCH_VNSRAWX 0xb4004057
  2097. +#define MASK_VNSRAWX 0xfc00707f
  2098. +#define MATCH_VNSRAWI 0xb4003057
  2099. +#define MASK_VNSRAWI 0xfc00707f
  2100. +
  2101. +#define MATCH_VMSEQVV 0x60000057
  2102. +#define MASK_VMSEQVV 0xfc00707f
  2103. +#define MATCH_VMSEQVX 0x60004057
  2104. +#define MASK_VMSEQVX 0xfc00707f
  2105. +#define MATCH_VMSEQVI 0x60003057
  2106. +#define MASK_VMSEQVI 0xfc00707f
  2107. +#define MATCH_VMSNEVV 0x64000057
  2108. +#define MASK_VMSNEVV 0xfc00707f
  2109. +#define MATCH_VMSNEVX 0x64004057
  2110. +#define MASK_VMSNEVX 0xfc00707f
  2111. +#define MATCH_VMSNEVI 0x64003057
  2112. +#define MASK_VMSNEVI 0xfc00707f
  2113. +#define MATCH_VMSLTVV 0x6c000057
  2114. +#define MASK_VMSLTVV 0xfc00707f
  2115. +#define MATCH_VMSLTVX 0x6c004057
  2116. +#define MASK_VMSLTVX 0xfc00707f
  2117. +#define MATCH_VMSLTUVV 0x68000057
  2118. +#define MASK_VMSLTUVV 0xfc00707f
  2119. +#define MATCH_VMSLTUVX 0x68004057
  2120. +#define MASK_VMSLTUVX 0xfc00707f
  2121. +#define MATCH_VMSLEVV 0x74000057
  2122. +#define MASK_VMSLEVV 0xfc00707f
  2123. +#define MATCH_VMSLEVX 0x74004057
  2124. +#define MASK_VMSLEVX 0xfc00707f
  2125. +#define MATCH_VMSLEVI 0x74003057
  2126. +#define MASK_VMSLEVI 0xfc00707f
  2127. +#define MATCH_VMSLEUVV 0x70000057
  2128. +#define MASK_VMSLEUVV 0xfc00707f
  2129. +#define MATCH_VMSLEUVX 0x70004057
  2130. +#define MASK_VMSLEUVX 0xfc00707f
  2131. +#define MATCH_VMSLEUVI 0x70003057
  2132. +#define MASK_VMSLEUVI 0xfc00707f
  2133. +#define MATCH_VMSGTVX 0x7c004057
  2134. +#define MASK_VMSGTVX 0xfc00707f
  2135. +#define MATCH_VMSGTVI 0x7c003057
  2136. +#define MASK_VMSGTVI 0xfc00707f
  2137. +#define MATCH_VMSGTUVX 0x78004057
  2138. +#define MASK_VMSGTUVX 0xfc00707f
  2139. +#define MATCH_VMSGTUVI 0x78003057
  2140. +#define MASK_VMSGTUVI 0xfc00707f
  2141. +
  2142. +#define MATCH_VMINVV 0x14000057
  2143. +#define MASK_VMINVV 0xfc00707f
  2144. +#define MATCH_VMINVX 0x14004057
  2145. +#define MASK_VMINVX 0xfc00707f
  2146. +#define MATCH_VMAXVV 0x1c000057
  2147. +#define MASK_VMAXVV 0xfc00707f
  2148. +#define MATCH_VMAXVX 0x1c004057
  2149. +#define MASK_VMAXVX 0xfc00707f
  2150. +#define MATCH_VMINUVV 0x10000057
  2151. +#define MASK_VMINUVV 0xfc00707f
  2152. +#define MATCH_VMINUVX 0x10004057
  2153. +#define MASK_VMINUVX 0xfc00707f
  2154. +#define MATCH_VMAXUVV 0x18000057
  2155. +#define MASK_VMAXUVV 0xfc00707f
  2156. +#define MATCH_VMAXUVX 0x18004057
  2157. +#define MASK_VMAXUVX 0xfc00707f
  2158. +
  2159. +#define MATCH_VMULVV 0x94002057
  2160. +#define MASK_VMULVV 0xfc00707f
  2161. +#define MATCH_VMULVX 0x94006057
  2162. +#define MASK_VMULVX 0xfc00707f
  2163. +#define MATCH_VMULHVV 0x9c002057
  2164. +#define MASK_VMULHVV 0xfc00707f
  2165. +#define MATCH_VMULHVX 0x9c006057
  2166. +#define MASK_VMULHVX 0xfc00707f
  2167. +#define MATCH_VMULHUVV 0x90002057
  2168. +#define MASK_VMULHUVV 0xfc00707f
  2169. +#define MATCH_VMULHUVX 0x90006057
  2170. +#define MASK_VMULHUVX 0xfc00707f
  2171. +#define MATCH_VMULHSUVV 0x98002057
  2172. +#define MASK_VMULHSUVV 0xfc00707f
  2173. +#define MATCH_VMULHSUVX 0x98006057
  2174. +#define MASK_VMULHSUVX 0xfc00707f
  2175. +
  2176. +#define MATCH_VWMULVV 0xec002057
  2177. +#define MASK_VWMULVV 0xfc00707f
  2178. +#define MATCH_VWMULVX 0xec006057
  2179. +#define MASK_VWMULVX 0xfc00707f
  2180. +#define MATCH_VWMULUVV 0xe0002057
  2181. +#define MASK_VWMULUVV 0xfc00707f
  2182. +#define MATCH_VWMULUVX 0xe0006057
  2183. +#define MASK_VWMULUVX 0xfc00707f
  2184. +#define MATCH_VWMULSUVV 0xe8002057
  2185. +#define MASK_VWMULSUVV 0xfc00707f
  2186. +#define MATCH_VWMULSUVX 0xe8006057
  2187. +#define MASK_VWMULSUVX 0xfc00707f
  2188. +
  2189. +#define MATCH_VMACCVV 0xb4002057
  2190. +#define MASK_VMACCVV 0xfc00707f
  2191. +#define MATCH_VMACCVX 0xb4006057
  2192. +#define MASK_VMACCVX 0xfc00707f
  2193. +#define MATCH_VNMSACVV 0xbc002057
  2194. +#define MASK_VNMSACVV 0xfc00707f
  2195. +#define MATCH_VNMSACVX 0xbc006057
  2196. +#define MASK_VNMSACVX 0xfc00707f
  2197. +#define MATCH_VMADDVV 0xa4002057
  2198. +#define MASK_VMADDVV 0xfc00707f
  2199. +#define MATCH_VMADDVX 0xa4006057
  2200. +#define MASK_VMADDVX 0xfc00707f
  2201. +#define MATCH_VNMSUBVV 0xac002057
  2202. +#define MASK_VNMSUBVV 0xfc00707f
  2203. +#define MATCH_VNMSUBVX 0xac006057
  2204. +#define MASK_VNMSUBVX 0xfc00707f
  2205. +
  2206. +#define MATCH_VWMACCUVV 0xf0002057
  2207. +#define MASK_VWMACCUVV 0xfc00707f
  2208. +#define MATCH_VWMACCUVX 0xf0006057
  2209. +#define MASK_VWMACCUVX 0xfc00707f
  2210. +#define MATCH_VWMACCVV 0xf4002057
  2211. +#define MASK_VWMACCVV 0xfc00707f
  2212. +#define MATCH_VWMACCVX 0xf4006057
  2213. +#define MASK_VWMACCVX 0xfc00707f
  2214. +#define MATCH_VWMACCSUVV 0xfc002057
  2215. +#define MASK_VWMACCSUVV 0xfc00707f
  2216. +#define MATCH_VWMACCSUVX 0xfc006057
  2217. +#define MASK_VWMACCSUVX 0xfc00707f
  2218. +#define MATCH_VWMACCUSVX 0xf8006057
  2219. +#define MASK_VWMACCUSVX 0xfc00707f
  2220. +
  2221. +#define MATCH_VQMACCUVV 0xf0000057
  2222. +#define MASK_VQMACCUVV 0xfc00707f
  2223. +#define MATCH_VQMACCUVX 0xf0004057
  2224. +#define MASK_VQMACCUVX 0xfc00707f
  2225. +#define MATCH_VQMACCVV 0xf4000057
  2226. +#define MASK_VQMACCVV 0xfc00707f
  2227. +#define MATCH_VQMACCVX 0xf4004057
  2228. +#define MASK_VQMACCVX 0xfc00707f
  2229. +#define MATCH_VQMACCSUVV 0xfc000057
  2230. +#define MASK_VQMACCSUVV 0xfc00707f
  2231. +#define MATCH_VQMACCSUVX 0xfc004057
  2232. +#define MASK_VQMACCSUVX 0xfc00707f
  2233. +#define MATCH_VQMACCUSVX 0xf8004057
  2234. +#define MASK_VQMACCUSVX 0xfc00707f
  2235. +
  2236. +#define MATCH_VDIVVV 0x84002057
  2237. +#define MASK_VDIVVV 0xfc00707f
  2238. +#define MATCH_VDIVVX 0x84006057
  2239. +#define MASK_VDIVVX 0xfc00707f
  2240. +#define MATCH_VDIVUVV 0x80002057
  2241. +#define MASK_VDIVUVV 0xfc00707f
  2242. +#define MATCH_VDIVUVX 0x80006057
  2243. +#define MASK_VDIVUVX 0xfc00707f
  2244. +#define MATCH_VREMVV 0x8c002057
  2245. +#define MASK_VREMVV 0xfc00707f
  2246. +#define MATCH_VREMVX 0x8c006057
  2247. +#define MASK_VREMVX 0xfc00707f
  2248. +#define MATCH_VREMUVV 0x88002057
  2249. +#define MASK_VREMUVV 0xfc00707f
  2250. +#define MATCH_VREMUVX 0x88006057
  2251. +#define MASK_VREMUVX 0xfc00707f
  2252. +
  2253. +#define MATCH_VMERGEVVM 0x5c000057
  2254. +#define MASK_VMERGEVVM 0xfe00707f
  2255. +#define MATCH_VMERGEVXM 0x5c004057
  2256. +#define MASK_VMERGEVXM 0xfe00707f
  2257. +#define MATCH_VMERGEVIM 0x5c003057
  2258. +#define MASK_VMERGEVIM 0xfe00707f
  2259. +
  2260. +#define MATCH_VMVVV 0x5e000057
  2261. +#define MASK_VMVVV 0xfff0707f
  2262. +#define MATCH_VMVVX 0x5e004057
  2263. +#define MASK_VMVVX 0xfff0707f
  2264. +#define MATCH_VMVVI 0x5e003057
  2265. +#define MASK_VMVVI 0xfff0707f
  2266. +
  2267. +#define MATCH_VSADDUVV 0x80000057
  2268. +#define MASK_VSADDUVV 0xfc00707f
  2269. +#define MATCH_VSADDUVX 0x80004057
  2270. +#define MASK_VSADDUVX 0xfc00707f
  2271. +#define MATCH_VSADDUVI 0x80003057
  2272. +#define MASK_VSADDUVI 0xfc00707f
  2273. +#define MATCH_VSADDVV 0x84000057
  2274. +#define MASK_VSADDVV 0xfc00707f
  2275. +#define MATCH_VSADDVX 0x84004057
  2276. +#define MASK_VSADDVX 0xfc00707f
  2277. +#define MATCH_VSADDVI 0x84003057
  2278. +#define MASK_VSADDVI 0xfc00707f
  2279. +#define MATCH_VSSUBUVV 0x88000057
  2280. +#define MASK_VSSUBUVV 0xfc00707f
  2281. +#define MATCH_VSSUBUVX 0x88004057
  2282. +#define MASK_VSSUBUVX 0xfc00707f
  2283. +#define MATCH_VSSUBVV 0x8c000057
  2284. +#define MASK_VSSUBVV 0xfc00707f
  2285. +#define MATCH_VSSUBVX 0x8c004057
  2286. +#define MASK_VSSUBVX 0xfc00707f
  2287. +
  2288. +#define MATCH_VAADDUVV 0x20002057
  2289. +#define MASK_VAADDUVV 0xfc00707f
  2290. +#define MATCH_VAADDUVX 0x20006057
  2291. +#define MASK_VAADDUVX 0xfc00707f
  2292. +#define MATCH_VAADDVV 0x24002057
  2293. +#define MASK_VAADDVV 0xfc00707f
  2294. +#define MATCH_VAADDVX 0x24006057
  2295. +#define MASK_VAADDVX 0xfc00707f
  2296. +#define MATCH_VASUBUVV 0x28002057
  2297. +#define MASK_VASUBUVV 0xfc00707f
  2298. +#define MATCH_VASUBUVX 0x28006057
  2299. +#define MASK_VASUBUVX 0xfc00707f
  2300. +#define MATCH_VASUBVV 0x2c002057
  2301. +#define MASK_VASUBVV 0xfc00707f
  2302. +#define MATCH_VASUBVX 0x2c006057
  2303. +#define MASK_VASUBVX 0xfc00707f
  2304. +
  2305. +#define MATCH_VSMULVV 0x9c000057
  2306. +#define MASK_VSMULVV 0xfc00707f
  2307. +#define MATCH_VSMULVX 0x9c004057
  2308. +#define MASK_VSMULVX 0xfc00707f
  2309. +
  2310. +#define MATCH_VSSRLVV 0xa8000057
  2311. +#define MASK_VSSRLVV 0xfc00707f
  2312. +#define MATCH_VSSRLVX 0xa8004057
  2313. +#define MASK_VSSRLVX 0xfc00707f
  2314. +#define MATCH_VSSRLVI 0xa8003057
  2315. +#define MASK_VSSRLVI 0xfc00707f
  2316. +#define MATCH_VSSRAVV 0xac000057
  2317. +#define MASK_VSSRAVV 0xfc00707f
  2318. +#define MATCH_VSSRAVX 0xac004057
  2319. +#define MASK_VSSRAVX 0xfc00707f
  2320. +#define MATCH_VSSRAVI 0xac003057
  2321. +#define MASK_VSSRAVI 0xfc00707f
  2322. +
  2323. +#define MATCH_VNCLIPUWV 0xb8000057
  2324. +#define MASK_VNCLIPUWV 0xfc00707f
  2325. +#define MATCH_VNCLIPUWX 0xb8004057
  2326. +#define MASK_VNCLIPUWX 0xfc00707f
  2327. +#define MATCH_VNCLIPUWI 0xb8003057
  2328. +#define MASK_VNCLIPUWI 0xfc00707f
  2329. +#define MATCH_VNCLIPWV 0xbc000057
  2330. +#define MASK_VNCLIPWV 0xfc00707f
  2331. +#define MATCH_VNCLIPWX 0xbc004057
  2332. +#define MASK_VNCLIPWX 0xfc00707f
  2333. +#define MATCH_VNCLIPWI 0xbc003057
  2334. +#define MASK_VNCLIPWI 0xfc00707f
  2335. +
  2336. +#define MATCH_VFADDVV 0x00001057
  2337. +#define MASK_VFADDVV 0xfc00707f
  2338. +#define MATCH_VFADDVF 0x00005057
  2339. +#define MASK_VFADDVF 0xfc00707f
  2340. +#define MATCH_VFSUBVV 0x08001057
  2341. +#define MASK_VFSUBVV 0xfc00707f
  2342. +#define MATCH_VFSUBVF 0x08005057
  2343. +#define MASK_VFSUBVF 0xfc00707f
  2344. +#define MATCH_VFRSUBVF 0x9c005057
  2345. +#define MASK_VFRSUBVF 0xfc00707f
  2346. +
  2347. +#define MATCH_VFWADDVV 0xc0001057
  2348. +#define MASK_VFWADDVV 0xfc00707f
  2349. +#define MATCH_VFWADDVF 0xc0005057
  2350. +#define MASK_VFWADDVF 0xfc00707f
  2351. +#define MATCH_VFWSUBVV 0xc8001057
  2352. +#define MASK_VFWSUBVV 0xfc00707f
  2353. +#define MATCH_VFWSUBVF 0xc8005057
  2354. +#define MASK_VFWSUBVF 0xfc00707f
  2355. +#define MATCH_VFWADDWV 0xd0001057
  2356. +#define MASK_VFWADDWV 0xfc00707f
  2357. +#define MATCH_VFWADDWF 0xd0005057
  2358. +#define MASK_VFWADDWF 0xfc00707f
  2359. +#define MATCH_VFWSUBWV 0xd8001057
  2360. +#define MASK_VFWSUBWV 0xfc00707f
  2361. +#define MATCH_VFWSUBWF 0xd8005057
  2362. +#define MASK_VFWSUBWF 0xfc00707f
  2363. +
  2364. +#define MATCH_VFMULVV 0x90001057
  2365. +#define MASK_VFMULVV 0xfc00707f
  2366. +#define MATCH_VFMULVF 0x90005057
  2367. +#define MASK_VFMULVF 0xfc00707f
  2368. +#define MATCH_VFDIVVV 0x80001057
  2369. +#define MASK_VFDIVVV 0xfc00707f
  2370. +#define MATCH_VFDIVVF 0x80005057
  2371. +#define MASK_VFDIVVF 0xfc00707f
  2372. +#define MATCH_VFRDIVVF 0x84005057
  2373. +#define MASK_VFRDIVVF 0xfc00707f
  2374. +
  2375. +#define MATCH_VFWMULVV 0xe0001057
  2376. +#define MASK_VFWMULVV 0xfc00707f
  2377. +#define MATCH_VFWMULVF 0xe0005057
  2378. +#define MASK_VFWMULVF 0xfc00707f
  2379. +
  2380. +#define MATCH_VFMADDVV 0xa0001057
  2381. +#define MASK_VFMADDVV 0xfc00707f
  2382. +#define MATCH_VFMADDVF 0xa0005057
  2383. +#define MASK_VFMADDVF 0xfc00707f
  2384. +#define MATCH_VFNMADDVV 0xa4001057
  2385. +#define MASK_VFNMADDVV 0xfc00707f
  2386. +#define MATCH_VFNMADDVF 0xa4005057
  2387. +#define MASK_VFNMADDVF 0xfc00707f
  2388. +#define MATCH_VFMSUBVV 0xa8001057
  2389. +#define MASK_VFMSUBVV 0xfc00707f
  2390. +#define MATCH_VFMSUBVF 0xa8005057
  2391. +#define MASK_VFMSUBVF 0xfc00707f
  2392. +#define MATCH_VFNMSUBVV 0xac001057
  2393. +#define MASK_VFNMSUBVV 0xfc00707f
  2394. +#define MATCH_VFNMSUBVF 0xac005057
  2395. +#define MASK_VFNMSUBVF 0xfc00707f
  2396. +#define MATCH_VFMACCVV 0xb0001057
  2397. +#define MASK_VFMACCVV 0xfc00707f
  2398. +#define MATCH_VFMACCVF 0xb0005057
  2399. +#define MASK_VFMACCVF 0xfc00707f
  2400. +#define MATCH_VFNMACCVV 0xb4001057
  2401. +#define MASK_VFNMACCVV 0xfc00707f
  2402. +#define MATCH_VFNMACCVF 0xb4005057
  2403. +#define MASK_VFNMACCVF 0xfc00707f
  2404. +#define MATCH_VFMSACVV 0xb8001057
  2405. +#define MASK_VFMSACVV 0xfc00707f
  2406. +#define MATCH_VFMSACVF 0xb8005057
  2407. +#define MASK_VFMSACVF 0xfc00707f
  2408. +#define MATCH_VFNMSACVV 0xbc001057
  2409. +#define MASK_VFNMSACVV 0xfc00707f
  2410. +#define MATCH_VFNMSACVF 0xbc005057
  2411. +#define MASK_VFNMSACVF 0xfc00707f
  2412. +
  2413. +#define MATCH_VFWMACCVV 0xf0001057
  2414. +#define MASK_VFWMACCVV 0xfc00707f
  2415. +#define MATCH_VFWMACCVF 0xf0005057
  2416. +#define MASK_VFWMACCVF 0xfc00707f
  2417. +#define MATCH_VFWNMACCVV 0xf4001057
  2418. +#define MASK_VFWNMACCVV 0xfc00707f
  2419. +#define MATCH_VFWNMACCVF 0xf4005057
  2420. +#define MASK_VFWNMACCVF 0xfc00707f
  2421. +#define MATCH_VFWMSACVV 0xf8001057
  2422. +#define MASK_VFWMSACVV 0xfc00707f
  2423. +#define MATCH_VFWMSACVF 0xf8005057
  2424. +#define MASK_VFWMSACVF 0xfc00707f
  2425. +#define MATCH_VFWNMSACVV 0xfc001057
  2426. +#define MASK_VFWNMSACVV 0xfc00707f
  2427. +#define MATCH_VFWNMSACVF 0xfc005057
  2428. +#define MASK_VFWNMSACVF 0xfc00707f
  2429. +
  2430. +#define MATCH_VFSQRTV 0x4c001057
  2431. +#define MASK_VFSQRTV 0xfc0ff07f
  2432. +#define MATCH_VFRSQRT7V 0x4c021057
  2433. +#define MASK_VFRSQRT7V 0xfc0ff07f
  2434. +#define MATCH_VFREC7V 0x4c029057
  2435. +#define MASK_VFREC7V 0xfc0ff07f
  2436. +#define MATCH_VFCLASSV 0x4c081057
  2437. +#define MASK_VFCLASSV 0xfc0ff07f
  2438. +
  2439. +#define MATCH_VFMINVV 0x10001057
  2440. +#define MASK_VFMINVV 0xfc00707f
  2441. +#define MATCH_VFMINVF 0x10005057
  2442. +#define MASK_VFMINVF 0xfc00707f
  2443. +#define MATCH_VFMAXVV 0x18001057
  2444. +#define MASK_VFMAXVV 0xfc00707f
  2445. +#define MATCH_VFMAXVF 0x18005057
  2446. +#define MASK_VFMAXVF 0xfc00707f
  2447. +
  2448. +#define MATCH_VFSGNJVV 0x20001057
  2449. +#define MASK_VFSGNJVV 0xfc00707f
  2450. +#define MATCH_VFSGNJVF 0x20005057
  2451. +#define MASK_VFSGNJVF 0xfc00707f
  2452. +#define MATCH_VFSGNJNVV 0x24001057
  2453. +#define MASK_VFSGNJNVV 0xfc00707f
  2454. +#define MATCH_VFSGNJNVF 0x24005057
  2455. +#define MASK_VFSGNJNVF 0xfc00707f
  2456. +#define MATCH_VFSGNJXVV 0x28001057
  2457. +#define MASK_VFSGNJXVV 0xfc00707f
  2458. +#define MATCH_VFSGNJXVF 0x28005057
  2459. +#define MASK_VFSGNJXVF 0xfc00707f
  2460. +
  2461. +#define MATCH_VMFEQVV 0x60001057
  2462. +#define MASK_VMFEQVV 0xfc00707f
  2463. +#define MATCH_VMFEQVF 0x60005057
  2464. +#define MASK_VMFEQVF 0xfc00707f
  2465. +#define MATCH_VMFNEVV 0x70001057
  2466. +#define MASK_VMFNEVV 0xfc00707f
  2467. +#define MATCH_VMFNEVF 0x70005057
  2468. +#define MASK_VMFNEVF 0xfc00707f
  2469. +#define MATCH_VMFLTVV 0x6c001057
  2470. +#define MASK_VMFLTVV 0xfc00707f
  2471. +#define MATCH_VMFLTVF 0x6c005057
  2472. +#define MASK_VMFLTVF 0xfc00707f
  2473. +#define MATCH_VMFLEVV 0x64001057
  2474. +#define MASK_VMFLEVV 0xfc00707f
  2475. +#define MATCH_VMFLEVF 0x64005057
  2476. +#define MASK_VMFLEVF 0xfc00707f
  2477. +#define MATCH_VMFGTVF 0x74005057
  2478. +#define MASK_VMFGTVF 0xfc00707f
  2479. +#define MATCH_VMFGEVF 0x7c005057
  2480. +#define MASK_VMFGEVF 0xfc00707f
  2481. +
  2482. +#define MATCH_VFMERGEVFM 0x5c005057
  2483. +#define MASK_VFMERGEVFM 0xfe00707f
  2484. +#define MATCH_VFMVVF 0x5e005057
  2485. +#define MASK_VFMVVF 0xfff0707f
  2486. +
  2487. +#define MATCH_VFCVTXUFV 0x48001057
  2488. +#define MASK_VFCVTXUFV 0xfc0ff07f
  2489. +#define MATCH_VFCVTXFV 0x48009057
  2490. +#define MASK_VFCVTXFV 0xfc0ff07f
  2491. +#define MATCH_VFCVTFXUV 0x48011057
  2492. +#define MASK_VFCVTFXUV 0xfc0ff07f
  2493. +#define MATCH_VFCVTFXV 0x48019057
  2494. +#define MASK_VFCVTFXV 0xfc0ff07f
  2495. +#define MATCH_VFCVTRTZXUFV 0x48031057
  2496. +#define MASK_VFCVTRTZXUFV 0xfc0ff07f
  2497. +#define MATCH_VFCVTRTZXFV 0x48039057
  2498. +#define MASK_VFCVTRTZXFV 0xfc0ff07f
  2499. +#define MATCH_VFWCVTXUFV 0x48041057
  2500. +#define MASK_VFWCVTXUFV 0xfc0ff07f
  2501. +#define MATCH_VFWCVTXFV 0x48049057
  2502. +#define MASK_VFWCVTXFV 0xfc0ff07f
  2503. +#define MATCH_VFWCVTFXUV 0x48051057
  2504. +#define MASK_VFWCVTFXUV 0xfc0ff07f
  2505. +#define MATCH_VFWCVTFXV 0x48059057
  2506. +#define MASK_VFWCVTFXV 0xfc0ff07f
  2507. +#define MATCH_VFWCVTFFV 0x48061057
  2508. +#define MASK_VFWCVTFFV 0xfc0ff07f
  2509. +#define MATCH_VFWCVTRTZXUFV 0x48071057
  2510. +#define MASK_VFWCVTRTZXUFV 0xfc0ff07f
  2511. +#define MATCH_VFWCVTRTZXFV 0x48079057
  2512. +#define MASK_VFWCVTRTZXFV 0xfc0ff07f
  2513. +#define MATCH_VFNCVTXUFW 0x48081057
  2514. +#define MASK_VFNCVTXUFW 0xfc0ff07f
  2515. +#define MATCH_VFNCVTXFW 0x48089057
  2516. +#define MASK_VFNCVTXFW 0xfc0ff07f
  2517. +#define MATCH_VFNCVTFXUW 0x48091057
  2518. +#define MASK_VFNCVTFXUW 0xfc0ff07f
  2519. +#define MATCH_VFNCVTFXW 0x48099057
  2520. +#define MASK_VFNCVTFXW 0xfc0ff07f
  2521. +#define MATCH_VFNCVTFFW 0x480a1057
  2522. +#define MASK_VFNCVTFFW 0xfc0ff07f
  2523. +#define MATCH_VFNCVTRODFFW 0x480a9057
  2524. +#define MASK_VFNCVTRODFFW 0xfc0ff07f
  2525. +#define MATCH_VFNCVTRTZXUFW 0x480b1057
  2526. +#define MASK_VFNCVTRTZXUFW 0xfc0ff07f
  2527. +#define MATCH_VFNCVTRTZXFW 0x480b9057
  2528. +#define MASK_VFNCVTRTZXFW 0xfc0ff07f
  2529. +
  2530. +#define MATCH_VREDSUMVS 0x00002057
  2531. +#define MASK_VREDSUMVS 0xfc00707f
  2532. +#define MATCH_VREDMAXVS 0x1c002057
  2533. +#define MASK_VREDMAXVS 0xfc00707f
  2534. +#define MATCH_VREDMAXUVS 0x18002057
  2535. +#define MASK_VREDMAXUVS 0xfc00707f
  2536. +#define MATCH_VREDMINVS 0x14002057
  2537. +#define MASK_VREDMINVS 0xfc00707f
  2538. +#define MATCH_VREDMINUVS 0x10002057
  2539. +#define MASK_VREDMINUVS 0xfc00707f
  2540. +#define MATCH_VREDANDVS 0x04002057
  2541. +#define MASK_VREDANDVS 0xfc00707f
  2542. +#define MATCH_VREDORVS 0x08002057
  2543. +#define MASK_VREDORVS 0xfc00707f
  2544. +#define MATCH_VREDXORVS 0x0c002057
  2545. +#define MASK_VREDXORVS 0xfc00707f
  2546. +
  2547. +#define MATCH_VWREDSUMUVS 0xc0000057
  2548. +#define MASK_VWREDSUMUVS 0xfc00707f
  2549. +#define MATCH_VWREDSUMVS 0xc4000057
  2550. +#define MASK_VWREDSUMVS 0xfc00707f
  2551. +
  2552. +#define MATCH_VFREDOSUMVS 0x0c001057
  2553. +#define MASK_VFREDOSUMVS 0xfc00707f
  2554. +#define MATCH_VFREDSUMVS 0x04001057
  2555. +#define MASK_VFREDSUMVS 0xfc00707f
  2556. +#define MATCH_VFREDMAXVS 0x1c001057
  2557. +#define MASK_VFREDMAXVS 0xfc00707f
  2558. +#define MATCH_VFREDMINVS 0x14001057
  2559. +#define MASK_VFREDMINVS 0xfc00707f
  2560. +
  2561. +#define MATCH_VFWREDOSUMVS 0xcc001057
  2562. +#define MASK_VFWREDOSUMVS 0xfc00707f
  2563. +#define MATCH_VFWREDSUMVS 0xc4001057
  2564. +#define MASK_VFWREDSUMVS 0xfc00707f
  2565. +
  2566. +#define MATCH_VMANDMM 0x66002057
  2567. +#define MASK_VMANDMM 0xfe00707f
  2568. +#define MATCH_VMNANDMM 0x76002057
  2569. +#define MASK_VMNANDMM 0xfe00707f
  2570. +#define MATCH_VMANDNOTMM 0x62002057
  2571. +#define MASK_VMANDNOTMM 0xfe00707f
  2572. +#define MATCH_VMXORMM 0x6e002057
  2573. +#define MASK_VMXORMM 0xfe00707f
  2574. +#define MATCH_VMORMM 0x6a002057
  2575. +#define MASK_VMORMM 0xfe00707f
  2576. +#define MATCH_VMNORMM 0x7a002057
  2577. +#define MASK_VMNORMM 0xfe00707f
  2578. +#define MATCH_VMORNOTMM 0x72002057
  2579. +#define MASK_VMORNOTMM 0xfe00707f
  2580. +#define MATCH_VMXNORMM 0x7e002057
  2581. +#define MASK_VMXNORMM 0xfe00707f
  2582. +
  2583. +#define MATCH_VPOPCM 0x40082057
  2584. +#define MASK_VPOPCM 0xfc0ff07f
  2585. +#define MATCH_VFIRSTM 0x4008a057
  2586. +#define MASK_VFIRSTM 0xfc0ff07f
  2587. +
  2588. +#define MATCH_VMSBFM 0x5000a057
  2589. +#define MASK_VMSBFM 0xfc0ff07f
  2590. +#define MATCH_VMSIFM 0x5001a057
  2591. +#define MASK_VMSIFM 0xfc0ff07f
  2592. +#define MATCH_VMSOFM 0x50012057
  2593. +#define MASK_VMSOFM 0xfc0ff07f
  2594. +#define MATCH_VIOTAM 0x50082057
  2595. +#define MASK_VIOTAM 0xfc0ff07f
  2596. +#define MATCH_VIDV 0x5008a057
  2597. +#define MASK_VIDV 0xfdfff07f
  2598. +
  2599. +#define MATCH_VMVXS 0x42002057
  2600. +#define MASK_VMVXS 0xfe0ff07f
  2601. +#define MATCH_VMVSX 0x42006057
  2602. +#define MASK_VMVSX 0xfff0707f
  2603. +
  2604. +#define MATCH_VFMVFS 0x42001057
  2605. +#define MASK_VFMVFS 0xfe0ff07f
  2606. +#define MATCH_VFMVSF 0x42005057
  2607. +#define MASK_VFMVSF 0xfff0707f
  2608. +
  2609. +#define MATCH_VSLIDEUPVX 0x38004057
  2610. +#define MASK_VSLIDEUPVX 0xfc00707f
  2611. +#define MATCH_VSLIDEUPVI 0x38003057
  2612. +#define MASK_VSLIDEUPVI 0xfc00707f
  2613. +#define MATCH_VSLIDEDOWNVX 0x3c004057
  2614. +#define MASK_VSLIDEDOWNVX 0xfc00707f
  2615. +#define MATCH_VSLIDEDOWNVI 0x3c003057
  2616. +#define MASK_VSLIDEDOWNVI 0xfc00707f
  2617. +
  2618. +#define MATCH_VSLIDE1UPVX 0x38006057
  2619. +#define MASK_VSLIDE1UPVX 0xfc00707f
  2620. +#define MATCH_VSLIDE1DOWNVX 0x3c006057
  2621. +#define MASK_VSLIDE1DOWNVX 0xfc00707f
  2622. +
  2623. +#define MATCH_VFSLIDE1UPVF 0x38005057
  2624. +#define MASK_VFSLIDE1UPVF 0xfc00707f
  2625. +#define MATCH_VFSLIDE1DOWNVF 0x3c005057
  2626. +#define MASK_VFSLIDE1DOWNVF 0xfc00707f
  2627. +
  2628. +#define MATCH_VRGATHERVV 0x30000057
  2629. +#define MASK_VRGATHERVV 0xfc00707f
  2630. +#define MATCH_VRGATHERVX 0x30004057
  2631. +#define MASK_VRGATHERVX 0xfc00707f
  2632. +#define MATCH_VRGATHERVI 0x30003057
  2633. +#define MASK_VRGATHERVI 0xfc00707f
  2634. +#define MATCH_VRGATHEREI16VV 0x38000057
  2635. +#define MASK_VRGATHEREI16VV 0xfc00707f
  2636. +
  2637. +#define MATCH_VCOMPRESSVM 0x5e002057
  2638. +#define MASK_VCOMPRESSVM 0xfe00707f
  2639. +
  2640. +#define MATCH_VMV1RV 0x9e003057
  2641. +#define MASK_VMV1RV 0xfe0ff07f
  2642. +#define MATCH_VMV2RV 0x9e00b057
  2643. +#define MASK_VMV2RV 0xfe0ff07f
  2644. +#define MATCH_VMV4RV 0x9e01b057
  2645. +#define MASK_VMV4RV 0xfe0ff07f
  2646. +#define MATCH_VMV8RV 0x9e03b057
  2647. +#define MASK_VMV8RV 0xfe0ff07f
  2648. +
  2649. +#define MATCH_VDOTVV 0xe4000057
  2650. +#define MASK_VDOTVV 0xfc00707f
  2651. +#define MATCH_VDOTUVV 0xe0000057
  2652. +#define MASK_VDOTUVV 0xfc00707f
  2653. +#define MATCH_VFDOTVV 0xe4001057
  2654. +#define MASK_VFDOTVV 0xfc00707f
  2655. +/* END RVV */
  2656. +
  2657. #define MATCH_CUSTOM0 0xb
  2658. #define MASK_CUSTOM0 0x707f
  2659. #define MATCH_CUSTOM0_RS1 0x200b
  2660. @@ -830,6 +2896,13 @@
  2661. #define CSR_FFLAGS 0x1
  2662. #define CSR_FRM 0x2
  2663. #define CSR_FCSR 0x3
  2664. +#define CSR_VSTART 0x008
  2665. +#define CSR_VXSAT 0x009
  2666. +#define CSR_VXRM 0x00a
  2667. +#define CSR_VCSR 0x00f
  2668. +#define CSR_VL 0xc20
  2669. +#define CSR_VTYPE 0xc21
  2670. +#define CSR_VLENB 0xc22
  2671. #define CSR_DCSR 0x7b0
  2672. #define CSR_DPC 0x7b1
  2673. #define CSR_DSCRATCH0 0x7b2
  2674. @@ -912,6 +2985,7 @@ DECLARE_INSN(sb, MATCH_SB, MASK_SB)
  2675. DECLARE_INSN(sh, MATCH_SH, MASK_SH)
  2676. DECLARE_INSN(sw, MATCH_SW, MASK_SW)
  2677. DECLARE_INSN(sd, MATCH_SD, MASK_SD)
  2678. +DECLARE_INSN(pause, MATCH_PAUSE, MASK_PAUSE)
  2679. DECLARE_INSN(fence, MATCH_FENCE, MASK_FENCE)
  2680. DECLARE_INSN(fence_i, MATCH_FENCE_I, MASK_FENCE_I)
  2681. DECLARE_INSN(mul, MATCH_MUL, MASK_MUL)
  2682. @@ -959,6 +3033,12 @@ DECLARE_INSN(dret, MATCH_DRET, MASK_DRET)
  2683. DECLARE_INSN(sfence_vm, MATCH_SFENCE_VM, MASK_SFENCE_VM)
  2684. DECLARE_INSN(sfence_vma, MATCH_SFENCE_VMA, MASK_SFENCE_VMA)
  2685. DECLARE_INSN(wfi, MATCH_WFI, MASK_WFI)
  2686. +/* Custom CSRs instruction */
  2687. +DECLARE_INSN(cflush_d_l1, MATCH_CFLUSH_D_L1, MASK_CFLUSH_D_L1)
  2688. +DECLARE_INSN(cdiscard_d_l1, MATCH_CDISCARD_D_L1, MASK_CDISCARD_D_L1)
  2689. +DECLARE_INSN(cflush_d_l2, MATCH_CFLUSH_D_L2, MASK_CFLUSH_D_L2)
  2690. +DECLARE_INSN(cdiscard_d_l2, MATCH_CDISCARD_D_L2, MASK_CDISCARD_D_L2)
  2691. +/* end */
  2692. DECLARE_INSN(csrrw, MATCH_CSRRW, MASK_CSRRW)
  2693. DECLARE_INSN(csrrs, MATCH_CSRRS, MASK_CSRRS)
  2694. DECLARE_INSN(csrrc, MATCH_CSRRC, MASK_CSRRC)
  2695. @@ -1043,6 +3123,109 @@ DECLARE_INSN(fcvt_q_wu, MATCH_FCVT_Q_WU, MASK_FCVT_Q_WU)
  2696. DECLARE_INSN(fcvt_q_l, MATCH_FCVT_Q_L, MASK_FCVT_Q_L)
  2697. DECLARE_INSN(fcvt_q_lu, MATCH_FCVT_Q_LU, MASK_FCVT_Q_LU)
  2698. DECLARE_INSN(fmv_q_x, MATCH_FMV_Q_X, MASK_FMV_Q_X)
  2699. +DECLARE_INSN(clz, MATCH_CLZ, MASK_CLZ)
  2700. +DECLARE_INSN(ctz, MATCH_CTZ, MASK_CTZ)
  2701. +DECLARE_INSN(cpop, MATCH_CPOP, MASK_CPOP)
  2702. +DECLARE_INSN(min, MATCH_MIN, MASK_MIN)
  2703. +DECLARE_INSN(minu, MATCH_MINU, MASK_MINU)
  2704. +DECLARE_INSN(max, MATCH_MAX, MASK_MAX)
  2705. +DECLARE_INSN(maxu, MATCH_MAXU, MASK_MAXU)
  2706. +DECLARE_INSN(sext_b, MATCH_SEXT_B, MASK_SEXT_B)
  2707. +DECLARE_INSN(sext_h, MATCH_SEXT_H, MASK_SEXT_H)
  2708. +DECLARE_INSN(andn, MATCH_ANDN, MASK_ANDN)
  2709. +DECLARE_INSN(orn, MATCH_ORN, MASK_ORN)
  2710. +DECLARE_INSN(xnor, MATCH_XNOR, MASK_XNOR)
  2711. +DECLARE_INSN(rol, MATCH_ROL, MASK_ROL)
  2712. +DECLARE_INSN(ror, MATCH_ROR, MASK_ROR)
  2713. +DECLARE_INSN(rori, MATCH_RORI, MASK_RORI)
  2714. +DECLARE_INSN(clzw, MATCH_CLZW, MASK_CLZW)
  2715. +DECLARE_INSN(ctzw, MATCH_CTZW, MASK_CTZW)
  2716. +DECLARE_INSN(cpopw, MATCH_CPOPW, MASK_CPOPW)
  2717. +DECLARE_INSN(rolw, MATCH_ROLW, MASK_ROLW)
  2718. +DECLARE_INSN(rorw, MATCH_RORW, MASK_RORW)
  2719. +DECLARE_INSN(roriw, MATCH_RORIW, MASK_RORIW)
  2720. +DECLARE_INSN(sh1add, MATCH_SH1ADD, MASK_SH1ADD)
  2721. +DECLARE_INSN(sh2add, MATCH_SH2ADD, MASK_SH2ADD)
  2722. +DECLARE_INSN(sh3add, MATCH_SH3ADD, MASK_SH3ADD)
  2723. +DECLARE_INSN(sh1add_uw, MATCH_SH1ADD_UW, MASK_SH1ADD_UW)
  2724. +DECLARE_INSN(sh2add_uw, MATCH_SH2ADD_UW, MASK_SH2ADD_UW)
  2725. +DECLARE_INSN(sh3add_uw, MATCH_SH3ADD_UW, MASK_SH3ADD_UW)
  2726. +DECLARE_INSN(add_uw, MATCH_ADD_UW, MASK_ADD_UW)
  2727. +DECLARE_INSN(slli_uw, MATCH_SLLI_UW, MASK_SLLI_UW)
  2728. +DECLARE_INSN(clmul, MATCH_CLMUL, MASK_CLMUL)
  2729. +DECLARE_INSN(clmulh, MATCH_CLMULH, MASK_CLMULH)
  2730. +DECLARE_INSN(clmulr, MATCH_CLMULR, MASK_CLMULR)
  2731. +DECLARE_INSN(pack, MATCH_PACK, MASK_PACK)
  2732. +DECLARE_INSN(packu, MATCH_PACKU, MASK_PACKU)
  2733. +DECLARE_INSN(packh, MATCH_PACKH, MASK_PACKH)
  2734. +DECLARE_INSN(packw, MATCH_PACKW, MASK_PACKW)
  2735. +DECLARE_INSN(packuw, MATCH_PACKUW, MASK_PACKUW)
  2736. +DECLARE_INSN(grev, MATCH_GREV, MASK_GREV)
  2737. +DECLARE_INSN(grevi, MATCH_GREVI, MASK_GREVI)
  2738. +DECLARE_INSN(grevw, MATCH_GREVW, MASK_GREVW)
  2739. +DECLARE_INSN(greviw, MATCH_GREVIW, MASK_GREVIW)
  2740. +DECLARE_INSN(gorc, MATCH_GORC, MASK_GORC)
  2741. +DECLARE_INSN(gorci, MATCH_GORCI, MASK_GORCI)
  2742. +DECLARE_INSN(gorcw, MATCH_GORCW, MASK_GORCW)
  2743. +DECLARE_INSN(gorciw, MATCH_GORCIW, MASK_GORCIW)
  2744. +DECLARE_INSN(shfl, MATCH_SHFL, MASK_SHFL)
  2745. +DECLARE_INSN(shfli, MATCH_SHFLI, MASK_SHFLI)
  2746. +DECLARE_INSN(shflw, MATCH_SHFLW, MASK_SHFLW)
  2747. +DECLARE_INSN(unshfl, MATCH_UNSHFL, MASK_UNSHFL)
  2748. +DECLARE_INSN(unshfli, MATCH_UNSHFLI, MASK_UNSHFLI)
  2749. +DECLARE_INSN(unshflw, MATCH_UNSHFLW, MASK_UNSHFLW)
  2750. +DECLARE_INSN(xperm_n, MATCH_XPERM_N, MASK_XPERM_N)
  2751. +DECLARE_INSN(xperm_b, MATCH_XPERM_B, MASK_XPERM_B)
  2752. +DECLARE_INSN(xperm_h, MATCH_XPERM_H, MASK_XPERM_H)
  2753. +DECLARE_INSN(xperm_w, MATCH_XPERM_W, MASK_XPERM_W)
  2754. +DECLARE_INSN(bset, MATCH_BSET, MASK_BSET)
  2755. +DECLARE_INSN(bclr, MATCH_BCLR, MASK_BCLR)
  2756. +DECLARE_INSN(binv, MATCH_BINV, MASK_BINV)
  2757. +DECLARE_INSN(bext, MATCH_BEXT, MASK_BEXT)
  2758. +DECLARE_INSN(bcompress, MATCH_BCOMPRESS, MASK_BCOMPRESS)
  2759. +DECLARE_INSN(bdecompress, MATCH_BDECOMPRESS, MASK_BDECOMPRESS)
  2760. +DECLARE_INSN(bseti, MATCH_BSETI, MASK_BSETI)
  2761. +DECLARE_INSN(bclri, MATCH_BCLRI, MASK_BCLRI)
  2762. +DECLARE_INSN(binvi, MATCH_BINVI, MASK_BINVI)
  2763. +DECLARE_INSN(bexti, MATCH_BEXTI, MASK_BEXTI)
  2764. +DECLARE_INSN(bsetw, MATCH_BSETW, MASK_BSETW)
  2765. +DECLARE_INSN(bclrw, MATCH_BCLRW, MASK_BCLRW)
  2766. +DECLARE_INSN(binvw, MATCH_BINVW, MASK_BINVW)
  2767. +DECLARE_INSN(bextw, MATCH_BEXTW, MASK_BEXTW)
  2768. +DECLARE_INSN(bcompressw, MATCH_BCOMPRESSW, MASK_BCOMPRESSW)
  2769. +DECLARE_INSN(bdecompressw, MATCH_BDECOMPRESSW, MASK_BDECOMPRESSW)
  2770. +DECLARE_INSN(bsetiw, MATCH_BSETIW, MASK_BSETIW)
  2771. +DECLARE_INSN(bclriw, MATCH_BCLRIW, MASK_BCLRIW)
  2772. +DECLARE_INSN(binviw, MATCH_BINVIW, MASK_BINVIW)
  2773. +DECLARE_INSN(slo, MATCH_SLO, MASK_SLO)
  2774. +DECLARE_INSN(sro, MATCH_SRO, MASK_SRO)
  2775. +DECLARE_INSN(sloi, MATCH_SLOI, MASK_SLOI)
  2776. +DECLARE_INSN(sroi, MATCH_SROI, MASK_SROI)
  2777. +DECLARE_INSN(slow, MATCH_SLOW, MASK_SLOW)
  2778. +DECLARE_INSN(srow, MATCH_SROW, MASK_SROW)
  2779. +DECLARE_INSN(sloiw, MATCH_SLOIW, MASK_SLOIW)
  2780. +DECLARE_INSN(sroiw, MATCH_SROIW, MASK_SROIW)
  2781. +DECLARE_INSN(bfp, MATCH_BFP, MASK_BFP)
  2782. +DECLARE_INSN(bfpw, MATCH_BFPW, MASK_BFPW)
  2783. +DECLARE_INSN(bmator, MATCH_BMATOR, MASK_BMATOR)
  2784. +DECLARE_INSN(bmatxor, MATCH_BMATXOR, MASK_BMATXOR)
  2785. +DECLARE_INSN(bmatflip, MATCH_BMATFLIP, MASK_BMATFLIP)
  2786. +DECLARE_INSN(crc32_b, MATCH_CRC32_B, MASK_CRC32_B)
  2787. +DECLARE_INSN(crc32_h, MATCH_CRC32_H, MASK_CRC32_H)
  2788. +DECLARE_INSN(crc32_w, MATCH_CRC32_W, MASK_CRC32_W)
  2789. +DECLARE_INSN(crc32c_b, MATCH_CRC32C_B, MASK_CRC32C_B)
  2790. +DECLARE_INSN(crc32c_h, MATCH_CRC32C_H, MASK_CRC32C_H)
  2791. +DECLARE_INSN(crc32c_w, MATCH_CRC32C_W, MASK_CRC32C_W)
  2792. +DECLARE_INSN(crc32_d, MATCH_CRC32_D, MASK_CRC32_D)
  2793. +DECLARE_INSN(crc32c_d, MATCH_CRC32C_D, MASK_CRC32C_D)
  2794. +DECLARE_INSN(cmix, MATCH_CMIX, MASK_CMIX)
  2795. +DECLARE_INSN(cmov, MATCH_CMOV, MASK_CMOV)
  2796. +DECLARE_INSN(fsl, MATCH_FSL, MASK_FSL)
  2797. +DECLARE_INSN(fsr, MATCH_FSR, MASK_FSR)
  2798. +DECLARE_INSN(fsri, MATCH_FSRI, MASK_FSRI)
  2799. +DECLARE_INSN(fslw, MATCH_FSLW, MASK_FSLW)
  2800. +DECLARE_INSN(fsrw, MATCH_FSRW, MASK_FSRW)
  2801. +DECLARE_INSN(fsriw, MATCH_FSRIW, MASK_FSRIW)
  2802. DECLARE_INSN(flw, MATCH_FLW, MASK_FLW)
  2803. DECLARE_INSN(fld, MATCH_FLD, MASK_FLD)
  2804. DECLARE_INSN(flq, MATCH_FLQ, MASK_FLQ)
  2805. @@ -1365,6 +3548,13 @@ DECLARE_CSR(mhcounteren, CSR_MHCOUNTEREN, CSR_CLASS_I, PRIV_SPEC_CLASS_1P9P1, PR
  2806. DECLARE_CSR(fflags, CSR_FFLAGS, CSR_CLASS_F, PRIV_SPEC_CLASS_NONE, PRIV_SPEC_CLASS_NONE)
  2807. DECLARE_CSR(frm, CSR_FRM, CSR_CLASS_F, PRIV_SPEC_CLASS_NONE, PRIV_SPEC_CLASS_NONE)
  2808. DECLARE_CSR(fcsr, CSR_FCSR, CSR_CLASS_F, PRIV_SPEC_CLASS_NONE, PRIV_SPEC_CLASS_NONE)
  2809. +DECLARE_CSR(vstart, CSR_VSTART, CSR_CLASS_V, PRIV_SPEC_CLASS_NONE, PRIV_SPEC_CLASS_NONE)
  2810. +DECLARE_CSR(vxsat, CSR_VXSAT, CSR_CLASS_V, PRIV_SPEC_CLASS_NONE, PRIV_SPEC_CLASS_NONE)
  2811. +DECLARE_CSR(vxrm, CSR_VXRM, CSR_CLASS_V, PRIV_SPEC_CLASS_NONE, PRIV_SPEC_CLASS_NONE)
  2812. +DECLARE_CSR(vcsr, CSR_VCSR, CSR_CLASS_V, PRIV_SPEC_CLASS_NONE, PRIV_SPEC_CLASS_NONE)
  2813. +DECLARE_CSR(vl, CSR_VL, CSR_CLASS_V, PRIV_SPEC_CLASS_NONE, PRIV_SPEC_CLASS_NONE)
  2814. +DECLARE_CSR(vtype, CSR_VTYPE, CSR_CLASS_V, PRIV_SPEC_CLASS_NONE, PRIV_SPEC_CLASS_NONE)
  2815. +DECLARE_CSR(vlenb, CSR_VLENB, CSR_CLASS_V, PRIV_SPEC_CLASS_NONE, PRIV_SPEC_CLASS_NONE)
  2816. DECLARE_CSR(dcsr, CSR_DCSR, CSR_CLASS_DEBUG, PRIV_SPEC_CLASS_NONE, PRIV_SPEC_CLASS_NONE)
  2817. DECLARE_CSR(dpc, CSR_DPC, CSR_CLASS_DEBUG, PRIV_SPEC_CLASS_NONE, PRIV_SPEC_CLASS_NONE)
  2818. DECLARE_CSR(dscratch0, CSR_DSCRATCH0, CSR_CLASS_DEBUG, PRIV_SPEC_CLASS_NONE, PRIV_SPEC_CLASS_NONE)
  2819. @@ -1391,4 +3581,4 @@ DECLARE_CSR_ALIAS(itrigger, CSR_TDATA1, CSR_CLASS_DEBUG, PRIV_SPEC_CLASS_NONE, P
  2820. DECLARE_CSR_ALIAS(etrigger, CSR_TDATA1, CSR_CLASS_DEBUG, PRIV_SPEC_CLASS_NONE, PRIV_SPEC_CLASS_NONE)
  2821. DECLARE_CSR_ALIAS(textra32, CSR_TDATA3, CSR_CLASS_DEBUG, PRIV_SPEC_CLASS_NONE, PRIV_SPEC_CLASS_NONE)
  2822. DECLARE_CSR_ALIAS(textra64, CSR_TDATA3, CSR_CLASS_DEBUG, PRIV_SPEC_CLASS_NONE, PRIV_SPEC_CLASS_NONE)
  2823. -#endif /* DECLARE_CSR_ALIAS. */
  2824. +#endif /* DECLARE_CSR_ALIAS. */
  2825. \ No newline at end of file
  2826. diff --git a/include/opcode/riscv.h b/include/opcode/riscv.h
  2827. index 2f1bc793e5..c90fc16f34 100644
  2828. --- a/include/opcode/riscv.h
  2829. +++ b/include/opcode/riscv.h
  2830. @@ -52,6 +52,28 @@ static const char * const riscv_pred_succ[16] =
  2831. "i", "iw", "ir", "irw", "io", "iow", "ior", "iorw"
  2832. };
  2833. +/* List of vsetvli vsew constants. */
  2834. +static const char * const riscv_vsew[8] =
  2835. +{
  2836. + "e8", "e16", "e32", "e64", "e128", "e256", "e512", "e1024"
  2837. +};
  2838. +
  2839. +/* List of vsetvli vlmul constants. */
  2840. +static const char * const riscv_vlmul[8] =
  2841. +{
  2842. + "m1", "m2", "m4", "m8", 0, "mf8", "mf4", "mf2"
  2843. +};
  2844. +
  2845. +static const char * const riscv_vta[2] =
  2846. +{
  2847. + "tu", "ta"
  2848. +};
  2849. +
  2850. +static const char * const riscv_vma[2] =
  2851. +{
  2852. + "mu", "ma"
  2853. +};
  2854. +
  2855. #define RVC_JUMP_BITS 11
  2856. #define RVC_JUMP_REACH ((1ULL << RVC_JUMP_BITS) * RISCV_JUMP_ALIGN)
  2857. @@ -100,6 +122,17 @@ static const char * const riscv_pred_succ[16] =
  2858. #define EXTRACT_RVC_J_IMM(x) \
  2859. ((RV_X(x, 3, 3) << 1) | (RV_X(x, 11, 1) << 4) | (RV_X(x, 2, 1) << 5) | (RV_X(x, 7, 1) << 6) | (RV_X(x, 6, 1) << 7) | (RV_X(x, 9, 2) << 8) | (RV_X(x, 8, 1) << 10) | (-RV_X(x, 12, 1) << 11))
  2860. +#define EXTRACT_RVV_VI_IMM(x) \
  2861. + (RV_X(x, 15, 5) | (-RV_X(x, 19, 1) << 5))
  2862. +#define EXTRACT_RVV_VI_UIMM(x) \
  2863. + (RV_X(x, 15, 5))
  2864. +#define EXTRACT_RVV_OFFSET(x) \
  2865. + (RV_X(x, 29, 3))
  2866. +#define EXTRACT_RVV_VB_IMM(x) \
  2867. + (RV_X(x, 20, 10))
  2868. +#define EXTRACT_RVV_VC_IMM(x) \
  2869. + (RV_X(x, 20, 11))
  2870. +
  2871. #define ENCODE_ITYPE_IMM(x) \
  2872. (RV_X(x, 0, 12) << 20)
  2873. #define ENCODE_STYPE_IMM(x) \
  2874. @@ -234,6 +267,11 @@ static const char * const riscv_pred_succ[16] =
  2875. #define OP_SH_FUNCT7 25
  2876. #define OP_MASK_FUNCT2 0x3
  2877. #define OP_SH_FUNCT2 25
  2878. +#define OP_MASK_PREF_TYPE 0x0f
  2879. +#define OP_SH_PREF_TYPE 20
  2880. +#define OP_MASK_PREF_OFFSET 0xff
  2881. +#define OP_SH_PREF_OFFSET 24
  2882. +
  2883. /* RVC fields. */
  2884. @@ -256,6 +294,35 @@ static const char * const riscv_pred_succ[16] =
  2885. #define OP_MASK_CFUNCT2 0x3
  2886. #define OP_SH_CFUNCT2 5
  2887. +/* RVV fields. */
  2888. +
  2889. +#define OP_MASK_VD 0x1f
  2890. +#define OP_SH_VD 7
  2891. +#define OP_MASK_VS1 0x1f
  2892. +#define OP_SH_VS1 15
  2893. +#define OP_MASK_VS2 0x1f
  2894. +#define OP_SH_VS2 20
  2895. +#define OP_MASK_VIMM 0x1f
  2896. +#define OP_SH_VIMM 15
  2897. +#define OP_MASK_VMASK 0x1
  2898. +#define OP_SH_VMASK 25
  2899. +#define OP_MASK_VFUNCT6 0x3f
  2900. +#define OP_SH_VFUNCT6 26
  2901. +
  2902. +#define OP_MASK_VLMUL 0x7
  2903. +#define OP_SH_VLMUL 0
  2904. +#define OP_MASK_VSEW 0x7
  2905. +#define OP_SH_VSEW 3
  2906. +#define OP_MASK_VTA 0x1
  2907. +#define OP_SH_VTA 6
  2908. +#define OP_MASK_VMA 0x1
  2909. +#define OP_SH_VMA 7
  2910. +#define OP_MASK_VTYPE_RES 0x1
  2911. +#define OP_SH_VTYPE_RES 10
  2912. +
  2913. +#define OP_MASK_VWD 0x1
  2914. +#define OP_SH_VWD 26
  2915. +
  2916. /* ABI names for selected x-registers. */
  2917. #define X_RA 1
  2918. @@ -269,6 +336,8 @@ static const char * const riscv_pred_succ[16] =
  2919. #define NGPR 32
  2920. #define NFPR 32
  2921. +#define NVECR 32
  2922. +#define NVECM 1
  2923. /* These fake label defines are use by both the assembler, and
  2924. libopcodes. The assembler uses this when it needs to generate a fake
  2925. @@ -309,6 +378,14 @@ enum riscv_insn_class
  2926. INSN_CLASS_D_AND_C,
  2927. INSN_CLASS_F_AND_C,
  2928. INSN_CLASS_Q,
  2929. + INSN_CLASS_ZBA,
  2930. + INSN_CLASS_ZBB,
  2931. + INSN_CLASS_ZBC,
  2932. + INSN_CLASS_ZBS,
  2933. + INSN_CLASS_V,
  2934. + INSN_CLASS_V_AND_F,
  2935. + INSN_CLASS_V_OR_ZVAMO,
  2936. + INSN_CLASS_V_OR_ZVLSSEG,
  2937. };
  2938. /* This structure holds information for a particular instruction. */
  2939. @@ -351,7 +428,8 @@ enum riscv_isa_spec_class
  2940. ISA_SPEC_CLASS_2P2,
  2941. ISA_SPEC_CLASS_20190608,
  2942. - ISA_SPEC_CLASS_20191213
  2943. + ISA_SPEC_CLASS_20191213,
  2944. + ISA_SPEC_CLASS_DRAFT
  2945. };
  2946. /* This structure holds version information for specific ISA. */
  2947. @@ -476,6 +554,12 @@ enum
  2948. M_CALL,
  2949. M_J,
  2950. M_LI,
  2951. + M_ZEXTH,
  2952. + M_ZEXTW,
  2953. + M_SEXTB,
  2954. + M_SEXTH,
  2955. + M_VMSGE,
  2956. + M_VMSGEU,
  2957. M_NUM_MACROS
  2958. };
  2959. @@ -484,6 +568,8 @@ extern const char * const riscv_gpr_names_numeric[NGPR];
  2960. extern const char * const riscv_gpr_names_abi[NGPR];
  2961. extern const char * const riscv_fpr_names_numeric[NFPR];
  2962. extern const char * const riscv_fpr_names_abi[NFPR];
  2963. +extern const char * const riscv_vecr_names_numeric[NVECR];
  2964. +extern const char * const riscv_vecm_names_numeric[NVECM];
  2965. extern const struct riscv_opcode riscv_opcodes[];
  2966. extern const struct riscv_opcode riscv_insn_types[];
  2967. diff --git a/opcodes/riscv-dis.c b/opcodes/riscv-dis.c
  2968. index 655ce4ad0b..e5989566f2 100644
  2969. --- a/opcodes/riscv-dis.c
  2970. +++ b/opcodes/riscv-dis.c
  2971. @@ -43,6 +43,7 @@ struct riscv_private_data
  2972. static const char * const *riscv_gpr_names;
  2973. static const char * const *riscv_fpr_names;
  2974. +static const char * const *riscv_vecr_names;
  2975. /* Other options. */
  2976. static int no_aliases; /* If set disassemble as most general inst. */
  2977. @@ -52,6 +53,7 @@ set_default_riscv_dis_options (void)
  2978. {
  2979. riscv_gpr_names = riscv_gpr_names_abi;
  2980. riscv_fpr_names = riscv_fpr_names_abi;
  2981. + riscv_vecr_names = riscv_vecr_names_numeric;
  2982. no_aliases = 0;
  2983. }
  2984. @@ -344,6 +346,14 @@ print_insn_args (const char *d, insn_t l, bfd_vma pc, disassemble_info *info)
  2985. print (info->stream, "0x%x", (int)EXTRACT_OPERAND (SHAMTW, l));
  2986. break;
  2987. + case '#':
  2988. + print (info->stream, "0x%x", (int)EXTRACT_OPERAND (PREF_TYPE, l));
  2989. + break;
  2990. +
  2991. + case '+':
  2992. + print (info->stream, "0x%x", (int)EXTRACT_OPERAND (PREF_OFFSET, l));
  2993. + break;
  2994. +
  2995. case 'S':
  2996. case 'U':
  2997. print (info->stream, "%s", riscv_fpr_names[rs1]);
  2998. @@ -401,6 +411,88 @@ print_insn_args (const char *d, insn_t l, bfd_vma pc, disassemble_info *info)
  2999. print (info->stream, "%d", rs1);
  3000. break;
  3001. + case 'V': /* RVV */
  3002. + switch (*++d)
  3003. + {
  3004. + case 'd':
  3005. + case 'f':
  3006. + print (info->stream, "%s",
  3007. + riscv_vecr_names[EXTRACT_OPERAND (VD, l)]);
  3008. + break;
  3009. +
  3010. + case 'e':
  3011. + if (!EXTRACT_OPERAND (VWD, l))
  3012. + print (info->stream, "%s", riscv_gpr_names[0]);
  3013. + else
  3014. + print (info->stream, "%s",
  3015. + riscv_vecr_names[EXTRACT_OPERAND (VD, l)]);
  3016. + break;
  3017. +
  3018. + case 's':
  3019. + print (info->stream, "%s",
  3020. + riscv_vecr_names[EXTRACT_OPERAND (VS1, l)]);
  3021. + break;
  3022. +
  3023. + case 't':
  3024. + case 'u': /* VS1 == VS2 already verified at this point. */
  3025. + case 'v': /* VD == VS1 == VS2 already verified at this point. */
  3026. + print (info->stream, "%s",
  3027. + riscv_vecr_names[EXTRACT_OPERAND (VS2, l)]);
  3028. + break;
  3029. +
  3030. + case '0':
  3031. + print (info->stream, "%s", riscv_vecr_names[0]);
  3032. + break;
  3033. +
  3034. + case 'b':
  3035. + case 'c':
  3036. + {
  3037. + int imm = (*d == 'b') ? EXTRACT_RVV_VB_IMM (l)
  3038. + : EXTRACT_RVV_VC_IMM (l);
  3039. + unsigned int imm_vlmul = EXTRACT_OPERAND (VLMUL, imm);
  3040. + unsigned int imm_vsew = EXTRACT_OPERAND (VSEW, imm);
  3041. + unsigned int imm_vta = EXTRACT_OPERAND (VTA, imm);
  3042. + unsigned int imm_vma = EXTRACT_OPERAND (VMA, imm);
  3043. + unsigned int imm_vtype_res = EXTRACT_OPERAND (VTYPE_RES, imm);
  3044. +
  3045. + if (imm_vsew < ARRAY_SIZE (riscv_vsew)
  3046. + && imm_vlmul < ARRAY_SIZE (riscv_vlmul)
  3047. + && imm_vta < ARRAY_SIZE (riscv_vta)
  3048. + && imm_vma < ARRAY_SIZE (riscv_vma)
  3049. + && ! imm_vtype_res)
  3050. + print (info->stream, "%s,%s,%s,%s", riscv_vsew[imm_vsew],
  3051. + riscv_vlmul[imm_vlmul], riscv_vta[imm_vta],
  3052. + riscv_vma[imm_vma]);
  3053. + else
  3054. + print (info->stream, "%d", imm);
  3055. + }
  3056. + break;
  3057. +
  3058. + case 'i':
  3059. + print (info->stream, "%d", (int)EXTRACT_RVV_VI_IMM (l));
  3060. + break;
  3061. +
  3062. + case 'j':
  3063. + print (info->stream, "%d", (int)EXTRACT_RVV_VI_UIMM (l));
  3064. + break;
  3065. +
  3066. + case 'k':
  3067. + print (info->stream, "%d", (int)EXTRACT_RVV_OFFSET (l));
  3068. + break;
  3069. +
  3070. + case 'm':
  3071. + if (! EXTRACT_OPERAND (VMASK, l))
  3072. + print (info->stream, ",%s", riscv_vecm_names_numeric[0]);
  3073. + break;
  3074. +
  3075. + default:
  3076. + /* xgettext:c-format */
  3077. + print (info->stream, _("# internal error, undefined modifier (V%c)"),
  3078. + *d);
  3079. + return;
  3080. + }
  3081. + break;
  3082. +
  3083. default:
  3084. /* xgettext:c-format */
  3085. print (info->stream, _("# internal error, undefined modifier (%c)"),
  3086. diff --git a/opcodes/riscv-opc.c b/opcodes/riscv-opc.c
  3087. index 03e3bd7c05..ffa817cc65 100644
  3088. --- a/opcodes/riscv-opc.c
  3089. +++ b/opcodes/riscv-opc.c
  3090. @@ -24,6 +24,130 @@
  3091. #include "opcode/riscv.h"
  3092. #include <stdio.h>
  3093. +#define MASK_SHAMT (OP_MASK_SHAMT << OP_SH_SHAMT)
  3094. +#define MATCH_SHAMT_REV_32 (0b11111 << OP_SH_SHAMT)
  3095. +#define MATCH_SHAMT_REV_64 (0b111111 << OP_SH_SHAMT)
  3096. +#define MATCH_SHAMT_REV_P_32 (0b00001 << OP_SH_SHAMT)
  3097. +#define MATCH_SHAMT_REV_P_64 (0b000001 << OP_SH_SHAMT)
  3098. +#define MATCH_SHAMT_REV_N_32 (0b00011 << OP_SH_SHAMT)
  3099. +#define MATCH_SHAMT_REV_N_64 (0b000011 << OP_SH_SHAMT)
  3100. +#define MATCH_SHAMT_REV_B_32 (0b00111 << OP_SH_SHAMT)
  3101. +#define MATCH_SHAMT_REV_B_64 (0b000111 << OP_SH_SHAMT)
  3102. +#define MATCH_SHAMT_REV_H_32 (0b01111 << OP_SH_SHAMT)
  3103. +#define MATCH_SHAMT_REV_H_64 (0b001111 << OP_SH_SHAMT)
  3104. +#define MATCH_SHAMT_REV_W_64 (0b011111 << OP_SH_SHAMT)
  3105. +#define MATCH_SHAMT_REV2_32 (0b11110 << OP_SH_SHAMT)
  3106. +#define MATCH_SHAMT_REV2_64 (0b111110 << OP_SH_SHAMT)
  3107. +#define MATCH_SHAMT_REV2_N_32 (0b00010 << OP_SH_SHAMT)
  3108. +#define MATCH_SHAMT_REV2_N_64 (0b000010 << OP_SH_SHAMT)
  3109. +#define MATCH_SHAMT_REV2_B_32 (0b00110 << OP_SH_SHAMT)
  3110. +#define MATCH_SHAMT_REV2_B_64 (0b000110 << OP_SH_SHAMT)
  3111. +#define MATCH_SHAMT_REV2_H_32 (0b01110 << OP_SH_SHAMT)
  3112. +#define MATCH_SHAMT_REV2_H_64 (0b001110 << OP_SH_SHAMT)
  3113. +#define MATCH_SHAMT_REV2_W_64 (0b011110 << OP_SH_SHAMT)
  3114. +#define MATCH_SHAMT_REV4_32 (0b11100 << OP_SH_SHAMT)
  3115. +#define MATCH_SHAMT_REV4_64 (0b111100 << OP_SH_SHAMT)
  3116. +#define MATCH_SHAMT_REV4_B_32 (0b00100 << OP_SH_SHAMT)
  3117. +#define MATCH_SHAMT_REV4_B_64 (0b000100 << OP_SH_SHAMT)
  3118. +#define MATCH_SHAMT_REV4_H_32 (0b01100 << OP_SH_SHAMT)
  3119. +#define MATCH_SHAMT_REV4_H_64 (0b001100 << OP_SH_SHAMT)
  3120. +#define MATCH_SHAMT_REV4_W_64 (0b011100 << OP_SH_SHAMT)
  3121. +#define MATCH_SHAMT_REV8_32 (0b11000 << OP_SH_SHAMT)
  3122. +#define MATCH_SHAMT_REV8_64 (0b111000 << OP_SH_SHAMT)
  3123. +#define MATCH_SHAMT_REV8_H_32 (0b01000 << OP_SH_SHAMT)
  3124. +#define MATCH_SHAMT_REV8_H_64 (0b001000 << OP_SH_SHAMT)
  3125. +#define MATCH_SHAMT_REV8_W_64 (0b011000 << OP_SH_SHAMT)
  3126. +#define MATCH_SHAMT_REV16_32 (0b10000 << OP_SH_SHAMT)
  3127. +#define MATCH_SHAMT_REV16_64 (0b110000 << OP_SH_SHAMT)
  3128. +#define MATCH_SHAMT_REV16_W_64 (0b010000 << OP_SH_SHAMT)
  3129. +#define MATCH_SHAMT_REV32_64 (0b100000 << OP_SH_SHAMT)
  3130. +#define MATCH_SHAMT_ZIP_32 (0b1111 << OP_SH_SHAMT)
  3131. +#define MATCH_SHAMT_ZIP_64 (0b11111 << OP_SH_SHAMT)
  3132. +#define MATCH_SHAMT_ZIP_N_32 (0b0001 << OP_SH_SHAMT)
  3133. +#define MATCH_SHAMT_ZIP_N_64 (0b00001 << OP_SH_SHAMT)
  3134. +#define MATCH_SHAMT_ZIP_B_32 (0b0011 << OP_SH_SHAMT)
  3135. +#define MATCH_SHAMT_ZIP_B_64 (0b00011 << OP_SH_SHAMT)
  3136. +#define MATCH_SHAMT_ZIP_H_32 (0b0111 << OP_SH_SHAMT)
  3137. +#define MATCH_SHAMT_ZIP_H_64 (0b00111 << OP_SH_SHAMT)
  3138. +#define MATCH_SHAMT_ZIP_W_64 (0b01111 << OP_SH_SHAMT)
  3139. +#define MATCH_SHAMT_ZIP2_32 (0b1110 << OP_SH_SHAMT)
  3140. +#define MATCH_SHAMT_ZIP2_64 (0b11110 << OP_SH_SHAMT)
  3141. +#define MATCH_SHAMT_ZIP2_B_32 (0b0010 << OP_SH_SHAMT)
  3142. +#define MATCH_SHAMT_ZIP2_B_64 (0b00010 << OP_SH_SHAMT)
  3143. +#define MATCH_SHAMT_ZIP2_H_32 (0b0110 << OP_SH_SHAMT)
  3144. +#define MATCH_SHAMT_ZIP2_H_64 (0b00110 << OP_SH_SHAMT)
  3145. +#define MATCH_SHAMT_ZIP2_W_64 (0b01110 << OP_SH_SHAMT)
  3146. +#define MATCH_SHAMT_ZIP4_32 (0b1100 << OP_SH_SHAMT)
  3147. +#define MATCH_SHAMT_ZIP4_64 (0b11100 << OP_SH_SHAMT)
  3148. +#define MATCH_SHAMT_ZIP4_H_32 (0b0100 << OP_SH_SHAMT)
  3149. +#define MATCH_SHAMT_ZIP4_H_64 (0b00100 << OP_SH_SHAMT)
  3150. +#define MATCH_SHAMT_ZIP4_W_64 (0b01100 << OP_SH_SHAMT)
  3151. +#define MATCH_SHAMT_ZIP8_32 (0b1000 << OP_SH_SHAMT)
  3152. +#define MATCH_SHAMT_ZIP8_64 (0b11000 << OP_SH_SHAMT)
  3153. +#define MATCH_SHAMT_ZIP8_W_64 (0b01000 << OP_SH_SHAMT)
  3154. +#define MATCH_SHAMT_ZIP16_64 (0b10000 << OP_SH_SHAMT)
  3155. +#define MATCH_SHAMT_UNZIP_32 (0b1111 << OP_SH_SHAMT)
  3156. +#define MATCH_SHAMT_UNZIP_64 (0b11111 << OP_SH_SHAMT)
  3157. +#define MATCH_SHAMT_UNZIP_N_32 (0b0001 << OP_SH_SHAMT)
  3158. +#define MATCH_SHAMT_UNZIP_N_64 (0b00001 << OP_SH_SHAMT)
  3159. +#define MATCH_SHAMT_UNZIP_B_32 (0b0011 << OP_SH_SHAMT)
  3160. +#define MATCH_SHAMT_UNZIP_B_64 (0b00011 << OP_SH_SHAMT)
  3161. +#define MATCH_SHAMT_UNZIP_H_32 (0b0111 << OP_SH_SHAMT)
  3162. +#define MATCH_SHAMT_UNZIP_H_64 (0b00111 << OP_SH_SHAMT)
  3163. +#define MATCH_SHAMT_UNZIP_W_64 (0b01111 << OP_SH_SHAMT)
  3164. +#define MATCH_SHAMT_UNZIP2_32 (0b1110 << OP_SH_SHAMT)
  3165. +#define MATCH_SHAMT_UNZIP2_64 (0b11110 << OP_SH_SHAMT)
  3166. +#define MATCH_SHAMT_UNZIP2_B_32 (0b0010 << OP_SH_SHAMT)
  3167. +#define MATCH_SHAMT_UNZIP2_B_64 (0b00010 << OP_SH_SHAMT)
  3168. +#define MATCH_SHAMT_UNZIP2_H_32 (0b0110 << OP_SH_SHAMT)
  3169. +#define MATCH_SHAMT_UNZIP2_H_64 (0b00110 << OP_SH_SHAMT)
  3170. +#define MATCH_SHAMT_UNZIP2_W_64 (0b01110 << OP_SH_SHAMT)
  3171. +#define MATCH_SHAMT_UNZIP4_32 (0b1100 << OP_SH_SHAMT)
  3172. +#define MATCH_SHAMT_UNZIP4_64 (0b11100 << OP_SH_SHAMT)
  3173. +#define MATCH_SHAMT_UNZIP4_H_32 (0b0100 << OP_SH_SHAMT)
  3174. +#define MATCH_SHAMT_UNZIP4_H_64 (0b00100 << OP_SH_SHAMT)
  3175. +#define MATCH_SHAMT_UNZIP4_W_64 (0b01100 << OP_SH_SHAMT)
  3176. +#define MATCH_SHAMT_UNZIP8_32 (0b1000 << OP_SH_SHAMT)
  3177. +#define MATCH_SHAMT_UNZIP8_64 (0b11000 << OP_SH_SHAMT)
  3178. +#define MATCH_SHAMT_UNZIP8_W_64 (0b01000 << OP_SH_SHAMT)
  3179. +#define MATCH_SHAMT_UNZIP16_64 (0b10000 << OP_SH_SHAMT)
  3180. +#define MATCH_SHAMT_ORC_32 (0b11111 << OP_SH_SHAMT)
  3181. +#define MATCH_SHAMT_ORC_64 (0b111111 << OP_SH_SHAMT)
  3182. +#define MATCH_SHAMT_ORC_P_32 (0b00001 << OP_SH_SHAMT)
  3183. +#define MATCH_SHAMT_ORC_P_64 (0b000001 << OP_SH_SHAMT)
  3184. +#define MATCH_SHAMT_ORC_N_32 (0b00011 << OP_SH_SHAMT)
  3185. +#define MATCH_SHAMT_ORC_N_64 (0b000011 << OP_SH_SHAMT)
  3186. +#define MATCH_SHAMT_ORC_B_32 (0b00111 << OP_SH_SHAMT)
  3187. +#define MATCH_SHAMT_ORC_B_64 (0b000111 << OP_SH_SHAMT)
  3188. +#define MATCH_SHAMT_ORC_H_32 (0b01111 << OP_SH_SHAMT)
  3189. +#define MATCH_SHAMT_ORC_H_64 (0b001111 << OP_SH_SHAMT)
  3190. +#define MATCH_SHAMT_ORC_W_64 (0b011111 << OP_SH_SHAMT)
  3191. +#define MATCH_SHAMT_ORC2_32 (0b11110 << OP_SH_SHAMT)
  3192. +#define MATCH_SHAMT_ORC2_64 (0b111110 << OP_SH_SHAMT)
  3193. +#define MATCH_SHAMT_ORC2_N_32 (0b00010 << OP_SH_SHAMT)
  3194. +#define MATCH_SHAMT_ORC2_N_64 (0b000010 << OP_SH_SHAMT)
  3195. +#define MATCH_SHAMT_ORC2_B_32 (0b00110 << OP_SH_SHAMT)
  3196. +#define MATCH_SHAMT_ORC2_B_64 (0b000110 << OP_SH_SHAMT)
  3197. +#define MATCH_SHAMT_ORC2_H_32 (0b01110 << OP_SH_SHAMT)
  3198. +#define MATCH_SHAMT_ORC2_H_64 (0b001110 << OP_SH_SHAMT)
  3199. +#define MATCH_SHAMT_ORC2_W_64 (0b011110 << OP_SH_SHAMT)
  3200. +#define MATCH_SHAMT_ORC4_32 (0b11100 << OP_SH_SHAMT)
  3201. +#define MATCH_SHAMT_ORC4_64 (0b111100 << OP_SH_SHAMT)
  3202. +#define MATCH_SHAMT_ORC4_B_32 (0b00100 << OP_SH_SHAMT)
  3203. +#define MATCH_SHAMT_ORC4_B_64 (0b000100 << OP_SH_SHAMT)
  3204. +#define MATCH_SHAMT_ORC4_H_32 (0b01100 << OP_SH_SHAMT)
  3205. +#define MATCH_SHAMT_ORC4_H_64 (0b001100 << OP_SH_SHAMT)
  3206. +#define MATCH_SHAMT_ORC4_W_64 (0b011100 << OP_SH_SHAMT)
  3207. +#define MATCH_SHAMT_ORC8_32 (0b11000 << OP_SH_SHAMT)
  3208. +#define MATCH_SHAMT_ORC8_64 (0b111000 << OP_SH_SHAMT)
  3209. +#define MATCH_SHAMT_ORC8_H_32 (0b01000 << OP_SH_SHAMT)
  3210. +#define MATCH_SHAMT_ORC8_H_64 (0b001000 << OP_SH_SHAMT)
  3211. +#define MATCH_SHAMT_ORC8_W_64 (0b011000 << OP_SH_SHAMT)
  3212. +#define MATCH_SHAMT_ORC16_32 (0b10000 << OP_SH_SHAMT)
  3213. +#define MATCH_SHAMT_ORC16_64 (0b110000 << OP_SH_SHAMT)
  3214. +#define MATCH_SHAMT_ORC16_W_64 (0b010000 << OP_SH_SHAMT)
  3215. +#define MATCH_SHAMT_ORC32_64 (0b100000 << OP_SH_SHAMT)
  3216. +
  3217. /* Register names used by gas and objdump. */
  3218. const char * const riscv_gpr_names_numeric[NGPR] =
  3219. @@ -56,6 +180,20 @@ const char * const riscv_fpr_names_abi[NFPR] = {
  3220. "fs8", "fs9", "fs10", "fs11", "ft8", "ft9", "ft10", "ft11"
  3221. };
  3222. +const char * const riscv_vecr_names_numeric[NVECR] =
  3223. +{
  3224. + "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7",
  3225. + "v8", "v9", "v10", "v11", "v12", "v13", "v14", "v15",
  3226. + "v16", "v17", "v18", "v19", "v20", "v21", "v22", "v23",
  3227. + "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31"
  3228. +};
  3229. +
  3230. +const char * const riscv_vecm_names_numeric[NVECM] =
  3231. +{
  3232. + "v0.t"
  3233. +};
  3234. +
  3235. +
  3236. /* The order of overloaded instructions matters. Label arguments and
  3237. register arguments look the same. Instructions that can have either
  3238. for arguments must apear in the correct order in this table for the
  3239. @@ -79,6 +217,15 @@ const char * const riscv_fpr_names_abi[NFPR] = {
  3240. #define MASK_AQ (OP_MASK_AQ << OP_SH_AQ)
  3241. #define MASK_RL (OP_MASK_RL << OP_SH_RL)
  3242. #define MASK_AQRL (MASK_AQ | MASK_RL)
  3243. +#define MASK_SHAMT (OP_MASK_SHAMT << OP_SH_SHAMT)
  3244. +#define MATCH_SHAMT_REV8_32 (0b11000 << OP_SH_SHAMT)
  3245. +#define MATCH_SHAMT_REV8_64 (0b111000 << OP_SH_SHAMT)
  3246. +#define MATCH_SHAMT_ORC_B (0b00111 << OP_SH_SHAMT)
  3247. +#define MASK_VD (OP_MASK_VD << OP_SH_VD)
  3248. +#define MASK_VS1 (OP_MASK_VS1 << OP_SH_VS1)
  3249. +#define MASK_VS2 (OP_MASK_VS2 << OP_SH_VS2)
  3250. +#define MASK_VMASK (OP_MASK_VMASK << OP_SH_VMASK)
  3251. +
  3252. static int
  3253. match_opcode (const struct riscv_opcode *op, insn_t insn)
  3254. @@ -101,6 +248,47 @@ match_rs1_eq_rs2 (const struct riscv_opcode *op, insn_t insn)
  3255. return match_opcode (op, insn) && rs1 == rs2;
  3256. }
  3257. +static int
  3258. +match_vs1_eq_vs2 (const struct riscv_opcode *op,
  3259. + insn_t insn)
  3260. +{
  3261. + int vs1 = (insn & MASK_VS1) >> OP_SH_VS1;
  3262. + int vs2 = (insn & MASK_VS2) >> OP_SH_VS2;
  3263. +
  3264. + return match_opcode (op, insn) && vs1 == vs2;
  3265. +}
  3266. +
  3267. +static int
  3268. +match_vs1_eq_vs2_neq_vm (const struct riscv_opcode *op,
  3269. + insn_t insn)
  3270. +{
  3271. + int vd = (insn & MASK_VD) >> OP_SH_VD;
  3272. + int vs1 = (insn & MASK_VS1) >> OP_SH_VS1;
  3273. + int vs2 = (insn & MASK_VS2) >> OP_SH_VS2;
  3274. + int vm = (insn & MASK_VMASK) >> OP_SH_VMASK;
  3275. +
  3276. + //if (!constraints || error == NULL)
  3277. + // return match_opcode (op, insn) && vs1 == vs2;
  3278. +
  3279. + if (!vm && vm == vd) return 0;
  3280. + //*error = "illegal operands vd cannot overlap vm";
  3281. + else
  3282. + return match_opcode (op, insn) && vs1 == vs2;
  3283. + return 0;
  3284. +}
  3285. +
  3286. +static int
  3287. +match_vd_eq_vs1_eq_vs2 (const struct riscv_opcode *op,
  3288. + insn_t insn)
  3289. +{
  3290. + int vd = (insn & MASK_VD) >> OP_SH_VD;
  3291. + int vs1 = (insn & MASK_VS1) >> OP_SH_VS1;
  3292. + int vs2 = (insn & MASK_VS2) >> OP_SH_VS2;
  3293. +
  3294. + return match_opcode (op, insn) && vd == vs1 && vs1 == vs2;
  3295. +}
  3296. +
  3297. +
  3298. static int
  3299. match_rd_nonzero (const struct riscv_opcode *op, insn_t insn)
  3300. {
  3301. @@ -195,6 +383,249 @@ match_srxi_as_c_srxi (const struct riscv_opcode *op, insn_t insn)
  3302. return match_opcode (op, insn) && EXTRACT_RVC_IMM (insn) != 0;
  3303. }
  3304. +
  3305. +/* These are used to check the vector constraints. */
  3306. +
  3307. +static int
  3308. +match_widen_vd_neq_vs1_neq_vs2_neq_vm (const struct riscv_opcode *op,
  3309. + insn_t insn)
  3310. +{
  3311. + int vd = (insn & MASK_VD) >> OP_SH_VD;
  3312. + int vs1 = (insn & MASK_VS1) >> OP_SH_VS1;
  3313. + int vs2 = (insn & MASK_VS2) >> OP_SH_VS2;
  3314. + int vm = (insn & MASK_VMASK) >> OP_SH_VMASK;
  3315. +
  3316. + //if (!constraints || error == NULL)
  3317. + // return match_opcode (op, insn);
  3318. +
  3319. + if ((vd % 2) != 0)
  3320. + return 0;
  3321. + //*error = "illegal operands vd must be multiple of 2";
  3322. + else if (vs1 >= vd && vs1 <= (vd + 1))
  3323. + return 0;
  3324. + //*error = "illegal operands vd cannot overlap vs1";
  3325. + else if (vs2 >= vd && vs2 <= (vd + 1))
  3326. + return 0;
  3327. + //*error = "illegal operands vd cannot overlap vs2";
  3328. + else if (!vm && vm >= vd && vm <= (vd + 1))
  3329. + return 0;
  3330. + //*error = "illegal operands vd cannot overlap vm";
  3331. + else
  3332. + return match_opcode (op, insn);
  3333. + return 0;
  3334. +}
  3335. +
  3336. +static int
  3337. +match_widen_vd_neq_vs1_neq_vm (const struct riscv_opcode *op,
  3338. + insn_t insn)
  3339. +{
  3340. + int vd = (insn & MASK_VD) >> OP_SH_VD;
  3341. + int vs1 = (insn & MASK_VS1) >> OP_SH_VS1;
  3342. + int vs2 = (insn & MASK_VS2) >> OP_SH_VS2;
  3343. + int vm = (insn & MASK_VMASK) >> OP_SH_VMASK;
  3344. +
  3345. + //if (!constraints || error == NULL)
  3346. + // return match_opcode (op, insn);
  3347. +
  3348. + if ((vd % 2) != 0) return 0;
  3349. + //*error = "illegal operands vd must be multiple of 2";
  3350. + else if ((vs2 % 2) != 0) return 0;
  3351. + //*error = "illegal operands vs2 must be multiple of 2";
  3352. + else if (vs1 >= vd && vs1 <= (vd + 1)) return 0;
  3353. + //*error = "illegal operands vd cannot overlap vs1";
  3354. + else if (!vm && vm >= vd && vm <= (vd + 1)) return 0;
  3355. + //*error = "illegal operands vd cannot overlap vm";
  3356. + else
  3357. + return match_opcode (op, insn);
  3358. + return 0;
  3359. +}
  3360. +
  3361. +static int
  3362. +match_widen_vd_neq_vs2_neq_vm (const struct riscv_opcode *op,
  3363. + insn_t insn)
  3364. +{
  3365. + int vd = (insn & MASK_VD) >> OP_SH_VD;
  3366. + int vs2 = (insn & MASK_VS2) >> OP_SH_VS2;
  3367. + int vm = (insn & MASK_VMASK) >> OP_SH_VMASK;
  3368. +
  3369. + //if (!constraints || error == NULL)
  3370. + // return match_opcode (op, insn);
  3371. +
  3372. + if ((vd % 2) != 0) return 0;
  3373. + //*error = "illegal operands vd must be multiple of 2";
  3374. + else if (vs2 >= vd && vs2 <= (vd + 1)) return 0;
  3375. + //*error = "illegal operands vd cannot overlap vs2";
  3376. + else if (!vm && vm >= vd && vm <= (vd + 1)) return 0;
  3377. + //*error = "illegal operands vd cannot overlap vm";
  3378. + else
  3379. + return match_opcode (op, insn);
  3380. + return 0;
  3381. +}
  3382. +
  3383. +static int
  3384. +match_widen_vd_neq_vm (const struct riscv_opcode *op,
  3385. + insn_t insn)
  3386. +{
  3387. + int vd = (insn & MASK_VD) >> OP_SH_VD;
  3388. + int vs2 = (insn & MASK_VS2) >> OP_SH_VS2;
  3389. + int vm = (insn & MASK_VMASK) >> OP_SH_VMASK;
  3390. +
  3391. + //if (!constraints || error == NULL)
  3392. + // return match_opcode (op, insn);
  3393. +
  3394. + if ((vd % 2) != 0) return 0;
  3395. + // *error = "illegal operands vd must be multiple of 2";
  3396. + else if ((vs2 % 2) != 0) return 0;
  3397. + // *error = "illegal operands vs2 must be multiple of 2";
  3398. + else if (!vm && vm >= vd && vm <= (vd + 1)) return 0;
  3399. + // *error = "illegal operands vd cannot overlap vm";
  3400. + else
  3401. + return match_opcode (op, insn);
  3402. + return 0;
  3403. +}
  3404. +
  3405. +static int
  3406. +match_narrow_vd_neq_vs2_neq_vm (const struct riscv_opcode *op,
  3407. + insn_t insn)
  3408. +{
  3409. + int vd = (insn & MASK_VD) >> OP_SH_VD;
  3410. + int vs2 = (insn & MASK_VS2) >> OP_SH_VS2;
  3411. + int vm = (insn & MASK_VMASK) >> OP_SH_VMASK;
  3412. +
  3413. + //if (!constraints || error == NULL)
  3414. + // return match_opcode (op, insn);
  3415. +
  3416. + if ((vs2 % 2) != 0) return 0;
  3417. + //*error = "illegal operands vd must be multiple of 2";
  3418. + else if (vd >= vs2 && vd <= (vs2 + 1)) return 0;
  3419. + //*error = "illegal operands vd cannot overlap vs2";
  3420. + else if (!vm && vd >= vm && vd <= (vm + 1)) return 0;
  3421. + //*error = "illegal operands vd cannot overlap vm";
  3422. + else
  3423. + return match_opcode (op, insn);
  3424. + return 0;
  3425. +}
  3426. +
  3427. +static int
  3428. +match_vd_neq_vs1_neq_vs2 (const struct riscv_opcode *op,
  3429. + insn_t insn)
  3430. +{
  3431. + int vd = (insn & MASK_VD) >> OP_SH_VD;
  3432. + int vs1 = (insn & MASK_VS1) >> OP_SH_VS1;
  3433. + int vs2 = (insn & MASK_VS2) >> OP_SH_VS2;
  3434. +
  3435. + //if (!constraints || error == NULL)
  3436. + // return match_opcode (op, insn);
  3437. +
  3438. + if (vs1 == vd) return 0;
  3439. + //*error = "illegal operands vd cannot overlap vs1";
  3440. + else if (vs2 == vd) return 0;
  3441. + //*error = "illegal operands vd cannot overlap vs2";
  3442. + else
  3443. + return match_opcode (op, insn);
  3444. + return 0;
  3445. +}
  3446. +
  3447. +static int
  3448. +match_vd_neq_vs1_neq_vs2_neq_vm (const struct riscv_opcode *op,
  3449. + insn_t insn)
  3450. +{
  3451. + int vd = (insn & MASK_VD) >> OP_SH_VD;
  3452. + int vs1 = (insn & MASK_VS1) >> OP_SH_VS1;
  3453. + int vs2 = (insn & MASK_VS2) >> OP_SH_VS2;
  3454. + int vm = (insn & MASK_VMASK) >> OP_SH_VMASK;
  3455. +
  3456. + if (vs1 == vd) return 0;
  3457. + //*error = "illegal operands vd cannot overlap vs1";
  3458. + else if (vs2 == vd) return 0;
  3459. + //*error = "illegal operands vd cannot overlap vs2";
  3460. + else if (!vm && vm == vd) return 0;
  3461. + //*error = "illegal operands vd cannot overlap vm";
  3462. + else
  3463. + return match_opcode (op, insn);
  3464. + return 0;
  3465. +}
  3466. +
  3467. +static int
  3468. +match_vd_neq_vs2_neq_vm (const struct riscv_opcode *op,
  3469. + insn_t insn)
  3470. +{
  3471. + int vd = (insn & MASK_VD) >> OP_SH_VD;
  3472. + int vs2 = (insn & MASK_VS2) >> OP_SH_VS2;
  3473. + int vm = (insn & MASK_VMASK) >> OP_SH_VMASK;
  3474. +
  3475. + //if (!constraints || error == NULL)
  3476. + // return match_opcode (op, insn);
  3477. +
  3478. + if (vs2 == vd) return 0;
  3479. + //*error = "illegal operands vd cannot overlap vs2";
  3480. + else if (!vm && vm == vd) return 0;
  3481. + //*error = "illegal operands vd cannot overlap vm";
  3482. + else
  3483. + return match_opcode (op, insn);
  3484. + return 0;
  3485. +}
  3486. +
  3487. +/* v[m]adc and v[m]sbc use the vm encoding to encode the
  3488. + carry-in v0 register. The carry-in v0 register can not
  3489. + overlap with the vd, too. Therefore, we use the same
  3490. + match_vd_neq_vm to check the overlap constraints. */
  3491. +
  3492. +static int
  3493. +match_vd_neq_vm (const struct riscv_opcode *op,
  3494. + insn_t insn)
  3495. +{
  3496. + int vd = (insn & MASK_VD) >> OP_SH_VD;
  3497. + int vm = (insn & MASK_VMASK) >> OP_SH_VMASK;
  3498. +
  3499. + //if (!constraints || error == NULL)
  3500. + // return match_opcode (op, insn);
  3501. +
  3502. + if (!vm && vm == vd) return 0;
  3503. + //*error = "illegal operands vd cannot overlap vm";
  3504. + else
  3505. + return match_opcode (op, insn);
  3506. + return 0;
  3507. +}
  3508. +
  3509. +static int
  3510. +match_vls_nf_rv (const struct riscv_opcode *op,
  3511. + insn_t insn)
  3512. +{
  3513. + int vd = (insn & MASK_VD) >> OP_SH_VD;
  3514. + int nf = ((insn & (0x7 << 29) ) >> 29) + 1;
  3515. +
  3516. + //if (!constraints || error == NULL)
  3517. + // return match_opcode (op, insn);
  3518. +
  3519. + if ((vd % nf) != 0) return 0;
  3520. + //*error = "illegal operands vd must be multiple of nf";
  3521. + else
  3522. + return match_opcode (op, insn);
  3523. + return 0;
  3524. +}
  3525. +
  3526. +static int
  3527. +match_vmv_nf_rv (const struct riscv_opcode *op,
  3528. + insn_t insn)
  3529. +{
  3530. + int vd = (insn & MASK_VD) >> OP_SH_VD;
  3531. + int vs2 = (insn & MASK_VS2) >> OP_SH_VS2;
  3532. + int nf = ((insn & (0x7 << 15) ) >> 15) + 1;
  3533. +
  3534. + //if (!constraints || error == NULL)
  3535. + // return match_opcode (op, insn);
  3536. +
  3537. + if ((vd % nf) != 0) return 0;
  3538. + //*error = "illegal operands vd must be multiple of nf";
  3539. + else if ((vs2 % nf) != 0) return 0;
  3540. + //*error = "illegal operands vs2 must be multiple of nf";
  3541. + else
  3542. + return match_opcode (op, insn);
  3543. + return 0;
  3544. +}
  3545. +
  3546. +
  3547. const struct riscv_opcode riscv_opcodes[] =
  3548. {
  3549. /* name, xlen, isa, operands, match, mask, match_func, pinfo. */
  3550. @@ -749,6 +1180,10 @@ const struct riscv_opcode riscv_opcodes[] =
  3551. {"c.fsw", 32, INSN_CLASS_F_AND_C, "CD,Ck(Cs)", MATCH_C_FSW, MASK_C_FSW, match_opcode, INSN_DREF|INSN_4_BYTE },
  3552. /* Supervisor instructions */
  3553. +{"cflush.d.l1", 0, INSN_CLASS_I, "s", MATCH_CFLUSH_D_L1, MASK_CFLUSH_D_L1, match_opcode, 0 },
  3554. +{"cdiscard.d.l1", 0, INSN_CLASS_I, "s", MATCH_CDISCARD_D_L1, MASK_CDISCARD_D_L1, match_opcode, 0 },
  3555. +{"cflush.d.l2", 0, INSN_CLASS_I, "s", MATCH_CFLUSH_D_L2, MASK_CFLUSH_D_L2, match_opcode, 0 },
  3556. +{"cdiscard.d.l2", 0, INSN_CLASS_I, "s", MATCH_CDISCARD_D_L2, MASK_CDISCARD_D_L2, match_opcode, 0 },
  3557. {"csrr", 0, INSN_CLASS_I, "d,E", MATCH_CSRRS, MASK_CSRRS | MASK_RS1, match_opcode, INSN_ALIAS },
  3558. {"csrwi", 0, INSN_CLASS_I, "E,Z", MATCH_CSRRWI, MASK_CSRRWI | MASK_RD, match_opcode, INSN_ALIAS },
  3559. {"csrsi", 0, INSN_CLASS_I, "E,Z", MATCH_CSRRSI, MASK_CSRRSI | MASK_RD, match_opcode, INSN_ALIAS },
  3560. @@ -780,6 +1215,879 @@ const struct riscv_opcode riscv_opcodes[] =
  3561. {"sfence.vma", 0, INSN_CLASS_I, "s,t", MATCH_SFENCE_VMA, MASK_SFENCE_VMA, match_opcode, 0 },
  3562. {"wfi", 0, INSN_CLASS_I, "", MATCH_WFI, MASK_WFI, match_opcode, 0 },
  3563. +{"clz", 0, INSN_CLASS_ZBB, "d,s", MATCH_CLZ, MASK_CLZ, match_opcode, 0 },
  3564. +{"ctz", 0, INSN_CLASS_ZBB, "d,s", MATCH_CTZ, MASK_CTZ, match_opcode, 0 },
  3565. +{"cpop", 0, INSN_CLASS_ZBB, "d,s", MATCH_CPOP, MASK_CPOP, match_opcode, 0 },
  3566. +{"min", 0, INSN_CLASS_ZBB, "d,s,t", MATCH_MIN, MASK_MIN, match_opcode, 0 },
  3567. +{"max", 0, INSN_CLASS_ZBB, "d,s,t", MATCH_MAX, MASK_MAX, match_opcode, 0 },
  3568. +{"minu", 0, INSN_CLASS_ZBB, "d,s,t", MATCH_MINU, MASK_MINU, match_opcode, 0 },
  3569. +{"maxu", 0, INSN_CLASS_ZBB, "d,s,t", MATCH_MAXU, MASK_MAXU, match_opcode, 0 },
  3570. +{"sext.b", 0, INSN_CLASS_ZBB, "d,s", MATCH_SEXT_B, MASK_SEXT_B, match_opcode, 0 },
  3571. +{"sext.b", 0, INSN_CLASS_I, "d,s", 0, (int) M_SEXTB, match_never, INSN_MACRO },
  3572. +{"sext.h", 0, INSN_CLASS_ZBB, "d,s", MATCH_SEXT_H, MASK_SEXT_H, match_opcode, 0 },
  3573. +{"sext.h", 0, INSN_CLASS_I, "d,s", 0, (int) M_SEXTH, match_never, INSN_MACRO },
  3574. +{"zext.h", 32, INSN_CLASS_ZBB, "d,s", MATCH_PACK, MASK_PACK | MASK_RS2, match_opcode, 0 },
  3575. +{"zext.h", 64, INSN_CLASS_ZBB, "d,s", MATCH_PACKW, MASK_PACKW | MASK_RS2, match_opcode, 0 },
  3576. +{"zext.h", 0, INSN_CLASS_I, "d,s", 0, (int) M_ZEXTH, match_never, INSN_MACRO },
  3577. +{"andn", 0, INSN_CLASS_ZBB, "d,s,t", MATCH_ANDN, MASK_ANDN, match_opcode, 0 },
  3578. +{"orn", 0, INSN_CLASS_ZBB, "d,s,t", MATCH_ORN, MASK_ORN, match_opcode, 0 },
  3579. +{"xnor", 0, INSN_CLASS_ZBB, "d,s,t", MATCH_XNOR, MASK_XNOR, match_opcode, 0 },
  3580. +{"rol", 0, INSN_CLASS_ZBB, "d,s,t", MATCH_ROL, MASK_ROL, match_opcode, 0 },
  3581. +{"rori", 0, INSN_CLASS_ZBB, "d,s,>", MATCH_RORI, MASK_RORI, match_opcode, 0 },
  3582. +{"ror", 0, INSN_CLASS_ZBB, "d,s,t", MATCH_ROR, MASK_ROR, match_opcode, 0 },
  3583. +{"ror", 0, INSN_CLASS_ZBB, "d,s,>", MATCH_RORI, MASK_RORI, match_opcode, INSN_ALIAS },
  3584. +{"rev8", 32, INSN_CLASS_ZBB, "d,s", MATCH_GREVI | MATCH_SHAMT_REV8_32 , MASK_GREVI | MASK_SHAMT, match_opcode, 0 },
  3585. +{"rev8", 64, INSN_CLASS_ZBB, "d,s", MATCH_GREVI | MATCH_SHAMT_REV8_64 , MASK_GREVI | MASK_SHAMT, match_opcode, 0 },
  3586. +{"orc.b", 0, INSN_CLASS_ZBB, "d,s", MATCH_GORCI | MATCH_SHAMT_ORC_B, MASK_GORCI | MASK_SHAMT, match_opcode, 0 },
  3587. +{"clzw", 64, INSN_CLASS_ZBB, "d,s", MATCH_CLZW, MASK_CLZW, match_opcode, 0 },
  3588. +{"ctzw", 64, INSN_CLASS_ZBB, "d,s", MATCH_CTZW, MASK_CTZW, match_opcode, 0 },
  3589. +{"cpopw", 64, INSN_CLASS_ZBB, "d,s", MATCH_CPOPW, MASK_CPOPW, match_opcode, 0 },
  3590. +{"rolw", 64, INSN_CLASS_ZBB, "d,s,t", MATCH_ROLW, MASK_ROLW, match_opcode, 0 },
  3591. +{"roriw", 64, INSN_CLASS_ZBB, "d,s,<", MATCH_RORIW, MASK_RORIW, match_opcode, 0 },
  3592. +{"rorw", 64, INSN_CLASS_ZBB, "d,s,t", MATCH_RORW, MASK_RORW, match_opcode, 0 },
  3593. +{"rorw", 64, INSN_CLASS_ZBB, "d,s,<", MATCH_RORIW, MASK_RORIW, match_opcode, 0 },
  3594. +{"sh1add", 0, INSN_CLASS_ZBA, "d,s,t", MATCH_SH1ADD, MASK_SH1ADD, match_opcode, 0 },
  3595. +{"sh2add", 0, INSN_CLASS_ZBA, "d,s,t", MATCH_SH2ADD, MASK_SH2ADD, match_opcode, 0 },
  3596. +{"sh3add", 0, INSN_CLASS_ZBA, "d,s,t", MATCH_SH3ADD, MASK_SH3ADD, match_opcode, 0 },
  3597. +{"sh1add.uw", 64, INSN_CLASS_ZBA, "d,s,t", MATCH_SH1ADD_UW, MASK_SH1ADD_UW, match_opcode, 0 },
  3598. +{"sh2add.uw", 64, INSN_CLASS_ZBA, "d,s,t", MATCH_SH2ADD_UW, MASK_SH2ADD_UW, match_opcode, 0 },
  3599. +{"sh3add.uw", 64, INSN_CLASS_ZBA, "d,s,t", MATCH_SH3ADD_UW, MASK_SH3ADD_UW, match_opcode, 0 },
  3600. +{"zext.w", 64, INSN_CLASS_ZBA, "d,s", MATCH_ADD_UW, MASK_ADD_UW | MASK_RS2, match_opcode, INSN_ALIAS },
  3601. +{"zext.w", 64, INSN_CLASS_I, "d,s", 0, (int) M_ZEXTW, match_never, INSN_MACRO },
  3602. +{"add.uw", 64, INSN_CLASS_ZBA, "d,s,t", MATCH_ADD_UW, MASK_ADD_UW, match_opcode, 0 },
  3603. +{"slli.uw", 64, INSN_CLASS_ZBA, "d,s,>", MATCH_SLLI_UW, MASK_SLLI_UW, match_opcode, 0 },
  3604. +{"clmul", 0, INSN_CLASS_ZBC, "d,s,t", MATCH_CLMUL, MASK_CLMUL, match_opcode, 0 },
  3605. +{"clmulh", 0, INSN_CLASS_ZBC, "d,s,t", MATCH_CLMULH, MASK_CLMULH, match_opcode, 0 },
  3606. +{"clmulr", 0, INSN_CLASS_ZBC, "d,s,t", MATCH_CLMULR, MASK_CLMULR, match_opcode, 0 },
  3607. +
  3608. +
  3609. +/* ZBS instructions */
  3610. +{"bclr", 0, INSN_CLASS_ZBS, "d,s,t", MATCH_BCLR, MASK_BCLR, match_opcode, 0 },
  3611. +{"bclri", 0, INSN_CLASS_ZBS, "d,s,>", MATCH_BCLRI, MASK_BCLRI, match_opcode, 0 },
  3612. +{"bext", 0, INSN_CLASS_ZBS, "d,s,t", MATCH_BEXT, MASK_BEXT, match_opcode, 0 },
  3613. +{"bexti", 0, INSN_CLASS_ZBS, "d,s,>", MATCH_BEXTI, MASK_BEXTI, match_opcode, 0 },
  3614. +{"binv", 0, INSN_CLASS_ZBS, "d,s,t", MATCH_BINV, MASK_BINV, match_opcode, 0 },
  3615. +{"binvi", 0, INSN_CLASS_ZBS, "d,s,>", MATCH_BINVI, MASK_BINVI, match_opcode, 0 },
  3616. +{"bset", 0, INSN_CLASS_ZBS, "d,s,t", MATCH_BSET, MASK_BSET, match_opcode, 0 },
  3617. +{"bseti", 0, INSN_CLASS_ZBS, "d,s,>", MATCH_BSETI, MASK_BSETI, match_opcode, 0 },
  3618. +
  3619. +/* B instructions excluded from spec1.0 */
  3620. +{"bsetw", 64, INSN_CLASS_ZBS, "d,s,t", MATCH_BSETW, MASK_BSETW, match_opcode, 0 },
  3621. +{"bclrw", 64, INSN_CLASS_ZBS, "d,s,t", MATCH_BCLRW, MASK_BCLRW, match_opcode, 0 },
  3622. +{"binvw", 64, INSN_CLASS_ZBS, "d,s,t", MATCH_BINVW, MASK_BINVW, match_opcode, 0 },
  3623. +{"bextw", 64, INSN_CLASS_ZBS, "d,s,t", MATCH_BEXTW, MASK_BEXTW, match_opcode, 0 },
  3624. +{"bsetiw", 64, INSN_CLASS_ZBS, "d,s,<", MATCH_BSETIW, MASK_BSETIW, match_opcode, 0 },
  3625. +{"bclriw", 64, INSN_CLASS_ZBS, "d,s,<", MATCH_BCLRIW, MASK_BCLRIW, match_opcode, 0 },
  3626. +{"binviw", 64, INSN_CLASS_ZBS, "d,s,<", MATCH_BINVIW, MASK_BINVIW, match_opcode, 0 },
  3627. +{"slo", 0, INSN_CLASS_ZBS, "d,s,t", MATCH_SLO, MASK_SLO, match_opcode, 0 },
  3628. +{"sro", 0, INSN_CLASS_ZBS, "d,s,t", MATCH_SRO, MASK_SRO, match_opcode, 0 },
  3629. +{"sloi", 0, INSN_CLASS_ZBS, "d,s,>", MATCH_SLOI, MASK_SLOI, match_opcode, 0 },
  3630. +{"sroi", 0, INSN_CLASS_ZBS, "d,s,>", MATCH_SROI, MASK_SROI, match_opcode, 0 },
  3631. +{"slow", 64, INSN_CLASS_ZBS, "d,s,<", MATCH_SLOW, MASK_SLOW, match_opcode, 0 },
  3632. +{"srow", 64, INSN_CLASS_ZBS, "d,s,<", MATCH_SROW, MASK_SROW, match_opcode, 0 },
  3633. +{"sloiw", 64, INSN_CLASS_ZBS, "d,s,<", MATCH_SLOIW, MASK_SLOIW, match_opcode, 0 },
  3634. +{"sroiw", 64, INSN_CLASS_ZBS, "d,s,<", MATCH_SROIW, MASK_SROIW, match_opcode, 0 },
  3635. +
  3636. +/* RVV */
  3637. +{"vsetvl", 0, INSN_CLASS_V, "d,s,t", MATCH_VSETVL, MASK_VSETVL, match_opcode, 0},
  3638. +{"vsetvli", 0, INSN_CLASS_V, "d,s,Vc", MATCH_VSETVLI, MASK_VSETVLI, match_opcode, 0},
  3639. +{"vsetivli", 0, INSN_CLASS_V, "d,Z,Vb", MATCH_VSETIVLI, MASK_VSETIVLI, match_opcode, 0},
  3640. +
  3641. +{"vle1.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VLE1V, MASK_VLE1V, match_opcode, INSN_DREF },
  3642. +{"vse1.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VSE1V, MASK_VSE1V, match_opcode, INSN_DREF },
  3643. +
  3644. +{"vle8.v", 0, INSN_CLASS_V, "Vd,0(s)Vm", MATCH_VLE8V, MASK_VLE8V, match_vd_neq_vm, INSN_DREF },
  3645. +{"vle16.v", 0, INSN_CLASS_V, "Vd,0(s)Vm", MATCH_VLE16V, MASK_VLE16V, match_vd_neq_vm, INSN_DREF },
  3646. +{"vle32.v", 0, INSN_CLASS_V, "Vd,0(s)Vm", MATCH_VLE32V, MASK_VLE32V, match_vd_neq_vm, INSN_DREF },
  3647. +{"vle64.v", 0, INSN_CLASS_V, "Vd,0(s)Vm", MATCH_VLE64V, MASK_VLE64V, match_vd_neq_vm, INSN_DREF },
  3648. +
  3649. +{"vse8.v", 0, INSN_CLASS_V, "Vd,0(s)Vm", MATCH_VSE8V, MASK_VSE8V, match_vd_neq_vm, INSN_DREF },
  3650. +{"vse16.v", 0, INSN_CLASS_V, "Vd,0(s)Vm", MATCH_VSE16V, MASK_VSE16V, match_vd_neq_vm, INSN_DREF },
  3651. +{"vse32.v", 0, INSN_CLASS_V, "Vd,0(s)Vm", MATCH_VSE32V, MASK_VSE32V, match_vd_neq_vm, INSN_DREF },
  3652. +{"vse64.v", 0, INSN_CLASS_V, "Vd,0(s)Vm", MATCH_VSE64V, MASK_VSE64V, match_vd_neq_vm, INSN_DREF },
  3653. +
  3654. +{"vlse8.v", 0, INSN_CLASS_V, "Vd,0(s),tVm", MATCH_VLSE8V, MASK_VLSE8V, match_vd_neq_vm, INSN_DREF },
  3655. +{"vlse16.v", 0, INSN_CLASS_V, "Vd,0(s),tVm", MATCH_VLSE16V, MASK_VLSE16V, match_vd_neq_vm, INSN_DREF },
  3656. +{"vlse32.v", 0, INSN_CLASS_V, "Vd,0(s),tVm", MATCH_VLSE32V, MASK_VLSE32V, match_vd_neq_vm, INSN_DREF },
  3657. +{"vlse64.v", 0, INSN_CLASS_V, "Vd,0(s),tVm", MATCH_VLSE64V, MASK_VLSE64V, match_vd_neq_vm, INSN_DREF },
  3658. +
  3659. +{"vsse8.v", 0, INSN_CLASS_V, "Vd,0(s),tVm", MATCH_VSSE8V, MASK_VSSE8V, match_vd_neq_vm, INSN_DREF },
  3660. +{"vsse16.v", 0, INSN_CLASS_V, "Vd,0(s),tVm", MATCH_VSSE16V, MASK_VSSE16V, match_vd_neq_vm, INSN_DREF },
  3661. +{"vsse32.v", 0, INSN_CLASS_V, "Vd,0(s),tVm", MATCH_VSSE32V, MASK_VSSE32V, match_vd_neq_vm, INSN_DREF },
  3662. +{"vsse64.v", 0, INSN_CLASS_V, "Vd,0(s),tVm", MATCH_VSSE64V, MASK_VSSE64V, match_vd_neq_vm, INSN_DREF },
  3663. +
  3664. +{"vloxei8.v", 0, INSN_CLASS_V, "Vd,0(s),VtVm", MATCH_VLOXEI8V, MASK_VLOXEI8V, match_vd_neq_vm, INSN_DREF },
  3665. +{"vloxei16.v", 0, INSN_CLASS_V, "Vd,0(s),VtVm", MATCH_VLOXEI16V, MASK_VLOXEI16V, match_vd_neq_vm, INSN_DREF },
  3666. +{"vloxei32.v", 0, INSN_CLASS_V, "Vd,0(s),VtVm", MATCH_VLOXEI32V, MASK_VLOXEI32V, match_vd_neq_vm, INSN_DREF },
  3667. +{"vloxei64.v", 0, INSN_CLASS_V, "Vd,0(s),VtVm", MATCH_VLOXEI64V, MASK_VLOXEI64V, match_vd_neq_vm, INSN_DREF },
  3668. +
  3669. +{"vsoxei8.v", 0, INSN_CLASS_V, "Vd,0(s),VtVm", MATCH_VSOXEI8V, MASK_VSOXEI8V, match_vd_neq_vm, INSN_DREF },
  3670. +{"vsoxei16.v", 0, INSN_CLASS_V, "Vd,0(s),VtVm", MATCH_VSOXEI16V, MASK_VSOXEI16V, match_vd_neq_vm, INSN_DREF },
  3671. +{"vsoxei32.v", 0, INSN_CLASS_V, "Vd,0(s),VtVm", MATCH_VSOXEI32V, MASK_VSOXEI32V, match_vd_neq_vm, INSN_DREF },
  3672. +{"vsoxei64.v", 0, INSN_CLASS_V, "Vd,0(s),VtVm", MATCH_VSOXEI64V, MASK_VSOXEI64V, match_vd_neq_vm, INSN_DREF },
  3673. +
  3674. +{"vluxei8.v", 0, INSN_CLASS_V, "Vd,0(s),VtVm", MATCH_VLUXEI8V, MASK_VLUXEI8V, match_vd_neq_vm, INSN_DREF },
  3675. +{"vluxei16.v", 0, INSN_CLASS_V, "Vd,0(s),VtVm", MATCH_VLUXEI16V, MASK_VLUXEI16V, match_vd_neq_vm, INSN_DREF },
  3676. +{"vluxei32.v", 0, INSN_CLASS_V, "Vd,0(s),VtVm", MATCH_VLUXEI32V, MASK_VLUXEI32V, match_vd_neq_vm, INSN_DREF },
  3677. +{"vluxei64.v", 0, INSN_CLASS_V, "Vd,0(s),VtVm", MATCH_VLUXEI64V, MASK_VLUXEI64V, match_vd_neq_vm, INSN_DREF },
  3678. +
  3679. +{"vsuxei8.v", 0, INSN_CLASS_V, "Vd,0(s),VtVm", MATCH_VSUXEI8V, MASK_VSUXEI8V, match_vd_neq_vm, INSN_DREF },
  3680. +{"vsuxei16.v", 0, INSN_CLASS_V, "Vd,0(s),VtVm", MATCH_VSUXEI16V, MASK_VSUXEI16V, match_vd_neq_vm, INSN_DREF },
  3681. +{"vsuxei32.v", 0, INSN_CLASS_V, "Vd,0(s),VtVm", MATCH_VSUXEI32V, MASK_VSUXEI32V, match_vd_neq_vm, INSN_DREF },
  3682. +{"vsuxei64.v", 0, INSN_CLASS_V, "Vd,0(s),VtVm", MATCH_VSUXEI64V, MASK_VSUXEI64V, match_vd_neq_vm, INSN_DREF },
  3683. +
  3684. +{"vle8ff.v", 0, INSN_CLASS_V, "Vd,0(s)Vm", MATCH_VLE8FFV, MASK_VLE8FFV, match_vd_neq_vm, INSN_DREF },
  3685. +{"vle16ff.v", 0, INSN_CLASS_V, "Vd,0(s)Vm", MATCH_VLE16FFV, MASK_VLE16FFV, match_vd_neq_vm, INSN_DREF },
  3686. +{"vle32ff.v", 0, INSN_CLASS_V, "Vd,0(s)Vm", MATCH_VLE32FFV, MASK_VLE32FFV, match_vd_neq_vm, INSN_DREF },
  3687. +{"vle64ff.v", 0, INSN_CLASS_V, "Vd,0(s)Vm", MATCH_VLE64FFV, MASK_VLE64FFV, match_vd_neq_vm, INSN_DREF },
  3688. +
  3689. +{"vlseg2e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG2E8V, MASK_VLSEG2E8V, match_vd_neq_vm, INSN_DREF },
  3690. +{"vsseg2e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG2E8V, MASK_VSSEG2E8V, match_vd_neq_vm, INSN_DREF },
  3691. +{"vlseg3e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG3E8V, MASK_VLSEG3E8V, match_vd_neq_vm, INSN_DREF },
  3692. +{"vsseg3e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG3E8V, MASK_VSSEG3E8V, match_vd_neq_vm, INSN_DREF },
  3693. +{"vlseg4e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG4E8V, MASK_VLSEG4E8V, match_vd_neq_vm, INSN_DREF },
  3694. +{"vsseg4e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG4E8V, MASK_VSSEG4E8V, match_vd_neq_vm, INSN_DREF },
  3695. +{"vlseg5e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG5E8V, MASK_VLSEG5E8V, match_vd_neq_vm, INSN_DREF },
  3696. +{"vsseg5e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG5E8V, MASK_VSSEG5E8V, match_vd_neq_vm, INSN_DREF },
  3697. +{"vlseg6e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG6E8V, MASK_VLSEG6E8V, match_vd_neq_vm, INSN_DREF },
  3698. +{"vsseg6e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG6E8V, MASK_VSSEG6E8V, match_vd_neq_vm, INSN_DREF },
  3699. +{"vlseg7e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG7E8V, MASK_VLSEG7E8V, match_vd_neq_vm, INSN_DREF },
  3700. +{"vsseg7e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG7E8V, MASK_VSSEG7E8V, match_vd_neq_vm, INSN_DREF },
  3701. +{"vlseg8e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG8E8V, MASK_VLSEG8E8V, match_vd_neq_vm, INSN_DREF },
  3702. +{"vsseg8e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG8E8V, MASK_VSSEG8E8V, match_vd_neq_vm, INSN_DREF },
  3703. +
  3704. +{"vlseg2e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG2E16V, MASK_VLSEG2E16V, match_vd_neq_vm, INSN_DREF },
  3705. +{"vsseg2e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG2E16V, MASK_VSSEG2E16V, match_vd_neq_vm, INSN_DREF },
  3706. +{"vlseg3e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG3E16V, MASK_VLSEG3E16V, match_vd_neq_vm, INSN_DREF },
  3707. +{"vsseg3e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG3E16V, MASK_VSSEG3E16V, match_vd_neq_vm, INSN_DREF },
  3708. +{"vlseg4e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG4E16V, MASK_VLSEG4E16V, match_vd_neq_vm, INSN_DREF },
  3709. +{"vsseg4e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG4E16V, MASK_VSSEG4E16V, match_vd_neq_vm, INSN_DREF },
  3710. +{"vlseg5e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG5E16V, MASK_VLSEG5E16V, match_vd_neq_vm, INSN_DREF },
  3711. +{"vsseg5e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG5E16V, MASK_VSSEG5E16V, match_vd_neq_vm, INSN_DREF },
  3712. +{"vlseg6e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG6E16V, MASK_VLSEG6E16V, match_vd_neq_vm, INSN_DREF },
  3713. +{"vsseg6e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG6E16V, MASK_VSSEG6E16V, match_vd_neq_vm, INSN_DREF },
  3714. +{"vlseg7e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG7E16V, MASK_VLSEG7E16V, match_vd_neq_vm, INSN_DREF },
  3715. +{"vsseg7e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG7E16V, MASK_VSSEG7E16V, match_vd_neq_vm, INSN_DREF },
  3716. +{"vlseg8e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG8E16V, MASK_VLSEG8E16V, match_vd_neq_vm, INSN_DREF },
  3717. +{"vsseg8e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG8E16V, MASK_VSSEG8E16V, match_vd_neq_vm, INSN_DREF },
  3718. +
  3719. +{"vlseg2e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG2E32V, MASK_VLSEG2E32V, match_vd_neq_vm, INSN_DREF },
  3720. +{"vsseg2e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG2E32V, MASK_VSSEG2E32V, match_vd_neq_vm, INSN_DREF },
  3721. +{"vlseg3e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG3E32V, MASK_VLSEG3E32V, match_vd_neq_vm, INSN_DREF },
  3722. +{"vsseg3e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG3E32V, MASK_VSSEG3E32V, match_vd_neq_vm, INSN_DREF },
  3723. +{"vlseg4e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG4E32V, MASK_VLSEG4E32V, match_vd_neq_vm, INSN_DREF },
  3724. +{"vsseg4e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG4E32V, MASK_VSSEG4E32V, match_vd_neq_vm, INSN_DREF },
  3725. +{"vlseg5e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG5E32V, MASK_VLSEG5E32V, match_vd_neq_vm, INSN_DREF },
  3726. +{"vsseg5e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG5E32V, MASK_VSSEG5E32V, match_vd_neq_vm, INSN_DREF },
  3727. +{"vlseg6e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG6E32V, MASK_VLSEG6E32V, match_vd_neq_vm, INSN_DREF },
  3728. +{"vsseg6e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG6E32V, MASK_VSSEG6E32V, match_vd_neq_vm, INSN_DREF },
  3729. +{"vlseg7e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG7E32V, MASK_VLSEG7E32V, match_vd_neq_vm, INSN_DREF },
  3730. +{"vsseg7e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG7E32V, MASK_VSSEG7E32V, match_vd_neq_vm, INSN_DREF },
  3731. +{"vlseg8e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG8E32V, MASK_VLSEG8E32V, match_vd_neq_vm, INSN_DREF },
  3732. +{"vsseg8e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG8E32V, MASK_VSSEG8E32V, match_vd_neq_vm, INSN_DREF },
  3733. +
  3734. +{"vlseg2e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG2E64V, MASK_VLSEG2E64V, match_vd_neq_vm, INSN_DREF },
  3735. +{"vsseg2e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG2E64V, MASK_VSSEG2E64V, match_vd_neq_vm, INSN_DREF },
  3736. +{"vlseg3e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG3E64V, MASK_VLSEG3E64V, match_vd_neq_vm, INSN_DREF },
  3737. +{"vsseg3e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG3E64V, MASK_VSSEG3E64V, match_vd_neq_vm, INSN_DREF },
  3738. +{"vlseg4e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG4E64V, MASK_VLSEG4E64V, match_vd_neq_vm, INSN_DREF },
  3739. +{"vsseg4e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG4E64V, MASK_VSSEG4E64V, match_vd_neq_vm, INSN_DREF },
  3740. +{"vlseg5e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG5E64V, MASK_VLSEG5E64V, match_vd_neq_vm, INSN_DREF },
  3741. +{"vsseg5e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG5E64V, MASK_VSSEG5E64V, match_vd_neq_vm, INSN_DREF },
  3742. +{"vlseg6e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG6E64V, MASK_VLSEG6E64V, match_vd_neq_vm, INSN_DREF },
  3743. +{"vsseg6e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG6E64V, MASK_VSSEG6E64V, match_vd_neq_vm, INSN_DREF },
  3744. +{"vlseg7e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG7E64V, MASK_VLSEG7E64V, match_vd_neq_vm, INSN_DREF },
  3745. +{"vsseg7e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG7E64V, MASK_VSSEG7E64V, match_vd_neq_vm, INSN_DREF },
  3746. +{"vlseg8e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG8E64V, MASK_VLSEG8E64V, match_vd_neq_vm, INSN_DREF },
  3747. +{"vsseg8e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG8E64V, MASK_VSSEG8E64V, match_vd_neq_vm, INSN_DREF },
  3748. +
  3749. +{"vlsseg2e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG2E8V, MASK_VLSSEG2E8V, match_vd_neq_vm, INSN_DREF },
  3750. +{"vssseg2e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG2E8V, MASK_VSSSEG2E8V, match_vd_neq_vm, INSN_DREF },
  3751. +{"vlsseg3e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG3E8V, MASK_VLSSEG3E8V, match_vd_neq_vm, INSN_DREF },
  3752. +{"vssseg3e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG3E8V, MASK_VSSSEG3E8V, match_vd_neq_vm, INSN_DREF },
  3753. +{"vlsseg4e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG4E8V, MASK_VLSSEG4E8V, match_vd_neq_vm, INSN_DREF },
  3754. +{"vssseg4e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG4E8V, MASK_VSSSEG4E8V, match_vd_neq_vm, INSN_DREF },
  3755. +{"vlsseg5e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG5E8V, MASK_VLSSEG5E8V, match_vd_neq_vm, INSN_DREF },
  3756. +{"vssseg5e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG5E8V, MASK_VSSSEG5E8V, match_vd_neq_vm, INSN_DREF },
  3757. +{"vlsseg6e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG6E8V, MASK_VLSSEG6E8V, match_vd_neq_vm, INSN_DREF },
  3758. +{"vssseg6e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG6E8V, MASK_VSSSEG6E8V, match_vd_neq_vm, INSN_DREF },
  3759. +{"vlsseg7e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG7E8V, MASK_VLSSEG7E8V, match_vd_neq_vm, INSN_DREF },
  3760. +{"vssseg7e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG7E8V, MASK_VSSSEG7E8V, match_vd_neq_vm, INSN_DREF },
  3761. +{"vlsseg8e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG8E8V, MASK_VLSSEG8E8V, match_vd_neq_vm, INSN_DREF },
  3762. +{"vssseg8e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG8E8V, MASK_VSSSEG8E8V, match_vd_neq_vm, INSN_DREF },
  3763. +
  3764. +{"vlsseg2e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG2E16V, MASK_VLSSEG2E16V, match_vd_neq_vm, INSN_DREF },
  3765. +{"vssseg2e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG2E16V, MASK_VSSSEG2E16V, match_vd_neq_vm, INSN_DREF },
  3766. +{"vlsseg3e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG3E16V, MASK_VLSSEG3E16V, match_vd_neq_vm, INSN_DREF },
  3767. +{"vssseg3e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG3E16V, MASK_VSSSEG3E16V, match_vd_neq_vm, INSN_DREF },
  3768. +{"vlsseg4e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG4E16V, MASK_VLSSEG4E16V, match_vd_neq_vm, INSN_DREF },
  3769. +{"vssseg4e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG4E16V, MASK_VSSSEG4E16V, match_vd_neq_vm, INSN_DREF },
  3770. +{"vlsseg5e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG5E16V, MASK_VLSSEG5E16V, match_vd_neq_vm, INSN_DREF },
  3771. +{"vssseg5e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG5E16V, MASK_VSSSEG5E16V, match_vd_neq_vm, INSN_DREF },
  3772. +{"vlsseg6e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG6E16V, MASK_VLSSEG6E16V, match_vd_neq_vm, INSN_DREF },
  3773. +{"vssseg6e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG6E16V, MASK_VSSSEG6E16V, match_vd_neq_vm, INSN_DREF },
  3774. +{"vlsseg7e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG7E16V, MASK_VLSSEG7E16V, match_vd_neq_vm, INSN_DREF },
  3775. +{"vssseg7e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG7E16V, MASK_VSSSEG7E16V, match_vd_neq_vm, INSN_DREF },
  3776. +{"vlsseg8e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG8E16V, MASK_VLSSEG8E16V, match_vd_neq_vm, INSN_DREF },
  3777. +{"vssseg8e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG8E16V, MASK_VSSSEG8E16V, match_vd_neq_vm, INSN_DREF },
  3778. +
  3779. +{"vlsseg2e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG2E32V, MASK_VLSSEG2E32V, match_vd_neq_vm, INSN_DREF },
  3780. +{"vssseg2e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG2E32V, MASK_VSSSEG2E32V, match_vd_neq_vm, INSN_DREF },
  3781. +{"vlsseg3e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG3E32V, MASK_VLSSEG3E32V, match_vd_neq_vm, INSN_DREF },
  3782. +{"vssseg3e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG3E32V, MASK_VSSSEG3E32V, match_vd_neq_vm, INSN_DREF },
  3783. +{"vlsseg4e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG4E32V, MASK_VLSSEG4E32V, match_vd_neq_vm, INSN_DREF },
  3784. +{"vssseg4e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG4E32V, MASK_VSSSEG4E32V, match_vd_neq_vm, INSN_DREF },
  3785. +{"vlsseg5e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG5E32V, MASK_VLSSEG5E32V, match_vd_neq_vm, INSN_DREF },
  3786. +{"vssseg5e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG5E32V, MASK_VSSSEG5E32V, match_vd_neq_vm, INSN_DREF },
  3787. +{"vlsseg6e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG6E32V, MASK_VLSSEG6E32V, match_vd_neq_vm, INSN_DREF },
  3788. +{"vssseg6e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG6E32V, MASK_VSSSEG6E32V, match_vd_neq_vm, INSN_DREF },
  3789. +{"vlsseg7e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG7E32V, MASK_VLSSEG7E32V, match_vd_neq_vm, INSN_DREF },
  3790. +{"vssseg7e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG7E32V, MASK_VSSSEG7E32V, match_vd_neq_vm, INSN_DREF },
  3791. +{"vlsseg8e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG8E32V, MASK_VLSSEG8E32V, match_vd_neq_vm, INSN_DREF },
  3792. +{"vssseg8e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG8E32V, MASK_VSSSEG8E32V, match_vd_neq_vm, INSN_DREF },
  3793. +
  3794. +{"vlsseg2e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG2E64V, MASK_VLSSEG2E64V, match_vd_neq_vm, INSN_DREF },
  3795. +{"vssseg2e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG2E64V, MASK_VSSSEG2E64V, match_vd_neq_vm, INSN_DREF },
  3796. +{"vlsseg3e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG3E64V, MASK_VLSSEG3E64V, match_vd_neq_vm, INSN_DREF },
  3797. +{"vssseg3e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG3E64V, MASK_VSSSEG3E64V, match_vd_neq_vm, INSN_DREF },
  3798. +{"vlsseg4e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG4E64V, MASK_VLSSEG4E64V, match_vd_neq_vm, INSN_DREF },
  3799. +{"vssseg4e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG4E64V, MASK_VSSSEG4E64V, match_vd_neq_vm, INSN_DREF },
  3800. +{"vlsseg5e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG5E64V, MASK_VLSSEG5E64V, match_vd_neq_vm, INSN_DREF },
  3801. +{"vssseg5e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG5E64V, MASK_VSSSEG5E64V, match_vd_neq_vm, INSN_DREF },
  3802. +{"vlsseg6e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG6E64V, MASK_VLSSEG6E64V, match_vd_neq_vm, INSN_DREF },
  3803. +{"vssseg6e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG6E64V, MASK_VSSSEG6E64V, match_vd_neq_vm, INSN_DREF },
  3804. +{"vlsseg7e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG7E64V, MASK_VLSSEG7E64V, match_vd_neq_vm, INSN_DREF },
  3805. +{"vssseg7e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG7E64V, MASK_VSSSEG7E64V, match_vd_neq_vm, INSN_DREF },
  3806. +{"vlsseg8e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG8E64V, MASK_VLSSEG8E64V, match_vd_neq_vm, INSN_DREF },
  3807. +{"vssseg8e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG8E64V, MASK_VSSSEG8E64V, match_vd_neq_vm, INSN_DREF },
  3808. +
  3809. +{"vloxseg2ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG2EI8V, MASK_VLOXSEG2EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3810. +{"vsoxseg2ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG2EI8V, MASK_VSOXSEG2EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3811. +{"vloxseg3ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG3EI8V, MASK_VLOXSEG3EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3812. +{"vsoxseg3ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG3EI8V, MASK_VSOXSEG3EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3813. +{"vloxseg4ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG4EI8V, MASK_VLOXSEG4EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3814. +{"vsoxseg4ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG4EI8V, MASK_VSOXSEG4EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3815. +{"vloxseg5ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG5EI8V, MASK_VLOXSEG5EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3816. +{"vsoxseg5ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG5EI8V, MASK_VSOXSEG5EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3817. +{"vloxseg6ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG6EI8V, MASK_VLOXSEG6EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3818. +{"vsoxseg6ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG6EI8V, MASK_VSOXSEG6EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3819. +{"vloxseg7ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG7EI8V, MASK_VLOXSEG7EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3820. +{"vsoxseg7ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG7EI8V, MASK_VSOXSEG7EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3821. +{"vloxseg8ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG8EI8V, MASK_VLOXSEG8EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3822. +{"vsoxseg8ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG8EI8V, MASK_VSOXSEG8EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3823. +
  3824. +{"vloxseg2ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG2EI16V, MASK_VLOXSEG2EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3825. +{"vsoxseg2ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG2EI16V, MASK_VSOXSEG2EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3826. +{"vloxseg3ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG3EI16V, MASK_VLOXSEG3EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3827. +{"vsoxseg3ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG3EI16V, MASK_VSOXSEG3EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3828. +{"vloxseg4ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG4EI16V, MASK_VLOXSEG4EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3829. +{"vsoxseg4ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG4EI16V, MASK_VSOXSEG4EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3830. +{"vloxseg5ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG5EI16V, MASK_VLOXSEG5EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3831. +{"vsoxseg5ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG5EI16V, MASK_VSOXSEG5EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3832. +{"vloxseg6ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG6EI16V, MASK_VLOXSEG6EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3833. +{"vsoxseg6ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG6EI16V, MASK_VSOXSEG6EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3834. +{"vloxseg7ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG7EI16V, MASK_VLOXSEG7EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3835. +{"vsoxseg7ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG7EI16V, MASK_VSOXSEG7EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3836. +{"vloxseg8ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG8EI16V, MASK_VLOXSEG8EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3837. +{"vsoxseg8ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG8EI16V, MASK_VSOXSEG8EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3838. +
  3839. +{"vloxseg2ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG2EI32V, MASK_VLOXSEG2EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3840. +{"vsoxseg2ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG2EI32V, MASK_VSOXSEG2EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3841. +{"vloxseg3ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG3EI32V, MASK_VLOXSEG3EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3842. +{"vsoxseg3ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG3EI32V, MASK_VSOXSEG3EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3843. +{"vloxseg4ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG4EI32V, MASK_VLOXSEG4EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3844. +{"vsoxseg4ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG4EI32V, MASK_VSOXSEG4EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3845. +{"vloxseg5ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG5EI32V, MASK_VLOXSEG5EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3846. +{"vsoxseg5ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG5EI32V, MASK_VSOXSEG5EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3847. +{"vloxseg6ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG6EI32V, MASK_VLOXSEG6EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3848. +{"vsoxseg6ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG6EI32V, MASK_VSOXSEG6EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3849. +{"vloxseg7ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG7EI32V, MASK_VLOXSEG7EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3850. +{"vsoxseg7ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG7EI32V, MASK_VSOXSEG7EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3851. +{"vloxseg8ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG8EI32V, MASK_VLOXSEG8EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3852. +{"vsoxseg8ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG8EI32V, MASK_VSOXSEG8EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3853. +
  3854. +{"vloxseg2ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG2EI64V, MASK_VLOXSEG2EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3855. +{"vsoxseg2ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG2EI64V, MASK_VSOXSEG2EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3856. +{"vloxseg3ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG3EI64V, MASK_VLOXSEG3EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3857. +{"vsoxseg3ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG3EI64V, MASK_VSOXSEG3EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3858. +{"vloxseg4ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG4EI64V, MASK_VLOXSEG4EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3859. +{"vsoxseg4ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG4EI64V, MASK_VSOXSEG4EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3860. +{"vloxseg5ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG5EI64V, MASK_VLOXSEG5EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3861. +{"vsoxseg5ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG5EI64V, MASK_VSOXSEG5EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3862. +{"vloxseg6ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG6EI64V, MASK_VLOXSEG6EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3863. +{"vsoxseg6ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG6EI64V, MASK_VSOXSEG6EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3864. +{"vloxseg7ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG7EI64V, MASK_VLOXSEG7EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3865. +{"vsoxseg7ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG7EI64V, MASK_VSOXSEG7EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3866. +{"vloxseg8ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG8EI64V, MASK_VLOXSEG8EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3867. +{"vsoxseg8ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG8EI64V, MASK_VSOXSEG8EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3868. +
  3869. +{"vluxseg2ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG2EI8V, MASK_VLUXSEG2EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3870. +{"vsuxseg2ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG2EI8V, MASK_VSUXSEG2EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3871. +{"vluxseg3ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG3EI8V, MASK_VLUXSEG3EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3872. +{"vsuxseg3ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG3EI8V, MASK_VSUXSEG3EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3873. +{"vluxseg4ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG4EI8V, MASK_VLUXSEG4EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3874. +{"vsuxseg4ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG4EI8V, MASK_VSUXSEG4EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3875. +{"vluxseg5ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG5EI8V, MASK_VLUXSEG5EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3876. +{"vsuxseg5ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG5EI8V, MASK_VSUXSEG5EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3877. +{"vluxseg6ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG6EI8V, MASK_VLUXSEG6EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3878. +{"vsuxseg6ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG6EI8V, MASK_VSUXSEG6EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3879. +{"vluxseg7ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG7EI8V, MASK_VLUXSEG7EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3880. +{"vsuxseg7ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG7EI8V, MASK_VSUXSEG7EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3881. +{"vluxseg8ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG8EI8V, MASK_VLUXSEG8EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3882. +{"vsuxseg8ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG8EI8V, MASK_VSUXSEG8EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3883. +
  3884. +{"vluxseg2ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG2EI16V, MASK_VLUXSEG2EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3885. +{"vsuxseg2ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG2EI16V, MASK_VSUXSEG2EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3886. +{"vluxseg3ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG3EI16V, MASK_VLUXSEG3EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3887. +{"vsuxseg3ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG3EI16V, MASK_VSUXSEG3EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3888. +{"vluxseg4ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG4EI16V, MASK_VLUXSEG4EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3889. +{"vsuxseg4ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG4EI16V, MASK_VSUXSEG4EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3890. +{"vluxseg5ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG5EI16V, MASK_VLUXSEG5EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3891. +{"vsuxseg5ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG5EI16V, MASK_VSUXSEG5EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3892. +{"vluxseg6ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG6EI16V, MASK_VLUXSEG6EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3893. +{"vsuxseg6ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG6EI16V, MASK_VSUXSEG6EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3894. +{"vluxseg7ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG7EI16V, MASK_VLUXSEG7EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3895. +{"vsuxseg7ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG7EI16V, MASK_VSUXSEG7EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3896. +{"vluxseg8ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG8EI16V, MASK_VLUXSEG8EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3897. +{"vsuxseg8ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG8EI16V, MASK_VSUXSEG8EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3898. +
  3899. +{"vluxseg2ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG2EI32V, MASK_VLUXSEG2EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3900. +{"vsuxseg2ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG2EI32V, MASK_VSUXSEG2EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3901. +{"vluxseg3ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG3EI32V, MASK_VLUXSEG3EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3902. +{"vsuxseg3ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG3EI32V, MASK_VSUXSEG3EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3903. +{"vluxseg4ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG4EI32V, MASK_VLUXSEG4EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3904. +{"vsuxseg4ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG4EI32V, MASK_VSUXSEG4EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3905. +{"vluxseg5ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG5EI32V, MASK_VLUXSEG5EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3906. +{"vsuxseg5ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG5EI32V, MASK_VSUXSEG5EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3907. +{"vluxseg6ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG6EI32V, MASK_VLUXSEG6EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3908. +{"vsuxseg6ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG6EI32V, MASK_VSUXSEG6EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3909. +{"vluxseg7ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG7EI32V, MASK_VLUXSEG7EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3910. +{"vsuxseg7ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG7EI32V, MASK_VSUXSEG7EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3911. +{"vluxseg8ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG8EI32V, MASK_VLUXSEG8EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3912. +{"vsuxseg8ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG8EI32V, MASK_VSUXSEG8EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3913. +
  3914. +{"vluxseg2ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG2EI64V, MASK_VLUXSEG2EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3915. +{"vsuxseg2ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG2EI64V, MASK_VSUXSEG2EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3916. +{"vluxseg3ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG3EI64V, MASK_VLUXSEG3EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3917. +{"vsuxseg3ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG3EI64V, MASK_VSUXSEG3EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3918. +{"vluxseg4ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG4EI64V, MASK_VLUXSEG4EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3919. +{"vsuxseg4ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG4EI64V, MASK_VSUXSEG4EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3920. +{"vluxseg5ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG5EI64V, MASK_VLUXSEG5EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3921. +{"vsuxseg5ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG5EI64V, MASK_VSUXSEG5EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3922. +{"vluxseg6ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG6EI64V, MASK_VLUXSEG6EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3923. +{"vsuxseg6ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG6EI64V, MASK_VSUXSEG6EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3924. +{"vluxseg7ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG7EI64V, MASK_VLUXSEG7EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3925. +{"vsuxseg7ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG7EI64V, MASK_VSUXSEG7EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3926. +{"vluxseg8ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG8EI64V, MASK_VLUXSEG8EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3927. +{"vsuxseg8ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG8EI64V, MASK_VSUXSEG8EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3928. +
  3929. +{"vlseg2e8ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG2E8FFV, MASK_VLSEG2E8FFV, match_vd_neq_vm, INSN_DREF },
  3930. +{"vlseg3e8ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG3E8FFV, MASK_VLSEG3E8FFV, match_vd_neq_vm, INSN_DREF },
  3931. +{"vlseg4e8ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG4E8FFV, MASK_VLSEG4E8FFV, match_vd_neq_vm, INSN_DREF },
  3932. +{"vlseg5e8ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG5E8FFV, MASK_VLSEG5E8FFV, match_vd_neq_vm, INSN_DREF },
  3933. +{"vlseg6e8ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG6E8FFV, MASK_VLSEG6E8FFV, match_vd_neq_vm, INSN_DREF },
  3934. +{"vlseg7e8ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG7E8FFV, MASK_VLSEG7E8FFV, match_vd_neq_vm, INSN_DREF },
  3935. +{"vlseg8e8ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG8E8FFV, MASK_VLSEG8E8FFV, match_vd_neq_vm, INSN_DREF },
  3936. +
  3937. +{"vlseg2e16ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG2E16FFV, MASK_VLSEG2E16FFV, match_vd_neq_vm, INSN_DREF },
  3938. +{"vlseg3e16ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG3E16FFV, MASK_VLSEG3E16FFV, match_vd_neq_vm, INSN_DREF },
  3939. +{"vlseg4e16ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG4E16FFV, MASK_VLSEG4E16FFV, match_vd_neq_vm, INSN_DREF },
  3940. +{"vlseg5e16ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG5E16FFV, MASK_VLSEG5E16FFV, match_vd_neq_vm, INSN_DREF },
  3941. +{"vlseg6e16ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG6E16FFV, MASK_VLSEG6E16FFV, match_vd_neq_vm, INSN_DREF },
  3942. +{"vlseg7e16ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG7E16FFV, MASK_VLSEG7E16FFV, match_vd_neq_vm, INSN_DREF },
  3943. +{"vlseg8e16ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG8E16FFV, MASK_VLSEG8E16FFV, match_vd_neq_vm, INSN_DREF },
  3944. +
  3945. +{"vlseg2e32ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG2E32FFV, MASK_VLSEG2E32FFV, match_vd_neq_vm, INSN_DREF },
  3946. +{"vlseg3e32ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG3E32FFV, MASK_VLSEG3E32FFV, match_vd_neq_vm, INSN_DREF },
  3947. +{"vlseg4e32ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG4E32FFV, MASK_VLSEG4E32FFV, match_vd_neq_vm, INSN_DREF },
  3948. +{"vlseg5e32ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG5E32FFV, MASK_VLSEG5E32FFV, match_vd_neq_vm, INSN_DREF },
  3949. +{"vlseg6e32ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG6E32FFV, MASK_VLSEG6E32FFV, match_vd_neq_vm, INSN_DREF },
  3950. +{"vlseg7e32ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG7E32FFV, MASK_VLSEG7E32FFV, match_vd_neq_vm, INSN_DREF },
  3951. +{"vlseg8e32ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG8E32FFV, MASK_VLSEG8E32FFV, match_vd_neq_vm, INSN_DREF },
  3952. +
  3953. +{"vlseg2e64ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG2E64FFV, MASK_VLSEG2E64FFV, match_vd_neq_vm, INSN_DREF },
  3954. +{"vlseg3e64ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG3E64FFV, MASK_VLSEG3E64FFV, match_vd_neq_vm, INSN_DREF },
  3955. +{"vlseg4e64ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG4E64FFV, MASK_VLSEG4E64FFV, match_vd_neq_vm, INSN_DREF },
  3956. +{"vlseg5e64ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG5E64FFV, MASK_VLSEG5E64FFV, match_vd_neq_vm, INSN_DREF },
  3957. +{"vlseg6e64ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG6E64FFV, MASK_VLSEG6E64FFV, match_vd_neq_vm, INSN_DREF },
  3958. +{"vlseg7e64ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG7E64FFV, MASK_VLSEG7E64FFV, match_vd_neq_vm, INSN_DREF },
  3959. +{"vlseg8e64ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG8E64FFV, MASK_VLSEG8E64FFV, match_vd_neq_vm, INSN_DREF },
  3960. +
  3961. +{"vl1r.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL1RE8V, MASK_VL1RE8V, match_vls_nf_rv, INSN_DREF|INSN_ALIAS },
  3962. +{"vl1re8.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL1RE8V, MASK_VL1RE8V, match_vls_nf_rv, INSN_DREF },
  3963. +{"vl1re16.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL1RE16V, MASK_VL1RE16V, match_vls_nf_rv, INSN_DREF },
  3964. +{"vl1re32.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL1RE32V, MASK_VL1RE32V, match_vls_nf_rv, INSN_DREF },
  3965. +{"vl1re64.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL1RE64V, MASK_VL1RE64V, match_vls_nf_rv, INSN_DREF },
  3966. +
  3967. +{"vl2r.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL2RE8V, MASK_VL2RE8V, match_vls_nf_rv, INSN_DREF|INSN_ALIAS },
  3968. +{"vl2re8.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL2RE8V, MASK_VL2RE8V, match_vls_nf_rv, INSN_DREF },
  3969. +{"vl2re16.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL2RE16V, MASK_VL2RE16V, match_vls_nf_rv, INSN_DREF },
  3970. +{"vl2re32.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL2RE32V, MASK_VL2RE32V, match_vls_nf_rv, INSN_DREF },
  3971. +{"vl2re64.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL2RE64V, MASK_VL2RE64V, match_vls_nf_rv, INSN_DREF },
  3972. +
  3973. +{"vl4r.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL4RE8V, MASK_VL4RE8V, match_vls_nf_rv, INSN_DREF|INSN_ALIAS },
  3974. +{"vl4re8.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL4RE8V, MASK_VL4RE8V, match_vls_nf_rv, INSN_DREF },
  3975. +{"vl4re16.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL4RE16V, MASK_VL4RE16V, match_vls_nf_rv, INSN_DREF },
  3976. +{"vl4re32.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL4RE32V, MASK_VL4RE32V, match_vls_nf_rv, INSN_DREF },
  3977. +{"vl4re64.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL4RE64V, MASK_VL4RE64V, match_vls_nf_rv, INSN_DREF },
  3978. +
  3979. +{"vl8r.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL8RE8V, MASK_VL8RE8V, match_vls_nf_rv, INSN_DREF|INSN_ALIAS },
  3980. +{"vl8re8.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL8RE8V, MASK_VL8RE8V, match_vls_nf_rv, INSN_DREF },
  3981. +{"vl8re16.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL8RE16V, MASK_VL8RE16V, match_vls_nf_rv, INSN_DREF },
  3982. +{"vl8re32.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL8RE32V, MASK_VL8RE32V, match_vls_nf_rv, INSN_DREF },
  3983. +{"vl8re64.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL8RE64V, MASK_VL8RE64V, match_vls_nf_rv, INSN_DREF },
  3984. +
  3985. +{"vs1r.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VS1RV, MASK_VS1RV, match_vls_nf_rv, INSN_DREF },
  3986. +{"vs2r.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VS2RV, MASK_VS2RV, match_vls_nf_rv, INSN_DREF },
  3987. +{"vs4r.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VS4RV, MASK_VS4RV, match_vls_nf_rv, INSN_DREF },
  3988. +{"vs8r.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VS8RV, MASK_VS8RV, match_vls_nf_rv, INSN_DREF },
  3989. +
  3990. +{"vamoaddei8.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOADDEI8V, MASK_VAMOADDEI8V, match_vd_neq_vm, INSN_DREF},
  3991. +{"vamoswapei8.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOSWAPEI8V, MASK_VAMOSWAPEI8V, match_vd_neq_vm, INSN_DREF},
  3992. +{"vamoxorei8.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOXOREI8V, MASK_VAMOXOREI8V, match_vd_neq_vm, INSN_DREF},
  3993. +{"vamoandei8.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOANDEI8V, MASK_VAMOANDEI8V, match_vd_neq_vm, INSN_DREF},
  3994. +{"vamoorei8.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOOREI8V, MASK_VAMOOREI8V, match_vd_neq_vm, INSN_DREF},
  3995. +{"vamominei8.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOMINEI8V, MASK_VAMOMINEI8V, match_vd_neq_vm, INSN_DREF},
  3996. +{"vamomaxei8.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOMAXEI8V, MASK_VAMOMAXEI8V, match_vd_neq_vm, INSN_DREF},
  3997. +{"vamominuei8.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOMINUEI8V, MASK_VAMOMINUEI8V, match_vd_neq_vm, INSN_DREF},
  3998. +{"vamomaxuei8.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOMAXUEI8V, MASK_VAMOMAXUEI8V, match_vd_neq_vm, INSN_DREF},
  3999. +
  4000. +{"vamoaddei16.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOADDEI16V, MASK_VAMOADDEI16V, match_vd_neq_vm, INSN_DREF},
  4001. +{"vamoswapei16.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOSWAPEI16V, MASK_VAMOSWAPEI16V, match_vd_neq_vm, INSN_DREF},
  4002. +{"vamoxorei16.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOXOREI16V, MASK_VAMOXOREI16V, match_vd_neq_vm, INSN_DREF},
  4003. +{"vamoandei16.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOANDEI16V, MASK_VAMOANDEI16V, match_vd_neq_vm, INSN_DREF},
  4004. +{"vamoorei16.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOOREI16V, MASK_VAMOOREI16V, match_vd_neq_vm, INSN_DREF},
  4005. +{"vamominei16.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOMINEI16V, MASK_VAMOMINEI16V, match_vd_neq_vm, INSN_DREF},
  4006. +{"vamomaxei16.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOMAXEI16V, MASK_VAMOMAXEI16V, match_vd_neq_vm, INSN_DREF},
  4007. +{"vamominuei16.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOMINUEI16V, MASK_VAMOMINUEI16V, match_vd_neq_vm, INSN_DREF},
  4008. +{"vamomaxuei16.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOMAXUEI16V, MASK_VAMOMAXUEI16V, match_vd_neq_vm, INSN_DREF},
  4009. +
  4010. +{"vamoaddei32.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOADDEI32V, MASK_VAMOADDEI32V, match_vd_neq_vm, INSN_DREF},
  4011. +{"vamoswapei32.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOSWAPEI32V, MASK_VAMOSWAPEI32V, match_vd_neq_vm, INSN_DREF},
  4012. +{"vamoxorei32.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOXOREI32V, MASK_VAMOXOREI32V, match_vd_neq_vm, INSN_DREF},
  4013. +{"vamoandei32.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOANDEI32V, MASK_VAMOANDEI32V, match_vd_neq_vm, INSN_DREF},
  4014. +{"vamoorei32.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOOREI32V, MASK_VAMOOREI32V, match_vd_neq_vm, INSN_DREF},
  4015. +{"vamominei32.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOMINEI32V, MASK_VAMOMINEI32V, match_vd_neq_vm, INSN_DREF},
  4016. +{"vamomaxei32.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOMAXEI32V, MASK_VAMOMAXEI32V, match_vd_neq_vm, INSN_DREF},
  4017. +{"vamominuei32.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOMINUEI32V, MASK_VAMOMINUEI32V, match_vd_neq_vm, INSN_DREF},
  4018. +{"vamomaxuei32.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOMAXUEI32V, MASK_VAMOMAXUEI32V, match_vd_neq_vm, INSN_DREF},
  4019. +
  4020. +{"vamoaddei64.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOADDEI64V, MASK_VAMOADDEI64V, match_vd_neq_vm, INSN_DREF},
  4021. +{"vamoswapei64.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOSWAPEI64V, MASK_VAMOSWAPEI64V, match_vd_neq_vm, INSN_DREF},
  4022. +{"vamoxorei64.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOXOREI64V, MASK_VAMOXOREI64V, match_vd_neq_vm, INSN_DREF},
  4023. +{"vamoandei64.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOANDEI64V, MASK_VAMOANDEI64V, match_vd_neq_vm, INSN_DREF},
  4024. +{"vamoorei64.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOOREI64V, MASK_VAMOOREI64V, match_vd_neq_vm, INSN_DREF},
  4025. +{"vamominei64.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOMINEI64V, MASK_VAMOMINEI64V, match_vd_neq_vm, INSN_DREF},
  4026. +{"vamomaxei64.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOMAXEI64V, MASK_VAMOMAXEI64V, match_vd_neq_vm, INSN_DREF},
  4027. +{"vamominuei64.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOMINUEI64V, MASK_VAMOMINUEI64V, match_vd_neq_vm, INSN_DREF},
  4028. +{"vamomaxuei64.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOMAXUEI64V, MASK_VAMOMAXUEI64V, match_vd_neq_vm, INSN_DREF},
  4029. +
  4030. +{"vneg.v", 0, INSN_CLASS_V, "Vd,VtVm", MATCH_VRSUBVX, MASK_VRSUBVX | MASK_RS1, match_vd_neq_vm, INSN_ALIAS },
  4031. +
  4032. +{"vadd.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VADDVV, MASK_VADDVV, match_vd_neq_vm, 0 },
  4033. +{"vadd.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VADDVX, MASK_VADDVX, match_vd_neq_vm, 0 },
  4034. +{"vadd.vi", 0, INSN_CLASS_V, "Vd,Vt,ViVm", MATCH_VADDVI, MASK_VADDVI, match_vd_neq_vm, 0 },
  4035. +{"vsub.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VSUBVV, MASK_VSUBVV, match_vd_neq_vm, 0 },
  4036. +{"vsub.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VSUBVX, MASK_VSUBVX, match_vd_neq_vm, 0 },
  4037. +{"vrsub.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VRSUBVX, MASK_VRSUBVX, match_vd_neq_vm, 0 },
  4038. +{"vrsub.vi", 0, INSN_CLASS_V, "Vd,Vt,ViVm", MATCH_VRSUBVI, MASK_VRSUBVI, match_vd_neq_vm, 0 },
  4039. +
  4040. +{"vwcvt.x.x.v", 0, INSN_CLASS_V, "Vd,VtVm", MATCH_VWCVTXXV, MASK_VWCVTXXV, match_widen_vd_neq_vs2_neq_vm, INSN_ALIAS },
  4041. +{"vwcvtu.x.x.v", 0, INSN_CLASS_V, "Vd,VtVm", MATCH_VWCVTUXXV, MASK_VWCVTUXXV, match_widen_vd_neq_vs2_neq_vm, INSN_ALIAS },
  4042. +
  4043. +{"vwaddu.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VWADDUVV, MASK_VWADDUVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0 },
  4044. +{"vwaddu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VWADDUVX, MASK_VWADDUVX, match_widen_vd_neq_vs2_neq_vm, 0 },
  4045. +{"vwsubu.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VWSUBUVV, MASK_VWSUBUVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0 },
  4046. +{"vwsubu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VWSUBUVX, MASK_VWSUBUVX, match_widen_vd_neq_vs2_neq_vm, 0 },
  4047. +{"vwadd.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VWADDVV, MASK_VWADDVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0 },
  4048. +{"vwadd.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VWADDVX, MASK_VWADDVX, match_widen_vd_neq_vs2_neq_vm, 0 },
  4049. +{"vwsub.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VWSUBVV, MASK_VWSUBVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0 },
  4050. +{"vwsub.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VWSUBVX, MASK_VWSUBVX, match_widen_vd_neq_vs2_neq_vm, 0 },
  4051. +{"vwaddu.wv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VWADDUWV, MASK_VWADDUWV, match_widen_vd_neq_vs1_neq_vm, 0 },
  4052. +{"vwaddu.wx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VWADDUWX, MASK_VWADDUWX, match_widen_vd_neq_vm, 0 },
  4053. +{"vwsubu.wv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VWSUBUWV, MASK_VWSUBUWV, match_widen_vd_neq_vs1_neq_vm, 0 },
  4054. +{"vwsubu.wx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VWSUBUWX, MASK_VWSUBUWX, match_widen_vd_neq_vm, 0 },
  4055. +{"vwadd.wv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VWADDWV, MASK_VWADDWV, match_widen_vd_neq_vs1_neq_vm, 0 },
  4056. +{"vwadd.wx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VWADDWX, MASK_VWADDWX, match_widen_vd_neq_vm, 0 },
  4057. +{"vwsub.wv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VWSUBWV, MASK_VWSUBWV, match_widen_vd_neq_vs1_neq_vm, 0 },
  4058. +{"vwsub.wx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VWSUBWX, MASK_VWSUBWX, match_widen_vd_neq_vm, 0 },
  4059. +
  4060. +{"vzext.vf2", 0, INSN_CLASS_V, "Vd,VtVm", MATCH_VZEXT_VF2, MASK_VZEXT_VF2, match_vd_neq_vm, 0 },
  4061. +{"vsext.vf2", 0, INSN_CLASS_V, "Vd,VtVm", MATCH_VSEXT_VF2, MASK_VSEXT_VF2, match_vd_neq_vm, 0 },
  4062. +{"vzext.vf4", 0, INSN_CLASS_V, "Vd,VtVm", MATCH_VZEXT_VF4, MASK_VZEXT_VF4, match_vd_neq_vm, 0 },
  4063. +{"vsext.vf4", 0, INSN_CLASS_V, "Vd,VtVm", MATCH_VSEXT_VF4, MASK_VSEXT_VF4, match_vd_neq_vm, 0 },
  4064. +{"vzext.vf8", 0, INSN_CLASS_V, "Vd,VtVm", MATCH_VZEXT_VF8, MASK_VZEXT_VF8, match_vd_neq_vm, 0 },
  4065. +{"vsext.vf8", 0, INSN_CLASS_V, "Vd,VtVm", MATCH_VSEXT_VF8, MASK_VSEXT_VF8, match_vd_neq_vm, 0 },
  4066. +
  4067. +{"vadc.vvm", 0, INSN_CLASS_V, "Vd,Vt,Vs,V0", MATCH_VADCVVM, MASK_VADCVVM, match_vd_neq_vm, 0 },
  4068. +{"vadc.vxm", 0, INSN_CLASS_V, "Vd,Vt,s,V0", MATCH_VADCVXM, MASK_VADCVXM, match_vd_neq_vm, 0 },
  4069. +{"vadc.vim", 0, INSN_CLASS_V, "Vd,Vt,Vi,V0", MATCH_VADCVIM, MASK_VADCVIM, match_vd_neq_vm, 0 },
  4070. +{"vmadc.vvm", 0, INSN_CLASS_V, "Vd,Vt,Vs,V0", MATCH_VMADCVVM, MASK_VMADCVVM, match_opcode, 0 },
  4071. +{"vmadc.vxm", 0, INSN_CLASS_V, "Vd,Vt,s,V0", MATCH_VMADCVXM, MASK_VMADCVXM, match_opcode, 0 },
  4072. +{"vmadc.vim", 0, INSN_CLASS_V, "Vd,Vt,Vi,V0", MATCH_VMADCVIM, MASK_VMADCVIM, match_opcode, 0 },
  4073. +{"vmadc.vv", 0, INSN_CLASS_V, "Vd,Vt,Vs", MATCH_VMADCVV, MASK_VMADCVV, match_opcode, 0 },
  4074. +{"vmadc.vx", 0, INSN_CLASS_V, "Vd,Vt,s", MATCH_VMADCVX, MASK_VMADCVX, match_opcode, 0 },
  4075. +{"vmadc.vi", 0, INSN_CLASS_V, "Vd,Vt,Vi", MATCH_VMADCVI, MASK_VMADCVI, match_opcode, 0 },
  4076. +{"vsbc.vvm", 0, INSN_CLASS_V, "Vd,Vt,Vs,V0", MATCH_VSBCVVM, MASK_VSBCVVM, match_vd_neq_vm, 0 },
  4077. +{"vsbc.vxm", 0, INSN_CLASS_V, "Vd,Vt,s,V0", MATCH_VSBCVXM, MASK_VSBCVXM, match_vd_neq_vm, 0 },
  4078. +{"vmsbc.vvm", 0, INSN_CLASS_V, "Vd,Vt,Vs,V0", MATCH_VMSBCVVM, MASK_VMSBCVVM, match_opcode, 0 },
  4079. +{"vmsbc.vxm", 0, INSN_CLASS_V, "Vd,Vt,s,V0", MATCH_VMSBCVXM, MASK_VMSBCVXM, match_opcode, 0 },
  4080. +{"vmsbc.vv", 0, INSN_CLASS_V, "Vd,Vt,Vs", MATCH_VMSBCVV, MASK_VMSBCVV, match_opcode, 0 },
  4081. +{"vmsbc.vx", 0, INSN_CLASS_V, "Vd,Vt,s", MATCH_VMSBCVX, MASK_VMSBCVX, match_opcode, 0 },
  4082. +
  4083. +{"vnot.v", 0, INSN_CLASS_V, "Vd,VtVm", MATCH_VNOTV, MASK_VNOTV, match_vd_neq_vm, INSN_ALIAS },
  4084. +
  4085. +{"vand.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VANDVV, MASK_VANDVV, match_vd_neq_vm, 0 },
  4086. +{"vand.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VANDVX, MASK_VANDVX, match_vd_neq_vm, 0 },
  4087. +{"vand.vi", 0, INSN_CLASS_V, "Vd,Vt,ViVm", MATCH_VANDVI, MASK_VANDVI, match_vd_neq_vm, 0 },
  4088. +{"vor.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VORVV, MASK_VORVV, match_vd_neq_vm, 0 },
  4089. +{"vor.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VORVX, MASK_VORVX, match_vd_neq_vm, 0 },
  4090. +{"vor.vi", 0, INSN_CLASS_V, "Vd,Vt,ViVm", MATCH_VORVI, MASK_VORVI, match_vd_neq_vm, 0 },
  4091. +{"vxor.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VXORVV, MASK_VXORVV, match_vd_neq_vm, 0 },
  4092. +{"vxor.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VXORVX, MASK_VXORVX, match_vd_neq_vm, 0 },
  4093. +{"vxor.vi", 0, INSN_CLASS_V, "Vd,Vt,ViVm", MATCH_VXORVI, MASK_VXORVI, match_vd_neq_vm, 0 },
  4094. +
  4095. +{"vsll.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VSLLVV, MASK_VSLLVV, match_vd_neq_vm, 0 },
  4096. +{"vsll.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VSLLVX, MASK_VSLLVX, match_vd_neq_vm, 0 },
  4097. +{"vsll.vi", 0, INSN_CLASS_V, "Vd,Vt,VjVm", MATCH_VSLLVI, MASK_VSLLVI, match_vd_neq_vm, 0 },
  4098. +{"vsrl.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VSRLVV, MASK_VSRLVV, match_vd_neq_vm, 0 },
  4099. +{"vsrl.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VSRLVX, MASK_VSRLVX, match_vd_neq_vm, 0 },
  4100. +{"vsrl.vi", 0, INSN_CLASS_V, "Vd,Vt,VjVm", MATCH_VSRLVI, MASK_VSRLVI, match_vd_neq_vm, 0 },
  4101. +{"vsra.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VSRAVV, MASK_VSRAVV, match_vd_neq_vm, 0 },
  4102. +{"vsra.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VSRAVX, MASK_VSRAVX, match_vd_neq_vm, 0 },
  4103. +{"vsra.vi", 0, INSN_CLASS_V, "Vd,Vt,VjVm", MATCH_VSRAVI, MASK_VSRAVI, match_vd_neq_vm, 0 },
  4104. +
  4105. +{"vncvt.x.x.w",0, INSN_CLASS_V, "Vd,VtVm", MATCH_VNCVTXXW, MASK_VNCVTXXW, match_narrow_vd_neq_vs2_neq_vm, INSN_ALIAS },
  4106. +
  4107. +{"vnsrl.wv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VNSRLWV, MASK_VNSRLWV, match_narrow_vd_neq_vs2_neq_vm, 0 },
  4108. +{"vnsrl.wx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VNSRLWX, MASK_VNSRLWX, match_narrow_vd_neq_vs2_neq_vm, 0 },
  4109. +{"vnsrl.wi", 0, INSN_CLASS_V, "Vd,Vt,VjVm", MATCH_VNSRLWI, MASK_VNSRLWI, match_narrow_vd_neq_vs2_neq_vm, 0 },
  4110. +{"vnsra.wv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VNSRAWV, MASK_VNSRAWV, match_narrow_vd_neq_vs2_neq_vm, 0 },
  4111. +{"vnsra.wx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VNSRAWX, MASK_VNSRAWX, match_narrow_vd_neq_vs2_neq_vm, 0 },
  4112. +{"vnsra.wi", 0, INSN_CLASS_V, "Vd,Vt,VjVm", MATCH_VNSRAWI, MASK_VNSRAWI, match_narrow_vd_neq_vs2_neq_vm, 0 },
  4113. +
  4114. +{"vmseq.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VMSEQVV, MASK_VMSEQVV, match_opcode, 0 },
  4115. +{"vmseq.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VMSEQVX, MASK_VMSEQVX, match_opcode, 0 },
  4116. +{"vmseq.vi", 0, INSN_CLASS_V, "Vd,Vt,ViVm", MATCH_VMSEQVI, MASK_VMSEQVI, match_opcode, 0 },
  4117. +{"vmsne.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VMSNEVV, MASK_VMSNEVV, match_opcode, 0 },
  4118. +{"vmsne.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VMSNEVX, MASK_VMSNEVX, match_opcode, 0 },
  4119. +{"vmsne.vi", 0, INSN_CLASS_V, "Vd,Vt,ViVm", MATCH_VMSNEVI, MASK_VMSNEVI, match_opcode, 0 },
  4120. +{"vmsltu.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VMSLTUVV, MASK_VMSLTUVV, match_opcode, 0 },
  4121. +{"vmsltu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VMSLTUVX, MASK_VMSLTUVX, match_opcode, 0 },
  4122. +{"vmslt.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VMSLTVV, MASK_VMSLTVV, match_opcode, 0 },
  4123. +{"vmslt.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VMSLTVX, MASK_VMSLTVX, match_opcode, 0 },
  4124. +{"vmsleu.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VMSLEUVV, MASK_VMSLEUVV, match_opcode, 0 },
  4125. +{"vmsleu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VMSLEUVX, MASK_VMSLEUVX, match_opcode, 0 },
  4126. +{"vmsleu.vi", 0, INSN_CLASS_V, "Vd,Vt,ViVm", MATCH_VMSLEUVI, MASK_VMSLEUVI, match_opcode, 0 },
  4127. +{"vmsle.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VMSLEVV, MASK_VMSLEVV, match_opcode, 0 },
  4128. +{"vmsle.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VMSLEVX, MASK_VMSLEVX, match_opcode, 0 },
  4129. +{"vmsle.vi", 0, INSN_CLASS_V, "Vd,Vt,ViVm", MATCH_VMSLEVI, MASK_VMSLEVI, match_opcode, 0 },
  4130. +{"vmsgtu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VMSGTUVX, MASK_VMSGTUVX, match_opcode, 0 },
  4131. +{"vmsgtu.vi", 0, INSN_CLASS_V, "Vd,Vt,ViVm", MATCH_VMSGTUVI, MASK_VMSGTUVI, match_opcode, 0 },
  4132. +{"vmsgt.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VMSGTVX, MASK_VMSGTVX, match_opcode, 0 },
  4133. +{"vmsgt.vi", 0, INSN_CLASS_V, "Vd,Vt,ViVm", MATCH_VMSGTVI, MASK_VMSGTVI, match_opcode, 0 },
  4134. +
  4135. +/* These aliases are for assembly but not disassembly. */
  4136. +{"vmsgt.vv", 0, INSN_CLASS_V, "Vd,Vs,VtVm", MATCH_VMSLTVV, MASK_VMSLTVV, match_opcode, INSN_ALIAS },
  4137. +{"vmsgtu.vv", 0, INSN_CLASS_V, "Vd,Vs,VtVm", MATCH_VMSLTUVV, MASK_VMSLTUVV, match_opcode, INSN_ALIAS },
  4138. +{"vmsge.vv", 0, INSN_CLASS_V, "Vd,Vs,VtVm", MATCH_VMSLEVV, MASK_VMSLEVV, match_opcode, INSN_ALIAS },
  4139. +{"vmsgeu.vv", 0, INSN_CLASS_V, "Vd,Vs,VtVm", MATCH_VMSLEUVV, MASK_VMSLEUVV, match_opcode, INSN_ALIAS },
  4140. +{"vmslt.vi", 0, INSN_CLASS_V, "Vd,Vt,VkVm", MATCH_VMSLEVI, MASK_VMSLEVI, match_opcode, INSN_ALIAS },
  4141. +{"vmsltu.vi", 0, INSN_CLASS_V, "Vd,Vu,0Vm", MATCH_VMSNEVV, MASK_VMSNEVV, match_opcode, INSN_ALIAS },
  4142. +{"vmsltu.vi", 0, INSN_CLASS_V, "Vd,Vt,VkVm", MATCH_VMSLEUVI, MASK_VMSLEUVI, match_opcode, INSN_ALIAS },
  4143. +{"vmsge.vi", 0, INSN_CLASS_V, "Vd,Vt,VkVm", MATCH_VMSGTVI, MASK_VMSGTVI, match_opcode, INSN_ALIAS },
  4144. +{"vmsgeu.vi", 0, INSN_CLASS_V, "Vd,Vu,0Vm", MATCH_VMSEQVV, MASK_VMSEQVV, match_opcode, INSN_ALIAS },
  4145. +{"vmsgeu.vi", 0, INSN_CLASS_V, "Vd,Vt,VkVm", MATCH_VMSGTUVI, MASK_VMSGTUVI, match_opcode, INSN_ALIAS },
  4146. +
  4147. +{"vmsge.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", 0, (int) M_VMSGE, match_never, INSN_MACRO },
  4148. +{"vmsge.vx", 0, INSN_CLASS_V, "Vd,Vt,s,VM,VT", 0, (int) M_VMSGE, match_never, INSN_MACRO },
  4149. +{"vmsgeu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", 0, (int) M_VMSGEU, match_never, INSN_MACRO },
  4150. +{"vmsgeu.vx", 0, INSN_CLASS_V, "Vd,Vt,s,VM,VT", 0, (int) M_VMSGEU, match_never, INSN_MACRO },
  4151. +
  4152. +{"vminu.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VMINUVV, MASK_VMINUVV, match_vd_neq_vm, 0},
  4153. +{"vminu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VMINUVX, MASK_VMINUVX, match_vd_neq_vm, 0},
  4154. +{"vmin.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VMINVV, MASK_VMINVV, match_vd_neq_vm, 0},
  4155. +{"vmin.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VMINVX, MASK_VMINVX, match_vd_neq_vm, 0},
  4156. +{"vmaxu.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VMAXUVV, MASK_VMAXUVV, match_vd_neq_vm, 0},
  4157. +{"vmaxu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VMAXUVX, MASK_VMAXUVX, match_vd_neq_vm, 0},
  4158. +{"vmax.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VMAXVV, MASK_VMAXVV, match_vd_neq_vm, 0},
  4159. +{"vmax.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VMAXVX, MASK_VMAXVX, match_vd_neq_vm, 0},
  4160. +
  4161. +{"vmul.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VMULVV, MASK_VMULVV, match_vd_neq_vm, 0 },
  4162. +{"vmul.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VMULVX, MASK_VMULVX, match_vd_neq_vm, 0 },
  4163. +{"vmulh.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VMULHVV, MASK_VMULHVV, match_vd_neq_vm, 0 },
  4164. +{"vmulh.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VMULHVX, MASK_VMULHVX, match_vd_neq_vm, 0 },
  4165. +{"vmulhu.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VMULHUVV, MASK_VMULHUVV, match_vd_neq_vm, 0 },
  4166. +{"vmulhu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VMULHUVX, MASK_VMULHUVX, match_vd_neq_vm, 0 },
  4167. +{"vmulhsu.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VMULHSUVV, MASK_VMULHSUVV, match_vd_neq_vm, 0 },
  4168. +{"vmulhsu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VMULHSUVX, MASK_VMULHSUVX, match_vd_neq_vm, 0 },
  4169. +
  4170. +{"vwmul.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VWMULVV, MASK_VWMULVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0 },
  4171. +{"vwmul.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VWMULVX, MASK_VWMULVX, match_widen_vd_neq_vs2_neq_vm, 0 },
  4172. +{"vwmulu.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VWMULUVV, MASK_VWMULUVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0 },
  4173. +{"vwmulu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VWMULUVX, MASK_VWMULUVX, match_widen_vd_neq_vs2_neq_vm, 0 },
  4174. +{"vwmulsu.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VWMULSUVV, MASK_VWMULSUVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0 },
  4175. +{"vwmulsu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VWMULSUVX, MASK_VWMULSUVX, match_widen_vd_neq_vs2_neq_vm, 0 },
  4176. +
  4177. +{"vmacc.vv", 0, INSN_CLASS_V, "Vd,Vs,VtVm", MATCH_VMACCVV, MASK_VMACCVV, match_vd_neq_vm, 0},
  4178. +{"vmacc.vx", 0, INSN_CLASS_V, "Vd,s,VtVm", MATCH_VMACCVX, MASK_VMACCVX, match_vd_neq_vm, 0},
  4179. +{"vnmsac.vv", 0, INSN_CLASS_V, "Vd,Vs,VtVm", MATCH_VNMSACVV, MASK_VNMSACVV, match_vd_neq_vm, 0},
  4180. +{"vnmsac.vx", 0, INSN_CLASS_V, "Vd,s,VtVm", MATCH_VNMSACVX, MASK_VNMSACVX, match_vd_neq_vm, 0},
  4181. +{"vmadd.vv", 0, INSN_CLASS_V, "Vd,Vs,VtVm", MATCH_VMADDVV, MASK_VMADDVV, match_vd_neq_vm, 0},
  4182. +{"vmadd.vx", 0, INSN_CLASS_V, "Vd,s,VtVm", MATCH_VMADDVX, MASK_VMADDVX, match_vd_neq_vm, 0},
  4183. +{"vnmsub.vv", 0, INSN_CLASS_V, "Vd,Vs,VtVm", MATCH_VNMSUBVV, MASK_VNMSUBVV, match_vd_neq_vm, 0},
  4184. +{"vnmsub.vx", 0, INSN_CLASS_V, "Vd,s,VtVm", MATCH_VNMSUBVX, MASK_VNMSUBVX, match_vd_neq_vm, 0},
  4185. +
  4186. +{"vwmaccu.vv", 0, INSN_CLASS_V, "Vd,Vs,VtVm", MATCH_VWMACCUVV, MASK_VWMACCUVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0},
  4187. +{"vwmaccu.vx", 0, INSN_CLASS_V, "Vd,s,VtVm", MATCH_VWMACCUVX, MASK_VWMACCUVX, match_widen_vd_neq_vs2_neq_vm, 0},
  4188. +{"vwmacc.vv", 0, INSN_CLASS_V, "Vd,Vs,VtVm", MATCH_VWMACCVV, MASK_VWMACCVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0},
  4189. +{"vwmacc.vx", 0, INSN_CLASS_V, "Vd,s,VtVm", MATCH_VWMACCVX, MASK_VWMACCVX, match_widen_vd_neq_vs2_neq_vm, 0},
  4190. +{"vwmaccsu.vv", 0, INSN_CLASS_V, "Vd,Vs,VtVm", MATCH_VWMACCSUVV, MASK_VWMACCSUVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0},
  4191. +{"vwmaccsu.vx", 0, INSN_CLASS_V, "Vd,s,VtVm", MATCH_VWMACCSUVX, MASK_VWMACCSUVX, match_widen_vd_neq_vs2_neq_vm, 0},
  4192. +{"vwmaccus.vx", 0, INSN_CLASS_V, "Vd,s,VtVm", MATCH_VWMACCUSVX, MASK_VWMACCUSVX, match_widen_vd_neq_vs2_neq_vm, 0},
  4193. +
  4194. +{"vdivu.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VDIVUVV, MASK_VDIVUVV, match_vd_neq_vm, 0 },
  4195. +{"vdivu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VDIVUVX, MASK_VDIVUVX, match_vd_neq_vm, 0 },
  4196. +{"vdiv.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VDIVVV, MASK_VDIVVV, match_vd_neq_vm, 0 },
  4197. +{"vdiv.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VDIVVX, MASK_VDIVVX, match_vd_neq_vm, 0 },
  4198. +{"vremu.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VREMUVV, MASK_VREMUVV, match_vd_neq_vm, 0 },
  4199. +{"vremu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VREMUVX, MASK_VREMUVX, match_vd_neq_vm, 0 },
  4200. +{"vrem.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VREMVV, MASK_VREMVV, match_vd_neq_vm, 0 },
  4201. +{"vrem.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VREMVX, MASK_VREMVX, match_vd_neq_vm, 0 },
  4202. +
  4203. +{"vmerge.vvm", 0, INSN_CLASS_V, "Vd,Vt,Vs,V0", MATCH_VMERGEVVM, MASK_VMERGEVVM, match_opcode, 0 },
  4204. +{"vmerge.vxm", 0, INSN_CLASS_V, "Vd,Vt,s,V0", MATCH_VMERGEVXM, MASK_VMERGEVXM, match_opcode, 0 },
  4205. +{"vmerge.vim", 0, INSN_CLASS_V, "Vd,Vt,Vi,V0", MATCH_VMERGEVIM, MASK_VMERGEVIM, match_opcode, 0 },
  4206. +
  4207. +{"vmv.v.v", 0, INSN_CLASS_V, "Vd,Vs", MATCH_VMVVV, MASK_VMVVV, match_opcode, 0 },
  4208. +{"vmv.v.x", 0, INSN_CLASS_V, "Vd,s", MATCH_VMVVX, MASK_VMVVX, match_opcode, 0 },
  4209. +{"vmv.v.i", 0, INSN_CLASS_V, "Vd,Vi", MATCH_VMVVI, MASK_VMVVI, match_opcode, 0 },
  4210. +
  4211. +{"vsaddu.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VSADDUVV, MASK_VSADDUVV, match_vd_neq_vm, 0 },
  4212. +{"vsaddu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VSADDUVX, MASK_VSADDUVX, match_vd_neq_vm, 0 },
  4213. +{"vsaddu.vi", 0, INSN_CLASS_V, "Vd,Vt,ViVm", MATCH_VSADDUVI, MASK_VSADDUVI, match_vd_neq_vm, 0 },
  4214. +{"vsadd.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VSADDVV, MASK_VSADDVV, match_vd_neq_vm, 0 },
  4215. +{"vsadd.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VSADDVX, MASK_VSADDVX, match_vd_neq_vm, 0 },
  4216. +{"vsadd.vi", 0, INSN_CLASS_V, "Vd,Vt,ViVm", MATCH_VSADDVI, MASK_VSADDVI, match_vd_neq_vm, 0 },
  4217. +{"vssubu.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VSSUBUVV, MASK_VSSUBUVV, match_vd_neq_vm, 0 },
  4218. +{"vssubu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VSSUBUVX, MASK_VSSUBUVX, match_vd_neq_vm, 0 },
  4219. +{"vssub.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VSSUBVV, MASK_VSSUBVV, match_vd_neq_vm, 0 },
  4220. +{"vssub.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VSSUBVX, MASK_VSSUBVX, match_vd_neq_vm, 0 },
  4221. +
  4222. +{"vaaddu.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VAADDUVV, MASK_VAADDUVV, match_vd_neq_vm, 0 },
  4223. +{"vaaddu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VAADDUVX, MASK_VAADDUVX, match_vd_neq_vm, 0 },
  4224. +{"vaadd.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VAADDVV, MASK_VAADDVV, match_vd_neq_vm, 0 },
  4225. +{"vaadd.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VAADDVX, MASK_VAADDVX, match_vd_neq_vm, 0 },
  4226. +{"vasubu.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VASUBUVV, MASK_VASUBUVV, match_vd_neq_vm, 0 },
  4227. +{"vasubu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VASUBUVX, MASK_VASUBUVX, match_vd_neq_vm, 0 },
  4228. +{"vasub.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VASUBVV, MASK_VASUBVV, match_vd_neq_vm, 0 },
  4229. +{"vasub.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VASUBVX, MASK_VASUBVX, match_vd_neq_vm, 0 },
  4230. +
  4231. +{"vsmul.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VSMULVV, MASK_VSMULVV, match_vd_neq_vm, 0 },
  4232. +{"vsmul.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VSMULVX, MASK_VSMULVX, match_vd_neq_vm, 0 },
  4233. +
  4234. +{"vssrl.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VSSRLVV, MASK_VSSRLVV, match_vd_neq_vm, 0 },
  4235. +{"vssrl.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VSSRLVX, MASK_VSSRLVX, match_vd_neq_vm, 0 },
  4236. +{"vssrl.vi", 0, INSN_CLASS_V, "Vd,Vt,VjVm", MATCH_VSSRLVI, MASK_VSSRLVI, match_vd_neq_vm, 0 },
  4237. +{"vssra.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VSSRAVV, MASK_VSSRAVV, match_vd_neq_vm, 0 },
  4238. +{"vssra.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VSSRAVX, MASK_VSSRAVX, match_vd_neq_vm, 0 },
  4239. +{"vssra.vi", 0, INSN_CLASS_V, "Vd,Vt,VjVm", MATCH_VSSRAVI, MASK_VSSRAVI, match_vd_neq_vm, 0 },
  4240. +
  4241. +{"vnclipu.wv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VNCLIPUWV, MASK_VNCLIPUWV, match_narrow_vd_neq_vs2_neq_vm, 0 },
  4242. +{"vnclipu.wx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VNCLIPUWX, MASK_VNCLIPUWX, match_narrow_vd_neq_vs2_neq_vm, 0 },
  4243. +{"vnclipu.wi", 0, INSN_CLASS_V, "Vd,Vt,VjVm", MATCH_VNCLIPUWI, MASK_VNCLIPUWI, match_narrow_vd_neq_vs2_neq_vm, 0 },
  4244. +{"vnclip.wv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VNCLIPWV, MASK_VNCLIPWV, match_narrow_vd_neq_vs2_neq_vm, 0 },
  4245. +{"vnclip.wx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VNCLIPWX, MASK_VNCLIPWX, match_narrow_vd_neq_vs2_neq_vm, 0 },
  4246. +{"vnclip.wi", 0, INSN_CLASS_V, "Vd,Vt,VjVm", MATCH_VNCLIPWI, MASK_VNCLIPWI, match_narrow_vd_neq_vs2_neq_vm, 0 },
  4247. +
  4248. +{"vfadd.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFADDVV, MASK_VFADDVV, match_vd_neq_vm, 0},
  4249. +{"vfadd.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFADDVF, MASK_VFADDVF, match_vd_neq_vm, 0},
  4250. +{"vfsub.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFSUBVV, MASK_VFSUBVV, match_vd_neq_vm, 0},
  4251. +{"vfsub.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFSUBVF, MASK_VFSUBVF, match_vd_neq_vm, 0},
  4252. +{"vfrsub.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFRSUBVF, MASK_VFRSUBVF, match_vd_neq_vm, 0},
  4253. +
  4254. +{"vfwadd.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFWADDVV, MASK_VFWADDVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0},
  4255. +{"vfwadd.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFWADDVF, MASK_VFWADDVF, match_widen_vd_neq_vs2_neq_vm, 0},
  4256. +{"vfwsub.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFWSUBVV, MASK_VFWSUBVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0},
  4257. +{"vfwsub.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFWSUBVF, MASK_VFWSUBVF, match_widen_vd_neq_vs2_neq_vm, 0},
  4258. +{"vfwadd.wv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFWADDWV, MASK_VFWADDWV, match_widen_vd_neq_vs1_neq_vm, 0},
  4259. +{"vfwadd.wf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFWADDWF, MASK_VFWADDWF, match_widen_vd_neq_vm, 0},
  4260. +{"vfwsub.wv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFWSUBWV, MASK_VFWSUBWV, match_widen_vd_neq_vs1_neq_vm, 0},
  4261. +{"vfwsub.wf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFWSUBWF, MASK_VFWSUBWF, match_widen_vd_neq_vm, 0},
  4262. +
  4263. +{"vfmul.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFMULVV, MASK_VFMULVV, match_vd_neq_vm, 0},
  4264. +{"vfmul.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFMULVF, MASK_VFMULVF, match_vd_neq_vm, 0},
  4265. +{"vfdiv.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFDIVVV, MASK_VFDIVVV, match_vd_neq_vm, 0},
  4266. +{"vfdiv.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFDIVVF, MASK_VFDIVVF, match_vd_neq_vm, 0},
  4267. +{"vfrdiv.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFRDIVVF, MASK_VFRDIVVF, match_vd_neq_vm, 0},
  4268. +
  4269. +{"vfwmul.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFWMULVV, MASK_VFWMULVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0},
  4270. +{"vfwmul.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFWMULVF, MASK_VFWMULVF, match_widen_vd_neq_vs2_neq_vm, 0},
  4271. +
  4272. +{"vfmadd.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vs,VtVm", MATCH_VFMADDVV, MASK_VFMADDVV, match_vd_neq_vm, 0},
  4273. +{"vfmadd.vf", 0, INSN_CLASS_V_AND_F, "Vd,S,VtVm", MATCH_VFMADDVF, MASK_VFMADDVF, match_vd_neq_vm, 0},
  4274. +{"vfnmadd.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vs,VtVm", MATCH_VFNMADDVV, MASK_VFNMADDVV, match_vd_neq_vm, 0},
  4275. +{"vfnmadd.vf", 0, INSN_CLASS_V_AND_F, "Vd,S,VtVm", MATCH_VFNMADDVF, MASK_VFNMADDVF, match_vd_neq_vm, 0},
  4276. +{"vfmsub.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vs,VtVm", MATCH_VFMSUBVV, MASK_VFMSUBVV, match_vd_neq_vm, 0},
  4277. +{"vfmsub.vf", 0, INSN_CLASS_V_AND_F, "Vd,S,VtVm", MATCH_VFMSUBVF, MASK_VFMSUBVF, match_vd_neq_vm, 0},
  4278. +{"vfnmsub.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vs,VtVm", MATCH_VFNMSUBVV, MASK_VFNMSUBVV, match_vd_neq_vm, 0},
  4279. +{"vfnmsub.vf", 0, INSN_CLASS_V_AND_F, "Vd,S,VtVm", MATCH_VFNMSUBVF, MASK_VFNMSUBVF, match_vd_neq_vm, 0},
  4280. +{"vfmacc.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vs,VtVm", MATCH_VFMACCVV, MASK_VFMACCVV, match_vd_neq_vm, 0},
  4281. +{"vfmacc.vf", 0, INSN_CLASS_V_AND_F, "Vd,S,VtVm", MATCH_VFMACCVF, MASK_VFMACCVF, match_vd_neq_vm, 0},
  4282. +{"vfnmacc.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vs,VtVm", MATCH_VFNMACCVV, MASK_VFNMACCVV, match_vd_neq_vm, 0},
  4283. +{"vfnmacc.vf", 0, INSN_CLASS_V_AND_F, "Vd,S,VtVm", MATCH_VFNMACCVF, MASK_VFNMACCVF, match_vd_neq_vm, 0},
  4284. +{"vfmsac.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vs,VtVm", MATCH_VFMSACVV, MASK_VFMSACVV, match_vd_neq_vm, 0},
  4285. +{"vfmsac.vf", 0, INSN_CLASS_V_AND_F, "Vd,S,VtVm", MATCH_VFMSACVF, MASK_VFMSACVF, match_vd_neq_vm, 0},
  4286. +{"vfnmsac.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vs,VtVm", MATCH_VFNMSACVV, MASK_VFNMSACVV, match_vd_neq_vm, 0},
  4287. +{"vfnmsac.vf", 0, INSN_CLASS_V_AND_F, "Vd,S,VtVm", MATCH_VFNMSACVF, MASK_VFNMSACVF, match_vd_neq_vm, 0},
  4288. +
  4289. +{"vfwmacc.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vs,VtVm", MATCH_VFWMACCVV, MASK_VFWMACCVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0},
  4290. +{"vfwmacc.vf", 0, INSN_CLASS_V_AND_F, "Vd,S,VtVm", MATCH_VFWMACCVF, MASK_VFWMACCVF, match_widen_vd_neq_vs2_neq_vm, 0},
  4291. +{"vfwnmacc.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vs,VtVm", MATCH_VFWNMACCVV, MASK_VFWNMACCVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0},
  4292. +{"vfwnmacc.vf", 0, INSN_CLASS_V_AND_F, "Vd,S,VtVm", MATCH_VFWNMACCVF, MASK_VFWNMACCVF, match_widen_vd_neq_vs2_neq_vm, 0},
  4293. +{"vfwmsac.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vs,VtVm", MATCH_VFWMSACVV, MASK_VFWMSACVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0},
  4294. +{"vfwmsac.vf", 0, INSN_CLASS_V_AND_F, "Vd,S,VtVm", MATCH_VFWMSACVF, MASK_VFWMSACVF, match_widen_vd_neq_vs2_neq_vm, 0},
  4295. +{"vfwnmsac.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vs,VtVm", MATCH_VFWNMSACVV, MASK_VFWNMSACVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0},
  4296. +{"vfwnmsac.vf", 0, INSN_CLASS_V_AND_F, "Vd,S,VtVm", MATCH_VFWNMSACVF, MASK_VFWNMSACVF, match_widen_vd_neq_vs2_neq_vm, 0},
  4297. +
  4298. +{"vfsqrt.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFSQRTV, MASK_VFSQRTV, match_vd_neq_vm, 0},
  4299. +{"vfrsqrt7.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFRSQRT7V, MASK_VFRSQRT7V, match_vd_neq_vm, 0},
  4300. +{"vfrsqrte7.v",0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFRSQRT7V, MASK_VFRSQRT7V, match_vd_neq_vm, 0},
  4301. +{"vfrec7.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFREC7V, MASK_VFREC7V, match_vd_neq_vm, 0},
  4302. +{"vfrece7.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFREC7V, MASK_VFREC7V, match_vd_neq_vm, 0},
  4303. +{"vfclass.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFCLASSV, MASK_VFCLASSV, match_vd_neq_vm, 0},
  4304. +
  4305. +{"vfmin.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFMINVV, MASK_VFMINVV, match_vd_neq_vm, 0},
  4306. +{"vfmin.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFMINVF, MASK_VFMINVF, match_vd_neq_vm, 0},
  4307. +{"vfmax.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFMAXVV, MASK_VFMAXVV, match_vd_neq_vm, 0},
  4308. +{"vfmax.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFMAXVF, MASK_VFMAXVF, match_vd_neq_vm, 0},
  4309. +
  4310. +{"vfneg.v", 0, INSN_CLASS_V_AND_F, "Vd,VuVm", MATCH_VFSGNJNVV, MASK_VFSGNJNVV, match_vs1_eq_vs2_neq_vm, INSN_ALIAS },
  4311. +{"vfabs.v", 0, INSN_CLASS_V_AND_F, "Vd,VuVm", MATCH_VFSGNJXVV, MASK_VFSGNJXVV, match_vs1_eq_vs2_neq_vm, INSN_ALIAS },
  4312. +
  4313. +{"vfsgnj.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFSGNJVV, MASK_VFSGNJVV, match_vd_neq_vm, 0},
  4314. +{"vfsgnj.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFSGNJVF, MASK_VFSGNJVF, match_vd_neq_vm, 0},
  4315. +{"vfsgnjn.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFSGNJNVV, MASK_VFSGNJNVV, match_vd_neq_vm, 0},
  4316. +{"vfsgnjn.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFSGNJNVF, MASK_VFSGNJNVF, match_vd_neq_vm, 0},
  4317. +{"vfsgnjx.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFSGNJXVV, MASK_VFSGNJXVV, match_vd_neq_vm, 0},
  4318. +{"vfsgnjx.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFSGNJXVF, MASK_VFSGNJXVF, match_vd_neq_vm, 0},
  4319. +
  4320. +{"vmfeq.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VMFEQVV, MASK_VMFEQVV, match_opcode, 0},
  4321. +{"vmfeq.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VMFEQVF, MASK_VMFEQVF, match_opcode, 0},
  4322. +{"vmfne.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VMFNEVV, MASK_VMFNEVV, match_opcode, 0},
  4323. +{"vmfne.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VMFNEVF, MASK_VMFNEVF, match_opcode, 0},
  4324. +{"vmflt.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VMFLTVV, MASK_VMFLTVV, match_opcode, 0},
  4325. +{"vmflt.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VMFLTVF, MASK_VMFLTVF, match_opcode, 0},
  4326. +{"vmfle.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VMFLEVV, MASK_VMFLEVV, match_opcode, 0},
  4327. +{"vmfle.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VMFLEVF, MASK_VMFLEVF, match_opcode, 0},
  4328. +{"vmfgt.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VMFGTVF, MASK_VMFGTVF, match_opcode, 0},
  4329. +{"vmfge.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VMFGEVF, MASK_VMFGEVF, match_opcode, 0},
  4330. +
  4331. +/* These aliases are for assembly but not disassembly. */
  4332. +{"vmfgt.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vs,VtVm", MATCH_VMFLTVV, MASK_VMFLTVV, match_opcode, INSN_ALIAS},
  4333. +{"vmfge.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vs,VtVm", MATCH_VMFLEVV, MASK_VMFLEVV, match_opcode, INSN_ALIAS},
  4334. +
  4335. +{"vfmerge.vfm",0, INSN_CLASS_V_AND_F, "Vd,Vt,S,V0", MATCH_VFMERGEVFM, MASK_VFMERGEVFM, match_opcode, 0},
  4336. +{"vfmv.v.f", 0, INSN_CLASS_V_AND_F, "Vd,S", MATCH_VFMVVF, MASK_VFMVVF, match_opcode, 0 },
  4337. +
  4338. +{"vfcvt.xu.f.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFCVTXUFV, MASK_VFCVTXUFV, match_vd_neq_vm, 0},
  4339. +{"vfcvt.x.f.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFCVTXFV, MASK_VFCVTXFV, match_vd_neq_vm, 0},
  4340. +{"vfcvt.rtz.xu.f.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFCVTRTZXUFV, MASK_VFCVTRTZXUFV, match_vd_neq_vm, 0},
  4341. +{"vfcvt.rtz.x.f.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFCVTRTZXFV, MASK_VFCVTRTZXFV, match_vd_neq_vm, 0},
  4342. +{"vfcvt.f.xu.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFCVTFXUV, MASK_VFCVTFXUV, match_vd_neq_vm, 0},
  4343. +{"vfcvt.f.x.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFCVTFXV, MASK_VFCVTFXV, match_vd_neq_vm, 0},
  4344. +
  4345. +{"vfwcvt.xu.f.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFWCVTXUFV, MASK_VFWCVTXUFV, match_widen_vd_neq_vs2_neq_vm, 0},
  4346. +{"vfwcvt.x.f.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFWCVTXFV, MASK_VFWCVTXFV, match_widen_vd_neq_vs2_neq_vm, 0},
  4347. +{"vfwcvt.rtz.xu.f.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFWCVTRTZXUFV, MASK_VFWCVTRTZXUFV, match_widen_vd_neq_vs2_neq_vm, 0},
  4348. +{"vfwcvt.rtz.x.f.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFWCVTRTZXFV, MASK_VFWCVTRTZXFV, match_widen_vd_neq_vs2_neq_vm, 0},
  4349. +{"vfwcvt.f.xu.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFWCVTFXUV, MASK_VFWCVTFXUV, match_widen_vd_neq_vs2_neq_vm, 0},
  4350. +{"vfwcvt.f.x.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFWCVTFXV, MASK_VFWCVTFXV, match_widen_vd_neq_vs2_neq_vm, 0},
  4351. +{"vfwcvt.f.f.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFWCVTFFV, MASK_VFWCVTFFV, match_widen_vd_neq_vs2_neq_vm, 0},
  4352. +
  4353. +{"vfncvt.xu.f.w", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFNCVTXUFW, MASK_VFNCVTXUFW, match_narrow_vd_neq_vs2_neq_vm, 0},
  4354. +{"vfncvt.x.f.w", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFNCVTXFW, MASK_VFNCVTXFW, match_narrow_vd_neq_vs2_neq_vm, 0},
  4355. +{"vfncvt.rtz.xu.f.w", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFNCVTRTZXUFW, MASK_VFNCVTRTZXUFW, match_narrow_vd_neq_vs2_neq_vm, 0},
  4356. +{"vfncvt.rtz.x.f.w", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFNCVTRTZXFW, MASK_VFNCVTRTZXFW, match_narrow_vd_neq_vs2_neq_vm, 0},
  4357. +{"vfncvt.f.xu.w", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFNCVTFXUW, MASK_VFNCVTFXUW, match_narrow_vd_neq_vs2_neq_vm, 0},
  4358. +{"vfncvt.f.x.w", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFNCVTFXW, MASK_VFNCVTFXW, match_narrow_vd_neq_vs2_neq_vm, 0},
  4359. +{"vfncvt.f.f.w", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFNCVTFFW, MASK_VFNCVTFFW, match_narrow_vd_neq_vs2_neq_vm, 0},
  4360. +{"vfncvt.rod.f.f.w", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFNCVTRODFFW, MASK_VFNCVTRODFFW, match_narrow_vd_neq_vs2_neq_vm, 0},
  4361. +
  4362. +{"vredsum.vs", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VREDSUMVS, MASK_VREDSUMVS, match_opcode, 0},
  4363. +{"vredmaxu.vs",0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VREDMAXUVS, MASK_VREDMAXUVS, match_opcode, 0},
  4364. +{"vredmax.vs", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VREDMAXVS, MASK_VREDMAXVS, match_opcode, 0},
  4365. +{"vredminu.vs",0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VREDMINUVS, MASK_VREDMINUVS, match_opcode, 0},
  4366. +{"vredmin.vs", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VREDMINVS, MASK_VREDMINVS, match_opcode, 0},
  4367. +{"vredand.vs", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VREDANDVS, MASK_VREDANDVS, match_opcode, 0},
  4368. +{"vredor.vs", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VREDORVS, MASK_VREDORVS, match_opcode, 0},
  4369. +{"vredxor.vs", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VREDXORVS, MASK_VREDXORVS, match_opcode, 0},
  4370. +
  4371. +{"vwredsumu.vs",0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VWREDSUMUVS, MASK_VWREDSUMUVS, match_opcode, 0},
  4372. +{"vwredsum.vs",0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VWREDSUMVS, MASK_VWREDSUMVS, match_opcode, 0},
  4373. +
  4374. +{"vfredosum.vs",0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFREDOSUMVS, MASK_VFREDOSUMVS, match_opcode, 0},
  4375. +{"vfredusum.vs", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFREDSUMVS, MASK_VFREDSUMVS, match_opcode, 0},
  4376. +{"vfredmax.vs", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFREDMAXVS, MASK_VFREDMAXVS, match_opcode, 0},
  4377. +{"vfredmin.vs", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFREDMINVS, MASK_VFREDMINVS, match_opcode, 0},
  4378. +
  4379. +{"vfwredosum.vs",0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFWREDOSUMVS, MASK_VFWREDOSUMVS, match_opcode, 0},
  4380. +{"vfwredusum.vs", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFWREDSUMVS, MASK_VFWREDSUMVS, match_opcode, 0},
  4381. +
  4382. +{"vfredsum.vs", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFREDSUMVS, MASK_VFREDSUMVS, match_opcode, INSN_ALIAS},
  4383. +{"vfwredsum.vs", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFWREDSUMVS, MASK_VFWREDSUMVS, match_opcode, INSN_ALIAS},
  4384. +{"vmmv.m", 0, INSN_CLASS_V, "Vd,Vu", MATCH_VMANDMM, MASK_VMANDMM, match_vs1_eq_vs2, INSN_ALIAS},
  4385. +{"vmcpy.m", 0, INSN_CLASS_V, "Vd,Vu", MATCH_VMANDMM, MASK_VMANDMM, match_vs1_eq_vs2, INSN_ALIAS},
  4386. +{"vmclr.m", 0, INSN_CLASS_V, "Vv", MATCH_VMXORMM, MASK_VMXORMM, match_vd_eq_vs1_eq_vs2, INSN_ALIAS},
  4387. +{"vmset.m", 0, INSN_CLASS_V, "Vv", MATCH_VMXNORMM, MASK_VMXNORMM, match_vd_eq_vs1_eq_vs2, INSN_ALIAS},
  4388. +{"vmnot.m", 0, INSN_CLASS_V, "Vd,Vu", MATCH_VMNANDMM, MASK_VMNANDMM, match_vs1_eq_vs2, INSN_ALIAS},
  4389. +
  4390. +{"vmand.mm", 0, INSN_CLASS_V, "Vd,Vt,Vs", MATCH_VMANDMM, MASK_VMANDMM, match_opcode, 0},
  4391. +{"vmnand.mm", 0, INSN_CLASS_V, "Vd,Vt,Vs", MATCH_VMNANDMM, MASK_VMNANDMM, match_opcode, 0},
  4392. +{"vmandnot.mm",0, INSN_CLASS_V, "Vd,Vt,Vs", MATCH_VMANDNOTMM, MASK_VMANDNOTMM, match_opcode, 0},
  4393. +{"vmxor.mm", 0, INSN_CLASS_V, "Vd,Vt,Vs", MATCH_VMXORMM, MASK_VMXORMM, match_opcode, 0},
  4394. +{"vmor.mm", 0, INSN_CLASS_V, "Vd,Vt,Vs", MATCH_VMORMM, MASK_VMORMM, match_opcode, 0},
  4395. +{"vmnor.mm", 0, INSN_CLASS_V, "Vd,Vt,Vs", MATCH_VMNORMM, MASK_VMNORMM, match_opcode, 0},
  4396. +{"vmornot.mm", 0, INSN_CLASS_V, "Vd,Vt,Vs", MATCH_VMORNOTMM, MASK_VMORNOTMM, match_opcode, 0},
  4397. +{"vmxnor.mm", 0, INSN_CLASS_V, "Vd,Vt,Vs", MATCH_VMXNORMM, MASK_VMXNORMM, match_opcode, 0},
  4398. +
  4399. +{"vpopc.m", 0, INSN_CLASS_V, "d,VtVm", MATCH_VPOPCM, MASK_VPOPCM, match_opcode, 0},
  4400. +{"vfirst.m", 0, INSN_CLASS_V, "d,VtVm", MATCH_VFIRSTM, MASK_VFIRSTM, match_opcode, 0},
  4401. +{"vmsbf.m", 0, INSN_CLASS_V, "Vd,VtVm", MATCH_VMSBFM, MASK_VMSBFM, match_vd_neq_vs2_neq_vm, 0},
  4402. +{"vmsif.m", 0, INSN_CLASS_V, "Vd,VtVm", MATCH_VMSIFM, MASK_VMSIFM, match_vd_neq_vs2_neq_vm, 0},
  4403. +{"vmsof.m", 0, INSN_CLASS_V, "Vd,VtVm", MATCH_VMSOFM, MASK_VMSOFM, match_vd_neq_vs2_neq_vm, 0},
  4404. +{"viota.m", 0, INSN_CLASS_V, "Vd,VtVm", MATCH_VIOTAM, MASK_VIOTAM, match_vd_neq_vs2_neq_vm, 0},
  4405. +{"vid.v", 0, INSN_CLASS_V, "VdVm", MATCH_VIDV, MASK_VIDV, match_vd_neq_vm, 0},
  4406. +
  4407. +{"vmv.x.s", 0, INSN_CLASS_V, "d,Vt", MATCH_VMVXS, MASK_VMVXS, match_opcode, 0},
  4408. +{"vmv.s.x", 0, INSN_CLASS_V, "Vd,s", MATCH_VMVSX, MASK_VMVSX, match_opcode, 0},
  4409. +
  4410. +{"vfmv.f.s", 0, INSN_CLASS_V_AND_F, "D,Vt", MATCH_VFMVFS, MASK_VFMVFS, match_opcode, 0},
  4411. +{"vfmv.s.f", 0, INSN_CLASS_V_AND_F, "Vd,S", MATCH_VFMVSF, MASK_VFMVSF, match_opcode, 0},
  4412. +
  4413. +{"vslideup.vx",0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VSLIDEUPVX, MASK_VSLIDEUPVX, match_vd_neq_vs2_neq_vm, 0},
  4414. +{"vslideup.vi",0, INSN_CLASS_V, "Vd,Vt,VjVm", MATCH_VSLIDEUPVI, MASK_VSLIDEUPVI, match_vd_neq_vs2_neq_vm, 0},
  4415. +{"vslidedown.vx",0,INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VSLIDEDOWNVX, MASK_VSLIDEDOWNVX, match_vd_neq_vm, 0},
  4416. +{"vslidedown.vi",0,INSN_CLASS_V, "Vd,Vt,VjVm", MATCH_VSLIDEDOWNVI, MASK_VSLIDEDOWNVI, match_vd_neq_vm, 0},
  4417. +
  4418. +{"vslide1up.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VSLIDE1UPVX, MASK_VSLIDE1UPVX, match_vd_neq_vs2_neq_vm, 0},
  4419. +{"vslide1down.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VSLIDE1DOWNVX, MASK_VSLIDE1DOWNVX, match_vd_neq_vm, 0},
  4420. +{"vfslide1up.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFSLIDE1UPVF, MASK_VFSLIDE1UPVF, match_vd_neq_vs2_neq_vm, 0},
  4421. +{"vfslide1down.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFSLIDE1DOWNVF, MASK_VFSLIDE1DOWNVF, match_vd_neq_vm, 0},
  4422. +
  4423. +{"vrgather.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VRGATHERVV, MASK_VRGATHERVV, match_vd_neq_vs1_neq_vs2_neq_vm, 0},
  4424. +{"vrgather.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VRGATHERVX, MASK_VRGATHERVX, match_vd_neq_vs2_neq_vm, 0},
  4425. +{"vrgather.vi", 0, INSN_CLASS_V, "Vd,Vt,VjVm", MATCH_VRGATHERVI, MASK_VRGATHERVI, match_vd_neq_vs2_neq_vm, 0},
  4426. +{"vrgatherei16.vv",0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VRGATHEREI16VV, MASK_VRGATHEREI16VV, match_vd_neq_vs1_neq_vs2_neq_vm, 0},
  4427. +
  4428. +{"vcompress.vm",0, INSN_CLASS_V, "Vd,Vt,Vs", MATCH_VCOMPRESSVM, MASK_VCOMPRESSVM, match_vd_neq_vs1_neq_vs2, 0},
  4429. +
  4430. +{"vmv1r.v", 0, INSN_CLASS_V, "Vd,Vt", MATCH_VMV1RV, MASK_VMV1RV, match_vmv_nf_rv, 0},
  4431. +{"vmv2r.v", 0, INSN_CLASS_V, "Vd,Vt", MATCH_VMV2RV, MASK_VMV2RV, match_vmv_nf_rv, 0},
  4432. +{"vmv4r.v", 0, INSN_CLASS_V, "Vd,Vt", MATCH_VMV4RV, MASK_VMV4RV, match_vmv_nf_rv, 0},
  4433. +{"vmv8r.v", 0, INSN_CLASS_V, "Vd,Vt", MATCH_VMV8RV, MASK_VMV8RV, match_vmv_nf_rv, 0},
  4434. +
  4435. +
  4436. /* Terminate the list. */
  4437. {0, 0, INSN_CLASS_NONE, 0, 0, 0, 0, 0}
  4438. };
  4439. @@ -814,6 +2122,26 @@ const struct riscv_opcode riscv_insn_types[] =
  4440. {"r", 0, INSN_CLASS_F, "O4,F3,F2,d,S,T,R", 0, 0, match_opcode, 0 },
  4441. {"r", 0, INSN_CLASS_F, "O4,F3,F2,D,S,T,R", 0, 0, match_opcode, 0 },
  4442. +{"r", 0, INSN_CLASS_V, "O4,F3,F7,Vd,s,t", 0, 0, match_opcode, 0 },
  4443. +{"r", 0, INSN_CLASS_V_AND_F, "O4,F3,F7,Vd,S,t", 0, 0, match_opcode, 0 },
  4444. +{"r", 0, INSN_CLASS_V_AND_F, "O4,F3,F7,Vd,s,T", 0, 0, match_opcode, 0 },
  4445. +{"r", 0, INSN_CLASS_V_AND_F, "O4,F3,F7,Vd,S,T", 0, 0, match_opcode, 0 },
  4446. +{"r", 0, INSN_CLASS_V, "O4,F3,F7,d,Vs,t", 0, 0, match_opcode, 0 },
  4447. +{"r", 0, INSN_CLASS_V_AND_F, "O4,F3,F7,D,Vs,t", 0, 0, match_opcode, 0 },
  4448. +{"r", 0, INSN_CLASS_V_AND_F, "O4,F3,F7,d,Vs,T", 0, 0, match_opcode, 0 },
  4449. +{"r", 0, INSN_CLASS_V_AND_F, "O4,F3,F7,D,Vs,T", 0, 0, match_opcode, 0 },
  4450. +{"r", 0, INSN_CLASS_V, "O4,F3,F7,d,s,Vt", 0, 0, match_opcode, 0 },
  4451. +{"r", 0, INSN_CLASS_V_AND_F, "O4,F3,F7,D,s,Vt", 0, 0, match_opcode, 0 },
  4452. +{"r", 0, INSN_CLASS_V_AND_F, "O4,F3,F7,d,S,Vt", 0, 0, match_opcode, 0 },
  4453. +{"r", 0, INSN_CLASS_V_AND_F, "O4,F3,F7,D,S,Vt", 0, 0, match_opcode, 0 },
  4454. +{"r", 0, INSN_CLASS_V, "O4,F3,F7,Vd,Vs,t", 0, 0, match_opcode, 0 },
  4455. +{"r", 0, INSN_CLASS_V_AND_F, "O4,F3,F7,Vd,Vs,T", 0, 0, match_opcode, 0 },
  4456. +{"r", 0, INSN_CLASS_V, "O4,F3,F7,Vd,s,Vt", 0, 0, match_opcode, 0 },
  4457. +{"r", 0, INSN_CLASS_V_AND_F, "O4,F3,F7,Vd,S,Vt", 0, 0, match_opcode, 0 },
  4458. +{"r", 0, INSN_CLASS_V, "O4,F3,F7,d,Vs,Vt", 0, 0, match_opcode, 0 },
  4459. +{"r", 0, INSN_CLASS_V_AND_F, "O4,F3,F7,D,Vs,Vt", 0, 0, match_opcode, 0 },
  4460. +{"r", 0, INSN_CLASS_V, "O4,F3,F7,Vd,Vs,Vt", 0, 0, match_opcode, 0 },
  4461. +
  4462. {"r4", 0, INSN_CLASS_I, "O4,F3,F2,d,s,t,r", 0, 0, match_opcode, 0 },
  4463. {"r4", 0, INSN_CLASS_F, "O4,F3,F2,D,s,t,r", 0, 0, match_opcode, 0 },
  4464. {"r4", 0, INSN_CLASS_F, "O4,F3,F2,d,S,t,r", 0, 0, match_opcode, 0 },
  4465. @@ -935,6 +2263,12 @@ const struct riscv_ext_version riscv_ext_version_table[] =
  4466. {"zicsr", ISA_SPEC_CLASS_20191213, 2, 0},
  4467. {"zicsr", ISA_SPEC_CLASS_20190608, 2, 0},
  4468. +{"v", ISA_SPEC_CLASS_DRAFT, 1, 0},
  4469. +{"zba", ISA_SPEC_CLASS_DRAFT, 1, 0},
  4470. +{"zbb", ISA_SPEC_CLASS_DRAFT, 1, 0},
  4471. +{"zbc", ISA_SPEC_CLASS_DRAFT, 1, 0},
  4472. +{"zbs", ISA_SPEC_CLASS_DRAFT, 1, 0},
  4473. +
  4474. /* Terminate the list. */
  4475. {NULL, 0, 0, 0}
  4476. };
  4477. --
  4478. 2.33.1