0003-merge-rvv1.0-branch.patch 196 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252
  1. From 8c25ceee37b496c1b89ca046d980d4c632450ada Mon Sep 17 00:00:00 2001
  2. From: "max.ma" <max.ma@starfivetech.com>
  3. Date: Wed, 2 Jun 2021 17:22:17 -0700
  4. Subject: [PATCH 03/28] merge rvv1.0 branch
  5. ---
  6. bfd/elfxx-riscv.c | 34 +
  7. gas/config/tc-riscv.c | 418 ++++++++
  8. gas/read.c | 5 +
  9. include/opcode/riscv-opc.h | 1970 ++++++++++++++++++++++++++++++++++++
  10. include/opcode/riscv.h | 83 ++
  11. opcodes/riscv-dis.c | 84 ++
  12. opcodes/riscv-opc.c | 1221 +++++++++++++++++++++-
  13. 7 files changed, 3811 insertions(+), 4 deletions(-)
  14. diff --git a/bfd/elfxx-riscv.c b/bfd/elfxx-riscv.c
  15. index 8c8af2386b..f257989b54 100644
  16. --- a/bfd/elfxx-riscv.c
  17. +++ b/bfd/elfxx-riscv.c
  18. @@ -1771,6 +1771,40 @@ riscv_parse_add_implicit_subsets (riscv_parse_subset_t *rps)
  19. RISCV_UNKNOWN_VERSION,
  20. RISCV_UNKNOWN_VERSION, TRUE);
  21. }
  22. +
  23. + if ((riscv_lookup_subset (rps->subset_list, "b", &subset)))
  24. + {
  25. + riscv_parse_add_subset (rps, "zba",
  26. + RISCV_UNKNOWN_VERSION,
  27. + RISCV_UNKNOWN_VERSION, TRUE);
  28. + riscv_parse_add_subset (rps, "zbb",
  29. + RISCV_UNKNOWN_VERSION,
  30. + RISCV_UNKNOWN_VERSION, TRUE);
  31. + riscv_parse_add_subset (rps, "zbc",
  32. + RISCV_UNKNOWN_VERSION,
  33. + RISCV_UNKNOWN_VERSION, TRUE);
  34. + riscv_parse_add_subset (rps, "zbe",
  35. + RISCV_UNKNOWN_VERSION,
  36. + RISCV_UNKNOWN_VERSION, TRUE);
  37. + riscv_parse_add_subset (rps, "zbf",
  38. + RISCV_UNKNOWN_VERSION,
  39. + RISCV_UNKNOWN_VERSION, TRUE);
  40. + riscv_parse_add_subset (rps, "zbm",
  41. + RISCV_UNKNOWN_VERSION,
  42. + RISCV_UNKNOWN_VERSION, TRUE);
  43. + riscv_parse_add_subset (rps, "zbp",
  44. + RISCV_UNKNOWN_VERSION,
  45. + RISCV_UNKNOWN_VERSION, TRUE);
  46. + riscv_parse_add_subset (rps, "zbr",
  47. + RISCV_UNKNOWN_VERSION,
  48. + RISCV_UNKNOWN_VERSION, TRUE);
  49. + riscv_parse_add_subset (rps, "zbs",
  50. + RISCV_UNKNOWN_VERSION,
  51. + RISCV_UNKNOWN_VERSION, TRUE);
  52. + riscv_parse_add_subset (rps, "zbt",
  53. + RISCV_UNKNOWN_VERSION,
  54. + RISCV_UNKNOWN_VERSION, TRUE);
  55. + }
  56. }
  57. /* Function for parsing arch string.
  58. diff --git a/gas/config/tc-riscv.c b/gas/config/tc-riscv.c
  59. index 3e76e538c4..405ef3278a 100644
  60. --- a/gas/config/tc-riscv.c
  61. +++ b/gas/config/tc-riscv.c
  62. @@ -253,6 +253,16 @@ riscv_multi_subset_supports (enum riscv_insn_class insn_class)
  63. case INSN_CLASS_ZIHINTPAUSE:
  64. return riscv_subset_supports ("zihintpause");
  65. + case INSN_CLASS_V: return riscv_subset_supports ("v");
  66. + case INSN_CLASS_V_AND_F:
  67. + return riscv_subset_supports ("v") && riscv_subset_supports ("f");
  68. + case INSN_CLASS_V_OR_ZVAMO:
  69. + return (riscv_subset_supports ("a")
  70. + && (riscv_subset_supports ("v")
  71. + || riscv_subset_supports ("zvamo")));
  72. + case INSN_CLASS_V_OR_ZVLSSEG:
  73. + return riscv_subset_supports ("v") || riscv_subset_supports ("zvlsseg");
  74. +
  75. case INSN_CLASS_ZBB:
  76. return riscv_subset_supports ("zbb");
  77. case INSN_CLASS_ZBA:
  78. @@ -693,6 +703,8 @@ enum reg_class
  79. {
  80. RCLASS_GPR,
  81. RCLASS_FPR,
  82. + RCLASS_VECR,
  83. + RCLASS_VECM,
  84. RCLASS_MAX,
  85. RCLASS_CSR
  86. @@ -791,6 +803,12 @@ riscv_csr_address (const char *csr_name,
  87. result = riscv_subset_supports ("f");
  88. need_check_version = FALSE;
  89. break;
  90. + case CSR_CLASS_V:
  91. + result = (riscv_subset_supports ("v")
  92. + || riscv_subset_supports ("zvamo")
  93. + || riscv_subset_supports ("zvlsseg"));
  94. + need_check_version = FALSE;
  95. + break;
  96. case CSR_CLASS_DEBUG:
  97. need_check_version = FALSE;
  98. break;
  99. @@ -868,6 +886,11 @@ reg_lookup_internal (const char *s, enum reg_class class)
  100. if (riscv_opts.rve && class == RCLASS_GPR && DECODE_REG_NUM (r) > 15)
  101. return -1;
  102. + if (class == RCLASS_CSR
  103. + && riscv_opts.csr_check
  104. + && !reg_csr_lookup_internal (s))
  105. + return -1;
  106. +
  107. return DECODE_REG_NUM (r);
  108. }
  109. @@ -1060,6 +1083,33 @@ validate_riscv_insn (const struct riscv_opcode *opc, int length)
  110. return FALSE;
  111. }
  112. break;
  113. +
  114. + case 'V': /* RVV */
  115. + switch (c = *p++)
  116. + {
  117. + case 'd':
  118. + case 'f': USE_BITS (OP_MASK_VD, OP_SH_VD); break;
  119. + case 'e': USE_BITS (OP_MASK_VWD, OP_SH_VWD); break;
  120. + case 's': USE_BITS (OP_MASK_VS1, OP_SH_VS1); break;
  121. + case 't': USE_BITS (OP_MASK_VS2, OP_SH_VS2); break;
  122. + case 'u': USE_BITS (OP_MASK_VS1, OP_SH_VS1);
  123. + USE_BITS (OP_MASK_VS2, OP_SH_VS2); break;
  124. + case 'v': USE_BITS (OP_MASK_VD, OP_SH_VD);
  125. + USE_BITS (OP_MASK_VS1, OP_SH_VS1);
  126. + USE_BITS (OP_MASK_VS2, OP_SH_VS2); break;
  127. + case '0': break;
  128. + case 'b': used_bits |= ENCODE_RVV_VB_IMM (-1U); break;
  129. + case 'c': used_bits |= ENCODE_RVV_VC_IMM (-1U); break;
  130. + case 'i':
  131. + case 'j':
  132. + case 'k': USE_BITS (OP_MASK_VIMM, OP_SH_VIMM); break;
  133. + case 'm': USE_BITS (OP_MASK_VMASK, OP_SH_VMASK); break;
  134. + default:
  135. + as_bad (_("internal: bad RISC-V opcode (unknown operand type `V%c'): %s %s"),
  136. + c, opc->name, opc->args);
  137. + }
  138. + break;
  139. +
  140. default:
  141. as_bad (_("internal: bad RISC-V opcode "
  142. "(unknown operand type `%c'): %s %s"),
  143. @@ -1138,6 +1188,8 @@ md_begin (void)
  144. hash_reg_names (RCLASS_GPR, riscv_gpr_names_abi, NGPR);
  145. hash_reg_names (RCLASS_FPR, riscv_fpr_names_numeric, NFPR);
  146. hash_reg_names (RCLASS_FPR, riscv_fpr_names_abi, NFPR);
  147. + hash_reg_names (RCLASS_VECR, riscv_vecr_names_numeric, NVECR);
  148. + hash_reg_names (RCLASS_VECM, riscv_vecm_names_numeric, NVECM);
  149. /* Add "fp" as an alias for "s0". */
  150. hash_reg_name (RCLASS_GPR, "fp", 8);
  151. @@ -1301,6 +1353,42 @@ macro_build (expressionS *ep, const char *name, const char *fmt, ...)
  152. break;
  153. case ',':
  154. continue;
  155. +
  156. + case 'V': /* RVV */
  157. + {
  158. + switch (*fmt++)
  159. + {
  160. + case 'd':
  161. + INSERT_OPERAND (VD, insn, va_arg (args, int));
  162. + continue;
  163. +
  164. + case 's':
  165. + INSERT_OPERAND (VS1, insn, va_arg (args, int));
  166. + continue;
  167. +
  168. + case 't':
  169. + INSERT_OPERAND (VS2, insn, va_arg (args, int));
  170. + continue;
  171. +
  172. + case 'm':
  173. + {
  174. + int reg = va_arg (args, int);
  175. + if (reg == -1)
  176. + {
  177. + INSERT_OPERAND (VMASK, insn, 1);
  178. + continue;
  179. + }
  180. + else if (reg == 0)
  181. + {
  182. + INSERT_OPERAND (VMASK, insn, 0);
  183. + continue;
  184. + }
  185. + }
  186. + /* fallthru */
  187. + }
  188. + }
  189. + /* fallthru */
  190. +
  191. default:
  192. as_fatal (_("internal error: invalid macro"));
  193. }
  194. @@ -1470,6 +1558,97 @@ load_const (int reg, expressionS *ep)
  195. }
  196. }
  197. +/* Expand RISC-V Vector macros into one of more instructions. */
  198. +
  199. +static void
  200. +vector_macro (struct riscv_cl_insn *ip)
  201. +{
  202. + int vd = (ip->insn_opcode >> OP_SH_VD) & OP_MASK_VD;
  203. + int vs1 = (ip->insn_opcode >> OP_SH_VS1) & OP_MASK_VS1;
  204. + int vs2 = (ip->insn_opcode >> OP_SH_VS2) & OP_MASK_VS2;
  205. + int vm = (ip->insn_opcode >> OP_SH_VMASK) & OP_MASK_VMASK;
  206. + int vtemp = (ip->insn_opcode >> OP_SH_VFUNCT6) & OP_MASK_VFUNCT6;
  207. + int mask = ip->insn_mo->mask;
  208. +
  209. + switch (mask)
  210. + {
  211. + case M_VMSGE:
  212. + if (vm)
  213. + {
  214. + /* Unmasked. */
  215. + macro_build (NULL, "vmslt.vx", "Vd,Vt,sVm", vd, vs2, vs1, -1);
  216. + macro_build (NULL, "vmnand.mm", "Vd,Vt,Vs", vd, vd, vd);
  217. + break;
  218. + }
  219. + if (vtemp != 0)
  220. + {
  221. + /* Masked. Have vtemp to avoid overlap constraints. */
  222. + if (vd == vm)
  223. + {
  224. + macro_build (NULL, "vmslt.vx", "Vd,Vt,s", vtemp, vs2, vs1);
  225. + macro_build (NULL, "vmandnot.mm", "Vd,Vt,Vs", vd, vm, vtemp);
  226. + }
  227. + else
  228. + {
  229. + /* Preserve the value of vd if not updating by vm. */
  230. + macro_build (NULL, "vmslt.vx", "Vd,Vt,s", vtemp, vs2, vs1);
  231. + macro_build (NULL, "vmandnot.mm", "Vd,Vt,Vs", vtemp, vm, vtemp);
  232. + macro_build (NULL, "vmandnot.mm", "Vd,Vt,Vs", vd, vd, vm);
  233. + macro_build (NULL, "vmor.mm", "Vd,Vt,Vs", vd, vtemp, vd);
  234. + }
  235. + }
  236. + else if (vd != vm)
  237. + {
  238. + /* Masked. This may cause the vd overlaps vs2, when LMUL > 1. */
  239. + macro_build (NULL, "vmslt.vx", "Vd,Vt,sVm", vd, vs2, vs1, vm);
  240. + macro_build (NULL, "vmxor.mm", "Vd,Vt,Vs", vd, vd, vm);
  241. + }
  242. + else
  243. + as_bad (_("must provide temp if destination overlaps mask"));
  244. + break;
  245. +
  246. + case M_VMSGEU:
  247. + if (vm)
  248. + {
  249. + /* Unmasked. */
  250. + macro_build (NULL, "vmsltu.vx", "Vd,Vt,sVm", vd, vs2, vs1, -1);
  251. + macro_build (NULL, "vmnand.mm", "Vd,Vt,Vs", vd, vd, vd);
  252. + break;
  253. + }
  254. + if (vtemp != 0)
  255. + {
  256. + /* Masked. Have vtemp to avoid overlap constraints. */
  257. + if (vd == vm)
  258. + {
  259. + macro_build (NULL, "vmsltu.vx", "Vd,Vt,s", vtemp, vs2, vs1);
  260. + macro_build (NULL, "vmandnot.mm", "Vd,Vt,Vs", vd, vm, vtemp);
  261. + }
  262. + else
  263. + {
  264. + /* Preserve the value of vd if not updating by vm. */
  265. + macro_build (NULL, "vmsltu.vx", "Vd,Vt,s", vtemp, vs2, vs1);
  266. + macro_build (NULL, "vmandnot.mm", "Vd,Vt,Vs", vtemp, vm, vtemp);
  267. + macro_build (NULL, "vmandnot.mm", "Vd,Vt,Vs", vd, vd, vm);
  268. + macro_build (NULL, "vmor.mm", "Vd,Vt,Vs", vd, vtemp, vd);
  269. + }
  270. + }
  271. + else if (vd != vm)
  272. + {
  273. + /* Masked. This may cause the vd overlaps vs2, when LMUL > 1. */
  274. + macro_build (NULL, "vmsltu.vx", "Vd,Vt,sVm", vd, vs2, vs1, vm);
  275. + macro_build (NULL, "vmxor.mm", "Vd,Vt,Vs", vd, vd, vm);
  276. + }
  277. + else
  278. + as_bad (_("must provide temp if destination overlaps mask"));
  279. + break;
  280. +
  281. + default:
  282. + as_bad (_("Macro %s not implemented"), ip->insn_mo->name);
  283. + break;
  284. + }
  285. +}
  286. +
  287. +
  288. /* Zero extend and sign extend byte/half-word/word. */
  289. static void
  290. @@ -1565,6 +1744,11 @@ macro (struct riscv_cl_insn *ip, expressionS *imm_expr,
  291. BFD_RELOC_RISCV_PCREL_HI20, BFD_RELOC_RISCV_PCREL_LO12_I);
  292. break;
  293. + case M_FLH:
  294. + pcrel_load (rd, rs1, imm_expr, "flh",
  295. + BFD_RELOC_RISCV_PCREL_HI20, BFD_RELOC_RISCV_PCREL_LO12_I);
  296. + break;
  297. +
  298. case M_FLW:
  299. pcrel_load (rd, rs1, imm_expr, "flw",
  300. BFD_RELOC_RISCV_PCREL_HI20, BFD_RELOC_RISCV_PCREL_LO12_I);
  301. @@ -1595,6 +1779,11 @@ macro (struct riscv_cl_insn *ip, expressionS *imm_expr,
  302. BFD_RELOC_RISCV_PCREL_HI20, BFD_RELOC_RISCV_PCREL_LO12_S);
  303. break;
  304. + case M_FSH:
  305. + pcrel_store (rs2, rs1, imm_expr, "fsh",
  306. + BFD_RELOC_RISCV_PCREL_HI20, BFD_RELOC_RISCV_PCREL_LO12_S);
  307. + break;
  308. +
  309. case M_FSW:
  310. pcrel_store (rs2, rs1, imm_expr, "fsw",
  311. BFD_RELOC_RISCV_PCREL_HI20, BFD_RELOC_RISCV_PCREL_LO12_S);
  312. @@ -1625,6 +1814,11 @@ macro (struct riscv_cl_insn *ip, expressionS *imm_expr,
  313. riscv_ext (rd, rs1, xlen - 16, TRUE);
  314. break;
  315. + case M_VMSGE:
  316. + case M_VMSGEU:
  317. + vector_macro (ip);
  318. + break;
  319. +
  320. default:
  321. as_bad (_("Macro %s not implemented"), ip->insn_mo->name);
  322. break;
  323. @@ -1778,6 +1972,66 @@ my_getSmallExpression (expressionS *ep, bfd_reloc_code_real_type *reloc,
  324. return reloc_index;
  325. }
  326. +/* Parse string STR as a vsetvli operand. Store the expression in *EP.
  327. + On exit, EXPR_END points to the first character after the expression. */
  328. +
  329. +static void
  330. +my_getVsetvliExpression (expressionS *ep, char *str)
  331. +{
  332. + unsigned int vsew_value = 0, vlmul_value = 0;
  333. + unsigned int vta_value = 0, vma_value = 0;
  334. + bfd_boolean vsew_found = FALSE, vlmul_found = FALSE;
  335. + bfd_boolean vta_found = FALSE, vma_found = FALSE;
  336. +
  337. + if (arg_lookup (&str, riscv_vsew, ARRAY_SIZE (riscv_vsew), &vsew_value))
  338. + {
  339. + if (*str == ',')
  340. + ++str;
  341. + if (vsew_found)
  342. + as_bad (_("multiple vsew constants"));
  343. + vsew_found = TRUE;
  344. + }
  345. + if (arg_lookup (&str, riscv_vlmul, ARRAY_SIZE (riscv_vlmul), &vlmul_value))
  346. + {
  347. + if (*str == ',')
  348. + ++str;
  349. + if (vlmul_found)
  350. + as_bad (_("multiple vlmul constants"));
  351. + vlmul_found = TRUE;
  352. + }
  353. + if (arg_lookup (&str, riscv_vta, ARRAY_SIZE (riscv_vta), &vta_value))
  354. + {
  355. + if (*str == ',')
  356. + ++str;
  357. + if (vta_found)
  358. + as_bad (_("multiple vta constants"));
  359. + vta_found = TRUE;
  360. + }
  361. + if (arg_lookup (&str, riscv_vma, ARRAY_SIZE (riscv_vma), &vma_value))
  362. + {
  363. + if (*str == ',')
  364. + ++str;
  365. + if (vma_found)
  366. + as_bad (_("multiple vma constants"));
  367. + vma_found = TRUE;
  368. + }
  369. +
  370. + if (vsew_found || vlmul_found || vta_found || vma_found)
  371. + {
  372. + ep->X_op = O_constant;
  373. + ep->X_add_number = (vlmul_value << OP_SH_VLMUL)
  374. + | (vsew_value << OP_SH_VSEW)
  375. + | (vta_value << OP_SH_VTA)
  376. + | (vma_value << OP_SH_VMA);
  377. + expr_end = str;
  378. + }
  379. + else
  380. + {
  381. + my_getExpression (ep, str);
  382. + str = expr_end;
  383. + }
  384. +}
  385. +
  386. /* Parse opcode name, could be an mnemonics or number. */
  387. static size_t
  388. my_getOpcodeExpression (expressionS *ep, bfd_reloc_code_real_type *reloc,
  389. @@ -2641,6 +2895,170 @@ riscv_ip (char *str, struct riscv_cl_insn *ip, expressionS *imm_expr,
  390. imm_expr->X_op = O_absent;
  391. continue;
  392. + case 'V': /* RVV */
  393. + switch (*++args)
  394. + {
  395. + case 'd': /* VD */
  396. + if (!reg_lookup (&s, RCLASS_VECR, &regno))
  397. + break;
  398. + INSERT_OPERAND (VD, *ip, regno);
  399. + continue;
  400. +
  401. + case 'e': /* AMO VD */
  402. + if (reg_lookup (&s, RCLASS_GPR, &regno) && regno == 0)
  403. + INSERT_OPERAND (VWD, *ip, 0);
  404. + else if (reg_lookup (&s, RCLASS_VECR, &regno))
  405. + {
  406. + INSERT_OPERAND (VWD, *ip, 1);
  407. + INSERT_OPERAND (VD, *ip, regno);
  408. + }
  409. + else
  410. + break;
  411. + continue;
  412. +
  413. + case 'f': /* AMO VS3 */
  414. + if (!reg_lookup (&s, RCLASS_VECR, &regno))
  415. + break;
  416. + if (!EXTRACT_OPERAND (VWD, ip->insn_opcode))
  417. + INSERT_OPERAND (VD, *ip, regno);
  418. + else
  419. + {
  420. + /* VS3 must match VD. */
  421. + if (EXTRACT_OPERAND (VD, ip->insn_opcode) != regno)
  422. + break;
  423. + }
  424. + continue;
  425. +
  426. + case 's': /* VS1 */
  427. + if (!reg_lookup (&s, RCLASS_VECR, &regno))
  428. + break;
  429. + INSERT_OPERAND (VS1, *ip, regno);
  430. + continue;
  431. +
  432. + case 't': /* VS2 */
  433. + if (!reg_lookup (&s, RCLASS_VECR, &regno))
  434. + break;
  435. + INSERT_OPERAND (VS2, *ip, regno);
  436. + continue;
  437. +
  438. + case 'u': /* VS1 == VS2 */
  439. + if (!reg_lookup (&s, RCLASS_VECR, &regno))
  440. + break;
  441. + INSERT_OPERAND (VS1, *ip, regno);
  442. + INSERT_OPERAND (VS2, *ip, regno);
  443. + continue;
  444. +
  445. + case 'v': /* VD == VS1 == VS2 */
  446. + if (!reg_lookup (&s, RCLASS_VECR, &regno))
  447. + break;
  448. + INSERT_OPERAND (VD, *ip, regno);
  449. + INSERT_OPERAND (VS1, *ip, regno);
  450. + INSERT_OPERAND (VS2, *ip, regno);
  451. + continue;
  452. +
  453. + /* The `V0` is carry-in register for v[m]adc and v[m]sbc,
  454. + and is used to choose vs1/rs1/frs1/imm or vs2 for
  455. + v[f]merge. It use the same encoding as the vector mask
  456. + register. */
  457. + case '0':
  458. + if (reg_lookup (&s, RCLASS_VECR, &regno) && regno == 0)
  459. + continue;
  460. + break;
  461. +
  462. + case 'b': /* vtypei for vsetivli */
  463. + my_getVsetvliExpression (imm_expr, s);
  464. + check_absolute_expr (ip, imm_expr, FALSE);
  465. + if (!VALID_RVV_VB_IMM (imm_expr->X_add_number))
  466. + as_bad (_("bad value for vsetivli immediate field, "
  467. + "value must be 0..1023"));
  468. + ip->insn_opcode
  469. + |= ENCODE_RVV_VB_IMM (imm_expr->X_add_number);
  470. + imm_expr->X_op = O_absent;
  471. + s = expr_end;
  472. + continue;
  473. +
  474. + case 'c': /* vtypei for vsetvli */
  475. + my_getVsetvliExpression (imm_expr, s);
  476. + check_absolute_expr (ip, imm_expr, FALSE);
  477. + if (!VALID_RVV_VC_IMM (imm_expr->X_add_number))
  478. + as_bad (_("bad value for vsetvli immediate field, "
  479. + "value must be 0..2047"));
  480. + ip->insn_opcode
  481. + |= ENCODE_RVV_VC_IMM (imm_expr->X_add_number);
  482. + imm_expr->X_op = O_absent;
  483. + s = expr_end;
  484. + continue;
  485. +
  486. + case 'i': /* vector arith signed immediate */
  487. + my_getExpression (imm_expr, s);
  488. + check_absolute_expr (ip, imm_expr, FALSE);
  489. + if (imm_expr->X_add_number > 15
  490. + || imm_expr->X_add_number < -16)
  491. + as_bad (_("bad value for vector immediate field, "
  492. + "value must be -16...15"));
  493. + INSERT_OPERAND (VIMM, *ip, imm_expr->X_add_number);
  494. + imm_expr->X_op = O_absent;
  495. + s = expr_end;
  496. + continue;
  497. +
  498. + case 'j': /* vector arith unsigned immediate */
  499. + my_getExpression (imm_expr, s);
  500. + check_absolute_expr (ip, imm_expr, FALSE);
  501. + if (imm_expr->X_add_number < 0
  502. + || imm_expr->X_add_number >= 32)
  503. + as_bad (_("bad value for vector immediate field, "
  504. + "value must be 0...31"));
  505. + INSERT_OPERAND (VIMM, *ip, imm_expr->X_add_number);
  506. + imm_expr->X_op = O_absent;
  507. + s = expr_end;
  508. + continue;
  509. +
  510. + case 'k': /* vector arith signed immediate, minus 1 */
  511. + my_getExpression (imm_expr, s);
  512. + check_absolute_expr (ip, imm_expr, FALSE);
  513. + if (imm_expr->X_add_number > 16
  514. + || imm_expr->X_add_number < -15)
  515. + as_bad (_("bad value for vector immediate field, "
  516. + "value must be -15...16"));
  517. + INSERT_OPERAND (VIMM, *ip, imm_expr->X_add_number - 1);
  518. + imm_expr->X_op = O_absent;
  519. + s = expr_end;
  520. + continue;
  521. +
  522. + case 'm': /* optional vector mask */
  523. + if (*s == '\0')
  524. + {
  525. + INSERT_OPERAND (VMASK, *ip, 1);
  526. + continue;
  527. + }
  528. + else if (*s == ',' && s++
  529. + && reg_lookup (&s, RCLASS_VECM, &regno)
  530. + && regno == 0)
  531. + {
  532. + INSERT_OPERAND (VMASK, *ip, 0);
  533. + continue;
  534. + }
  535. + break;
  536. +
  537. + /* The following ones are only used in macros. */
  538. + case 'M': /* required vector mask */
  539. + if (reg_lookup (&s, RCLASS_VECM, &regno) && regno == 0)
  540. + {
  541. + INSERT_OPERAND (VMASK, *ip, 0);
  542. + continue;
  543. + }
  544. + break;
  545. +
  546. + case 'T': /* vector macro temporary register */
  547. + if (!reg_lookup (&s, RCLASS_VECR, &regno) || regno == 0)
  548. + break;
  549. + /* Store it in the FUNCT6 field as we don't have anyplace
  550. + else to store it. */
  551. + INSERT_OPERAND (VFUNCT6, *ip, regno);
  552. + continue;
  553. + }
  554. + break;
  555. +
  556. default:
  557. as_fatal (_("internal error: bad argument type %c"), *args);
  558. }
  559. diff --git a/gas/read.c b/gas/read.c
  560. index be6e3e03d4..2d2acf9592 100644
  561. --- a/gas/read.c
  562. +++ b/gas/read.c
  563. @@ -4827,6 +4827,11 @@ hex_float (int float_type, char *bytes)
  564. switch (float_type)
  565. {
  566. + case 'h':
  567. + case 'H':
  568. + length = 2;
  569. + break;
  570. +
  571. case 'f':
  572. case 'F':
  573. case 's':
  574. diff --git a/include/opcode/riscv-opc.h b/include/opcode/riscv-opc.h
  575. index 1a9e313fc3..5bb3803d03 100644
  576. --- a/include/opcode/riscv-opc.h
  577. +++ b/include/opcode/riscv-opc.h
  578. @@ -323,6 +323,38 @@
  579. #define MASK_FCVT_D_S 0xfff0007f
  580. #define MATCH_FSQRT_D 0x5a000053
  581. #define MASK_FSQRT_D 0xfff0007f
  582. +#define MATCH_FADD_H 0x4000053
  583. +#define MASK_FADD_H 0xfe00007f
  584. +#define MATCH_FSUB_H 0xc000053
  585. +#define MASK_FSUB_H 0xfe00007f
  586. +#define MATCH_FMUL_H 0x14000053
  587. +#define MASK_FMUL_H 0xfe00007f
  588. +#define MATCH_FDIV_H 0x1c000053
  589. +#define MASK_FDIV_H 0xfe00007f
  590. +#define MATCH_FSGNJ_H 0x24000053
  591. +#define MASK_FSGNJ_H 0xfe00707f
  592. +#define MATCH_FSGNJN_H 0x24001053
  593. +#define MASK_FSGNJN_H 0xfe00707f
  594. +#define MATCH_FSGNJX_H 0x24002053
  595. +#define MASK_FSGNJX_H 0xfe00707f
  596. +#define MATCH_FMIN_H 0x2c000053
  597. +#define MASK_FMIN_H 0xfe00707f
  598. +#define MATCH_FMAX_H 0x2c001053
  599. +#define MASK_FMAX_H 0xfe00707f
  600. +#define MATCH_FCVT_S_H 0x40200053
  601. +#define MASK_FCVT_S_H 0xfff0007f
  602. +#define MATCH_FCVT_H_S 0x44000053
  603. +#define MASK_FCVT_H_S 0xfff0007f
  604. +#define MATCH_FCVT_D_H 0x42200053
  605. +#define MASK_FCVT_D_H 0xfff0007f
  606. +#define MATCH_FCVT_H_D 0x44100053
  607. +#define MASK_FCVT_H_D 0xfff0007f
  608. +#define MATCH_FCVT_Q_H 0x46300053
  609. +#define MASK_FCVT_Q_H 0xfff0007f
  610. +#define MATCH_FCVT_H_Q 0x44200053
  611. +#define MASK_FCVT_H_Q 0xfff0007f
  612. +#define MATCH_FSQRT_H 0x5c000053
  613. +#define MASK_FSQRT_H 0xfff0007f
  614. #define MATCH_FADD_Q 0x6000053
  615. #define MASK_FADD_Q 0xfe00007f
  616. #define MATCH_FSUB_Q 0xe000053
  617. @@ -363,6 +395,12 @@
  618. #define MASK_FLT_D 0xfe00707f
  619. #define MATCH_FEQ_D 0xa2002053
  620. #define MASK_FEQ_D 0xfe00707f
  621. +#define MATCH_FLE_H 0xa4000053
  622. +#define MASK_FLE_H 0xfe00707f
  623. +#define MATCH_FLT_H 0xa4001053
  624. +#define MASK_FLT_H 0xfe00707f
  625. +#define MATCH_FEQ_H 0xa4002053
  626. +#define MASK_FEQ_H 0xfe00707f
  627. #define MATCH_FLE_Q 0xa6000053
  628. #define MASK_FLE_Q 0xfe00707f
  629. #define MATCH_FLT_Q 0xa6001053
  630. @@ -393,6 +431,18 @@
  631. #define MASK_FMV_X_D 0xfff0707f
  632. #define MATCH_FCLASS_D 0xe2001053
  633. #define MASK_FCLASS_D 0xfff0707f
  634. +#define MATCH_FCVT_W_H 0xc4000053
  635. +#define MASK_FCVT_W_H 0xfff0007f
  636. +#define MATCH_FCVT_WU_H 0xc4100053
  637. +#define MASK_FCVT_WU_H 0xfff0007f
  638. +#define MATCH_FCVT_L_H 0xc4200053
  639. +#define MASK_FCVT_L_H 0xfff0007f
  640. +#define MATCH_FCVT_LU_H 0xc4300053
  641. +#define MASK_FCVT_LU_H 0xfff0007f
  642. +#define MATCH_FMV_X_H 0xe4000053
  643. +#define MASK_FMV_X_H 0xfff0707f
  644. +#define MATCH_FCLASS_H 0xe4001053
  645. +#define MASK_FCLASS_H 0xfff0707f
  646. #define MATCH_FCVT_W_Q 0xc6000053
  647. #define MASK_FCVT_W_Q 0xfff0007f
  648. #define MATCH_FCVT_WU_Q 0xc6100053
  649. @@ -425,6 +475,16 @@
  650. #define MASK_FCVT_D_LU 0xfff0007f
  651. #define MATCH_FMV_D_X 0xf2000053
  652. #define MASK_FMV_D_X 0xfff0707f
  653. +#define MATCH_FCVT_H_W 0xd4000053
  654. +#define MASK_FCVT_H_W 0xfff0007f
  655. +#define MATCH_FCVT_H_WU 0xd4100053
  656. +#define MASK_FCVT_H_WU 0xfff0007f
  657. +#define MATCH_FCVT_H_L 0xd4200053
  658. +#define MASK_FCVT_H_L 0xfff0007f
  659. +#define MATCH_FCVT_H_LU 0xd4300053
  660. +#define MASK_FCVT_H_LU 0xfff0007f
  661. +#define MATCH_FMV_H_X 0xf4000053
  662. +#define MASK_FMV_H_X 0xfff0707f
  663. #define MATCH_FCVT_Q_W 0xd6000053
  664. #define MASK_FCVT_Q_W 0xfff0007f
  665. #define MATCH_FCVT_Q_WU 0xd6100053
  666. @@ -625,12 +685,16 @@
  667. #define MASK_CLMULH 0xfe00707f
  668. #define MATCH_CLMULR 0xa002033
  669. #define MASK_CLMULR 0xfe00707f
  670. +#define MATCH_FLH 0x1007
  671. +#define MASK_FLH 0x707f
  672. #define MATCH_FLW 0x2007
  673. #define MASK_FLW 0x707f
  674. #define MATCH_FLD 0x3007
  675. #define MASK_FLD 0x707f
  676. #define MATCH_FLQ 0x4007
  677. #define MASK_FLQ 0x707f
  678. +#define MATCH_FSH 0x1027
  679. +#define MASK_FSH 0x707f
  680. #define MATCH_FSW 0x2027
  681. #define MASK_FSW 0x707f
  682. #define MATCH_FSD 0x3027
  683. @@ -653,6 +717,14 @@
  684. #define MASK_FNMSUB_D 0x600007f
  685. #define MATCH_FNMADD_D 0x200004f
  686. #define MASK_FNMADD_D 0x600007f
  687. +#define MATCH_FMADD_H 0x4000043
  688. +#define MASK_FMADD_H 0x600007f
  689. +#define MATCH_FMSUB_H 0x4000047
  690. +#define MASK_FMSUB_H 0x600007f
  691. +#define MATCH_FNMSUB_H 0x400004b
  692. +#define MASK_FNMSUB_H 0x600007f
  693. +#define MATCH_FNMADD_H 0x400004f
  694. +#define MASK_FNMADD_H 0x600007f
  695. #define MATCH_FMADD_Q 0x6000043
  696. #define MASK_FMADD_Q 0x600007f
  697. #define MATCH_FMSUB_Q 0x6000047
  698. @@ -751,6 +823,1854 @@
  699. #define MASK_C_LDSP 0xe003
  700. #define MATCH_C_SDSP 0xe002
  701. #define MASK_C_SDSP 0xe003
  702. +
  703. +/* RVV */
  704. +/* Version 1.0-draft-20210130. */
  705. +
  706. +/* Temporary configuration-setting encoding info
  707. +
  708. +`-` means zimm
  709. +
  710. +31 30 zimm RS2 RS1/uimm funct3 RD opcode
  711. +1 0 00000 xxxxx xxxxx 111 xxxxx 1010111 vsetvl
  712. +1 1 ----- ----- xxxxx 111 xxxxx 1010111 vsetivli
  713. +0 - ----- ----- xxxxx 111 xxxxx 1010111 vsetvli
  714. +*/
  715. +
  716. +#define MATCH_VSETVL 0x80007057
  717. +#define MASK_VSETVL 0xfe00707f
  718. +#define MATCH_VSETIVLI 0xc0007057
  719. +#define MASK_VSETIVLI 0xc000707f
  720. +#define MATCH_VSETVLI 0x00007057
  721. +#define MASK_VSETVLI 0x8000707f
  722. +
  723. +/* Temporary Load/store encoding info
  724. +
  725. +MOP load
  726. +00 unit-stride LE<EEW>, VLE<EEW>FF, VL<nf>RE<EEW> (nf = 1, 2, 4, 8)
  727. +01 indexed-unordered VLUXEI<EEW>
  728. +10 strided VLSE<EEW>
  729. +11 indexed-ordered VLOXEI<EEW>
  730. +
  731. +MOP store
  732. +00 unit-stride VSE<EEW>, VS<nf>R (nf = 1, 2, 4, 8)
  733. +01 indexed-unordered VSUXEI<EEW>
  734. +10 strided VSSE<EEW>
  735. +11 indexed-ordered VSOXEI<EEW>
  736. +
  737. +VM 0 masked
  738. +VM 1 unmasked
  739. +
  740. +LUMOP
  741. +00000 unit-stride load
  742. +01000 unit-stride, whole registers load
  743. +01011 unit-stride, mask load, EEW = 1
  744. +10000 unit-stride first-fault
  745. +xxxxx other encodings reserved, x != 0
  746. +
  747. +SUMOP
  748. +00000 unit-stride store
  749. +01000 unit-stride, whole registers store
  750. +01011 unit-stride, mask store, EEW = 1
  751. +0xxxx other encodings reserved, x != 0
  752. +
  753. +`-` means EEW =
  754. +MEW WIDTH
  755. +x 001 FLH/FSH
  756. +x 010 FLW/FSW
  757. +x 011 FLD/FSW
  758. +x 100 FLQ/FSQ
  759. +0 000 VLxE8/VSxE8, VLxEI8/VSxEI8, VL<nf>RE8, VS<nf>R
  760. +0 101 VLxE16/VSxE16, VLxEI16/VSxEI16, VL<nf>RE16
  761. +0 110 VLxE32/VSxE32, VLxEI32/VSxEI32, VL<nf>RE32
  762. +0 111 VLxE64/VSxE64, VLxEI64/VSxEI64, VL<nf>RE64
  763. +1 000 Reserved (VLxE128/VSxE128, VL<nf>RE128)
  764. +1 101 Reserved (VLxE256/VSxE256, VL<nf>RE256)
  765. +1 110 Reserved (VLxE512/VSxE512, VL<nf>RE512)
  766. +1 111 Reserved (VLxE1024/VSxE1024, VL<nf>RE1024)
  767. +
  768. +NF MEW MOP VM LUMOP/RS2 RS1 WIDTH VD opcode
  769. +000 - 00 x 00000 xxxxx --- xxxxx 0000111 VLE<EEW>
  770. +000 - 00 x 00000 xxxxx --- xxxxx 0100111 VSE<EEW>
  771. +000 - 00 1 01011 xxxxx --- xxxxx 0000111 VLE, EEW = 1
  772. +000 - 00 1 01011 xxxxx --- xxxxx 0100111 VSE, EEW = 1
  773. +000 - 10 x xxxxx xxxxx --- xxxxx 0000111 VLSE<EEW>
  774. +000 - 10 x xxxxx xxxxx --- xxxxx 0100111 VSSE<EEW>
  775. +000 0 11 x xxxxx xxxxx --- xxxxx 0000111 VLOXE<EEW>I
  776. +000 0 11 x xxxxx xxxxx --- xxxxx 0100111 VSOXE<EEW>I
  777. +000 0 01 x xxxxx xxxxx --- xxxxx 0000111 VLUXE<EEW>I
  778. +000 0 01 x xxxxx xxxxx --- xxxxx 0100111 VSUXE<EEW>I
  779. +000 - 00 x 10000 xxxxx --- xxxxx 0000111 VLE<EEW>FF
  780. +xxx - 00 1 01000 xxxxx --- xxxxx 0000111 VL<nf>RE<EEW>, nf = 1,2,4,8
  781. +xxx 0 00 1 01000 xxxxx 000 xxxxx 0100111 VS<nf>R, nf = 1,2,4,8
  782. +
  783. +xxx - 00 x 00000 xxxxx --- xxxxx 0000111 VLSEG<nf>E<EEW>
  784. +xxx - 00 x 00000 xxxxx --- xxxxx 0100111 VSSEG<nf>E<EEW>
  785. +xxx - 10 x 00000 xxxxx --- xxxxx 0000111 VLSSEG<nf>E<EEW>
  786. +xxx - 10 x 00000 xxxxx --- xxxxx 0100111 VSSSEG<nf>E<EEW>
  787. +xxx - 11 x 00000 xxxxx --- xxxxx 0000111 VLOXSEG<nf>E<EEW>I
  788. +xxx - 11 x 00000 xxxxx --- xxxxx 0100111 VSOXSEG<nf>E<EEW>I
  789. +xxx - 01 x 00000 xxxxx --- xxxxx 0000111 VLUXSEG<nf>E<EEW>I
  790. +xxx - 01 x 00000 xxxxx --- xxxxx 0100111 VSUXSEG<nf>E<EEW>I
  791. +xxx - 00 x 10000 xxxxx --- xxxxx 0000111 VLSEG<nf>E<EEW>FF
  792. +*/
  793. +
  794. +#define MATCH_VLE1V 0x02b00007
  795. +#define MASK_VLE1V 0xfff0707f
  796. +#define MATCH_VSE1V 0x02b00027
  797. +#define MASK_VSE1V 0xfff0707f
  798. +
  799. +#define MATCH_VLE8V 0x00000007
  800. +#define MASK_VLE8V 0xfdf0707f
  801. +#define MATCH_VLE16V 0x00005007
  802. +#define MASK_VLE16V 0xfdf0707f
  803. +#define MATCH_VLE32V 0x00006007
  804. +#define MASK_VLE32V 0xfdf0707f
  805. +#define MATCH_VLE64V 0x00007007
  806. +#define MASK_VLE64V 0xfdf0707f
  807. +
  808. +#define MATCH_VSE8V 0x00000027
  809. +#define MASK_VSE8V 0xfdf0707f
  810. +#define MATCH_VSE16V 0x00005027
  811. +#define MASK_VSE16V 0xfdf0707f
  812. +#define MATCH_VSE32V 0x00006027
  813. +#define MASK_VSE32V 0xfdf0707f
  814. +#define MATCH_VSE64V 0x00007027
  815. +#define MASK_VSE64V 0xfdf0707f
  816. +
  817. +#define MATCH_VLSE8V 0x08000007
  818. +#define MASK_VLSE8V 0xfc00707f
  819. +#define MATCH_VLSE16V 0x08005007
  820. +#define MASK_VLSE16V 0xfc00707f
  821. +#define MATCH_VLSE32V 0x08006007
  822. +#define MASK_VLSE32V 0xfc00707f
  823. +#define MATCH_VLSE64V 0x08007007
  824. +#define MASK_VLSE64V 0xfc00707f
  825. +
  826. +#define MATCH_VSSE8V 0x08000027
  827. +#define MASK_VSSE8V 0xfc00707f
  828. +#define MATCH_VSSE16V 0x08005027
  829. +#define MASK_VSSE16V 0xfc00707f
  830. +#define MATCH_VSSE32V 0x08006027
  831. +#define MASK_VSSE32V 0xfc00707f
  832. +#define MATCH_VSSE64V 0x08007027
  833. +#define MASK_VSSE64V 0xfc00707f
  834. +
  835. +#define MATCH_VLOXEI8V 0x0c000007
  836. +#define MASK_VLOXEI8V 0xfc00707f
  837. +#define MATCH_VLOXEI16V 0x0c005007
  838. +#define MASK_VLOXEI16V 0xfc00707f
  839. +#define MATCH_VLOXEI32V 0x0c006007
  840. +#define MASK_VLOXEI32V 0xfc00707f
  841. +#define MATCH_VLOXEI64V 0x0c007007
  842. +#define MASK_VLOXEI64V 0xfc00707f
  843. +
  844. +#define MATCH_VSOXEI8V 0x0c000027
  845. +#define MASK_VSOXEI8V 0xfc00707f
  846. +#define MATCH_VSOXEI16V 0x0c005027
  847. +#define MASK_VSOXEI16V 0xfc00707f
  848. +#define MATCH_VSOXEI32V 0x0c006027
  849. +#define MASK_VSOXEI32V 0xfc00707f
  850. +#define MATCH_VSOXEI64V 0x0c007027
  851. +#define MASK_VSOXEI64V 0xfc00707f
  852. +
  853. +#define MATCH_VLUXEI8V 0x04000007
  854. +#define MASK_VLUXEI8V 0xfc00707f
  855. +#define MATCH_VLUXEI16V 0x04005007
  856. +#define MASK_VLUXEI16V 0xfc00707f
  857. +#define MATCH_VLUXEI32V 0x04006007
  858. +#define MASK_VLUXEI32V 0xfc00707f
  859. +#define MATCH_VLUXEI64V 0x04007007
  860. +#define MASK_VLUXEI64V 0xfc00707f
  861. +
  862. +#define MATCH_VSUXEI8V 0x04000027
  863. +#define MASK_VSUXEI8V 0xfc00707f
  864. +#define MATCH_VSUXEI16V 0x04005027
  865. +#define MASK_VSUXEI16V 0xfc00707f
  866. +#define MATCH_VSUXEI32V 0x04006027
  867. +#define MASK_VSUXEI32V 0xfc00707f
  868. +#define MATCH_VSUXEI64V 0x04007027
  869. +#define MASK_VSUXEI64V 0xfc00707f
  870. +
  871. +#define MATCH_VLE8FFV 0x01000007
  872. +#define MASK_VLE8FFV 0xfdf0707f
  873. +#define MATCH_VLE16FFV 0x01005007
  874. +#define MASK_VLE16FFV 0xfdf0707f
  875. +#define MATCH_VLE32FFV 0x01006007
  876. +#define MASK_VLE32FFV 0xfdf0707f
  877. +#define MATCH_VLE64FFV 0x01007007
  878. +#define MASK_VLE64FFV 0xfdf0707f
  879. +
  880. +#define MATCH_VLSEG2E8V 0x20000007
  881. +#define MASK_VLSEG2E8V 0xfdf0707f
  882. +#define MATCH_VSSEG2E8V 0x20000027
  883. +#define MASK_VSSEG2E8V 0xfdf0707f
  884. +#define MATCH_VLSEG3E8V 0x40000007
  885. +#define MASK_VLSEG3E8V 0xfdf0707f
  886. +#define MATCH_VSSEG3E8V 0x40000027
  887. +#define MASK_VSSEG3E8V 0xfdf0707f
  888. +#define MATCH_VLSEG4E8V 0x60000007
  889. +#define MASK_VLSEG4E8V 0xfdf0707f
  890. +#define MATCH_VSSEG4E8V 0x60000027
  891. +#define MASK_VSSEG4E8V 0xfdf0707f
  892. +#define MATCH_VLSEG5E8V 0x80000007
  893. +#define MASK_VLSEG5E8V 0xfdf0707f
  894. +#define MATCH_VSSEG5E8V 0x80000027
  895. +#define MASK_VSSEG5E8V 0xfdf0707f
  896. +#define MATCH_VLSEG6E8V 0xa0000007
  897. +#define MASK_VLSEG6E8V 0xfdf0707f
  898. +#define MATCH_VSSEG6E8V 0xa0000027
  899. +#define MASK_VSSEG6E8V 0xfdf0707f
  900. +#define MATCH_VLSEG7E8V 0xc0000007
  901. +#define MASK_VLSEG7E8V 0xfdf0707f
  902. +#define MATCH_VSSEG7E8V 0xc0000027
  903. +#define MASK_VSSEG7E8V 0xfdf0707f
  904. +#define MATCH_VLSEG8E8V 0xe0000007
  905. +#define MASK_VLSEG8E8V 0xfdf0707f
  906. +#define MATCH_VSSEG8E8V 0xe0000027
  907. +#define MASK_VSSEG8E8V 0xfdf0707f
  908. +
  909. +#define MATCH_VLSEG2E16V 0x20005007
  910. +#define MASK_VLSEG2E16V 0xfdf0707f
  911. +#define MATCH_VSSEG2E16V 0x20005027
  912. +#define MASK_VSSEG2E16V 0xfdf0707f
  913. +#define MATCH_VLSEG3E16V 0x40005007
  914. +#define MASK_VLSEG3E16V 0xfdf0707f
  915. +#define MATCH_VSSEG3E16V 0x40005027
  916. +#define MASK_VSSEG3E16V 0xfdf0707f
  917. +#define MATCH_VLSEG4E16V 0x60005007
  918. +#define MASK_VLSEG4E16V 0xfdf0707f
  919. +#define MATCH_VSSEG4E16V 0x60005027
  920. +#define MASK_VSSEG4E16V 0xfdf0707f
  921. +#define MATCH_VLSEG5E16V 0x80005007
  922. +#define MASK_VLSEG5E16V 0xfdf0707f
  923. +#define MATCH_VSSEG5E16V 0x80005027
  924. +#define MASK_VSSEG5E16V 0xfdf0707f
  925. +#define MATCH_VLSEG6E16V 0xa0005007
  926. +#define MASK_VLSEG6E16V 0xfdf0707f
  927. +#define MATCH_VSSEG6E16V 0xa0005027
  928. +#define MASK_VSSEG6E16V 0xfdf0707f
  929. +#define MATCH_VLSEG7E16V 0xc0005007
  930. +#define MASK_VLSEG7E16V 0xfdf0707f
  931. +#define MATCH_VSSEG7E16V 0xc0005027
  932. +#define MASK_VSSEG7E16V 0xfdf0707f
  933. +#define MATCH_VLSEG8E16V 0xe0005007
  934. +#define MASK_VLSEG8E16V 0xfdf0707f
  935. +#define MATCH_VSSEG8E16V 0xe0005027
  936. +#define MASK_VSSEG8E16V 0xfdf0707f
  937. +
  938. +#define MATCH_VLSEG2E32V 0x20006007
  939. +#define MASK_VLSEG2E32V 0xfdf0707f
  940. +#define MATCH_VSSEG2E32V 0x20006027
  941. +#define MASK_VSSEG2E32V 0xfdf0707f
  942. +#define MATCH_VLSEG3E32V 0x40006007
  943. +#define MASK_VLSEG3E32V 0xfdf0707f
  944. +#define MATCH_VSSEG3E32V 0x40006027
  945. +#define MASK_VSSEG3E32V 0xfdf0707f
  946. +#define MATCH_VLSEG4E32V 0x60006007
  947. +#define MASK_VLSEG4E32V 0xfdf0707f
  948. +#define MATCH_VSSEG4E32V 0x60006027
  949. +#define MASK_VSSEG4E32V 0xfdf0707f
  950. +#define MATCH_VLSEG5E32V 0x80006007
  951. +#define MASK_VLSEG5E32V 0xfdf0707f
  952. +#define MATCH_VSSEG5E32V 0x80006027
  953. +#define MASK_VSSEG5E32V 0xfdf0707f
  954. +#define MATCH_VLSEG6E32V 0xa0006007
  955. +#define MASK_VLSEG6E32V 0xfdf0707f
  956. +#define MATCH_VSSEG6E32V 0xa0006027
  957. +#define MASK_VSSEG6E32V 0xfdf0707f
  958. +#define MATCH_VLSEG7E32V 0xc0006007
  959. +#define MASK_VLSEG7E32V 0xfdf0707f
  960. +#define MATCH_VSSEG7E32V 0xc0006027
  961. +#define MASK_VSSEG7E32V 0xfdf0707f
  962. +#define MATCH_VLSEG8E32V 0xe0006007
  963. +#define MASK_VLSEG8E32V 0xfdf0707f
  964. +#define MATCH_VSSEG8E32V 0xe0006027
  965. +#define MASK_VSSEG8E32V 0xfdf0707f
  966. +
  967. +#define MATCH_VLSEG2E64V 0x20007007
  968. +#define MASK_VLSEG2E64V 0xfdf0707f
  969. +#define MATCH_VSSEG2E64V 0x20007027
  970. +#define MASK_VSSEG2E64V 0xfdf0707f
  971. +#define MATCH_VLSEG3E64V 0x40007007
  972. +#define MASK_VLSEG3E64V 0xfdf0707f
  973. +#define MATCH_VSSEG3E64V 0x40007027
  974. +#define MASK_VSSEG3E64V 0xfdf0707f
  975. +#define MATCH_VLSEG4E64V 0x60007007
  976. +#define MASK_VLSEG4E64V 0xfdf0707f
  977. +#define MATCH_VSSEG4E64V 0x60007027
  978. +#define MASK_VSSEG4E64V 0xfdf0707f
  979. +#define MATCH_VLSEG5E64V 0x80007007
  980. +#define MASK_VLSEG5E64V 0xfdf0707f
  981. +#define MATCH_VSSEG5E64V 0x80007027
  982. +#define MASK_VSSEG5E64V 0xfdf0707f
  983. +#define MATCH_VLSEG6E64V 0xa0007007
  984. +#define MASK_VLSEG6E64V 0xfdf0707f
  985. +#define MATCH_VSSEG6E64V 0xa0007027
  986. +#define MASK_VSSEG6E64V 0xfdf0707f
  987. +#define MATCH_VLSEG7E64V 0xc0007007
  988. +#define MASK_VLSEG7E64V 0xfdf0707f
  989. +#define MATCH_VSSEG7E64V 0xc0007027
  990. +#define MASK_VSSEG7E64V 0xfdf0707f
  991. +#define MATCH_VLSEG8E64V 0xe0007007
  992. +#define MASK_VLSEG8E64V 0xfdf0707f
  993. +#define MATCH_VSSEG8E64V 0xe0007027
  994. +#define MASK_VSSEG8E64V 0xfdf0707f
  995. +
  996. +#define MATCH_VLSSEG2E8V 0x28000007
  997. +#define MASK_VLSSEG2E8V 0xfc00707f
  998. +#define MATCH_VSSSEG2E8V 0x28000027
  999. +#define MASK_VSSSEG2E8V 0xfc00707f
  1000. +#define MATCH_VLSSEG3E8V 0x48000007
  1001. +#define MASK_VLSSEG3E8V 0xfc00707f
  1002. +#define MATCH_VSSSEG3E8V 0x48000027
  1003. +#define MASK_VSSSEG3E8V 0xfc00707f
  1004. +#define MATCH_VLSSEG4E8V 0x68000007
  1005. +#define MASK_VLSSEG4E8V 0xfc00707f
  1006. +#define MATCH_VSSSEG4E8V 0x68000027
  1007. +#define MASK_VSSSEG4E8V 0xfc00707f
  1008. +#define MATCH_VLSSEG5E8V 0x88000007
  1009. +#define MASK_VLSSEG5E8V 0xfc00707f
  1010. +#define MATCH_VSSSEG5E8V 0x88000027
  1011. +#define MASK_VSSSEG5E8V 0xfc00707f
  1012. +#define MATCH_VLSSEG6E8V 0xa8000007
  1013. +#define MASK_VLSSEG6E8V 0xfc00707f
  1014. +#define MATCH_VSSSEG6E8V 0xa8000027
  1015. +#define MASK_VSSSEG6E8V 0xfc00707f
  1016. +#define MATCH_VLSSEG7E8V 0xc8000007
  1017. +#define MASK_VLSSEG7E8V 0xfc00707f
  1018. +#define MATCH_VSSSEG7E8V 0xc8000027
  1019. +#define MASK_VSSSEG7E8V 0xfc00707f
  1020. +#define MATCH_VLSSEG8E8V 0xe8000007
  1021. +#define MASK_VLSSEG8E8V 0xfc00707f
  1022. +#define MATCH_VSSSEG8E8V 0xe8000027
  1023. +#define MASK_VSSSEG8E8V 0xfc00707f
  1024. +
  1025. +#define MATCH_VLSSEG2E16V 0x28005007
  1026. +#define MASK_VLSSEG2E16V 0xfc00707f
  1027. +#define MATCH_VSSSEG2E16V 0x28005027
  1028. +#define MASK_VSSSEG2E16V 0xfc00707f
  1029. +#define MATCH_VLSSEG3E16V 0x48005007
  1030. +#define MASK_VLSSEG3E16V 0xfc00707f
  1031. +#define MATCH_VSSSEG3E16V 0x48005027
  1032. +#define MASK_VSSSEG3E16V 0xfc00707f
  1033. +#define MATCH_VLSSEG4E16V 0x68005007
  1034. +#define MASK_VLSSEG4E16V 0xfc00707f
  1035. +#define MATCH_VSSSEG4E16V 0x68005027
  1036. +#define MASK_VSSSEG4E16V 0xfc00707f
  1037. +#define MATCH_VLSSEG5E16V 0x88005007
  1038. +#define MASK_VLSSEG5E16V 0xfc00707f
  1039. +#define MATCH_VSSSEG5E16V 0x88005027
  1040. +#define MASK_VSSSEG5E16V 0xfc00707f
  1041. +#define MATCH_VLSSEG6E16V 0xa8005007
  1042. +#define MASK_VLSSEG6E16V 0xfc00707f
  1043. +#define MATCH_VSSSEG6E16V 0xa8005027
  1044. +#define MASK_VSSSEG6E16V 0xfc00707f
  1045. +#define MATCH_VLSSEG7E16V 0xc8005007
  1046. +#define MASK_VLSSEG7E16V 0xfc00707f
  1047. +#define MATCH_VSSSEG7E16V 0xc8005027
  1048. +#define MASK_VSSSEG7E16V 0xfc00707f
  1049. +#define MATCH_VLSSEG8E16V 0xe8005007
  1050. +#define MASK_VLSSEG8E16V 0xfc00707f
  1051. +#define MATCH_VSSSEG8E16V 0xe8005027
  1052. +#define MASK_VSSSEG8E16V 0xfc00707f
  1053. +
  1054. +#define MATCH_VLSSEG2E32V 0x28006007
  1055. +#define MASK_VLSSEG2E32V 0xfc00707f
  1056. +#define MATCH_VSSSEG2E32V 0x28006027
  1057. +#define MASK_VSSSEG2E32V 0xfc00707f
  1058. +#define MATCH_VLSSEG3E32V 0x48006007
  1059. +#define MASK_VLSSEG3E32V 0xfc00707f
  1060. +#define MATCH_VSSSEG3E32V 0x48006027
  1061. +#define MASK_VSSSEG3E32V 0xfc00707f
  1062. +#define MATCH_VLSSEG4E32V 0x68006007
  1063. +#define MASK_VLSSEG4E32V 0xfc00707f
  1064. +#define MATCH_VSSSEG4E32V 0x68006027
  1065. +#define MASK_VSSSEG4E32V 0xfc00707f
  1066. +#define MATCH_VLSSEG5E32V 0x88006007
  1067. +#define MASK_VLSSEG5E32V 0xfc00707f
  1068. +#define MATCH_VSSSEG5E32V 0x88006027
  1069. +#define MASK_VSSSEG5E32V 0xfc00707f
  1070. +#define MATCH_VLSSEG6E32V 0xa8006007
  1071. +#define MASK_VLSSEG6E32V 0xfc00707f
  1072. +#define MATCH_VSSSEG6E32V 0xa8006027
  1073. +#define MASK_VSSSEG6E32V 0xfc00707f
  1074. +#define MATCH_VLSSEG7E32V 0xc8006007
  1075. +#define MASK_VLSSEG7E32V 0xfc00707f
  1076. +#define MATCH_VSSSEG7E32V 0xc8006027
  1077. +#define MASK_VSSSEG7E32V 0xfc00707f
  1078. +#define MATCH_VLSSEG8E32V 0xe8006007
  1079. +#define MASK_VLSSEG8E32V 0xfc00707f
  1080. +#define MATCH_VSSSEG8E32V 0xe8006027
  1081. +#define MASK_VSSSEG8E32V 0xfc00707f
  1082. +
  1083. +#define MATCH_VLSSEG2E64V 0x28007007
  1084. +#define MASK_VLSSEG2E64V 0xfc00707f
  1085. +#define MATCH_VSSSEG2E64V 0x28007027
  1086. +#define MASK_VSSSEG2E64V 0xfc00707f
  1087. +#define MATCH_VLSSEG3E64V 0x48007007
  1088. +#define MASK_VLSSEG3E64V 0xfc00707f
  1089. +#define MATCH_VSSSEG3E64V 0x48007027
  1090. +#define MASK_VSSSEG3E64V 0xfc00707f
  1091. +#define MATCH_VLSSEG4E64V 0x68007007
  1092. +#define MASK_VLSSEG4E64V 0xfc00707f
  1093. +#define MATCH_VSSSEG4E64V 0x68007027
  1094. +#define MASK_VSSSEG4E64V 0xfc00707f
  1095. +#define MATCH_VLSSEG5E64V 0x88007007
  1096. +#define MASK_VLSSEG5E64V 0xfc00707f
  1097. +#define MATCH_VSSSEG5E64V 0x88007027
  1098. +#define MASK_VSSSEG5E64V 0xfc00707f
  1099. +#define MATCH_VLSSEG6E64V 0xa8007007
  1100. +#define MASK_VLSSEG6E64V 0xfc00707f
  1101. +#define MATCH_VSSSEG6E64V 0xa8007027
  1102. +#define MASK_VSSSEG6E64V 0xfc00707f
  1103. +#define MATCH_VLSSEG7E64V 0xc8007007
  1104. +#define MASK_VLSSEG7E64V 0xfc00707f
  1105. +#define MATCH_VSSSEG7E64V 0xc8007027
  1106. +#define MASK_VSSSEG7E64V 0xfc00707f
  1107. +#define MATCH_VLSSEG8E64V 0xe8007007
  1108. +#define MASK_VLSSEG8E64V 0xfc00707f
  1109. +#define MATCH_VSSSEG8E64V 0xe8007027
  1110. +#define MASK_VSSSEG8E64V 0xfc00707f
  1111. +
  1112. +#define MATCH_VLOXSEG2EI8V 0x2c000007
  1113. +#define MASK_VLOXSEG2EI8V 0xfc00707f
  1114. +#define MATCH_VSOXSEG2EI8V 0x2c000027
  1115. +#define MASK_VSOXSEG2EI8V 0xfc00707f
  1116. +#define MATCH_VLOXSEG3EI8V 0x4c000007
  1117. +#define MASK_VLOXSEG3EI8V 0xfc00707f
  1118. +#define MATCH_VSOXSEG3EI8V 0x4c000027
  1119. +#define MASK_VSOXSEG3EI8V 0xfc00707f
  1120. +#define MATCH_VLOXSEG4EI8V 0x6c000007
  1121. +#define MASK_VLOXSEG4EI8V 0xfc00707f
  1122. +#define MATCH_VSOXSEG4EI8V 0x6c000027
  1123. +#define MASK_VSOXSEG4EI8V 0xfc00707f
  1124. +#define MATCH_VLOXSEG5EI8V 0x8c000007
  1125. +#define MASK_VLOXSEG5EI8V 0xfc00707f
  1126. +#define MATCH_VSOXSEG5EI8V 0x8c000027
  1127. +#define MASK_VSOXSEG5EI8V 0xfc00707f
  1128. +#define MATCH_VLOXSEG6EI8V 0xac000007
  1129. +#define MASK_VLOXSEG6EI8V 0xfc00707f
  1130. +#define MATCH_VSOXSEG6EI8V 0xac000027
  1131. +#define MASK_VSOXSEG6EI8V 0xfc00707f
  1132. +#define MATCH_VLOXSEG7EI8V 0xcc000007
  1133. +#define MASK_VLOXSEG7EI8V 0xfc00707f
  1134. +#define MATCH_VSOXSEG7EI8V 0xcc000027
  1135. +#define MASK_VSOXSEG7EI8V 0xfc00707f
  1136. +#define MATCH_VLOXSEG8EI8V 0xec000007
  1137. +#define MASK_VLOXSEG8EI8V 0xfc00707f
  1138. +#define MATCH_VSOXSEG8EI8V 0xec000027
  1139. +#define MASK_VSOXSEG8EI8V 0xfc00707f
  1140. +
  1141. +#define MATCH_VLUXSEG2EI8V 0x24000007
  1142. +#define MASK_VLUXSEG2EI8V 0xfc00707f
  1143. +#define MATCH_VSUXSEG2EI8V 0x24000027
  1144. +#define MASK_VSUXSEG2EI8V 0xfc00707f
  1145. +#define MATCH_VLUXSEG3EI8V 0x44000007
  1146. +#define MASK_VLUXSEG3EI8V 0xfc00707f
  1147. +#define MATCH_VSUXSEG3EI8V 0x44000027
  1148. +#define MASK_VSUXSEG3EI8V 0xfc00707f
  1149. +#define MATCH_VLUXSEG4EI8V 0x64000007
  1150. +#define MASK_VLUXSEG4EI8V 0xfc00707f
  1151. +#define MATCH_VSUXSEG4EI8V 0x64000027
  1152. +#define MASK_VSUXSEG4EI8V 0xfc00707f
  1153. +#define MATCH_VLUXSEG5EI8V 0x84000007
  1154. +#define MASK_VLUXSEG5EI8V 0xfc00707f
  1155. +#define MATCH_VSUXSEG5EI8V 0x84000027
  1156. +#define MASK_VSUXSEG5EI8V 0xfc00707f
  1157. +#define MATCH_VLUXSEG6EI8V 0xa4000007
  1158. +#define MASK_VLUXSEG6EI8V 0xfc00707f
  1159. +#define MATCH_VSUXSEG6EI8V 0xa4000027
  1160. +#define MASK_VSUXSEG6EI8V 0xfc00707f
  1161. +#define MATCH_VLUXSEG7EI8V 0xc4000007
  1162. +#define MASK_VLUXSEG7EI8V 0xfc00707f
  1163. +#define MATCH_VSUXSEG7EI8V 0xc4000027
  1164. +#define MASK_VSUXSEG7EI8V 0xfc00707f
  1165. +#define MATCH_VLUXSEG8EI8V 0xe4000007
  1166. +#define MASK_VLUXSEG8EI8V 0xfc00707f
  1167. +#define MATCH_VSUXSEG8EI8V 0xe4000027
  1168. +#define MASK_VSUXSEG8EI8V 0xfc00707f
  1169. +
  1170. +#define MATCH_VLOXSEG2EI16V 0x2c005007
  1171. +#define MASK_VLOXSEG2EI16V 0xfc00707f
  1172. +#define MATCH_VSOXSEG2EI16V 0x2c005027
  1173. +#define MASK_VSOXSEG2EI16V 0xfc00707f
  1174. +#define MATCH_VLOXSEG3EI16V 0x4c005007
  1175. +#define MASK_VLOXSEG3EI16V 0xfc00707f
  1176. +#define MATCH_VSOXSEG3EI16V 0x4c005027
  1177. +#define MASK_VSOXSEG3EI16V 0xfc00707f
  1178. +#define MATCH_VLOXSEG4EI16V 0x6c005007
  1179. +#define MASK_VLOXSEG4EI16V 0xfc00707f
  1180. +#define MATCH_VSOXSEG4EI16V 0x6c005027
  1181. +#define MASK_VSOXSEG4EI16V 0xfc00707f
  1182. +#define MATCH_VLOXSEG5EI16V 0x8c005007
  1183. +#define MASK_VLOXSEG5EI16V 0xfc00707f
  1184. +#define MATCH_VSOXSEG5EI16V 0x8c005027
  1185. +#define MASK_VSOXSEG5EI16V 0xfc00707f
  1186. +#define MATCH_VLOXSEG6EI16V 0xac005007
  1187. +#define MASK_VLOXSEG6EI16V 0xfc00707f
  1188. +#define MATCH_VSOXSEG6EI16V 0xac005027
  1189. +#define MASK_VSOXSEG6EI16V 0xfc00707f
  1190. +#define MATCH_VLOXSEG7EI16V 0xcc005007
  1191. +#define MASK_VLOXSEG7EI16V 0xfc00707f
  1192. +#define MATCH_VSOXSEG7EI16V 0xcc005027
  1193. +#define MASK_VSOXSEG7EI16V 0xfc00707f
  1194. +#define MATCH_VLOXSEG8EI16V 0xec005007
  1195. +#define MASK_VLOXSEG8EI16V 0xfc00707f
  1196. +#define MATCH_VSOXSEG8EI16V 0xec005027
  1197. +#define MASK_VSOXSEG8EI16V 0xfc00707f
  1198. +
  1199. +#define MATCH_VLUXSEG2EI16V 0x24005007
  1200. +#define MASK_VLUXSEG2EI16V 0xfc00707f
  1201. +#define MATCH_VSUXSEG2EI16V 0x24005027
  1202. +#define MASK_VSUXSEG2EI16V 0xfc00707f
  1203. +#define MATCH_VLUXSEG3EI16V 0x44005007
  1204. +#define MASK_VLUXSEG3EI16V 0xfc00707f
  1205. +#define MATCH_VSUXSEG3EI16V 0x44005027
  1206. +#define MASK_VSUXSEG3EI16V 0xfc00707f
  1207. +#define MATCH_VLUXSEG4EI16V 0x64005007
  1208. +#define MASK_VLUXSEG4EI16V 0xfc00707f
  1209. +#define MATCH_VSUXSEG4EI16V 0x64005027
  1210. +#define MASK_VSUXSEG4EI16V 0xfc00707f
  1211. +#define MATCH_VLUXSEG5EI16V 0x84005007
  1212. +#define MASK_VLUXSEG5EI16V 0xfc00707f
  1213. +#define MATCH_VSUXSEG5EI16V 0x84005027
  1214. +#define MASK_VSUXSEG5EI16V 0xfc00707f
  1215. +#define MATCH_VLUXSEG6EI16V 0xa4005007
  1216. +#define MASK_VLUXSEG6EI16V 0xfc00707f
  1217. +#define MATCH_VSUXSEG6EI16V 0xa4005027
  1218. +#define MASK_VSUXSEG6EI16V 0xfc00707f
  1219. +#define MATCH_VLUXSEG7EI16V 0xc4005007
  1220. +#define MASK_VLUXSEG7EI16V 0xfc00707f
  1221. +#define MATCH_VSUXSEG7EI16V 0xc4005027
  1222. +#define MASK_VSUXSEG7EI16V 0xfc00707f
  1223. +#define MATCH_VLUXSEG8EI16V 0xe4005007
  1224. +#define MASK_VLUXSEG8EI16V 0xfc00707f
  1225. +#define MATCH_VSUXSEG8EI16V 0xe4005027
  1226. +#define MASK_VSUXSEG8EI16V 0xfc00707f
  1227. +
  1228. +#define MATCH_VLOXSEG2EI32V 0x2c006007
  1229. +#define MASK_VLOXSEG2EI32V 0xfc00707f
  1230. +#define MATCH_VSOXSEG2EI32V 0x2c006027
  1231. +#define MASK_VSOXSEG2EI32V 0xfc00707f
  1232. +#define MATCH_VLOXSEG3EI32V 0x4c006007
  1233. +#define MASK_VLOXSEG3EI32V 0xfc00707f
  1234. +#define MATCH_VSOXSEG3EI32V 0x4c006027
  1235. +#define MASK_VSOXSEG3EI32V 0xfc00707f
  1236. +#define MATCH_VLOXSEG4EI32V 0x6c006007
  1237. +#define MASK_VLOXSEG4EI32V 0xfc00707f
  1238. +#define MATCH_VSOXSEG4EI32V 0x6c006027
  1239. +#define MASK_VSOXSEG4EI32V 0xfc00707f
  1240. +#define MATCH_VLOXSEG5EI32V 0x8c006007
  1241. +#define MASK_VLOXSEG5EI32V 0xfc00707f
  1242. +#define MATCH_VSOXSEG5EI32V 0x8c006027
  1243. +#define MASK_VSOXSEG5EI32V 0xfc00707f
  1244. +#define MATCH_VLOXSEG6EI32V 0xac006007
  1245. +#define MASK_VLOXSEG6EI32V 0xfc00707f
  1246. +#define MATCH_VSOXSEG6EI32V 0xac006027
  1247. +#define MASK_VSOXSEG6EI32V 0xfc00707f
  1248. +#define MATCH_VLOXSEG7EI32V 0xcc006007
  1249. +#define MASK_VLOXSEG7EI32V 0xfc00707f
  1250. +#define MATCH_VSOXSEG7EI32V 0xcc006027
  1251. +#define MASK_VSOXSEG7EI32V 0xfc00707f
  1252. +#define MATCH_VLOXSEG8EI32V 0xec006007
  1253. +#define MASK_VLOXSEG8EI32V 0xfc00707f
  1254. +#define MATCH_VSOXSEG8EI32V 0xec006027
  1255. +#define MASK_VSOXSEG8EI32V 0xfc00707f
  1256. +
  1257. +#define MATCH_VLUXSEG2EI32V 0x24006007
  1258. +#define MASK_VLUXSEG2EI32V 0xfc00707f
  1259. +#define MATCH_VSUXSEG2EI32V 0x24006027
  1260. +#define MASK_VSUXSEG2EI32V 0xfc00707f
  1261. +#define MATCH_VLUXSEG3EI32V 0x44006007
  1262. +#define MASK_VLUXSEG3EI32V 0xfc00707f
  1263. +#define MATCH_VSUXSEG3EI32V 0x44006027
  1264. +#define MASK_VSUXSEG3EI32V 0xfc00707f
  1265. +#define MATCH_VLUXSEG4EI32V 0x64006007
  1266. +#define MASK_VLUXSEG4EI32V 0xfc00707f
  1267. +#define MATCH_VSUXSEG4EI32V 0x64006027
  1268. +#define MASK_VSUXSEG4EI32V 0xfc00707f
  1269. +#define MATCH_VLUXSEG5EI32V 0x84006007
  1270. +#define MASK_VLUXSEG5EI32V 0xfc00707f
  1271. +#define MATCH_VSUXSEG5EI32V 0x84006027
  1272. +#define MASK_VSUXSEG5EI32V 0xfc00707f
  1273. +#define MATCH_VLUXSEG6EI32V 0xa4006007
  1274. +#define MASK_VLUXSEG6EI32V 0xfc00707f
  1275. +#define MATCH_VSUXSEG6EI32V 0xa4006027
  1276. +#define MASK_VSUXSEG6EI32V 0xfc00707f
  1277. +#define MATCH_VLUXSEG7EI32V 0xc4006007
  1278. +#define MASK_VLUXSEG7EI32V 0xfc00707f
  1279. +#define MATCH_VSUXSEG7EI32V 0xc4006027
  1280. +#define MASK_VSUXSEG7EI32V 0xfc00707f
  1281. +#define MATCH_VLUXSEG8EI32V 0xe4006007
  1282. +#define MASK_VLUXSEG8EI32V 0xfc00707f
  1283. +#define MATCH_VSUXSEG8EI32V 0xe4006027
  1284. +#define MASK_VSUXSEG8EI32V 0xfc00707f
  1285. +
  1286. +#define MATCH_VLOXSEG2EI64V 0x2c007007
  1287. +#define MASK_VLOXSEG2EI64V 0xfc00707f
  1288. +#define MATCH_VSOXSEG2EI64V 0x2c007027
  1289. +#define MASK_VSOXSEG2EI64V 0xfc00707f
  1290. +#define MATCH_VLOXSEG3EI64V 0x4c007007
  1291. +#define MASK_VLOXSEG3EI64V 0xfc00707f
  1292. +#define MATCH_VSOXSEG3EI64V 0x4c007027
  1293. +#define MASK_VSOXSEG3EI64V 0xfc00707f
  1294. +#define MATCH_VLOXSEG4EI64V 0x6c007007
  1295. +#define MASK_VLOXSEG4EI64V 0xfc00707f
  1296. +#define MATCH_VSOXSEG4EI64V 0x6c007027
  1297. +#define MASK_VSOXSEG4EI64V 0xfc00707f
  1298. +#define MATCH_VLOXSEG5EI64V 0x8c007007
  1299. +#define MASK_VLOXSEG5EI64V 0xfc00707f
  1300. +#define MATCH_VSOXSEG5EI64V 0x8c007027
  1301. +#define MASK_VSOXSEG5EI64V 0xfc00707f
  1302. +#define MATCH_VLOXSEG6EI64V 0xac007007
  1303. +#define MASK_VLOXSEG6EI64V 0xfc00707f
  1304. +#define MATCH_VSOXSEG6EI64V 0xac007027
  1305. +#define MASK_VSOXSEG6EI64V 0xfc00707f
  1306. +#define MATCH_VLOXSEG7EI64V 0xcc007007
  1307. +#define MASK_VLOXSEG7EI64V 0xfc00707f
  1308. +#define MATCH_VSOXSEG7EI64V 0xcc007027
  1309. +#define MASK_VSOXSEG7EI64V 0xfc00707f
  1310. +#define MATCH_VLOXSEG8EI64V 0xec007007
  1311. +#define MASK_VLOXSEG8EI64V 0xfc00707f
  1312. +#define MATCH_VSOXSEG8EI64V 0xec007027
  1313. +#define MASK_VSOXSEG8EI64V 0xfc00707f
  1314. +
  1315. +#define MATCH_VLUXSEG2EI64V 0x24007007
  1316. +#define MASK_VLUXSEG2EI64V 0xfc00707f
  1317. +#define MATCH_VSUXSEG2EI64V 0x24007027
  1318. +#define MASK_VSUXSEG2EI64V 0xfc00707f
  1319. +#define MATCH_VLUXSEG3EI64V 0x44007007
  1320. +#define MASK_VLUXSEG3EI64V 0xfc00707f
  1321. +#define MATCH_VSUXSEG3EI64V 0x44007027
  1322. +#define MASK_VSUXSEG3EI64V 0xfc00707f
  1323. +#define MATCH_VLUXSEG4EI64V 0x64007007
  1324. +#define MASK_VLUXSEG4EI64V 0xfc00707f
  1325. +#define MATCH_VSUXSEG4EI64V 0x64007027
  1326. +#define MASK_VSUXSEG4EI64V 0xfc00707f
  1327. +#define MATCH_VLUXSEG5EI64V 0x84007007
  1328. +#define MASK_VLUXSEG5EI64V 0xfc00707f
  1329. +#define MATCH_VSUXSEG5EI64V 0x84007027
  1330. +#define MASK_VSUXSEG5EI64V 0xfc00707f
  1331. +#define MATCH_VLUXSEG6EI64V 0xa4007007
  1332. +#define MASK_VLUXSEG6EI64V 0xfc00707f
  1333. +#define MATCH_VSUXSEG6EI64V 0xa4007027
  1334. +#define MASK_VSUXSEG6EI64V 0xfc00707f
  1335. +#define MATCH_VLUXSEG7EI64V 0xc4007007
  1336. +#define MASK_VLUXSEG7EI64V 0xfc00707f
  1337. +#define MATCH_VSUXSEG7EI64V 0xc4007027
  1338. +#define MASK_VSUXSEG7EI64V 0xfc00707f
  1339. +#define MATCH_VLUXSEG8EI64V 0xe4007007
  1340. +#define MASK_VLUXSEG8EI64V 0xfc00707f
  1341. +#define MATCH_VSUXSEG8EI64V 0xe4007027
  1342. +#define MASK_VSUXSEG8EI64V 0xfc00707f
  1343. +
  1344. +#define MATCH_VLSEG2E8FFV 0x21000007
  1345. +#define MASK_VLSEG2E8FFV 0xfdf0707f
  1346. +#define MATCH_VLSEG3E8FFV 0x41000007
  1347. +#define MASK_VLSEG3E8FFV 0xfdf0707f
  1348. +#define MATCH_VLSEG4E8FFV 0x61000007
  1349. +#define MASK_VLSEG4E8FFV 0xfdf0707f
  1350. +#define MATCH_VLSEG5E8FFV 0x81000007
  1351. +#define MASK_VLSEG5E8FFV 0xfdf0707f
  1352. +#define MATCH_VLSEG6E8FFV 0xa1000007
  1353. +#define MASK_VLSEG6E8FFV 0xfdf0707f
  1354. +#define MATCH_VLSEG7E8FFV 0xc1000007
  1355. +#define MASK_VLSEG7E8FFV 0xfdf0707f
  1356. +#define MATCH_VLSEG8E8FFV 0xe1000007
  1357. +#define MASK_VLSEG8E8FFV 0xfdf0707f
  1358. +
  1359. +#define MATCH_VLSEG2E16FFV 0x21005007
  1360. +#define MASK_VLSEG2E16FFV 0xfdf0707f
  1361. +#define MATCH_VLSEG3E16FFV 0x41005007
  1362. +#define MASK_VLSEG3E16FFV 0xfdf0707f
  1363. +#define MATCH_VLSEG4E16FFV 0x61005007
  1364. +#define MASK_VLSEG4E16FFV 0xfdf0707f
  1365. +#define MATCH_VLSEG5E16FFV 0x81005007
  1366. +#define MASK_VLSEG5E16FFV 0xfdf0707f
  1367. +#define MATCH_VLSEG6E16FFV 0xa1005007
  1368. +#define MASK_VLSEG6E16FFV 0xfdf0707f
  1369. +#define MATCH_VLSEG7E16FFV 0xc1005007
  1370. +#define MASK_VLSEG7E16FFV 0xfdf0707f
  1371. +#define MATCH_VLSEG8E16FFV 0xe1005007
  1372. +#define MASK_VLSEG8E16FFV 0xfdf0707f
  1373. +
  1374. +#define MATCH_VLSEG2E32FFV 0x21006007
  1375. +#define MASK_VLSEG2E32FFV 0xfdf0707f
  1376. +#define MATCH_VLSEG3E32FFV 0x41006007
  1377. +#define MASK_VLSEG3E32FFV 0xfdf0707f
  1378. +#define MATCH_VLSEG4E32FFV 0x61006007
  1379. +#define MASK_VLSEG4E32FFV 0xfdf0707f
  1380. +#define MATCH_VLSEG5E32FFV 0x81006007
  1381. +#define MASK_VLSEG5E32FFV 0xfdf0707f
  1382. +#define MATCH_VLSEG6E32FFV 0xa1006007
  1383. +#define MASK_VLSEG6E32FFV 0xfdf0707f
  1384. +#define MATCH_VLSEG7E32FFV 0xc1006007
  1385. +#define MASK_VLSEG7E32FFV 0xfdf0707f
  1386. +#define MATCH_VLSEG8E32FFV 0xe1006007
  1387. +#define MASK_VLSEG8E32FFV 0xfdf0707f
  1388. +
  1389. +#define MATCH_VLSEG2E64FFV 0x21007007
  1390. +#define MASK_VLSEG2E64FFV 0xfdf0707f
  1391. +#define MATCH_VLSEG3E64FFV 0x41007007
  1392. +#define MASK_VLSEG3E64FFV 0xfdf0707f
  1393. +#define MATCH_VLSEG4E64FFV 0x61007007
  1394. +#define MASK_VLSEG4E64FFV 0xfdf0707f
  1395. +#define MATCH_VLSEG5E64FFV 0x81007007
  1396. +#define MASK_VLSEG5E64FFV 0xfdf0707f
  1397. +#define MATCH_VLSEG6E64FFV 0xa1007007
  1398. +#define MASK_VLSEG6E64FFV 0xfdf0707f
  1399. +#define MATCH_VLSEG7E64FFV 0xc1007007
  1400. +#define MASK_VLSEG7E64FFV 0xfdf0707f
  1401. +#define MATCH_VLSEG8E64FFV 0xe1007007
  1402. +#define MASK_VLSEG8E64FFV 0xfdf0707f
  1403. +
  1404. +#define MATCH_VL1RE8V 0x02800007
  1405. +#define MASK_VL1RE8V 0xfff0707f
  1406. +#define MATCH_VL1RE16V 0x02805007
  1407. +#define MASK_VL1RE16V 0xfff0707f
  1408. +#define MATCH_VL1RE32V 0x02806007
  1409. +#define MASK_VL1RE32V 0xfff0707f
  1410. +#define MATCH_VL1RE64V 0x02807007
  1411. +#define MASK_VL1RE64V 0xfff0707f
  1412. +
  1413. +#define MATCH_VL2RE8V 0x22800007
  1414. +#define MASK_VL2RE8V 0xfff0707f
  1415. +#define MATCH_VL2RE16V 0x22805007
  1416. +#define MASK_VL2RE16V 0xfff0707f
  1417. +#define MATCH_VL2RE32V 0x22806007
  1418. +#define MASK_VL2RE32V 0xfff0707f
  1419. +#define MATCH_VL2RE64V 0x22807007
  1420. +#define MASK_VL2RE64V 0xfff0707f
  1421. +
  1422. +#define MATCH_VL4RE8V 0x62800007
  1423. +#define MASK_VL4RE8V 0xfff0707f
  1424. +#define MATCH_VL4RE16V 0x62805007
  1425. +#define MASK_VL4RE16V 0xfff0707f
  1426. +#define MATCH_VL4RE32V 0x62806007
  1427. +#define MASK_VL4RE32V 0xfff0707f
  1428. +#define MATCH_VL4RE64V 0x62807007
  1429. +#define MASK_VL4RE64V 0xfff0707f
  1430. +
  1431. +#define MATCH_VL8RE8V 0xe2800007
  1432. +#define MASK_VL8RE8V 0xfff0707f
  1433. +#define MATCH_VL8RE16V 0xe2805007
  1434. +#define MASK_VL8RE16V 0xfff0707f
  1435. +#define MATCH_VL8RE32V 0xe2806007
  1436. +#define MASK_VL8RE32V 0xfff0707f
  1437. +#define MATCH_VL8RE64V 0xe2807007
  1438. +#define MASK_VL8RE64V 0xfff0707f
  1439. +
  1440. +#define MATCH_VS1RV 0x02800027
  1441. +#define MASK_VS1RV 0xfff0707f
  1442. +#define MATCH_VS2RV 0x22800027
  1443. +#define MASK_VS2RV 0xfff0707f
  1444. +#define MATCH_VS4RV 0x62800027
  1445. +#define MASK_VS4RV 0xfff0707f
  1446. +#define MATCH_VS8RV 0xe2800027
  1447. +#define MASK_VS8RV 0xfff0707f
  1448. +
  1449. +/* Temporary AMO encoding info
  1450. +
  1451. +width
  1452. +010 AMO*.W
  1453. +011 AMO*.D
  1454. +100 AMO*.Q
  1455. +000 VAMO*EI8.V
  1456. +101 VAMO*EI16.V
  1457. +110 VAMO*EI32.V
  1458. +111 VAMO*EI64.V
  1459. +
  1460. +amoop
  1461. +00001 vamoswap
  1462. +00000 vamoadd
  1463. +00100 vamoxor
  1464. +01100 vamoand
  1465. +01000 vamoor
  1466. +10000 vamomin
  1467. +10100 vamomax
  1468. +11000 vamominu
  1469. +11100 vamomaxu
  1470. +
  1471. + 31-27 26 25 24-20 19-15 14-12 11-7 6-0
  1472. + amoop wd vm vs2 rs1 width vs3/vd opcode
  1473. + 00001 x 1 xxxxx xxxxx 110 xxxxx 0101111
  1474. + 0000 1x1x xxxx xxxx x110 xxxx x010 1111
  1475. + 1111 1010 0000 0000 0111 0000 0111 1111 */
  1476. +
  1477. +#define MATCH_VAMOADDEI8V 0x0000002f
  1478. +#define MASK_VAMOADDEI8V 0xf800707f
  1479. +#define MATCH_VAMOSWAPEI8V 0x0800002f
  1480. +#define MASK_VAMOSWAPEI8V 0xf800707f
  1481. +#define MATCH_VAMOXOREI8V 0x2000002f
  1482. +#define MASK_VAMOXOREI8V 0xf800707f
  1483. +#define MATCH_VAMOANDEI8V 0x6000002f
  1484. +#define MASK_VAMOANDEI8V 0xf800707f
  1485. +#define MATCH_VAMOOREI8V 0x4000002f
  1486. +#define MASK_VAMOOREI8V 0xf800707f
  1487. +#define MATCH_VAMOMINEI8V 0x8000002f
  1488. +#define MASK_VAMOMINEI8V 0xf800707f
  1489. +#define MATCH_VAMOMAXEI8V 0xa000002f
  1490. +#define MASK_VAMOMAXEI8V 0xf800707f
  1491. +#define MATCH_VAMOMINUEI8V 0xc000002f
  1492. +#define MASK_VAMOMINUEI8V 0xf800707f
  1493. +#define MATCH_VAMOMAXUEI8V 0xe000002f
  1494. +#define MASK_VAMOMAXUEI8V 0xf800707f
  1495. +
  1496. +#define MATCH_VAMOADDEI16V 0x0000502f
  1497. +#define MASK_VAMOADDEI16V 0xf800707f
  1498. +#define MATCH_VAMOSWAPEI16V 0x0800502f
  1499. +#define MASK_VAMOSWAPEI16V 0xf800707f
  1500. +#define MATCH_VAMOXOREI16V 0x2000502f
  1501. +#define MASK_VAMOXOREI16V 0xf800707f
  1502. +#define MATCH_VAMOANDEI16V 0x6000502f
  1503. +#define MASK_VAMOANDEI16V 0xf800707f
  1504. +#define MATCH_VAMOOREI16V 0x4000502f
  1505. +#define MASK_VAMOOREI16V 0xf800707f
  1506. +#define MATCH_VAMOMINEI16V 0x8000502f
  1507. +#define MASK_VAMOMINEI16V 0xf800707f
  1508. +#define MATCH_VAMOMAXEI16V 0xa000502f
  1509. +#define MASK_VAMOMAXEI16V 0xf800707f
  1510. +#define MATCH_VAMOMINUEI16V 0xc000502f
  1511. +#define MASK_VAMOMINUEI16V 0xf800707f
  1512. +#define MATCH_VAMOMAXUEI16V 0xe000502f
  1513. +#define MASK_VAMOMAXUEI16V 0xf800707f
  1514. +
  1515. +#define MATCH_VAMOADDEI32V 0x0000602f
  1516. +#define MASK_VAMOADDEI32V 0xf800707f
  1517. +#define MATCH_VAMOSWAPEI32V 0x0800602f
  1518. +#define MASK_VAMOSWAPEI32V 0xf800707f
  1519. +#define MATCH_VAMOXOREI32V 0x2000602f
  1520. +#define MASK_VAMOXOREI32V 0xf800707f
  1521. +#define MATCH_VAMOANDEI32V 0x6000602f
  1522. +#define MASK_VAMOANDEI32V 0xf800707f
  1523. +#define MATCH_VAMOOREI32V 0x4000602f
  1524. +#define MASK_VAMOOREI32V 0xf800707f
  1525. +#define MATCH_VAMOMINEI32V 0x8000602f
  1526. +#define MASK_VAMOMINEI32V 0xf800707f
  1527. +#define MATCH_VAMOMAXEI32V 0xa000602f
  1528. +#define MASK_VAMOMAXEI32V 0xf800707f
  1529. +#define MATCH_VAMOMINUEI32V 0xc000602f
  1530. +#define MASK_VAMOMINUEI32V 0xf800707f
  1531. +#define MATCH_VAMOMAXUEI32V 0xe000602f
  1532. +#define MASK_VAMOMAXUEI32V 0xf800707f
  1533. +
  1534. +#define MATCH_VAMOADDEI64V 0x0000702f
  1535. +#define MASK_VAMOADDEI64V 0xf800707f
  1536. +#define MATCH_VAMOSWAPEI64V 0x0800702f
  1537. +#define MASK_VAMOSWAPEI64V 0xf800707f
  1538. +#define MATCH_VAMOXOREI64V 0x2000702f
  1539. +#define MASK_VAMOXOREI64V 0xf800707f
  1540. +#define MATCH_VAMOANDEI64V 0x6000702f
  1541. +#define MASK_VAMOANDEI64V 0xf800707f
  1542. +#define MATCH_VAMOOREI64V 0x4000702f
  1543. +#define MASK_VAMOOREI64V 0xf800707f
  1544. +#define MATCH_VAMOMINEI64V 0x8000702f
  1545. +#define MASK_VAMOMINEI64V 0xf800707f
  1546. +#define MATCH_VAMOMAXEI64V 0xa000702f
  1547. +#define MASK_VAMOMAXEI64V 0xf800707f
  1548. +#define MATCH_VAMOMINUEI64V 0xc000702f
  1549. +#define MASK_VAMOMINUEI64V 0xf800707f
  1550. +#define MATCH_VAMOMAXUEI64V 0xe000702f
  1551. +#define MASK_VAMOMAXUEI64V 0xf800707f
  1552. +
  1553. +/* Temporary ALU encoding info
  1554. +
  1555. +funct3
  1556. +000 OPIVV vv
  1557. +001 OPFVV vv
  1558. +010 OPMVV vv
  1559. +011 OPIVI vi simm[4:0]
  1560. +100 OPIVX vx GPR x-reg rs1
  1561. +101 OPFVF vf FP f-reg rs1
  1562. +110 OPMVX vx GPR x-reg rs1
  1563. +111 OPCFG si GPR x-reg rs1 & rs2/imm
  1564. +
  1565. +INT OPI
  1566. +funct6
  1567. +000000 vadd
  1568. +000001
  1569. +000010 vsub
  1570. +000011 vrsub
  1571. +000100 vminu
  1572. +000101 vmin
  1573. +000110 vmaxu
  1574. +000111 vmax
  1575. +001000
  1576. +001001 vand
  1577. +001010 vor
  1578. +001011 vxor
  1579. +001100 vrgather
  1580. +001101
  1581. +001110 vslideup, vrgatherei16
  1582. +001111 vslidedown
  1583. +010000 vadc
  1584. +010001 vmadc
  1585. +010010 vsbc
  1586. +010011 vmsbc
  1587. +010100
  1588. +010101
  1589. +010110
  1590. +010111 vmerge/vmv
  1591. +011000 vmseq
  1592. +011001 vmsne
  1593. +011010 vmsltu
  1594. +011011 vmslt
  1595. +011100 vmsleu
  1596. +011101 vmsle
  1597. +011110 vmsgtu
  1598. +011111 vmsgt
  1599. +100000 vsaddu
  1600. +100001 vsadd
  1601. +100010 vssubu
  1602. +100011 vssub
  1603. +100100
  1604. +100101 vsll
  1605. +100110
  1606. +100111 vmv<nf>r (nf = 1, 2, 4, 8)
  1607. +101000 vsrl
  1608. +101001 vsra
  1609. +101010 vssrl
  1610. +101011 vssra
  1611. +101100 vnsrl
  1612. +101101 vnsra
  1613. +101110 vnclipu
  1614. +101111 vnclip
  1615. +110000 vwredsumu
  1616. +110001 vwredsum
  1617. +110010
  1618. +110011
  1619. +110100
  1620. +110101
  1621. +110110
  1622. +110111
  1623. +111000 vdotu **
  1624. +111001 vdot **
  1625. +111010
  1626. +111011
  1627. +111100 vqmaccu
  1628. +111101 vqmacc
  1629. +111110 vqmaccus
  1630. +111111 vqmaccsu
  1631. +
  1632. +INT OPM
  1633. +funct6
  1634. +000000 vredsum
  1635. +000001 vredand
  1636. +000010 vredor
  1637. +000011 vredxor
  1638. +000100 vredminu
  1639. +000101 vredmin
  1640. +000110 vredmaxu
  1641. +000111 vredmax
  1642. +001000 vaaddu
  1643. +001001 vaadd
  1644. +001010 vasubu
  1645. +001011 vasub
  1646. +001100
  1647. +001101
  1648. +001110 vslide1up
  1649. +001111 vslide1down
  1650. +010000 VRXUNARY0/VWXUNARY0
  1651. +010001
  1652. +010010 VXUNARY0
  1653. +010011
  1654. +010100 VMUNARY0
  1655. +010101
  1656. +010110
  1657. +010111 vcompress
  1658. +011000 vmandnot
  1659. +011001 vmand
  1660. +011010 vmor
  1661. +011011 vmxor
  1662. +011100 vmornot
  1663. +011101 vmnand
  1664. +011110 vmnor
  1665. +011111 vmxnor
  1666. +100000 vdivu
  1667. +100001 vdiv
  1668. +100010 vremu
  1669. +100011 vrem
  1670. +100100 vmulhu
  1671. +100101 vmul
  1672. +100110 vmulhsu
  1673. +100111 vmulh
  1674. +101000
  1675. +101001 vmadd
  1676. +101010
  1677. +101011 vnmsub
  1678. +101100
  1679. +101101 vmacc
  1680. +101110
  1681. +101111 vnmsac
  1682. +110000 vwaddu
  1683. +110001 vwadd
  1684. +110010 vwsubu
  1685. +110011 vwsub
  1686. +110100 vwaddu.w
  1687. +110101 vwadd.w
  1688. +110110 vwsubu.w
  1689. +110111 vwsub.w
  1690. +111000 vwmulu
  1691. +111001
  1692. +111010 vwmulsu
  1693. +111011 vwmul
  1694. +111100 vwmaccu
  1695. +111101 vwmacc
  1696. +111110 vwmaccus
  1697. +111111 vwmaccsu
  1698. +
  1699. +VRXUNARY0
  1700. +vs2, funct3=X
  1701. +00000 vmv.s.x
  1702. +
  1703. +VWXUNARY0
  1704. +vs1, funct3=V
  1705. +00000 vmv.x.s
  1706. +10000 vpopc
  1707. +10001 vfirst
  1708. +
  1709. +VXUNARY0
  1710. +vs1, funct3=V
  1711. +00010 vzext.vf8
  1712. +00011 vsext.vf8
  1713. +00100 vzext.vf4
  1714. +00101 vsext.vf4
  1715. +00110 vzext.vf2
  1716. +00111 vsext.vf2
  1717. +
  1718. +VMUNARY0
  1719. +rs1
  1720. +00001 vmsbf
  1721. +00010 vmsof
  1722. +00011 vmsif
  1723. +10000 viota
  1724. +10001 vid
  1725. +
  1726. +VFLOAT
  1727. +funct6
  1728. +000000 vfadd
  1729. +000001 vfredsum
  1730. +000010 vfsub
  1731. +000011 vfredosum
  1732. +000100 vfmin
  1733. +000101 vfredmin
  1734. +000110 vfmax
  1735. +000111 vfredmax
  1736. +001000 vfsgnj
  1737. +001001 vfsgnn
  1738. +001010 vfsgnx
  1739. +001011
  1740. +001100
  1741. +001101
  1742. +001110 vfslide1up
  1743. +001111 vfslide1down
  1744. +010000 VRFUNARY0/VWFUNARY0
  1745. +010001
  1746. +010010 VFUNARY0
  1747. +010011 VFUNARY1
  1748. +010100
  1749. +010101
  1750. +010110
  1751. +010111 vfmerge/vfmv
  1752. +011000 vmfeq
  1753. +011001 vmfle
  1754. +011010
  1755. +011011 vmflt
  1756. +011100 vmfne
  1757. +011101 vmfgt
  1758. +011110
  1759. +011111 vmfge
  1760. +100000 vfdiv
  1761. +100001 vfrdiv
  1762. +100010
  1763. +100011
  1764. +100100 vfmul
  1765. +100101
  1766. +100110
  1767. +100111 vfrsub
  1768. +101000 vfmadd
  1769. +101001 vfnmadd
  1770. +101010 vfmsub
  1771. +101011 vfnmsub
  1772. +101100 vfmacc
  1773. +101101 vfnmacc
  1774. +101110 vfmsac
  1775. +101111 vfnmsac
  1776. +110000 vfwadd
  1777. +110001 vfwredsum
  1778. +110010 vfwsub
  1779. +110011 vfwredosum
  1780. +110100 vfwadd.w
  1781. +110101
  1782. +110110 vfwsub.w
  1783. +110111
  1784. +111000 vfwmul
  1785. +111001 vfdot
  1786. +111010
  1787. +111011
  1788. +111100 vfwmacc
  1789. +111101 vfwnmacc
  1790. +111110 vfwmsac
  1791. +111111 vfwnmsac
  1792. +
  1793. +VRFUNARY0
  1794. +vs2, funct3=F
  1795. +00000 vfmv.s.f
  1796. +
  1797. +VWFUNARY0
  1798. +vs1, funct3=V
  1799. +00000 vfmv.f.s
  1800. +
  1801. +VFUNARY0
  1802. +vs1
  1803. +00000 vfcvt.xu.f.v
  1804. +00001 vfcvt.x.f.v
  1805. +00010 vfcvt.f.xu.v
  1806. +00011 vfcvt.f.x.v
  1807. +00110 vfcvt.rtz.xu.f.v
  1808. +00111 vfcvt.rtz.x.f.v
  1809. +
  1810. +01000 vfwcvt.xu.f.v
  1811. +01001 vfwcvt.x.f.v
  1812. +01010 vfwcvt.f.xu.v
  1813. +01011 vfwcvt.f.x.v
  1814. +01100 vfwcvt.f.f.v
  1815. +01110 vfwcvt.rtz.xu.f.v
  1816. +01111 vfwcvt.rtz.x.f.v
  1817. +
  1818. +10000 vfncvt.xu.f.w
  1819. +10001 vfncvt.x.f.w
  1820. +10010 vfncvt.f.xu.w
  1821. +10011 vfncvt.f.x.w
  1822. +10100 vfncvt.f.f.w
  1823. +10101 vfncvt.rod.f.f.w
  1824. +10110 vfncvt.rtz.xu.f.v
  1825. +10111 vfncvt.rtz.x.f.v
  1826. +
  1827. +VFUNARY1
  1828. +vs1
  1829. +00000 vfsqrt.v
  1830. +00100 vfrsqrte7.v
  1831. +00101 vfrece7.v
  1832. +10000 vfclass.v
  1833. +
  1834. +31-26 25 24-20 19-15 14-12 11-7 6-0
  1835. +funct6 VM VS2 VS1/RS1/IMM funct3 VD opcode
  1836. +010000 x xxxxx 00000 001 xxxxx 1010111
  1837. +0100 00xx xxxx 0000 0001 xxxx x101 0111
  1838. +*/
  1839. +
  1840. +#define MATCH_VADDVV 0x00000057
  1841. +#define MASK_VADDVV 0xfc00707f
  1842. +#define MATCH_VADDVX 0x00004057
  1843. +#define MASK_VADDVX 0xfc00707f
  1844. +#define MATCH_VADDVI 0x00003057
  1845. +#define MASK_VADDVI 0xfc00707f
  1846. +#define MATCH_VSUBVV 0x08000057
  1847. +#define MASK_VSUBVV 0xfc00707f
  1848. +#define MATCH_VSUBVX 0x08004057
  1849. +#define MASK_VSUBVX 0xfc00707f
  1850. +#define MATCH_VRSUBVX 0x0c004057
  1851. +#define MASK_VRSUBVX 0xfc00707f
  1852. +#define MATCH_VRSUBVI 0x0c003057
  1853. +#define MASK_VRSUBVI 0xfc00707f
  1854. +
  1855. +#define MATCH_VWCVTXXV 0xc4006057
  1856. +#define MASK_VWCVTXXV 0xfc0ff07f
  1857. +#define MATCH_VWCVTUXXV 0xc0006057
  1858. +#define MASK_VWCVTUXXV 0xfc0ff07f
  1859. +
  1860. +#define MATCH_VWADDVV 0xc4002057
  1861. +#define MASK_VWADDVV 0xfc00707f
  1862. +#define MATCH_VWADDVX 0xc4006057
  1863. +#define MASK_VWADDVX 0xfc00707f
  1864. +#define MATCH_VWSUBVV 0xcc002057
  1865. +#define MASK_VWSUBVV 0xfc00707f
  1866. +#define MATCH_VWSUBVX 0xcc006057
  1867. +#define MASK_VWSUBVX 0xfc00707f
  1868. +#define MATCH_VWADDWV 0xd4002057
  1869. +#define MASK_VWADDWV 0xfc00707f
  1870. +#define MATCH_VWADDWX 0xd4006057
  1871. +#define MASK_VWADDWX 0xfc00707f
  1872. +#define MATCH_VWSUBWV 0xdc002057
  1873. +#define MASK_VWSUBWV 0xfc00707f
  1874. +#define MATCH_VWSUBWX 0xdc006057
  1875. +#define MASK_VWSUBWX 0xfc00707f
  1876. +#define MATCH_VWADDUVV 0xc0002057
  1877. +#define MASK_VWADDUVV 0xfc00707f
  1878. +#define MATCH_VWADDUVX 0xc0006057
  1879. +#define MASK_VWADDUVX 0xfc00707f
  1880. +#define MATCH_VWSUBUVV 0xc8002057
  1881. +#define MASK_VWSUBUVV 0xfc00707f
  1882. +#define MATCH_VWSUBUVX 0xc8006057
  1883. +#define MASK_VWSUBUVX 0xfc00707f
  1884. +#define MATCH_VWADDUWV 0xd0002057
  1885. +#define MASK_VWADDUWV 0xfc00707f
  1886. +#define MATCH_VWADDUWX 0xd0006057
  1887. +#define MASK_VWADDUWX 0xfc00707f
  1888. +#define MATCH_VWSUBUWV 0xd8002057
  1889. +#define MASK_VWSUBUWV 0xfc00707f
  1890. +#define MATCH_VWSUBUWX 0xd8006057
  1891. +#define MASK_VWSUBUWX 0xfc00707f
  1892. +
  1893. +#define MATCH_VZEXT_VF8 0x48012057
  1894. +#define MASK_VZEXT_VF8 0xfc0ff07f
  1895. +#define MATCH_VSEXT_VF8 0x4801a057
  1896. +#define MASK_VSEXT_VF8 0xfc0ff07f
  1897. +#define MATCH_VZEXT_VF4 0x48022057
  1898. +#define MASK_VZEXT_VF4 0xfc0ff07f
  1899. +#define MATCH_VSEXT_VF4 0x4802a057
  1900. +#define MASK_VSEXT_VF4 0xfc0ff07f
  1901. +#define MATCH_VZEXT_VF2 0x48032057
  1902. +#define MASK_VZEXT_VF2 0xfc0ff07f
  1903. +#define MATCH_VSEXT_VF2 0x4803a057
  1904. +#define MASK_VSEXT_VF2 0xfc0ff07f
  1905. +
  1906. +#define MATCH_VADCVVM 0x40000057
  1907. +#define MASK_VADCVVM 0xfe00707f
  1908. +#define MATCH_VADCVXM 0x40004057
  1909. +#define MASK_VADCVXM 0xfe00707f
  1910. +#define MATCH_VADCVIM 0x40003057
  1911. +#define MASK_VADCVIM 0xfe00707f
  1912. +#define MATCH_VMADCVVM 0x44000057
  1913. +#define MASK_VMADCVVM 0xfe00707f
  1914. +#define MATCH_VMADCVXM 0x44004057
  1915. +#define MASK_VMADCVXM 0xfe00707f
  1916. +#define MATCH_VMADCVIM 0x44003057
  1917. +#define MASK_VMADCVIM 0xfe00707f
  1918. +#define MATCH_VMADCVV 0x46000057
  1919. +#define MASK_VMADCVV 0xfe00707f
  1920. +#define MATCH_VMADCVX 0x46004057
  1921. +#define MASK_VMADCVX 0xfe00707f
  1922. +#define MATCH_VMADCVI 0x46003057
  1923. +#define MASK_VMADCVI 0xfe00707f
  1924. +#define MATCH_VSBCVVM 0x48000057
  1925. +#define MASK_VSBCVVM 0xfe00707f
  1926. +#define MATCH_VSBCVXM 0x48004057
  1927. +#define MASK_VSBCVXM 0xfe00707f
  1928. +#define MATCH_VMSBCVVM 0x4c000057
  1929. +#define MASK_VMSBCVVM 0xfe00707f
  1930. +#define MATCH_VMSBCVXM 0x4c004057
  1931. +#define MASK_VMSBCVXM 0xfe00707f
  1932. +#define MATCH_VMSBCVV 0x4e000057
  1933. +#define MASK_VMSBCVV 0xfe00707f
  1934. +#define MATCH_VMSBCVX 0x4e004057
  1935. +#define MASK_VMSBCVX 0xfe00707f
  1936. +
  1937. +#define MATCH_VNOTV 0x2c0fb057
  1938. +#define MASK_VNOTV 0xfc0ff07f
  1939. +
  1940. +#define MATCH_VANDVV 0x24000057
  1941. +#define MASK_VANDVV 0xfc00707f
  1942. +#define MATCH_VANDVX 0x24004057
  1943. +#define MASK_VANDVX 0xfc00707f
  1944. +#define MATCH_VANDVI 0x24003057
  1945. +#define MASK_VANDVI 0xfc00707f
  1946. +#define MATCH_VORVV 0x28000057
  1947. +#define MASK_VORVV 0xfc00707f
  1948. +#define MATCH_VORVX 0x28004057
  1949. +#define MASK_VORVX 0xfc00707f
  1950. +#define MATCH_VORVI 0x28003057
  1951. +#define MASK_VORVI 0xfc00707f
  1952. +#define MATCH_VXORVV 0x2c000057
  1953. +#define MASK_VXORVV 0xfc00707f
  1954. +#define MATCH_VXORVX 0x2c004057
  1955. +#define MASK_VXORVX 0xfc00707f
  1956. +#define MATCH_VXORVI 0x2c003057
  1957. +#define MASK_VXORVI 0xfc00707f
  1958. +
  1959. +#define MATCH_VSLLVV 0x94000057
  1960. +#define MASK_VSLLVV 0xfc00707f
  1961. +#define MATCH_VSLLVX 0x94004057
  1962. +#define MASK_VSLLVX 0xfc00707f
  1963. +#define MATCH_VSLLVI 0x94003057
  1964. +#define MASK_VSLLVI 0xfc00707f
  1965. +#define MATCH_VSRLVV 0xa0000057
  1966. +#define MASK_VSRLVV 0xfc00707f
  1967. +#define MATCH_VSRLVX 0xa0004057
  1968. +#define MASK_VSRLVX 0xfc00707f
  1969. +#define MATCH_VSRLVI 0xa0003057
  1970. +#define MASK_VSRLVI 0xfc00707f
  1971. +#define MATCH_VSRAVV 0xa4000057
  1972. +#define MASK_VSRAVV 0xfc00707f
  1973. +#define MATCH_VSRAVX 0xa4004057
  1974. +#define MASK_VSRAVX 0xfc00707f
  1975. +#define MATCH_VSRAVI 0xa4003057
  1976. +#define MASK_VSRAVI 0xfc00707f
  1977. +
  1978. +#define MATCH_VNCVTXXW 0xb0004057
  1979. +#define MASK_VNCVTXXW 0xfc0ff07f
  1980. +
  1981. +#define MATCH_VNSRLWV 0xb0000057
  1982. +#define MASK_VNSRLWV 0xfc00707f
  1983. +#define MATCH_VNSRLWX 0xb0004057
  1984. +#define MASK_VNSRLWX 0xfc00707f
  1985. +#define MATCH_VNSRLWI 0xb0003057
  1986. +#define MASK_VNSRLWI 0xfc00707f
  1987. +#define MATCH_VNSRAWV 0xb4000057
  1988. +#define MASK_VNSRAWV 0xfc00707f
  1989. +#define MATCH_VNSRAWX 0xb4004057
  1990. +#define MASK_VNSRAWX 0xfc00707f
  1991. +#define MATCH_VNSRAWI 0xb4003057
  1992. +#define MASK_VNSRAWI 0xfc00707f
  1993. +
  1994. +#define MATCH_VMSEQVV 0x60000057
  1995. +#define MASK_VMSEQVV 0xfc00707f
  1996. +#define MATCH_VMSEQVX 0x60004057
  1997. +#define MASK_VMSEQVX 0xfc00707f
  1998. +#define MATCH_VMSEQVI 0x60003057
  1999. +#define MASK_VMSEQVI 0xfc00707f
  2000. +#define MATCH_VMSNEVV 0x64000057
  2001. +#define MASK_VMSNEVV 0xfc00707f
  2002. +#define MATCH_VMSNEVX 0x64004057
  2003. +#define MASK_VMSNEVX 0xfc00707f
  2004. +#define MATCH_VMSNEVI 0x64003057
  2005. +#define MASK_VMSNEVI 0xfc00707f
  2006. +#define MATCH_VMSLTVV 0x6c000057
  2007. +#define MASK_VMSLTVV 0xfc00707f
  2008. +#define MATCH_VMSLTVX 0x6c004057
  2009. +#define MASK_VMSLTVX 0xfc00707f
  2010. +#define MATCH_VMSLTUVV 0x68000057
  2011. +#define MASK_VMSLTUVV 0xfc00707f
  2012. +#define MATCH_VMSLTUVX 0x68004057
  2013. +#define MASK_VMSLTUVX 0xfc00707f
  2014. +#define MATCH_VMSLEVV 0x74000057
  2015. +#define MASK_VMSLEVV 0xfc00707f
  2016. +#define MATCH_VMSLEVX 0x74004057
  2017. +#define MASK_VMSLEVX 0xfc00707f
  2018. +#define MATCH_VMSLEVI 0x74003057
  2019. +#define MASK_VMSLEVI 0xfc00707f
  2020. +#define MATCH_VMSLEUVV 0x70000057
  2021. +#define MASK_VMSLEUVV 0xfc00707f
  2022. +#define MATCH_VMSLEUVX 0x70004057
  2023. +#define MASK_VMSLEUVX 0xfc00707f
  2024. +#define MATCH_VMSLEUVI 0x70003057
  2025. +#define MASK_VMSLEUVI 0xfc00707f
  2026. +#define MATCH_VMSGTVX 0x7c004057
  2027. +#define MASK_VMSGTVX 0xfc00707f
  2028. +#define MATCH_VMSGTVI 0x7c003057
  2029. +#define MASK_VMSGTVI 0xfc00707f
  2030. +#define MATCH_VMSGTUVX 0x78004057
  2031. +#define MASK_VMSGTUVX 0xfc00707f
  2032. +#define MATCH_VMSGTUVI 0x78003057
  2033. +#define MASK_VMSGTUVI 0xfc00707f
  2034. +
  2035. +#define MATCH_VMINVV 0x14000057
  2036. +#define MASK_VMINVV 0xfc00707f
  2037. +#define MATCH_VMINVX 0x14004057
  2038. +#define MASK_VMINVX 0xfc00707f
  2039. +#define MATCH_VMAXVV 0x1c000057
  2040. +#define MASK_VMAXVV 0xfc00707f
  2041. +#define MATCH_VMAXVX 0x1c004057
  2042. +#define MASK_VMAXVX 0xfc00707f
  2043. +#define MATCH_VMINUVV 0x10000057
  2044. +#define MASK_VMINUVV 0xfc00707f
  2045. +#define MATCH_VMINUVX 0x10004057
  2046. +#define MASK_VMINUVX 0xfc00707f
  2047. +#define MATCH_VMAXUVV 0x18000057
  2048. +#define MASK_VMAXUVV 0xfc00707f
  2049. +#define MATCH_VMAXUVX 0x18004057
  2050. +#define MASK_VMAXUVX 0xfc00707f
  2051. +
  2052. +#define MATCH_VMULVV 0x94002057
  2053. +#define MASK_VMULVV 0xfc00707f
  2054. +#define MATCH_VMULVX 0x94006057
  2055. +#define MASK_VMULVX 0xfc00707f
  2056. +#define MATCH_VMULHVV 0x9c002057
  2057. +#define MASK_VMULHVV 0xfc00707f
  2058. +#define MATCH_VMULHVX 0x9c006057
  2059. +#define MASK_VMULHVX 0xfc00707f
  2060. +#define MATCH_VMULHUVV 0x90002057
  2061. +#define MASK_VMULHUVV 0xfc00707f
  2062. +#define MATCH_VMULHUVX 0x90006057
  2063. +#define MASK_VMULHUVX 0xfc00707f
  2064. +#define MATCH_VMULHSUVV 0x98002057
  2065. +#define MASK_VMULHSUVV 0xfc00707f
  2066. +#define MATCH_VMULHSUVX 0x98006057
  2067. +#define MASK_VMULHSUVX 0xfc00707f
  2068. +
  2069. +#define MATCH_VWMULVV 0xec002057
  2070. +#define MASK_VWMULVV 0xfc00707f
  2071. +#define MATCH_VWMULVX 0xec006057
  2072. +#define MASK_VWMULVX 0xfc00707f
  2073. +#define MATCH_VWMULUVV 0xe0002057
  2074. +#define MASK_VWMULUVV 0xfc00707f
  2075. +#define MATCH_VWMULUVX 0xe0006057
  2076. +#define MASK_VWMULUVX 0xfc00707f
  2077. +#define MATCH_VWMULSUVV 0xe8002057
  2078. +#define MASK_VWMULSUVV 0xfc00707f
  2079. +#define MATCH_VWMULSUVX 0xe8006057
  2080. +#define MASK_VWMULSUVX 0xfc00707f
  2081. +
  2082. +#define MATCH_VMACCVV 0xb4002057
  2083. +#define MASK_VMACCVV 0xfc00707f
  2084. +#define MATCH_VMACCVX 0xb4006057
  2085. +#define MASK_VMACCVX 0xfc00707f
  2086. +#define MATCH_VNMSACVV 0xbc002057
  2087. +#define MASK_VNMSACVV 0xfc00707f
  2088. +#define MATCH_VNMSACVX 0xbc006057
  2089. +#define MASK_VNMSACVX 0xfc00707f
  2090. +#define MATCH_VMADDVV 0xa4002057
  2091. +#define MASK_VMADDVV 0xfc00707f
  2092. +#define MATCH_VMADDVX 0xa4006057
  2093. +#define MASK_VMADDVX 0xfc00707f
  2094. +#define MATCH_VNMSUBVV 0xac002057
  2095. +#define MASK_VNMSUBVV 0xfc00707f
  2096. +#define MATCH_VNMSUBVX 0xac006057
  2097. +#define MASK_VNMSUBVX 0xfc00707f
  2098. +
  2099. +#define MATCH_VWMACCUVV 0xf0002057
  2100. +#define MASK_VWMACCUVV 0xfc00707f
  2101. +#define MATCH_VWMACCUVX 0xf0006057
  2102. +#define MASK_VWMACCUVX 0xfc00707f
  2103. +#define MATCH_VWMACCVV 0xf4002057
  2104. +#define MASK_VWMACCVV 0xfc00707f
  2105. +#define MATCH_VWMACCVX 0xf4006057
  2106. +#define MASK_VWMACCVX 0xfc00707f
  2107. +#define MATCH_VWMACCSUVV 0xfc002057
  2108. +#define MASK_VWMACCSUVV 0xfc00707f
  2109. +#define MATCH_VWMACCSUVX 0xfc006057
  2110. +#define MASK_VWMACCSUVX 0xfc00707f
  2111. +#define MATCH_VWMACCUSVX 0xf8006057
  2112. +#define MASK_VWMACCUSVX 0xfc00707f
  2113. +
  2114. +#define MATCH_VQMACCUVV 0xf0000057
  2115. +#define MASK_VQMACCUVV 0xfc00707f
  2116. +#define MATCH_VQMACCUVX 0xf0004057
  2117. +#define MASK_VQMACCUVX 0xfc00707f
  2118. +#define MATCH_VQMACCVV 0xf4000057
  2119. +#define MASK_VQMACCVV 0xfc00707f
  2120. +#define MATCH_VQMACCVX 0xf4004057
  2121. +#define MASK_VQMACCVX 0xfc00707f
  2122. +#define MATCH_VQMACCSUVV 0xfc000057
  2123. +#define MASK_VQMACCSUVV 0xfc00707f
  2124. +#define MATCH_VQMACCSUVX 0xfc004057
  2125. +#define MASK_VQMACCSUVX 0xfc00707f
  2126. +#define MATCH_VQMACCUSVX 0xf8004057
  2127. +#define MASK_VQMACCUSVX 0xfc00707f
  2128. +
  2129. +#define MATCH_VDIVVV 0x84002057
  2130. +#define MASK_VDIVVV 0xfc00707f
  2131. +#define MATCH_VDIVVX 0x84006057
  2132. +#define MASK_VDIVVX 0xfc00707f
  2133. +#define MATCH_VDIVUVV 0x80002057
  2134. +#define MASK_VDIVUVV 0xfc00707f
  2135. +#define MATCH_VDIVUVX 0x80006057
  2136. +#define MASK_VDIVUVX 0xfc00707f
  2137. +#define MATCH_VREMVV 0x8c002057
  2138. +#define MASK_VREMVV 0xfc00707f
  2139. +#define MATCH_VREMVX 0x8c006057
  2140. +#define MASK_VREMVX 0xfc00707f
  2141. +#define MATCH_VREMUVV 0x88002057
  2142. +#define MASK_VREMUVV 0xfc00707f
  2143. +#define MATCH_VREMUVX 0x88006057
  2144. +#define MASK_VREMUVX 0xfc00707f
  2145. +
  2146. +#define MATCH_VMERGEVVM 0x5c000057
  2147. +#define MASK_VMERGEVVM 0xfe00707f
  2148. +#define MATCH_VMERGEVXM 0x5c004057
  2149. +#define MASK_VMERGEVXM 0xfe00707f
  2150. +#define MATCH_VMERGEVIM 0x5c003057
  2151. +#define MASK_VMERGEVIM 0xfe00707f
  2152. +
  2153. +#define MATCH_VMVVV 0x5e000057
  2154. +#define MASK_VMVVV 0xfff0707f
  2155. +#define MATCH_VMVVX 0x5e004057
  2156. +#define MASK_VMVVX 0xfff0707f
  2157. +#define MATCH_VMVVI 0x5e003057
  2158. +#define MASK_VMVVI 0xfff0707f
  2159. +
  2160. +#define MATCH_VSADDUVV 0x80000057
  2161. +#define MASK_VSADDUVV 0xfc00707f
  2162. +#define MATCH_VSADDUVX 0x80004057
  2163. +#define MASK_VSADDUVX 0xfc00707f
  2164. +#define MATCH_VSADDUVI 0x80003057
  2165. +#define MASK_VSADDUVI 0xfc00707f
  2166. +#define MATCH_VSADDVV 0x84000057
  2167. +#define MASK_VSADDVV 0xfc00707f
  2168. +#define MATCH_VSADDVX 0x84004057
  2169. +#define MASK_VSADDVX 0xfc00707f
  2170. +#define MATCH_VSADDVI 0x84003057
  2171. +#define MASK_VSADDVI 0xfc00707f
  2172. +#define MATCH_VSSUBUVV 0x88000057
  2173. +#define MASK_VSSUBUVV 0xfc00707f
  2174. +#define MATCH_VSSUBUVX 0x88004057
  2175. +#define MASK_VSSUBUVX 0xfc00707f
  2176. +#define MATCH_VSSUBVV 0x8c000057
  2177. +#define MASK_VSSUBVV 0xfc00707f
  2178. +#define MATCH_VSSUBVX 0x8c004057
  2179. +#define MASK_VSSUBVX 0xfc00707f
  2180. +
  2181. +#define MATCH_VAADDUVV 0x20002057
  2182. +#define MASK_VAADDUVV 0xfc00707f
  2183. +#define MATCH_VAADDUVX 0x20006057
  2184. +#define MASK_VAADDUVX 0xfc00707f
  2185. +#define MATCH_VAADDVV 0x24002057
  2186. +#define MASK_VAADDVV 0xfc00707f
  2187. +#define MATCH_VAADDVX 0x24006057
  2188. +#define MASK_VAADDVX 0xfc00707f
  2189. +#define MATCH_VASUBUVV 0x28002057
  2190. +#define MASK_VASUBUVV 0xfc00707f
  2191. +#define MATCH_VASUBUVX 0x28006057
  2192. +#define MASK_VASUBUVX 0xfc00707f
  2193. +#define MATCH_VASUBVV 0x2c002057
  2194. +#define MASK_VASUBVV 0xfc00707f
  2195. +#define MATCH_VASUBVX 0x2c006057
  2196. +#define MASK_VASUBVX 0xfc00707f
  2197. +
  2198. +#define MATCH_VSMULVV 0x9c000057
  2199. +#define MASK_VSMULVV 0xfc00707f
  2200. +#define MATCH_VSMULVX 0x9c004057
  2201. +#define MASK_VSMULVX 0xfc00707f
  2202. +
  2203. +#define MATCH_VSSRLVV 0xa8000057
  2204. +#define MASK_VSSRLVV 0xfc00707f
  2205. +#define MATCH_VSSRLVX 0xa8004057
  2206. +#define MASK_VSSRLVX 0xfc00707f
  2207. +#define MATCH_VSSRLVI 0xa8003057
  2208. +#define MASK_VSSRLVI 0xfc00707f
  2209. +#define MATCH_VSSRAVV 0xac000057
  2210. +#define MASK_VSSRAVV 0xfc00707f
  2211. +#define MATCH_VSSRAVX 0xac004057
  2212. +#define MASK_VSSRAVX 0xfc00707f
  2213. +#define MATCH_VSSRAVI 0xac003057
  2214. +#define MASK_VSSRAVI 0xfc00707f
  2215. +
  2216. +#define MATCH_VNCLIPUWV 0xb8000057
  2217. +#define MASK_VNCLIPUWV 0xfc00707f
  2218. +#define MATCH_VNCLIPUWX 0xb8004057
  2219. +#define MASK_VNCLIPUWX 0xfc00707f
  2220. +#define MATCH_VNCLIPUWI 0xb8003057
  2221. +#define MASK_VNCLIPUWI 0xfc00707f
  2222. +#define MATCH_VNCLIPWV 0xbc000057
  2223. +#define MASK_VNCLIPWV 0xfc00707f
  2224. +#define MATCH_VNCLIPWX 0xbc004057
  2225. +#define MASK_VNCLIPWX 0xfc00707f
  2226. +#define MATCH_VNCLIPWI 0xbc003057
  2227. +#define MASK_VNCLIPWI 0xfc00707f
  2228. +
  2229. +#define MATCH_VFADDVV 0x00001057
  2230. +#define MASK_VFADDVV 0xfc00707f
  2231. +#define MATCH_VFADDVF 0x00005057
  2232. +#define MASK_VFADDVF 0xfc00707f
  2233. +#define MATCH_VFSUBVV 0x08001057
  2234. +#define MASK_VFSUBVV 0xfc00707f
  2235. +#define MATCH_VFSUBVF 0x08005057
  2236. +#define MASK_VFSUBVF 0xfc00707f
  2237. +#define MATCH_VFRSUBVF 0x9c005057
  2238. +#define MASK_VFRSUBVF 0xfc00707f
  2239. +
  2240. +#define MATCH_VFWADDVV 0xc0001057
  2241. +#define MASK_VFWADDVV 0xfc00707f
  2242. +#define MATCH_VFWADDVF 0xc0005057
  2243. +#define MASK_VFWADDVF 0xfc00707f
  2244. +#define MATCH_VFWSUBVV 0xc8001057
  2245. +#define MASK_VFWSUBVV 0xfc00707f
  2246. +#define MATCH_VFWSUBVF 0xc8005057
  2247. +#define MASK_VFWSUBVF 0xfc00707f
  2248. +#define MATCH_VFWADDWV 0xd0001057
  2249. +#define MASK_VFWADDWV 0xfc00707f
  2250. +#define MATCH_VFWADDWF 0xd0005057
  2251. +#define MASK_VFWADDWF 0xfc00707f
  2252. +#define MATCH_VFWSUBWV 0xd8001057
  2253. +#define MASK_VFWSUBWV 0xfc00707f
  2254. +#define MATCH_VFWSUBWF 0xd8005057
  2255. +#define MASK_VFWSUBWF 0xfc00707f
  2256. +
  2257. +#define MATCH_VFMULVV 0x90001057
  2258. +#define MASK_VFMULVV 0xfc00707f
  2259. +#define MATCH_VFMULVF 0x90005057
  2260. +#define MASK_VFMULVF 0xfc00707f
  2261. +#define MATCH_VFDIVVV 0x80001057
  2262. +#define MASK_VFDIVVV 0xfc00707f
  2263. +#define MATCH_VFDIVVF 0x80005057
  2264. +#define MASK_VFDIVVF 0xfc00707f
  2265. +#define MATCH_VFRDIVVF 0x84005057
  2266. +#define MASK_VFRDIVVF 0xfc00707f
  2267. +
  2268. +#define MATCH_VFWMULVV 0xe0001057
  2269. +#define MASK_VFWMULVV 0xfc00707f
  2270. +#define MATCH_VFWMULVF 0xe0005057
  2271. +#define MASK_VFWMULVF 0xfc00707f
  2272. +
  2273. +#define MATCH_VFMADDVV 0xa0001057
  2274. +#define MASK_VFMADDVV 0xfc00707f
  2275. +#define MATCH_VFMADDVF 0xa0005057
  2276. +#define MASK_VFMADDVF 0xfc00707f
  2277. +#define MATCH_VFNMADDVV 0xa4001057
  2278. +#define MASK_VFNMADDVV 0xfc00707f
  2279. +#define MATCH_VFNMADDVF 0xa4005057
  2280. +#define MASK_VFNMADDVF 0xfc00707f
  2281. +#define MATCH_VFMSUBVV 0xa8001057
  2282. +#define MASK_VFMSUBVV 0xfc00707f
  2283. +#define MATCH_VFMSUBVF 0xa8005057
  2284. +#define MASK_VFMSUBVF 0xfc00707f
  2285. +#define MATCH_VFNMSUBVV 0xac001057
  2286. +#define MASK_VFNMSUBVV 0xfc00707f
  2287. +#define MATCH_VFNMSUBVF 0xac005057
  2288. +#define MASK_VFNMSUBVF 0xfc00707f
  2289. +#define MATCH_VFMACCVV 0xb0001057
  2290. +#define MASK_VFMACCVV 0xfc00707f
  2291. +#define MATCH_VFMACCVF 0xb0005057
  2292. +#define MASK_VFMACCVF 0xfc00707f
  2293. +#define MATCH_VFNMACCVV 0xb4001057
  2294. +#define MASK_VFNMACCVV 0xfc00707f
  2295. +#define MATCH_VFNMACCVF 0xb4005057
  2296. +#define MASK_VFNMACCVF 0xfc00707f
  2297. +#define MATCH_VFMSACVV 0xb8001057
  2298. +#define MASK_VFMSACVV 0xfc00707f
  2299. +#define MATCH_VFMSACVF 0xb8005057
  2300. +#define MASK_VFMSACVF 0xfc00707f
  2301. +#define MATCH_VFNMSACVV 0xbc001057
  2302. +#define MASK_VFNMSACVV 0xfc00707f
  2303. +#define MATCH_VFNMSACVF 0xbc005057
  2304. +#define MASK_VFNMSACVF 0xfc00707f
  2305. +
  2306. +#define MATCH_VFWMACCVV 0xf0001057
  2307. +#define MASK_VFWMACCVV 0xfc00707f
  2308. +#define MATCH_VFWMACCVF 0xf0005057
  2309. +#define MASK_VFWMACCVF 0xfc00707f
  2310. +#define MATCH_VFWNMACCVV 0xf4001057
  2311. +#define MASK_VFWNMACCVV 0xfc00707f
  2312. +#define MATCH_VFWNMACCVF 0xf4005057
  2313. +#define MASK_VFWNMACCVF 0xfc00707f
  2314. +#define MATCH_VFWMSACVV 0xf8001057
  2315. +#define MASK_VFWMSACVV 0xfc00707f
  2316. +#define MATCH_VFWMSACVF 0xf8005057
  2317. +#define MASK_VFWMSACVF 0xfc00707f
  2318. +#define MATCH_VFWNMSACVV 0xfc001057
  2319. +#define MASK_VFWNMSACVV 0xfc00707f
  2320. +#define MATCH_VFWNMSACVF 0xfc005057
  2321. +#define MASK_VFWNMSACVF 0xfc00707f
  2322. +
  2323. +#define MATCH_VFSQRTV 0x4c001057
  2324. +#define MASK_VFSQRTV 0xfc0ff07f
  2325. +#define MATCH_VFRSQRT7V 0x4c021057
  2326. +#define MASK_VFRSQRT7V 0xfc0ff07f
  2327. +#define MATCH_VFREC7V 0x4c029057
  2328. +#define MASK_VFREC7V 0xfc0ff07f
  2329. +#define MATCH_VFCLASSV 0x4c081057
  2330. +#define MASK_VFCLASSV 0xfc0ff07f
  2331. +
  2332. +#define MATCH_VFMINVV 0x10001057
  2333. +#define MASK_VFMINVV 0xfc00707f
  2334. +#define MATCH_VFMINVF 0x10005057
  2335. +#define MASK_VFMINVF 0xfc00707f
  2336. +#define MATCH_VFMAXVV 0x18001057
  2337. +#define MASK_VFMAXVV 0xfc00707f
  2338. +#define MATCH_VFMAXVF 0x18005057
  2339. +#define MASK_VFMAXVF 0xfc00707f
  2340. +
  2341. +#define MATCH_VFSGNJVV 0x20001057
  2342. +#define MASK_VFSGNJVV 0xfc00707f
  2343. +#define MATCH_VFSGNJVF 0x20005057
  2344. +#define MASK_VFSGNJVF 0xfc00707f
  2345. +#define MATCH_VFSGNJNVV 0x24001057
  2346. +#define MASK_VFSGNJNVV 0xfc00707f
  2347. +#define MATCH_VFSGNJNVF 0x24005057
  2348. +#define MASK_VFSGNJNVF 0xfc00707f
  2349. +#define MATCH_VFSGNJXVV 0x28001057
  2350. +#define MASK_VFSGNJXVV 0xfc00707f
  2351. +#define MATCH_VFSGNJXVF 0x28005057
  2352. +#define MASK_VFSGNJXVF 0xfc00707f
  2353. +
  2354. +#define MATCH_VMFEQVV 0x60001057
  2355. +#define MASK_VMFEQVV 0xfc00707f
  2356. +#define MATCH_VMFEQVF 0x60005057
  2357. +#define MASK_VMFEQVF 0xfc00707f
  2358. +#define MATCH_VMFNEVV 0x70001057
  2359. +#define MASK_VMFNEVV 0xfc00707f
  2360. +#define MATCH_VMFNEVF 0x70005057
  2361. +#define MASK_VMFNEVF 0xfc00707f
  2362. +#define MATCH_VMFLTVV 0x6c001057
  2363. +#define MASK_VMFLTVV 0xfc00707f
  2364. +#define MATCH_VMFLTVF 0x6c005057
  2365. +#define MASK_VMFLTVF 0xfc00707f
  2366. +#define MATCH_VMFLEVV 0x64001057
  2367. +#define MASK_VMFLEVV 0xfc00707f
  2368. +#define MATCH_VMFLEVF 0x64005057
  2369. +#define MASK_VMFLEVF 0xfc00707f
  2370. +#define MATCH_VMFGTVF 0x74005057
  2371. +#define MASK_VMFGTVF 0xfc00707f
  2372. +#define MATCH_VMFGEVF 0x7c005057
  2373. +#define MASK_VMFGEVF 0xfc00707f
  2374. +
  2375. +#define MATCH_VFMERGEVFM 0x5c005057
  2376. +#define MASK_VFMERGEVFM 0xfe00707f
  2377. +#define MATCH_VFMVVF 0x5e005057
  2378. +#define MASK_VFMVVF 0xfff0707f
  2379. +
  2380. +#define MATCH_VFCVTXUFV 0x48001057
  2381. +#define MASK_VFCVTXUFV 0xfc0ff07f
  2382. +#define MATCH_VFCVTXFV 0x48009057
  2383. +#define MASK_VFCVTXFV 0xfc0ff07f
  2384. +#define MATCH_VFCVTFXUV 0x48011057
  2385. +#define MASK_VFCVTFXUV 0xfc0ff07f
  2386. +#define MATCH_VFCVTFXV 0x48019057
  2387. +#define MASK_VFCVTFXV 0xfc0ff07f
  2388. +#define MATCH_VFCVTRTZXUFV 0x48031057
  2389. +#define MASK_VFCVTRTZXUFV 0xfc0ff07f
  2390. +#define MATCH_VFCVTRTZXFV 0x48039057
  2391. +#define MASK_VFCVTRTZXFV 0xfc0ff07f
  2392. +#define MATCH_VFWCVTXUFV 0x48041057
  2393. +#define MASK_VFWCVTXUFV 0xfc0ff07f
  2394. +#define MATCH_VFWCVTXFV 0x48049057
  2395. +#define MASK_VFWCVTXFV 0xfc0ff07f
  2396. +#define MATCH_VFWCVTFXUV 0x48051057
  2397. +#define MASK_VFWCVTFXUV 0xfc0ff07f
  2398. +#define MATCH_VFWCVTFXV 0x48059057
  2399. +#define MASK_VFWCVTFXV 0xfc0ff07f
  2400. +#define MATCH_VFWCVTFFV 0x48061057
  2401. +#define MASK_VFWCVTFFV 0xfc0ff07f
  2402. +#define MATCH_VFWCVTRTZXUFV 0x48071057
  2403. +#define MASK_VFWCVTRTZXUFV 0xfc0ff07f
  2404. +#define MATCH_VFWCVTRTZXFV 0x48079057
  2405. +#define MASK_VFWCVTRTZXFV 0xfc0ff07f
  2406. +#define MATCH_VFNCVTXUFW 0x48081057
  2407. +#define MASK_VFNCVTXUFW 0xfc0ff07f
  2408. +#define MATCH_VFNCVTXFW 0x48089057
  2409. +#define MASK_VFNCVTXFW 0xfc0ff07f
  2410. +#define MATCH_VFNCVTFXUW 0x48091057
  2411. +#define MASK_VFNCVTFXUW 0xfc0ff07f
  2412. +#define MATCH_VFNCVTFXW 0x48099057
  2413. +#define MASK_VFNCVTFXW 0xfc0ff07f
  2414. +#define MATCH_VFNCVTFFW 0x480a1057
  2415. +#define MASK_VFNCVTFFW 0xfc0ff07f
  2416. +#define MATCH_VFNCVTRODFFW 0x480a9057
  2417. +#define MASK_VFNCVTRODFFW 0xfc0ff07f
  2418. +#define MATCH_VFNCVTRTZXUFW 0x480b1057
  2419. +#define MASK_VFNCVTRTZXUFW 0xfc0ff07f
  2420. +#define MATCH_VFNCVTRTZXFW 0x480b9057
  2421. +#define MASK_VFNCVTRTZXFW 0xfc0ff07f
  2422. +
  2423. +#define MATCH_VREDSUMVS 0x00002057
  2424. +#define MASK_VREDSUMVS 0xfc00707f
  2425. +#define MATCH_VREDMAXVS 0x1c002057
  2426. +#define MASK_VREDMAXVS 0xfc00707f
  2427. +#define MATCH_VREDMAXUVS 0x18002057
  2428. +#define MASK_VREDMAXUVS 0xfc00707f
  2429. +#define MATCH_VREDMINVS 0x14002057
  2430. +#define MASK_VREDMINVS 0xfc00707f
  2431. +#define MATCH_VREDMINUVS 0x10002057
  2432. +#define MASK_VREDMINUVS 0xfc00707f
  2433. +#define MATCH_VREDANDVS 0x04002057
  2434. +#define MASK_VREDANDVS 0xfc00707f
  2435. +#define MATCH_VREDORVS 0x08002057
  2436. +#define MASK_VREDORVS 0xfc00707f
  2437. +#define MATCH_VREDXORVS 0x0c002057
  2438. +#define MASK_VREDXORVS 0xfc00707f
  2439. +
  2440. +#define MATCH_VWREDSUMUVS 0xc0000057
  2441. +#define MASK_VWREDSUMUVS 0xfc00707f
  2442. +#define MATCH_VWREDSUMVS 0xc4000057
  2443. +#define MASK_VWREDSUMVS 0xfc00707f
  2444. +
  2445. +#define MATCH_VFREDOSUMVS 0x0c001057
  2446. +#define MASK_VFREDOSUMVS 0xfc00707f
  2447. +#define MATCH_VFREDSUMVS 0x04001057
  2448. +#define MASK_VFREDSUMVS 0xfc00707f
  2449. +#define MATCH_VFREDMAXVS 0x1c001057
  2450. +#define MASK_VFREDMAXVS 0xfc00707f
  2451. +#define MATCH_VFREDMINVS 0x14001057
  2452. +#define MASK_VFREDMINVS 0xfc00707f
  2453. +
  2454. +#define MATCH_VFWREDOSUMVS 0xcc001057
  2455. +#define MASK_VFWREDOSUMVS 0xfc00707f
  2456. +#define MATCH_VFWREDSUMVS 0xc4001057
  2457. +#define MASK_VFWREDSUMVS 0xfc00707f
  2458. +
  2459. +#define MATCH_VMANDMM 0x66002057
  2460. +#define MASK_VMANDMM 0xfe00707f
  2461. +#define MATCH_VMNANDMM 0x76002057
  2462. +#define MASK_VMNANDMM 0xfe00707f
  2463. +#define MATCH_VMANDNOTMM 0x62002057
  2464. +#define MASK_VMANDNOTMM 0xfe00707f
  2465. +#define MATCH_VMXORMM 0x6e002057
  2466. +#define MASK_VMXORMM 0xfe00707f
  2467. +#define MATCH_VMORMM 0x6a002057
  2468. +#define MASK_VMORMM 0xfe00707f
  2469. +#define MATCH_VMNORMM 0x7a002057
  2470. +#define MASK_VMNORMM 0xfe00707f
  2471. +#define MATCH_VMORNOTMM 0x72002057
  2472. +#define MASK_VMORNOTMM 0xfe00707f
  2473. +#define MATCH_VMXNORMM 0x7e002057
  2474. +#define MASK_VMXNORMM 0xfe00707f
  2475. +
  2476. +#define MATCH_VPOPCM 0x40082057
  2477. +#define MASK_VPOPCM 0xfc0ff07f
  2478. +#define MATCH_VFIRSTM 0x4008a057
  2479. +#define MASK_VFIRSTM 0xfc0ff07f
  2480. +
  2481. +#define MATCH_VMSBFM 0x5000a057
  2482. +#define MASK_VMSBFM 0xfc0ff07f
  2483. +#define MATCH_VMSIFM 0x5001a057
  2484. +#define MASK_VMSIFM 0xfc0ff07f
  2485. +#define MATCH_VMSOFM 0x50012057
  2486. +#define MASK_VMSOFM 0xfc0ff07f
  2487. +#define MATCH_VIOTAM 0x50082057
  2488. +#define MASK_VIOTAM 0xfc0ff07f
  2489. +#define MATCH_VIDV 0x5008a057
  2490. +#define MASK_VIDV 0xfdfff07f
  2491. +
  2492. +#define MATCH_VMVXS 0x42002057
  2493. +#define MASK_VMVXS 0xfe0ff07f
  2494. +#define MATCH_VMVSX 0x42006057
  2495. +#define MASK_VMVSX 0xfff0707f
  2496. +
  2497. +#define MATCH_VFMVFS 0x42001057
  2498. +#define MASK_VFMVFS 0xfe0ff07f
  2499. +#define MATCH_VFMVSF 0x42005057
  2500. +#define MASK_VFMVSF 0xfff0707f
  2501. +
  2502. +#define MATCH_VSLIDEUPVX 0x38004057
  2503. +#define MASK_VSLIDEUPVX 0xfc00707f
  2504. +#define MATCH_VSLIDEUPVI 0x38003057
  2505. +#define MASK_VSLIDEUPVI 0xfc00707f
  2506. +#define MATCH_VSLIDEDOWNVX 0x3c004057
  2507. +#define MASK_VSLIDEDOWNVX 0xfc00707f
  2508. +#define MATCH_VSLIDEDOWNVI 0x3c003057
  2509. +#define MASK_VSLIDEDOWNVI 0xfc00707f
  2510. +
  2511. +#define MATCH_VSLIDE1UPVX 0x38006057
  2512. +#define MASK_VSLIDE1UPVX 0xfc00707f
  2513. +#define MATCH_VSLIDE1DOWNVX 0x3c006057
  2514. +#define MASK_VSLIDE1DOWNVX 0xfc00707f
  2515. +
  2516. +#define MATCH_VFSLIDE1UPVF 0x38005057
  2517. +#define MASK_VFSLIDE1UPVF 0xfc00707f
  2518. +#define MATCH_VFSLIDE1DOWNVF 0x3c005057
  2519. +#define MASK_VFSLIDE1DOWNVF 0xfc00707f
  2520. +
  2521. +#define MATCH_VRGATHERVV 0x30000057
  2522. +#define MASK_VRGATHERVV 0xfc00707f
  2523. +#define MATCH_VRGATHERVX 0x30004057
  2524. +#define MASK_VRGATHERVX 0xfc00707f
  2525. +#define MATCH_VRGATHERVI 0x30003057
  2526. +#define MASK_VRGATHERVI 0xfc00707f
  2527. +#define MATCH_VRGATHEREI16VV 0x38000057
  2528. +#define MASK_VRGATHEREI16VV 0xfc00707f
  2529. +
  2530. +#define MATCH_VCOMPRESSVM 0x5e002057
  2531. +#define MASK_VCOMPRESSVM 0xfe00707f
  2532. +
  2533. +#define MATCH_VMV1RV 0x9e003057
  2534. +#define MASK_VMV1RV 0xfe0ff07f
  2535. +#define MATCH_VMV2RV 0x9e00b057
  2536. +#define MASK_VMV2RV 0xfe0ff07f
  2537. +#define MATCH_VMV4RV 0x9e01b057
  2538. +#define MASK_VMV4RV 0xfe0ff07f
  2539. +#define MATCH_VMV8RV 0x9e03b057
  2540. +#define MASK_VMV8RV 0xfe0ff07f
  2541. +
  2542. +#define MATCH_VDOTVV 0xe4000057
  2543. +#define MASK_VDOTVV 0xfc00707f
  2544. +#define MATCH_VDOTUVV 0xe0000057
  2545. +#define MASK_VDOTUVV 0xfc00707f
  2546. +#define MATCH_VFDOTVV 0xe4001057
  2547. +#define MASK_VFDOTVV 0xfc00707f
  2548. +/* END RVV */
  2549. +
  2550. #define MATCH_CUSTOM0 0xb
  2551. #define MASK_CUSTOM0 0x707f
  2552. #define MATCH_CUSTOM0_RS1 0x200b
  2553. @@ -1034,6 +2954,13 @@
  2554. #define CSR_FFLAGS 0x1
  2555. #define CSR_FRM 0x2
  2556. #define CSR_FCSR 0x3
  2557. +#define CSR_VSTART 0x008
  2558. +#define CSR_VXSAT 0x009
  2559. +#define CSR_VXRM 0x00a
  2560. +#define CSR_VCSR 0x00f
  2561. +#define CSR_VL 0xc20
  2562. +#define CSR_VTYPE 0xc21
  2563. +#define CSR_VLENB 0xc22
  2564. #define CSR_DCSR 0x7b0
  2565. #define CSR_DPC 0x7b1
  2566. #define CSR_DSCRATCH0 0x7b2
  2567. @@ -1192,6 +3119,22 @@ DECLARE_INSN(fmax_d, MATCH_FMAX_D, MASK_FMAX_D)
  2568. DECLARE_INSN(fcvt_s_d, MATCH_FCVT_S_D, MASK_FCVT_S_D)
  2569. DECLARE_INSN(fcvt_d_s, MATCH_FCVT_D_S, MASK_FCVT_D_S)
  2570. DECLARE_INSN(fsqrt_d, MATCH_FSQRT_D, MASK_FSQRT_D)
  2571. +DECLARE_INSN(fadd_h, MATCH_FADD_H, MASK_FADD_H)
  2572. +DECLARE_INSN(fsub_h, MATCH_FSUB_D, MASK_FSUB_H)
  2573. +DECLARE_INSN(fmul_h, MATCH_FMUL_D, MASK_FMUL_H)
  2574. +DECLARE_INSN(fdiv_h, MATCH_FDIV_D, MASK_FDIV_H)
  2575. +DECLARE_INSN(fsgnj_h, MATCH_FSGNJ_D, MASK_FSGNJ_H)
  2576. +DECLARE_INSN(fsgnjn_h, MATCH_FSGNJN_D, MASK_FSGNJN_H)
  2577. +DECLARE_INSN(fsgnjx_h, MATCH_FSGNJX_D, MASK_FSGNJX_H)
  2578. +DECLARE_INSN(fmin_h, MATCH_FMIN_D, MASK_FMIN_H)
  2579. +DECLARE_INSN(fmax_h, MATCH_FMAX_D, MASK_FMAX_H)
  2580. +DECLARE_INSN(fcvt_s_h, MATCH_FCVT_S_D, MASK_FCVT_S_H)
  2581. +DECLARE_INSN(fcvt_h_s, MATCH_FCVT_H_S, MASK_FCVT_H_S)
  2582. +DECLARE_INSN(fcvt_d_h, MATCH_FCVT_D_H, MASK_FCVT_D_H)
  2583. +DECLARE_INSN(fcvt_h_d, MATCH_FCVT_H_D, MASK_FCVT_H_D)
  2584. +DECLARE_INSN(fcvt_q_h, MATCH_FCVT_Q_H, MASK_FCVT_Q_H)
  2585. +DECLARE_INSN(fcvt_h_q, MATCH_FCVT_H_Q, MASK_FCVT_H_Q)
  2586. +DECLARE_INSN(fsqrt_h, MATCH_FSQRT_H, MASK_FSQRT_H)
  2587. DECLARE_INSN(fadd_q, MATCH_FADD_Q, MASK_FADD_Q)
  2588. DECLARE_INSN(fsub_q, MATCH_FSUB_Q, MASK_FSUB_Q)
  2589. DECLARE_INSN(fmul_q, MATCH_FMUL_Q, MASK_FMUL_Q)
  2590. @@ -1212,6 +3155,9 @@ DECLARE_INSN(feq_s, MATCH_FEQ_S, MASK_FEQ_S)
  2591. DECLARE_INSN(fle_d, MATCH_FLE_D, MASK_FLE_D)
  2592. DECLARE_INSN(flt_d, MATCH_FLT_D, MASK_FLT_D)
  2593. DECLARE_INSN(feq_d, MATCH_FEQ_D, MASK_FEQ_D)
  2594. +DECLARE_INSN(fle_h, MATCH_FLE_H, MASK_FLE_H)
  2595. +DECLARE_INSN(flt_h, MATCH_FLT_H, MASK_FLT_H)
  2596. +DECLARE_INSN(feq_h, MATCH_FEQ_H, MASK_FEQ_H)
  2597. DECLARE_INSN(fle_q, MATCH_FLE_Q, MASK_FLE_Q)
  2598. DECLARE_INSN(flt_q, MATCH_FLT_Q, MASK_FLT_Q)
  2599. DECLARE_INSN(feq_q, MATCH_FEQ_Q, MASK_FEQ_Q)
  2600. @@ -1227,6 +3173,12 @@ DECLARE_INSN(fcvt_l_d, MATCH_FCVT_L_D, MASK_FCVT_L_D)
  2601. DECLARE_INSN(fcvt_lu_d, MATCH_FCVT_LU_D, MASK_FCVT_LU_D)
  2602. DECLARE_INSN(fmv_x_d, MATCH_FMV_X_D, MASK_FMV_X_D)
  2603. DECLARE_INSN(fclass_d, MATCH_FCLASS_D, MASK_FCLASS_D)
  2604. +DECLARE_INSN(fcvt_w_h, MATCH_FCVT_W_H, MASK_FCVT_W_H)
  2605. +DECLARE_INSN(fcvt_wu_h, MATCH_FCVT_WU_H, MASK_FCVT_WU_H)
  2606. +DECLARE_INSN(fcvt_l_h, MATCH_FCVT_L_H, MASK_FCVT_L_H)
  2607. +DECLARE_INSN(fcvt_lu_h, MATCH_FCVT_LU_H, MASK_FCVT_LU_H)
  2608. +DECLARE_INSN(fmv_x_h, MATCH_FMV_X_H, MASK_FMV_X_H)
  2609. +DECLARE_INSN(fclass_h, MATCH_FCLASS_H, MASK_FCLASS_H)
  2610. DECLARE_INSN(fcvt_w_q, MATCH_FCVT_W_Q, MASK_FCVT_W_Q)
  2611. DECLARE_INSN(fcvt_wu_q, MATCH_FCVT_WU_Q, MASK_FCVT_WU_Q)
  2612. DECLARE_INSN(fcvt_l_q, MATCH_FCVT_L_Q, MASK_FCVT_L_Q)
  2613. @@ -1243,6 +3195,11 @@ DECLARE_INSN(fcvt_d_wu, MATCH_FCVT_D_WU, MASK_FCVT_D_WU)
  2614. DECLARE_INSN(fcvt_d_l, MATCH_FCVT_D_L, MASK_FCVT_D_L)
  2615. DECLARE_INSN(fcvt_d_lu, MATCH_FCVT_D_LU, MASK_FCVT_D_LU)
  2616. DECLARE_INSN(fmv_d_x, MATCH_FMV_D_X, MASK_FMV_D_X)
  2617. +DECLARE_INSN(fcvt_h_w, MATCH_FCVT_H_W, MASK_FCVT_H_W)
  2618. +DECLARE_INSN(fcvt_h_wu, MATCH_FCVT_H_WU, MASK_FCVT_H_WU)
  2619. +DECLARE_INSN(fcvt_h_l, MATCH_FCVT_H_L, MASK_FCVT_H_L)
  2620. +DECLARE_INSN(fcvt_h_lu, MATCH_FCVT_H_LU, MASK_FCVT_H_LU)
  2621. +DECLARE_INSN(fmv_h_x, MATCH_FMV_H_X, MASK_FMV_H_X)
  2622. DECLARE_INSN(fcvt_q_w, MATCH_FCVT_Q_W, MASK_FCVT_Q_W)
  2623. DECLARE_INSN(fcvt_q_wu, MATCH_FCVT_Q_WU, MASK_FCVT_Q_WU)
  2624. DECLARE_INSN(fcvt_q_l, MATCH_FCVT_Q_L, MASK_FCVT_Q_L)
  2625. @@ -1349,9 +3306,11 @@ DECLARE_INSN(fsri, MATCH_FSRI, MASK_FSRI)
  2626. DECLARE_INSN(fslw, MATCH_FSLW, MASK_FSLW)
  2627. DECLARE_INSN(fsrw, MATCH_FSRW, MASK_FSRW)
  2628. DECLARE_INSN(fsriw, MATCH_FSRIW, MASK_FSRIW)
  2629. +DECLARE_INSN(flh, MATCH_FLH, MASK_FLH)
  2630. DECLARE_INSN(flw, MATCH_FLW, MASK_FLW)
  2631. DECLARE_INSN(fld, MATCH_FLD, MASK_FLD)
  2632. DECLARE_INSN(flq, MATCH_FLQ, MASK_FLQ)
  2633. +DECLARE_INSN(fsh, MATCH_FSH, MASK_FSH)
  2634. DECLARE_INSN(fsw, MATCH_FSW, MASK_FSW)
  2635. DECLARE_INSN(fsd, MATCH_FSD, MASK_FSD)
  2636. DECLARE_INSN(fsq, MATCH_FSQ, MASK_FSQ)
  2637. @@ -1363,6 +3322,10 @@ DECLARE_INSN(fmadd_d, MATCH_FMADD_D, MASK_FMADD_D)
  2638. DECLARE_INSN(fmsub_d, MATCH_FMSUB_D, MASK_FMSUB_D)
  2639. DECLARE_INSN(fnmsub_d, MATCH_FNMSUB_D, MASK_FNMSUB_D)
  2640. DECLARE_INSN(fnmadd_d, MATCH_FNMADD_D, MASK_FNMADD_D)
  2641. +DECLARE_INSN(fmadd_h, MATCH_FMADD_H, MASK_FMADD_H)
  2642. +DECLARE_INSN(fmsub_h, MATCH_FMSUB_H, MASK_FMSUB_H)
  2643. +DECLARE_INSN(fnmsub_h, MATCH_FNMSUB_H, MASK_FNMSUB_H)
  2644. +DECLARE_INSN(fnmadd_h, MATCH_FNMADD_H, MASK_FNMADD_H)
  2645. DECLARE_INSN(fmadd_q, MATCH_FMADD_Q, MASK_FMADD_Q)
  2646. DECLARE_INSN(fmsub_q, MATCH_FMSUB_Q, MASK_FMSUB_Q)
  2647. DECLARE_INSN(fnmsub_q, MATCH_FNMSUB_Q, MASK_FNMSUB_Q)
  2648. @@ -1671,6 +3634,13 @@ DECLARE_CSR(mhcounteren, CSR_MHCOUNTEREN, CSR_CLASS_I, PRIV_SPEC_CLASS_1P9P1, PR
  2649. DECLARE_CSR(fflags, CSR_FFLAGS, CSR_CLASS_F, PRIV_SPEC_CLASS_NONE, PRIV_SPEC_CLASS_NONE)
  2650. DECLARE_CSR(frm, CSR_FRM, CSR_CLASS_F, PRIV_SPEC_CLASS_NONE, PRIV_SPEC_CLASS_NONE)
  2651. DECLARE_CSR(fcsr, CSR_FCSR, CSR_CLASS_F, PRIV_SPEC_CLASS_NONE, PRIV_SPEC_CLASS_NONE)
  2652. +DECLARE_CSR(vstart, CSR_VSTART, CSR_CLASS_V, PRIV_SPEC_CLASS_NONE, PRIV_SPEC_CLASS_NONE)
  2653. +DECLARE_CSR(vxsat, CSR_VXSAT, CSR_CLASS_V, PRIV_SPEC_CLASS_NONE, PRIV_SPEC_CLASS_NONE)
  2654. +DECLARE_CSR(vxrm, CSR_VXRM, CSR_CLASS_V, PRIV_SPEC_CLASS_NONE, PRIV_SPEC_CLASS_NONE)
  2655. +DECLARE_CSR(vcsr, CSR_VCSR, CSR_CLASS_V, PRIV_SPEC_CLASS_NONE, PRIV_SPEC_CLASS_NONE)
  2656. +DECLARE_CSR(vl, CSR_VL, CSR_CLASS_V, PRIV_SPEC_CLASS_NONE, PRIV_SPEC_CLASS_NONE)
  2657. +DECLARE_CSR(vtype, CSR_VTYPE, CSR_CLASS_V, PRIV_SPEC_CLASS_NONE, PRIV_SPEC_CLASS_NONE)
  2658. +DECLARE_CSR(vlenb, CSR_VLENB, CSR_CLASS_V, PRIV_SPEC_CLASS_NONE, PRIV_SPEC_CLASS_NONE)
  2659. DECLARE_CSR(dcsr, CSR_DCSR, CSR_CLASS_DEBUG, PRIV_SPEC_CLASS_NONE, PRIV_SPEC_CLASS_NONE)
  2660. DECLARE_CSR(dpc, CSR_DPC, CSR_CLASS_DEBUG, PRIV_SPEC_CLASS_NONE, PRIV_SPEC_CLASS_NONE)
  2661. DECLARE_CSR(dscratch0, CSR_DSCRATCH0, CSR_CLASS_DEBUG, PRIV_SPEC_CLASS_NONE, PRIV_SPEC_CLASS_NONE)
  2662. diff --git a/include/opcode/riscv.h b/include/opcode/riscv.h
  2663. index 87d1aedb40..524637a27b 100644
  2664. --- a/include/opcode/riscv.h
  2665. +++ b/include/opcode/riscv.h
  2666. @@ -52,6 +52,28 @@ static const char * const riscv_pred_succ[16] =
  2667. "i", "iw", "ir", "irw", "io", "iow", "ior", "iorw"
  2668. };
  2669. +/* List of vsetvli vsew constants. */
  2670. +static const char * const riscv_vsew[8] =
  2671. +{
  2672. + "e8", "e16", "e32", "e64", "e128", "e256", "e512", "e1024"
  2673. +};
  2674. +
  2675. +/* List of vsetvli vlmul constants. */
  2676. +static const char * const riscv_vlmul[8] =
  2677. +{
  2678. + "m1", "m2", "m4", "m8", 0, "mf8", "mf4", "mf2"
  2679. +};
  2680. +
  2681. +static const char * const riscv_vta[2] =
  2682. +{
  2683. + "tu", "ta"
  2684. +};
  2685. +
  2686. +static const char * const riscv_vma[2] =
  2687. +{
  2688. + "mu", "ma"
  2689. +};
  2690. +
  2691. #define RVC_JUMP_BITS 11
  2692. #define RVC_JUMP_REACH ((1ULL << RVC_JUMP_BITS) * RISCV_JUMP_ALIGN)
  2693. @@ -99,6 +121,16 @@ static const char * const riscv_pred_succ[16] =
  2694. ((RV_X(x, 3, 2) << 1) | (RV_X(x, 10, 2) << 3) | (RV_X(x, 2, 1) << 5) | (RV_X(x, 5, 2) << 6) | (-RV_X(x, 12, 1) << 8))
  2695. #define EXTRACT_RVC_J_IMM(x) \
  2696. ((RV_X(x, 3, 3) << 1) | (RV_X(x, 11, 1) << 4) | (RV_X(x, 2, 1) << 5) | (RV_X(x, 7, 1) << 6) | (RV_X(x, 6, 1) << 7) | (RV_X(x, 9, 2) << 8) | (RV_X(x, 8, 1) << 10) | (-RV_X(x, 12, 1) << 11))
  2697. +#define EXTRACT_RVV_VI_IMM(x) \
  2698. + (RV_X(x, 15, 5) | (-RV_X(x, 19, 1) << 5))
  2699. +#define EXTRACT_RVV_VI_UIMM(x) \
  2700. + (RV_X(x, 15, 5))
  2701. +#define EXTRACT_RVV_OFFSET(x) \
  2702. + (RV_X(x, 29, 3))
  2703. +#define EXTRACT_RVV_VB_IMM(x) \
  2704. + (RV_X(x, 20, 10))
  2705. +#define EXTRACT_RVV_VC_IMM(x) \
  2706. + (RV_X(x, 20, 11))
  2707. #define ENCODE_ITYPE_IMM(x) \
  2708. (RV_X(x, 0, 12) << 20)
  2709. @@ -138,6 +170,10 @@ static const char * const riscv_pred_succ[16] =
  2710. ((RV_X(x, 1, 2) << 3) | (RV_X(x, 3, 2) << 10) | (RV_X(x, 5, 1) << 2) | (RV_X(x, 6, 2) << 5) | (RV_X(x, 8, 1) << 12))
  2711. #define ENCODE_RVC_J_IMM(x) \
  2712. ((RV_X(x, 1, 3) << 3) | (RV_X(x, 4, 1) << 11) | (RV_X(x, 5, 1) << 2) | (RV_X(x, 6, 1) << 7) | (RV_X(x, 7, 1) << 6) | (RV_X(x, 8, 2) << 9) | (RV_X(x, 10, 1) << 8) | (RV_X(x, 11, 1) << 12))
  2713. +#define ENCODE_RVV_VB_IMM(x) \
  2714. + (RV_X(x, 0, 10) << 20)
  2715. +#define ENCODE_RVV_VC_IMM(x) \
  2716. + (RV_X(x, 0, 11) << 20)
  2717. #define VALID_ITYPE_IMM(x) (EXTRACT_ITYPE_IMM(ENCODE_ITYPE_IMM(x)) == (x))
  2718. #define VALID_STYPE_IMM(x) (EXTRACT_STYPE_IMM(ENCODE_STYPE_IMM(x)) == (x))
  2719. @@ -158,6 +194,8 @@ static const char * const riscv_pred_succ[16] =
  2720. #define VALID_RVC_SDSP_IMM(x) (EXTRACT_RVC_SDSP_IMM(ENCODE_RVC_SDSP_IMM(x)) == (x))
  2721. #define VALID_RVC_B_IMM(x) (EXTRACT_RVC_B_IMM(ENCODE_RVC_B_IMM(x)) == (x))
  2722. #define VALID_RVC_J_IMM(x) (EXTRACT_RVC_J_IMM(ENCODE_RVC_J_IMM(x)) == (x))
  2723. +#define VALID_RVV_VB_IMM(x) (EXTRACT_RVV_VB_IMM(ENCODE_RVV_VB_IMM(x)) == (x))
  2724. +#define VALID_RVV_VC_IMM(x) (EXTRACT_RVV_VC_IMM(ENCODE_RVV_VC_IMM(x)) == (x))
  2725. #define RISCV_RTYPE(insn, rd, rs1, rs2) \
  2726. ((MATCH_ ## insn) | ((rd) << OP_SH_RD) | ((rs1) << OP_SH_RS1) | ((rs2) << OP_SH_RS2))
  2727. @@ -256,6 +294,35 @@ static const char * const riscv_pred_succ[16] =
  2728. #define OP_MASK_CFUNCT2 0x3
  2729. #define OP_SH_CFUNCT2 5
  2730. +/* RVV fields. */
  2731. +
  2732. +#define OP_MASK_VD 0x1f
  2733. +#define OP_SH_VD 7
  2734. +#define OP_MASK_VS1 0x1f
  2735. +#define OP_SH_VS1 15
  2736. +#define OP_MASK_VS2 0x1f
  2737. +#define OP_SH_VS2 20
  2738. +#define OP_MASK_VIMM 0x1f
  2739. +#define OP_SH_VIMM 15
  2740. +#define OP_MASK_VMASK 0x1
  2741. +#define OP_SH_VMASK 25
  2742. +#define OP_MASK_VFUNCT6 0x3f
  2743. +#define OP_SH_VFUNCT6 26
  2744. +
  2745. +#define OP_MASK_VLMUL 0x7
  2746. +#define OP_SH_VLMUL 0
  2747. +#define OP_MASK_VSEW 0x7
  2748. +#define OP_SH_VSEW 3
  2749. +#define OP_MASK_VTA 0x1
  2750. +#define OP_SH_VTA 6
  2751. +#define OP_MASK_VMA 0x1
  2752. +#define OP_SH_VMA 7
  2753. +#define OP_MASK_VTYPE_RES 0x1
  2754. +#define OP_SH_VTYPE_RES 10
  2755. +
  2756. +#define OP_MASK_VWD 0x1
  2757. +#define OP_SH_VWD 26
  2758. +
  2759. /* ABI names for selected x-registers. */
  2760. #define X_RA 1
  2761. @@ -269,6 +336,8 @@ static const char * const riscv_pred_succ[16] =
  2762. #define NGPR 32
  2763. #define NFPR 32
  2764. +#define NVECR 32
  2765. +#define NVECM 1
  2766. /* These fake label defines are use by both the assembler, and
  2767. libopcodes. The assembler uses this when it needs to generate a fake
  2768. @@ -327,6 +396,13 @@ enum riscv_insn_class
  2769. INSN_CLASS_ZBT,
  2770. INSN_CLASS_ZBP_OR_ZBE_OR_ZBF,
  2771. INSN_CLASS_ZBP_OR_ZBE_OR_ZBF_OR_ZBM,
  2772. + INSN_CLASS_V,
  2773. + INSN_CLASS_F_AND_ZFH,
  2774. + INSN_CLASS_D_AND_ZFH,
  2775. + INSN_CLASS_Q_AND_ZFH,
  2776. + INSN_CLASS_V_AND_F,
  2777. + INSN_CLASS_V_OR_ZVAMO,
  2778. + INSN_CLASS_V_OR_ZVLSSEG,
  2779. };
  2780. /* This structure holds information for a particular instruction. */
  2781. @@ -394,6 +470,7 @@ enum riscv_csr_class
  2782. CSR_CLASS_I,
  2783. CSR_CLASS_I_32, /* rv32 only */
  2784. CSR_CLASS_F, /* f-ext only */
  2785. + CSR_CLASS_V, /* v-ext only */
  2786. CSR_CLASS_DEBUG /* debug CSR */
  2787. };
  2788. @@ -488,9 +565,11 @@ enum
  2789. M_SH,
  2790. M_SW,
  2791. M_SD,
  2792. + M_FLH,
  2793. M_FLW,
  2794. M_FLD,
  2795. M_FLQ,
  2796. + M_FSH,
  2797. M_FSW,
  2798. M_FSD,
  2799. M_FSQ,
  2800. @@ -501,6 +580,8 @@ enum
  2801. M_ZEXTW,
  2802. M_SEXTB,
  2803. M_SEXTH,
  2804. + M_VMSGE,
  2805. + M_VMSGEU,
  2806. M_NUM_MACROS
  2807. };
  2808. @@ -509,6 +590,8 @@ extern const char * const riscv_gpr_names_numeric[NGPR];
  2809. extern const char * const riscv_gpr_names_abi[NGPR];
  2810. extern const char * const riscv_fpr_names_numeric[NFPR];
  2811. extern const char * const riscv_fpr_names_abi[NFPR];
  2812. +extern const char * const riscv_vecr_names_numeric[NVECR];
  2813. +extern const char * const riscv_vecm_names_numeric[NVECM];
  2814. extern const struct riscv_opcode riscv_opcodes[];
  2815. extern const struct riscv_opcode riscv_insn_types[];
  2816. diff --git a/opcodes/riscv-dis.c b/opcodes/riscv-dis.c
  2817. index a169d013c4..065ea75b82 100644
  2818. --- a/opcodes/riscv-dis.c
  2819. +++ b/opcodes/riscv-dis.c
  2820. @@ -43,6 +43,7 @@ struct riscv_private_data
  2821. static const char * const *riscv_gpr_names;
  2822. static const char * const *riscv_fpr_names;
  2823. +static const char * const *riscv_vecr_names;
  2824. /* Other options. */
  2825. static int no_aliases; /* If set disassemble as most general inst. */
  2826. @@ -52,6 +53,7 @@ set_default_riscv_dis_options (void)
  2827. {
  2828. riscv_gpr_names = riscv_gpr_names_abi;
  2829. riscv_fpr_names = riscv_fpr_names_abi;
  2830. + riscv_vecr_names = riscv_vecr_names_numeric;
  2831. no_aliases = 0;
  2832. }
  2833. @@ -413,6 +415,88 @@ print_insn_args (const char *d, insn_t l, bfd_vma pc, disassemble_info *info)
  2834. print (info->stream, "%d", rs1);
  2835. break;
  2836. + case 'V': /* RVV */
  2837. + switch (*++d)
  2838. + {
  2839. + case 'd':
  2840. + case 'f':
  2841. + print (info->stream, "%s",
  2842. + riscv_vecr_names[EXTRACT_OPERAND (VD, l)]);
  2843. + break;
  2844. +
  2845. + case 'e':
  2846. + if (!EXTRACT_OPERAND (VWD, l))
  2847. + print (info->stream, "%s", riscv_gpr_names[0]);
  2848. + else
  2849. + print (info->stream, "%s",
  2850. + riscv_vecr_names[EXTRACT_OPERAND (VD, l)]);
  2851. + break;
  2852. +
  2853. + case 's':
  2854. + print (info->stream, "%s",
  2855. + riscv_vecr_names[EXTRACT_OPERAND (VS1, l)]);
  2856. + break;
  2857. +
  2858. + case 't':
  2859. + case 'u': /* VS1 == VS2 already verified at this point. */
  2860. + case 'v': /* VD == VS1 == VS2 already verified at this point. */
  2861. + print (info->stream, "%s",
  2862. + riscv_vecr_names[EXTRACT_OPERAND (VS2, l)]);
  2863. + break;
  2864. +
  2865. + case '0':
  2866. + print (info->stream, "%s", riscv_vecr_names[0]);
  2867. + break;
  2868. +
  2869. + case 'b':
  2870. + case 'c':
  2871. + {
  2872. + int imm = (*d == 'b') ? EXTRACT_RVV_VB_IMM (l)
  2873. + : EXTRACT_RVV_VC_IMM (l);
  2874. + unsigned int imm_vlmul = EXTRACT_OPERAND (VLMUL, imm);
  2875. + unsigned int imm_vsew = EXTRACT_OPERAND (VSEW, imm);
  2876. + unsigned int imm_vta = EXTRACT_OPERAND (VTA, imm);
  2877. + unsigned int imm_vma = EXTRACT_OPERAND (VMA, imm);
  2878. + unsigned int imm_vtype_res = EXTRACT_OPERAND (VTYPE_RES, imm);
  2879. +
  2880. + if (imm_vsew < ARRAY_SIZE (riscv_vsew)
  2881. + && imm_vlmul < ARRAY_SIZE (riscv_vlmul)
  2882. + && imm_vta < ARRAY_SIZE (riscv_vta)
  2883. + && imm_vma < ARRAY_SIZE (riscv_vma)
  2884. + && ! imm_vtype_res)
  2885. + print (info->stream, "%s,%s,%s,%s", riscv_vsew[imm_vsew],
  2886. + riscv_vlmul[imm_vlmul], riscv_vta[imm_vta],
  2887. + riscv_vma[imm_vma]);
  2888. + else
  2889. + print (info->stream, "%d", imm);
  2890. + }
  2891. + break;
  2892. +
  2893. + case 'i':
  2894. + print (info->stream, "%d", (int)EXTRACT_RVV_VI_IMM (l));
  2895. + break;
  2896. +
  2897. + case 'j':
  2898. + print (info->stream, "%d", (int)EXTRACT_RVV_VI_UIMM (l));
  2899. + break;
  2900. +
  2901. + case 'k':
  2902. + print (info->stream, "%d", (int)EXTRACT_RVV_OFFSET (l));
  2903. + break;
  2904. +
  2905. + case 'm':
  2906. + if (! EXTRACT_OPERAND (VMASK, l))
  2907. + print (info->stream, ",%s", riscv_vecm_names_numeric[0]);
  2908. + break;
  2909. +
  2910. + default:
  2911. + /* xgettext:c-format */
  2912. + print (info->stream, _("# internal error, undefined modifier (V%c)"),
  2913. + *d);
  2914. + return;
  2915. + }
  2916. + break;
  2917. +
  2918. default:
  2919. /* xgettext:c-format */
  2920. print (info->stream, _("# internal error, undefined modifier (%c)"),
  2921. diff --git a/opcodes/riscv-opc.c b/opcodes/riscv-opc.c
  2922. index d73c980acd..b51e876977 100644
  2923. --- a/opcodes/riscv-opc.c
  2924. +++ b/opcodes/riscv-opc.c
  2925. @@ -61,6 +61,19 @@ const char * const riscv_fpr_names_abi[NFPR] = {
  2926. "fs8", "fs9", "fs10", "fs11", "ft8", "ft9", "ft10", "ft11"
  2927. };
  2928. +const char * const riscv_vecr_names_numeric[NVECR] =
  2929. +{
  2930. + "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7",
  2931. + "v8", "v9", "v10", "v11", "v12", "v13", "v14", "v15",
  2932. + "v16", "v17", "v18", "v19", "v20", "v21", "v22", "v23",
  2933. + "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31"
  2934. +};
  2935. +
  2936. +const char * const riscv_vecm_names_numeric[NVECM] =
  2937. +{
  2938. + "v0.t"
  2939. +};
  2940. +
  2941. /* The order of overloaded instructions matters. Label arguments and
  2942. register arguments look the same. Instructions that can have either
  2943. for arguments must apear in the correct order in this table for the
  2944. @@ -84,6 +97,10 @@ const char * const riscv_fpr_names_abi[NFPR] = {
  2945. #define MASK_AQ (OP_MASK_AQ << OP_SH_AQ)
  2946. #define MASK_RL (OP_MASK_RL << OP_SH_RL)
  2947. #define MASK_AQRL (MASK_AQ | MASK_RL)
  2948. +#define MASK_VD (OP_MASK_VD << OP_SH_VD)
  2949. +#define MASK_VS1 (OP_MASK_VS1 << OP_SH_VS1)
  2950. +#define MASK_VS2 (OP_MASK_VS2 << OP_SH_VS2)
  2951. +#define MASK_VMASK (OP_MASK_VMASK << OP_SH_VMASK)
  2952. static int
  2953. match_opcode (const struct riscv_opcode *op, insn_t insn)
  2954. @@ -106,6 +123,53 @@ match_rs1_eq_rs2 (const struct riscv_opcode *op, insn_t insn)
  2955. return match_opcode (op, insn) && rs1 == rs2;
  2956. }
  2957. +static int
  2958. +match_vs1_eq_vs2 (const struct riscv_opcode *op,
  2959. + insn_t insn,
  2960. + int constraints ATTRIBUTE_UNUSED,
  2961. + const char **error ATTRIBUTE_UNUSED)
  2962. +{
  2963. + int vs1 = (insn & MASK_VS1) >> OP_SH_VS1;
  2964. + int vs2 = (insn & MASK_VS2) >> OP_SH_VS2;
  2965. +
  2966. + return match_opcode (op, insn) && vs1 == vs2;
  2967. +}
  2968. +
  2969. +static int
  2970. +match_vs1_eq_vs2_neq_vm (const struct riscv_opcode *op,
  2971. + insn_t insn)
  2972. + //int constraints,
  2973. + //const char **error)
  2974. +{
  2975. + int vd = (insn & MASK_VD) >> OP_SH_VD;
  2976. + int vs1 = (insn & MASK_VS1) >> OP_SH_VS1;
  2977. + int vs2 = (insn & MASK_VS2) >> OP_SH_VS2;
  2978. + int vm = (insn & MASK_VMASK) >> OP_SH_VMASK;
  2979. +
  2980. + //if (!constraints || error == NULL)
  2981. + // return match_opcode (op, insn) && vs1 == vs2;
  2982. +
  2983. + if (!vm && vm == vd) return 0;
  2984. + //*error = "illegal operands vd cannot overlap vm";
  2985. + else
  2986. + return match_opcode (op, insn) && vs1 == vs2;
  2987. + return 0;
  2988. +}
  2989. +
  2990. +static int
  2991. +match_vd_eq_vs1_eq_vs2 (const struct riscv_opcode *op,
  2992. + insn_t insn)
  2993. + //int constraints ATTRIBUTE_UNUSED,
  2994. + //const char **error ATTRIBUTE_UNUSED)
  2995. +{
  2996. + int vd = (insn & MASK_VD) >> OP_SH_VD;
  2997. + int vs1 = (insn & MASK_VS1) >> OP_SH_VS1;
  2998. + int vs2 = (insn & MASK_VS2) >> OP_SH_VS2;
  2999. +
  3000. + return match_opcode (op, insn) && vd == vs1 && vs1 == vs2;
  3001. +}
  3002. +
  3003. +
  3004. static int
  3005. match_rd_nonzero (const struct riscv_opcode *op, insn_t insn)
  3006. {
  3007. @@ -200,6 +264,274 @@ match_srxi_as_c_srxi (const struct riscv_opcode *op, insn_t insn)
  3008. return match_opcode (op, insn) && EXTRACT_RVC_IMM (insn) != 0;
  3009. }
  3010. +
  3011. +/* These are used to check the vector constraints. */
  3012. +
  3013. +static int
  3014. +match_widen_vd_neq_vs1_neq_vs2_neq_vm (const struct riscv_opcode *op,
  3015. + insn_t insn)
  3016. + //int constraints,
  3017. + //const char **error)
  3018. +{
  3019. + int vd = (insn & MASK_VD) >> OP_SH_VD;
  3020. + int vs1 = (insn & MASK_VS1) >> OP_SH_VS1;
  3021. + int vs2 = (insn & MASK_VS2) >> OP_SH_VS2;
  3022. + int vm = (insn & MASK_VMASK) >> OP_SH_VMASK;
  3023. +
  3024. + //if (!constraints || error == NULL)
  3025. + // return match_opcode (op, insn);
  3026. +
  3027. + if ((vd % 2) != 0)
  3028. + return 0;
  3029. + //*error = "illegal operands vd must be multiple of 2";
  3030. + else if (vs1 >= vd && vs1 <= (vd + 1))
  3031. + return 0;
  3032. + //*error = "illegal operands vd cannot overlap vs1";
  3033. + else if (vs2 >= vd && vs2 <= (vd + 1))
  3034. + return 0;
  3035. + //*error = "illegal operands vd cannot overlap vs2";
  3036. + else if (!vm && vm >= vd && vm <= (vd + 1))
  3037. + return 0;
  3038. + //*error = "illegal operands vd cannot overlap vm";
  3039. + else
  3040. + return match_opcode (op, insn);
  3041. + return 0;
  3042. +}
  3043. +
  3044. +static int
  3045. +match_widen_vd_neq_vs1_neq_vm (const struct riscv_opcode *op,
  3046. + insn_t insn)
  3047. + //int constraints,
  3048. + //const char **error)
  3049. +{
  3050. + int vd = (insn & MASK_VD) >> OP_SH_VD;
  3051. + int vs1 = (insn & MASK_VS1) >> OP_SH_VS1;
  3052. + int vs2 = (insn & MASK_VS2) >> OP_SH_VS2;
  3053. + int vm = (insn & MASK_VMASK) >> OP_SH_VMASK;
  3054. +
  3055. + //if (!constraints || error == NULL)
  3056. + // return match_opcode (op, insn);
  3057. +
  3058. + if ((vd % 2) != 0) return 0;
  3059. + //*error = "illegal operands vd must be multiple of 2";
  3060. + else if ((vs2 % 2) != 0) return 0;
  3061. + //*error = "illegal operands vs2 must be multiple of 2";
  3062. + else if (vs1 >= vd && vs1 <= (vd + 1)) return 0;
  3063. + //*error = "illegal operands vd cannot overlap vs1";
  3064. + else if (!vm && vm >= vd && vm <= (vd + 1)) return 0;
  3065. + //*error = "illegal operands vd cannot overlap vm";
  3066. + else
  3067. + return match_opcode (op, insn);
  3068. + return 0;
  3069. +}
  3070. +
  3071. +static int
  3072. +match_widen_vd_neq_vs2_neq_vm (const struct riscv_opcode *op,
  3073. + insn_t insn)
  3074. + //int constraints,
  3075. + //const char **error)
  3076. +{
  3077. + int vd = (insn & MASK_VD) >> OP_SH_VD;
  3078. + int vs2 = (insn & MASK_VS2) >> OP_SH_VS2;
  3079. + int vm = (insn & MASK_VMASK) >> OP_SH_VMASK;
  3080. +
  3081. + //if (!constraints || error == NULL)
  3082. + // return match_opcode (op, insn);
  3083. +
  3084. + if ((vd % 2) != 0) return 0;
  3085. + //*error = "illegal operands vd must be multiple of 2";
  3086. + else if (vs2 >= vd && vs2 <= (vd + 1)) return 0;
  3087. + //*error = "illegal operands vd cannot overlap vs2";
  3088. + else if (!vm && vm >= vd && vm <= (vd + 1)) return 0;
  3089. + //*error = "illegal operands vd cannot overlap vm";
  3090. + else
  3091. + return match_opcode (op, insn);
  3092. + return 0;
  3093. +}
  3094. +
  3095. +static int
  3096. +match_widen_vd_neq_vm (const struct riscv_opcode *op,
  3097. + insn_t insn)
  3098. + //int constraints,
  3099. + //const char **error)
  3100. +{
  3101. + int vd = (insn & MASK_VD) >> OP_SH_VD;
  3102. + int vs2 = (insn & MASK_VS2) >> OP_SH_VS2;
  3103. + int vm = (insn & MASK_VMASK) >> OP_SH_VMASK;
  3104. +
  3105. + //if (!constraints || error == NULL)
  3106. + // return match_opcode (op, insn);
  3107. +
  3108. + if ((vd % 2) != 0) return 0;
  3109. + // *error = "illegal operands vd must be multiple of 2";
  3110. + else if ((vs2 % 2) != 0) return 0;
  3111. + // *error = "illegal operands vs2 must be multiple of 2";
  3112. + else if (!vm && vm >= vd && vm <= (vd + 1)) return 0;
  3113. + // *error = "illegal operands vd cannot overlap vm";
  3114. + else
  3115. + return match_opcode (op, insn);
  3116. + return 0;
  3117. +}
  3118. +
  3119. +static int
  3120. +match_narrow_vd_neq_vs2_neq_vm (const struct riscv_opcode *op,
  3121. + insn_t insn)
  3122. + //int constraints,
  3123. + //const char **error)
  3124. +{
  3125. + int vd = (insn & MASK_VD) >> OP_SH_VD;
  3126. + int vs2 = (insn & MASK_VS2) >> OP_SH_VS2;
  3127. + int vm = (insn & MASK_VMASK) >> OP_SH_VMASK;
  3128. +
  3129. + //if (!constraints || error == NULL)
  3130. + // return match_opcode (op, insn);
  3131. +
  3132. + if ((vs2 % 2) != 0) return 0;
  3133. + //*error = "illegal operands vd must be multiple of 2";
  3134. + else if (vd >= vs2 && vd <= (vs2 + 1)) return 0;
  3135. + //*error = "illegal operands vd cannot overlap vs2";
  3136. + else if (!vm && vd >= vm && vd <= (vm + 1)) return 0;
  3137. + //*error = "illegal operands vd cannot overlap vm";
  3138. + else
  3139. + return match_opcode (op, insn);
  3140. + return 0;
  3141. +}
  3142. +
  3143. +static int
  3144. +match_vd_neq_vs1_neq_vs2 (const struct riscv_opcode *op,
  3145. + insn_t insn)
  3146. + //int constraints,
  3147. + //const char **error)
  3148. +{
  3149. + int vd = (insn & MASK_VD) >> OP_SH_VD;
  3150. + int vs1 = (insn & MASK_VS1) >> OP_SH_VS1;
  3151. + int vs2 = (insn & MASK_VS2) >> OP_SH_VS2;
  3152. +
  3153. + //if (!constraints || error == NULL)
  3154. + // return match_opcode (op, insn);
  3155. +
  3156. + if (vs1 == vd) return 0;
  3157. + //*error = "illegal operands vd cannot overlap vs1";
  3158. + else if (vs2 == vd) return 0;
  3159. + //*error = "illegal operands vd cannot overlap vs2";
  3160. + else
  3161. + return match_opcode (op, insn);
  3162. + return 0;
  3163. +}
  3164. +
  3165. +static int
  3166. +match_vd_neq_vs1_neq_vs2_neq_vm (const struct riscv_opcode *op,
  3167. + insn_t insn,
  3168. + int constraints,
  3169. + const char **error)
  3170. +{
  3171. + int vd = (insn & MASK_VD) >> OP_SH_VD;
  3172. + int vs1 = (insn & MASK_VS1) >> OP_SH_VS1;
  3173. + int vs2 = (insn & MASK_VS2) >> OP_SH_VS2;
  3174. + int vm = (insn & MASK_VMASK) >> OP_SH_VMASK;
  3175. +
  3176. + if (!constraints || error == NULL)
  3177. + return match_opcode (op, insn);
  3178. +
  3179. + if (vs1 == vd)
  3180. + *error = "illegal operands vd cannot overlap vs1";
  3181. + else if (vs2 == vd)
  3182. + *error = "illegal operands vd cannot overlap vs2";
  3183. + else if (!vm && vm == vd)
  3184. + *error = "illegal operands vd cannot overlap vm";
  3185. + else
  3186. + return match_opcode (op, insn);
  3187. + return 0;
  3188. +}
  3189. +
  3190. +static int
  3191. +match_vd_neq_vs2_neq_vm (const struct riscv_opcode *op,
  3192. + insn_t insn)
  3193. + //int constraints,
  3194. + //const char **error)
  3195. +{
  3196. + int vd = (insn & MASK_VD) >> OP_SH_VD;
  3197. + int vs2 = (insn & MASK_VS2) >> OP_SH_VS2;
  3198. + int vm = (insn & MASK_VMASK) >> OP_SH_VMASK;
  3199. +
  3200. + //if (!constraints || error == NULL)
  3201. + // return match_opcode (op, insn);
  3202. +
  3203. + if (vs2 == vd) return 0;
  3204. + //*error = "illegal operands vd cannot overlap vs2";
  3205. + else if (!vm && vm == vd) return 0;
  3206. + //*error = "illegal operands vd cannot overlap vm";
  3207. + else
  3208. + return match_opcode (op, insn);
  3209. + return 0;
  3210. +}
  3211. +
  3212. +/* v[m]adc and v[m]sbc use the vm encoding to encode the
  3213. + carry-in v0 register. The carry-in v0 register can not
  3214. + overlap with the vd, too. Therefore, we use the same
  3215. + match_vd_neq_vm to check the overlap constraints. */
  3216. +
  3217. +static int
  3218. +match_vd_neq_vm (const struct riscv_opcode *op,
  3219. + insn_t insn)
  3220. + //int constraints,
  3221. + //const char **error)
  3222. +{
  3223. + int vd = (insn & MASK_VD) >> OP_SH_VD;
  3224. + int vm = (insn & MASK_VMASK) >> OP_SH_VMASK;
  3225. +
  3226. + //if (!constraints || error == NULL)
  3227. + // return match_opcode (op, insn);
  3228. +
  3229. + if (!vm && vm == vd) return 0;
  3230. + //*error = "illegal operands vd cannot overlap vm";
  3231. + else
  3232. + return match_opcode (op, insn);
  3233. + return 0;
  3234. +}
  3235. +
  3236. +static int
  3237. +match_vls_nf_rv (const struct riscv_opcode *op,
  3238. + insn_t insn)
  3239. + //int constraints,
  3240. + //const char **error)
  3241. +{
  3242. + int vd = (insn & MASK_VD) >> OP_SH_VD;
  3243. + int nf = ((insn & (0x7 << 29) ) >> 29) + 1;
  3244. +
  3245. + //if (!constraints || error == NULL)
  3246. + // return match_opcode (op, insn);
  3247. +
  3248. + if ((vd % nf) != 0) return 0;
  3249. + //*error = "illegal operands vd must be multiple of nf";
  3250. + else
  3251. + return match_opcode (op, insn);
  3252. + return 0;
  3253. +}
  3254. +
  3255. +static int
  3256. +match_vmv_nf_rv (const struct riscv_opcode *op,
  3257. + insn_t insn)
  3258. + //int constraints,
  3259. + //const char **error)
  3260. +{
  3261. + int vd = (insn & MASK_VD) >> OP_SH_VD;
  3262. + int vs2 = (insn & MASK_VS2) >> OP_SH_VS2;
  3263. + int nf = ((insn & (0x7 << 15) ) >> 15) + 1;
  3264. +
  3265. + //if (!constraints || error == NULL)
  3266. + // return match_opcode (op, insn);
  3267. +
  3268. + if ((vd % nf) != 0) return 0;
  3269. + //*error = "illegal operands vd must be multiple of nf";
  3270. + else if ((vs2 % nf) != 0) return 0;
  3271. + //*error = "illegal operands vs2 must be multiple of nf";
  3272. + else
  3273. + return match_opcode (op, insn);
  3274. + return 0;
  3275. +}
  3276. +
  3277. +
  3278. const struct riscv_opcode riscv_opcodes[] =
  3279. {
  3280. /* name, xlen, isa, operands, match, mask, match_func, pinfo. */
  3281. @@ -243,10 +575,7 @@ const struct riscv_opcode riscv_opcodes[] =
  3282. {"mv", 0, INSN_CLASS_I, "d,s", MATCH_ADDI, MASK_ADDI | MASK_IMM, match_opcode, INSN_ALIAS },
  3283. {"move", 0, INSN_CLASS_C, "d,CV", MATCH_C_MV, MASK_C_MV, match_c_add, INSN_ALIAS },
  3284. {"move", 0, INSN_CLASS_I, "d,s", MATCH_ADDI, MASK_ADDI | MASK_IMM, match_opcode, INSN_ALIAS },
  3285. -{"sext.b", 0, INSN_CLASS_I, "d,s", 0, (int) M_SEXTB, match_never, INSN_MACRO },
  3286. -{"sext.h", 0, INSN_CLASS_I, "d,s", 0, (int) M_SEXTH, match_never, INSN_MACRO },
  3287. {"zext.b", 0, INSN_CLASS_I, "d,s", MATCH_ANDI | ENCODE_ITYPE_IMM (255), MASK_ANDI | MASK_IMM, match_opcode, INSN_ALIAS },
  3288. -{"zext.h", 0, INSN_CLASS_I, "d,s", 0, (int) M_ZEXTH, match_never, INSN_MACRO },
  3289. {"andi", 0, INSN_CLASS_C, "Cs,Cw,Co", MATCH_C_ANDI, MASK_C_ANDI, match_opcode, INSN_ALIAS },
  3290. {"andi", 0, INSN_CLASS_I, "d,s,j", MATCH_ANDI, MASK_ANDI, match_opcode, 0 },
  3291. {"and", 0, INSN_CLASS_C, "Cs,Cw,Ct", MATCH_C_AND, MASK_C_AND, match_opcode, INSN_ALIAS },
  3292. @@ -380,7 +709,6 @@ const struct riscv_opcode riscv_opcodes[] =
  3293. {"sd", 64, INSN_CLASS_C, "Ct,Cl(Cs)", MATCH_C_SD, MASK_C_SD, match_opcode, INSN_ALIAS|INSN_DREF|INSN_8_BYTE },
  3294. {"sd", 64, INSN_CLASS_I, "t,q(s)", MATCH_SD, MASK_SD, match_opcode, INSN_DREF|INSN_8_BYTE },
  3295. {"sd", 64, INSN_CLASS_I, "t,A,s", 0, (int) M_SD, match_never, INSN_MACRO },
  3296. -{"zext.w", 64, INSN_CLASS_I, "d,s", 0, (int) M_ZEXTW, match_never, INSN_MACRO },
  3297. {"sext.w", 64, INSN_CLASS_C, "d,CU", MATCH_C_ADDIW, MASK_C_ADDIW | MASK_RVC_IMM, match_rd_nonzero, INSN_ALIAS },
  3298. {"sext.w", 64, INSN_CLASS_I, "d,s", MATCH_ADDIW, MASK_ADDIW | MASK_IMM, match_opcode, INSN_ALIAS },
  3299. {"addiw", 64, INSN_CLASS_C, "d,CU,Co", MATCH_C_ADDIW, MASK_C_ADDIW, match_rd_nonzero, INSN_ALIAS },
  3300. @@ -508,6 +836,71 @@ const struct riscv_opcode riscv_opcodes[] =
  3301. {"remw", 64, INSN_CLASS_M, "d,s,t", MATCH_REMW, MASK_REMW, match_opcode, 0 },
  3302. {"remuw", 64, INSN_CLASS_M, "d,s,t", MATCH_REMUW, MASK_REMUW, match_opcode, 0 },
  3303. +/* Half-precision floating-point instruction subset */
  3304. +{"flh", 0, INSN_CLASS_F_AND_ZFH, "D,o(s)", MATCH_FLH, MASK_FLH, match_opcode, INSN_DREF|INSN_2_BYTE },
  3305. +{"flh", 0, INSN_CLASS_F_AND_ZFH, "D,A,s", 0, (int) M_FLH, match_never, INSN_MACRO },
  3306. +{"fsh", 0, INSN_CLASS_F_AND_ZFH, "T,q(s)", MATCH_FSH, MASK_FSH, match_opcode, INSN_DREF|INSN_2_BYTE },
  3307. +{"fsh", 0, INSN_CLASS_F_AND_ZFH, "T,A,s", 0, (int) M_FSH, match_never, INSN_MACRO },
  3308. +{"fmv.h", 0, INSN_CLASS_F_AND_ZFH, "D,U", MATCH_FSGNJ_H, MASK_FSGNJ_H, match_rs1_eq_rs2, INSN_ALIAS },
  3309. +{"fneg.h", 0, INSN_CLASS_F_AND_ZFH, "D,U", MATCH_FSGNJN_H, MASK_FSGNJN_H, match_rs1_eq_rs2, INSN_ALIAS },
  3310. +{"fabs.h", 0, INSN_CLASS_F_AND_ZFH, "D,U", MATCH_FSGNJX_H, MASK_FSGNJX_H, match_rs1_eq_rs2, INSN_ALIAS },
  3311. +{"fsgnj.h", 0, INSN_CLASS_F_AND_ZFH, "D,S,T", MATCH_FSGNJ_H, MASK_FSGNJ_H, match_opcode, 0 },
  3312. +{"fsgnjn.h", 0, INSN_CLASS_F_AND_ZFH, "D,S,T", MATCH_FSGNJN_H, MASK_FSGNJN_H, match_opcode, 0 },
  3313. +{"fsgnjx.h", 0, INSN_CLASS_F_AND_ZFH, "D,S,T", MATCH_FSGNJX_H, MASK_FSGNJX_H, match_opcode, 0 },
  3314. +{"fadd.h", 0, INSN_CLASS_F_AND_ZFH, "D,S,T", MATCH_FADD_H | MASK_RM, MASK_FADD_H | MASK_RM, match_opcode, 0 },
  3315. +{"fadd.h", 0, INSN_CLASS_F_AND_ZFH, "D,S,T,m", MATCH_FADD_H, MASK_FADD_H, match_opcode, 0 },
  3316. +{"fsub.h", 0, INSN_CLASS_F_AND_ZFH, "D,S,T", MATCH_FSUB_H | MASK_RM, MASK_FSUB_H | MASK_RM, match_opcode, 0 },
  3317. +{"fsub.h", 0, INSN_CLASS_F_AND_ZFH, "D,S,T,m", MATCH_FSUB_H, MASK_FSUB_H, match_opcode, 0 },
  3318. +{"fmul.h", 0, INSN_CLASS_F_AND_ZFH, "D,S,T", MATCH_FMUL_H | MASK_RM, MASK_FMUL_H | MASK_RM, match_opcode, 0 },
  3319. +{"fmul.h", 0, INSN_CLASS_F_AND_ZFH, "D,S,T,m", MATCH_FMUL_H, MASK_FMUL_H, match_opcode, 0 },
  3320. +{"fdiv.h", 0, INSN_CLASS_F_AND_ZFH, "D,S,T", MATCH_FDIV_H | MASK_RM, MASK_FDIV_H | MASK_RM, match_opcode, 0 },
  3321. +{"fdiv.h", 0, INSN_CLASS_F_AND_ZFH, "D,S,T,m", MATCH_FDIV_H, MASK_FDIV_H, match_opcode, 0 },
  3322. +{"fsqrt.h", 0, INSN_CLASS_F_AND_ZFH, "D,S", MATCH_FSQRT_H | MASK_RM, MASK_FSQRT_H | MASK_RM, match_opcode, 0 },
  3323. +{"fsqrt.h", 0, INSN_CLASS_F_AND_ZFH, "D,S,m", MATCH_FSQRT_H, MASK_FSQRT_H, match_opcode, 0 },
  3324. +{"fmin.h", 0, INSN_CLASS_F_AND_ZFH, "D,S,T", MATCH_FMIN_H, MASK_FMIN_H, match_opcode, 0 },
  3325. +{"fmax.h", 0, INSN_CLASS_F_AND_ZFH, "D,S,T", MATCH_FMAX_H, MASK_FMAX_H, match_opcode, 0 },
  3326. +{"fmadd.h", 0, INSN_CLASS_F_AND_ZFH, "D,S,T,R", MATCH_FMADD_H | MASK_RM, MASK_FMADD_H | MASK_RM, match_opcode, 0 },
  3327. +{"fmadd.h", 0, INSN_CLASS_F_AND_ZFH, "D,S,T,R,m", MATCH_FMADD_H, MASK_FMADD_H, match_opcode, 0 },
  3328. +{"fnmadd.h", 0, INSN_CLASS_F_AND_ZFH, "D,S,T,R", MATCH_FNMADD_H | MASK_RM, MASK_FNMADD_H | MASK_RM, match_opcode, 0 },
  3329. +{"fnmadd.h", 0, INSN_CLASS_F_AND_ZFH, "D,S,T,R,m", MATCH_FNMADD_H, MASK_FNMADD_H, match_opcode, 0 },
  3330. +{"fmsub.h", 0, INSN_CLASS_F_AND_ZFH, "D,S,T,R", MATCH_FMSUB_H | MASK_RM, MASK_FMSUB_H | MASK_RM, match_opcode, 0 },
  3331. +{"fmsub.h", 0, INSN_CLASS_F_AND_ZFH, "D,S,T,R,m", MATCH_FMSUB_H, MASK_FMSUB_H, match_opcode, 0 },
  3332. +{"fnmsub.h", 0, INSN_CLASS_F_AND_ZFH, "D,S,T,R", MATCH_FNMSUB_H | MASK_RM, MASK_FNMSUB_H | MASK_RM, match_opcode, 0 },
  3333. +{"fnmsub.h", 0, INSN_CLASS_F_AND_ZFH, "D,S,T,R,m", MATCH_FNMSUB_H, MASK_FNMSUB_H, match_opcode, 0 },
  3334. +{"fcvt.w.h", 0, INSN_CLASS_F_AND_ZFH, "d,S", MATCH_FCVT_W_H | MASK_RM, MASK_FCVT_W_H | MASK_RM, match_opcode, 0 },
  3335. +{"fcvt.w.h", 0, INSN_CLASS_F_AND_ZFH, "d,S,m", MATCH_FCVT_W_H, MASK_FCVT_W_H, match_opcode, 0 },
  3336. +{"fcvt.wu.h", 0, INSN_CLASS_F_AND_ZFH, "d,S", MATCH_FCVT_WU_H | MASK_RM, MASK_FCVT_WU_H | MASK_RM, match_opcode, 0 },
  3337. +{"fcvt.wu.h", 0, INSN_CLASS_F_AND_ZFH, "d,S,m", MATCH_FCVT_WU_H, MASK_FCVT_WU_H, match_opcode, 0 },
  3338. +{"fcvt.h.w", 0, INSN_CLASS_F_AND_ZFH, "D,s", MATCH_FCVT_H_W | MASK_RM, MASK_FCVT_H_W | MASK_RM, match_opcode, 0 },
  3339. +{"fcvt.h.w", 0, INSN_CLASS_F_AND_ZFH, "D,s,m", MATCH_FCVT_H_W, MASK_FCVT_H_W, match_opcode, 0 },
  3340. +{"fcvt.h.wu", 0, INSN_CLASS_F_AND_ZFH, "D,s", MATCH_FCVT_H_WU | MASK_RM, MASK_FCVT_H_WU | MASK_RM, match_opcode, 0 },
  3341. +{"fcvt.h.wu", 0, INSN_CLASS_F_AND_ZFH, "D,s,m", MATCH_FCVT_H_WU, MASK_FCVT_H_WU, match_opcode, 0 },
  3342. +{"fcvt.s.h", 0, INSN_CLASS_F_AND_ZFH, "D,S", MATCH_FCVT_S_H, MASK_FCVT_S_H | MASK_RM, match_opcode, 0 },
  3343. +{"fcvt.d.h", 0, INSN_CLASS_D_AND_ZFH, "D,S", MATCH_FCVT_D_H, MASK_FCVT_D_H | MASK_RM, match_opcode, 0 },
  3344. +{"fcvt.q.h", 0, INSN_CLASS_Q_AND_ZFH, "D,S", MATCH_FCVT_Q_H, MASK_FCVT_Q_H | MASK_RM, match_opcode, 0 },
  3345. +{"fcvt.h.s", 0, INSN_CLASS_F_AND_ZFH, "D,S", MATCH_FCVT_H_S | MASK_RM, MASK_FCVT_H_S | MASK_RM, match_opcode, 0 },
  3346. +{"fcvt.h.s", 0, INSN_CLASS_F_AND_ZFH, "D,S,m", MATCH_FCVT_H_S, MASK_FCVT_H_S, match_opcode, 0 },
  3347. +{"fcvt.h.d", 0, INSN_CLASS_D_AND_ZFH, "D,S", MATCH_FCVT_H_D | MASK_RM, MASK_FCVT_H_D | MASK_RM, match_opcode, 0 },
  3348. +{"fcvt.h.d", 0, INSN_CLASS_D_AND_ZFH, "D,S,m", MATCH_FCVT_H_D, MASK_FCVT_H_D, match_opcode, 0 },
  3349. +{"fcvt.h.q", 0, INSN_CLASS_Q_AND_ZFH, "D,S", MATCH_FCVT_H_Q | MASK_RM, MASK_FCVT_H_Q | MASK_RM, match_opcode, 0 },
  3350. +{"fcvt.h.q", 0, INSN_CLASS_Q_AND_ZFH, "D,S,m", MATCH_FCVT_H_Q, MASK_FCVT_H_Q, match_opcode, 0 },
  3351. +{"fclass.h", 0, INSN_CLASS_F_AND_ZFH, "d,S", MATCH_FCLASS_H, MASK_FCLASS_H, match_opcode, 0 },
  3352. +{"feq.h", 0, INSN_CLASS_F_AND_ZFH, "d,S,T", MATCH_FEQ_H, MASK_FEQ_H, match_opcode, 0 },
  3353. +{"flt.h", 0, INSN_CLASS_F_AND_ZFH, "d,S,T", MATCH_FLT_H, MASK_FLT_H, match_opcode, 0 },
  3354. +{"fle.h", 0, INSN_CLASS_F_AND_ZFH, "d,S,T", MATCH_FLE_H, MASK_FLE_H, match_opcode, 0 },
  3355. +{"fgt.h", 0, INSN_CLASS_F_AND_ZFH, "d,T,S", MATCH_FLT_H, MASK_FLT_H, match_opcode, 0 },
  3356. +{"fge.h", 0, INSN_CLASS_F_AND_ZFH, "d,T,S", MATCH_FLE_H, MASK_FLE_H, match_opcode, 0 },
  3357. +{"fmv.x.h", 0, INSN_CLASS_F_AND_ZFH, "d,S", MATCH_FMV_X_H, MASK_FMV_X_H, match_opcode, 0 },
  3358. +{"fmv.h.x", 0, INSN_CLASS_F_AND_ZFH, "D,s", MATCH_FMV_H_X, MASK_FMV_H_X, match_opcode, 0 },
  3359. +{"fcvt.l.h", 64, INSN_CLASS_F_AND_ZFH, "d,S", MATCH_FCVT_L_H | MASK_RM, MASK_FCVT_L_H | MASK_RM, match_opcode, 0 },
  3360. +{"fcvt.l.h", 64, INSN_CLASS_F_AND_ZFH, "d,S,m", MATCH_FCVT_L_H, MASK_FCVT_L_H, match_opcode, 0 },
  3361. +{"fcvt.lu.h", 64, INSN_CLASS_F_AND_ZFH, "d,S", MATCH_FCVT_LU_H | MASK_RM, MASK_FCVT_LU_H | MASK_RM, match_opcode, 0 },
  3362. +{"fcvt.lu.h", 64, INSN_CLASS_F_AND_ZFH, "d,S,m", MATCH_FCVT_LU_H, MASK_FCVT_LU_H, match_opcode, 0 },
  3363. +{"fcvt.h.l", 64, INSN_CLASS_F_AND_ZFH, "D,s", MATCH_FCVT_H_L | MASK_RM, MASK_FCVT_H_L | MASK_RM, match_opcode, 0 },
  3364. +{"fcvt.h.l", 64, INSN_CLASS_F_AND_ZFH, "D,s,m", MATCH_FCVT_H_L, MASK_FCVT_H_L, match_opcode, 0 },
  3365. +{"fcvt.h.lu", 64, INSN_CLASS_F_AND_ZFH, "D,s", MATCH_FCVT_H_LU | MASK_RM, MASK_FCVT_H_L | MASK_RM, match_opcode, 0 },
  3366. +{"fcvt.h.lu", 64, INSN_CLASS_F_AND_ZFH, "D,s,m", MATCH_FCVT_H_LU, MASK_FCVT_H_LU, match_opcode, 0 },
  3367. +
  3368. /* Single-precision floating-point instruction subset */
  3369. {"frcsr", 0, INSN_CLASS_F, "d", MATCH_FRCSR, MASK_FRCSR, match_opcode, INSN_ALIAS },
  3370. {"frsr", 0, INSN_CLASS_F, "d", MATCH_FRCSR, MASK_FRCSR, match_opcode, INSN_ALIAS },
  3371. @@ -912,6 +1305,804 @@ const struct riscv_opcode riscv_opcodes[] =
  3372. {"fslw", 64, INSN_CLASS_ZBT, "d,s,<,r", MATCH_FSLW, MASK_FSLW, match_opcode, 0 },
  3373. {"fsrw", 64, INSN_CLASS_ZBT, "d,s,<,r", MATCH_FSRW, MASK_FSRW, match_opcode, 0 },
  3374. {"fsriw", 64, INSN_CLASS_ZBT, "d,s,<,r", MATCH_FSRIW, MASK_FSRIW, match_opcode, 0 },
  3375. +
  3376. +/* RVV */
  3377. +{"vsetvl", 0, INSN_CLASS_V, "d,s,t", MATCH_VSETVL, MASK_VSETVL, match_opcode, 0},
  3378. +{"vsetvli", 0, INSN_CLASS_V, "d,s,Vc", MATCH_VSETVLI, MASK_VSETVLI, match_opcode, 0},
  3379. +{"vsetivli", 0, INSN_CLASS_V, "d,Z,Vb", MATCH_VSETIVLI, MASK_VSETIVLI, match_opcode, 0},
  3380. +
  3381. +{"vle1.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VLE1V, MASK_VLE1V, match_opcode, INSN_DREF },
  3382. +{"vse1.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VSE1V, MASK_VSE1V, match_opcode, INSN_DREF },
  3383. +
  3384. +{"vle8.v", 0, INSN_CLASS_V, "Vd,0(s)Vm", MATCH_VLE8V, MASK_VLE8V, match_vd_neq_vm, INSN_DREF },
  3385. +{"vle16.v", 0, INSN_CLASS_V, "Vd,0(s)Vm", MATCH_VLE16V, MASK_VLE16V, match_vd_neq_vm, INSN_DREF },
  3386. +{"vle32.v", 0, INSN_CLASS_V, "Vd,0(s)Vm", MATCH_VLE32V, MASK_VLE32V, match_vd_neq_vm, INSN_DREF },
  3387. +{"vle64.v", 0, INSN_CLASS_V, "Vd,0(s)Vm", MATCH_VLE64V, MASK_VLE64V, match_vd_neq_vm, INSN_DREF },
  3388. +
  3389. +{"vse8.v", 0, INSN_CLASS_V, "Vd,0(s)Vm", MATCH_VSE8V, MASK_VSE8V, match_vd_neq_vm, INSN_DREF },
  3390. +{"vse16.v", 0, INSN_CLASS_V, "Vd,0(s)Vm", MATCH_VSE16V, MASK_VSE16V, match_vd_neq_vm, INSN_DREF },
  3391. +{"vse32.v", 0, INSN_CLASS_V, "Vd,0(s)Vm", MATCH_VSE32V, MASK_VSE32V, match_vd_neq_vm, INSN_DREF },
  3392. +{"vse64.v", 0, INSN_CLASS_V, "Vd,0(s)Vm", MATCH_VSE64V, MASK_VSE64V, match_vd_neq_vm, INSN_DREF },
  3393. +
  3394. +{"vlse8.v", 0, INSN_CLASS_V, "Vd,0(s),tVm", MATCH_VLSE8V, MASK_VLSE8V, match_vd_neq_vm, INSN_DREF },
  3395. +{"vlse16.v", 0, INSN_CLASS_V, "Vd,0(s),tVm", MATCH_VLSE16V, MASK_VLSE16V, match_vd_neq_vm, INSN_DREF },
  3396. +{"vlse32.v", 0, INSN_CLASS_V, "Vd,0(s),tVm", MATCH_VLSE32V, MASK_VLSE32V, match_vd_neq_vm, INSN_DREF },
  3397. +{"vlse64.v", 0, INSN_CLASS_V, "Vd,0(s),tVm", MATCH_VLSE64V, MASK_VLSE64V, match_vd_neq_vm, INSN_DREF },
  3398. +
  3399. +{"vsse8.v", 0, INSN_CLASS_V, "Vd,0(s),tVm", MATCH_VSSE8V, MASK_VSSE8V, match_vd_neq_vm, INSN_DREF },
  3400. +{"vsse16.v", 0, INSN_CLASS_V, "Vd,0(s),tVm", MATCH_VSSE16V, MASK_VSSE16V, match_vd_neq_vm, INSN_DREF },
  3401. +{"vsse32.v", 0, INSN_CLASS_V, "Vd,0(s),tVm", MATCH_VSSE32V, MASK_VSSE32V, match_vd_neq_vm, INSN_DREF },
  3402. +{"vsse64.v", 0, INSN_CLASS_V, "Vd,0(s),tVm", MATCH_VSSE64V, MASK_VSSE64V, match_vd_neq_vm, INSN_DREF },
  3403. +
  3404. +{"vloxei8.v", 0, INSN_CLASS_V, "Vd,0(s),VtVm", MATCH_VLOXEI8V, MASK_VLOXEI8V, match_vd_neq_vm, INSN_DREF },
  3405. +{"vloxei16.v", 0, INSN_CLASS_V, "Vd,0(s),VtVm", MATCH_VLOXEI16V, MASK_VLOXEI16V, match_vd_neq_vm, INSN_DREF },
  3406. +{"vloxei32.v", 0, INSN_CLASS_V, "Vd,0(s),VtVm", MATCH_VLOXEI32V, MASK_VLOXEI32V, match_vd_neq_vm, INSN_DREF },
  3407. +{"vloxei64.v", 0, INSN_CLASS_V, "Vd,0(s),VtVm", MATCH_VLOXEI64V, MASK_VLOXEI64V, match_vd_neq_vm, INSN_DREF },
  3408. +
  3409. +{"vsoxei8.v", 0, INSN_CLASS_V, "Vd,0(s),VtVm", MATCH_VSOXEI8V, MASK_VSOXEI8V, match_vd_neq_vm, INSN_DREF },
  3410. +{"vsoxei16.v", 0, INSN_CLASS_V, "Vd,0(s),VtVm", MATCH_VSOXEI16V, MASK_VSOXEI16V, match_vd_neq_vm, INSN_DREF },
  3411. +{"vsoxei32.v", 0, INSN_CLASS_V, "Vd,0(s),VtVm", MATCH_VSOXEI32V, MASK_VSOXEI32V, match_vd_neq_vm, INSN_DREF },
  3412. +{"vsoxei64.v", 0, INSN_CLASS_V, "Vd,0(s),VtVm", MATCH_VSOXEI64V, MASK_VSOXEI64V, match_vd_neq_vm, INSN_DREF },
  3413. +
  3414. +{"vluxei8.v", 0, INSN_CLASS_V, "Vd,0(s),VtVm", MATCH_VLUXEI8V, MASK_VLUXEI8V, match_vd_neq_vm, INSN_DREF },
  3415. +{"vluxei16.v", 0, INSN_CLASS_V, "Vd,0(s),VtVm", MATCH_VLUXEI16V, MASK_VLUXEI16V, match_vd_neq_vm, INSN_DREF },
  3416. +{"vluxei32.v", 0, INSN_CLASS_V, "Vd,0(s),VtVm", MATCH_VLUXEI32V, MASK_VLUXEI32V, match_vd_neq_vm, INSN_DREF },
  3417. +{"vluxei64.v", 0, INSN_CLASS_V, "Vd,0(s),VtVm", MATCH_VLUXEI64V, MASK_VLUXEI64V, match_vd_neq_vm, INSN_DREF },
  3418. +
  3419. +{"vsuxei8.v", 0, INSN_CLASS_V, "Vd,0(s),VtVm", MATCH_VSUXEI8V, MASK_VSUXEI8V, match_vd_neq_vm, INSN_DREF },
  3420. +{"vsuxei16.v", 0, INSN_CLASS_V, "Vd,0(s),VtVm", MATCH_VSUXEI16V, MASK_VSUXEI16V, match_vd_neq_vm, INSN_DREF },
  3421. +{"vsuxei32.v", 0, INSN_CLASS_V, "Vd,0(s),VtVm", MATCH_VSUXEI32V, MASK_VSUXEI32V, match_vd_neq_vm, INSN_DREF },
  3422. +{"vsuxei64.v", 0, INSN_CLASS_V, "Vd,0(s),VtVm", MATCH_VSUXEI64V, MASK_VSUXEI64V, match_vd_neq_vm, INSN_DREF },
  3423. +
  3424. +{"vle8ff.v", 0, INSN_CLASS_V, "Vd,0(s)Vm", MATCH_VLE8FFV, MASK_VLE8FFV, match_vd_neq_vm, INSN_DREF },
  3425. +{"vle16ff.v", 0, INSN_CLASS_V, "Vd,0(s)Vm", MATCH_VLE16FFV, MASK_VLE16FFV, match_vd_neq_vm, INSN_DREF },
  3426. +{"vle32ff.v", 0, INSN_CLASS_V, "Vd,0(s)Vm", MATCH_VLE32FFV, MASK_VLE32FFV, match_vd_neq_vm, INSN_DREF },
  3427. +{"vle64ff.v", 0, INSN_CLASS_V, "Vd,0(s)Vm", MATCH_VLE64FFV, MASK_VLE64FFV, match_vd_neq_vm, INSN_DREF },
  3428. +
  3429. +{"vlseg2e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG2E8V, MASK_VLSEG2E8V, match_vd_neq_vm, INSN_DREF },
  3430. +{"vsseg2e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG2E8V, MASK_VSSEG2E8V, match_vd_neq_vm, INSN_DREF },
  3431. +{"vlseg3e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG3E8V, MASK_VLSEG3E8V, match_vd_neq_vm, INSN_DREF },
  3432. +{"vsseg3e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG3E8V, MASK_VSSEG3E8V, match_vd_neq_vm, INSN_DREF },
  3433. +{"vlseg4e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG4E8V, MASK_VLSEG4E8V, match_vd_neq_vm, INSN_DREF },
  3434. +{"vsseg4e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG4E8V, MASK_VSSEG4E8V, match_vd_neq_vm, INSN_DREF },
  3435. +{"vlseg5e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG5E8V, MASK_VLSEG5E8V, match_vd_neq_vm, INSN_DREF },
  3436. +{"vsseg5e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG5E8V, MASK_VSSEG5E8V, match_vd_neq_vm, INSN_DREF },
  3437. +{"vlseg6e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG6E8V, MASK_VLSEG6E8V, match_vd_neq_vm, INSN_DREF },
  3438. +{"vsseg6e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG6E8V, MASK_VSSEG6E8V, match_vd_neq_vm, INSN_DREF },
  3439. +{"vlseg7e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG7E8V, MASK_VLSEG7E8V, match_vd_neq_vm, INSN_DREF },
  3440. +{"vsseg7e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG7E8V, MASK_VSSEG7E8V, match_vd_neq_vm, INSN_DREF },
  3441. +{"vlseg8e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG8E8V, MASK_VLSEG8E8V, match_vd_neq_vm, INSN_DREF },
  3442. +{"vsseg8e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG8E8V, MASK_VSSEG8E8V, match_vd_neq_vm, INSN_DREF },
  3443. +
  3444. +{"vlseg2e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG2E16V, MASK_VLSEG2E16V, match_vd_neq_vm, INSN_DREF },
  3445. +{"vsseg2e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG2E16V, MASK_VSSEG2E16V, match_vd_neq_vm, INSN_DREF },
  3446. +{"vlseg3e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG3E16V, MASK_VLSEG3E16V, match_vd_neq_vm, INSN_DREF },
  3447. +{"vsseg3e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG3E16V, MASK_VSSEG3E16V, match_vd_neq_vm, INSN_DREF },
  3448. +{"vlseg4e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG4E16V, MASK_VLSEG4E16V, match_vd_neq_vm, INSN_DREF },
  3449. +{"vsseg4e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG4E16V, MASK_VSSEG4E16V, match_vd_neq_vm, INSN_DREF },
  3450. +{"vlseg5e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG5E16V, MASK_VLSEG5E16V, match_vd_neq_vm, INSN_DREF },
  3451. +{"vsseg5e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG5E16V, MASK_VSSEG5E16V, match_vd_neq_vm, INSN_DREF },
  3452. +{"vlseg6e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG6E16V, MASK_VLSEG6E16V, match_vd_neq_vm, INSN_DREF },
  3453. +{"vsseg6e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG6E16V, MASK_VSSEG6E16V, match_vd_neq_vm, INSN_DREF },
  3454. +{"vlseg7e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG7E16V, MASK_VLSEG7E16V, match_vd_neq_vm, INSN_DREF },
  3455. +{"vsseg7e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG7E16V, MASK_VSSEG7E16V, match_vd_neq_vm, INSN_DREF },
  3456. +{"vlseg8e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG8E16V, MASK_VLSEG8E16V, match_vd_neq_vm, INSN_DREF },
  3457. +{"vsseg8e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG8E16V, MASK_VSSEG8E16V, match_vd_neq_vm, INSN_DREF },
  3458. +
  3459. +{"vlseg2e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG2E32V, MASK_VLSEG2E32V, match_vd_neq_vm, INSN_DREF },
  3460. +{"vsseg2e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG2E32V, MASK_VSSEG2E32V, match_vd_neq_vm, INSN_DREF },
  3461. +{"vlseg3e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG3E32V, MASK_VLSEG3E32V, match_vd_neq_vm, INSN_DREF },
  3462. +{"vsseg3e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG3E32V, MASK_VSSEG3E32V, match_vd_neq_vm, INSN_DREF },
  3463. +{"vlseg4e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG4E32V, MASK_VLSEG4E32V, match_vd_neq_vm, INSN_DREF },
  3464. +{"vsseg4e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG4E32V, MASK_VSSEG4E32V, match_vd_neq_vm, INSN_DREF },
  3465. +{"vlseg5e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG5E32V, MASK_VLSEG5E32V, match_vd_neq_vm, INSN_DREF },
  3466. +{"vsseg5e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG5E32V, MASK_VSSEG5E32V, match_vd_neq_vm, INSN_DREF },
  3467. +{"vlseg6e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG6E32V, MASK_VLSEG6E32V, match_vd_neq_vm, INSN_DREF },
  3468. +{"vsseg6e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG6E32V, MASK_VSSEG6E32V, match_vd_neq_vm, INSN_DREF },
  3469. +{"vlseg7e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG7E32V, MASK_VLSEG7E32V, match_vd_neq_vm, INSN_DREF },
  3470. +{"vsseg7e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG7E32V, MASK_VSSEG7E32V, match_vd_neq_vm, INSN_DREF },
  3471. +{"vlseg8e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG8E32V, MASK_VLSEG8E32V, match_vd_neq_vm, INSN_DREF },
  3472. +{"vsseg8e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG8E32V, MASK_VSSEG8E32V, match_vd_neq_vm, INSN_DREF },
  3473. +
  3474. +{"vlseg2e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG2E64V, MASK_VLSEG2E64V, match_vd_neq_vm, INSN_DREF },
  3475. +{"vsseg2e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG2E64V, MASK_VSSEG2E64V, match_vd_neq_vm, INSN_DREF },
  3476. +{"vlseg3e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG3E64V, MASK_VLSEG3E64V, match_vd_neq_vm, INSN_DREF },
  3477. +{"vsseg3e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG3E64V, MASK_VSSEG3E64V, match_vd_neq_vm, INSN_DREF },
  3478. +{"vlseg4e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG4E64V, MASK_VLSEG4E64V, match_vd_neq_vm, INSN_DREF },
  3479. +{"vsseg4e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG4E64V, MASK_VSSEG4E64V, match_vd_neq_vm, INSN_DREF },
  3480. +{"vlseg5e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG5E64V, MASK_VLSEG5E64V, match_vd_neq_vm, INSN_DREF },
  3481. +{"vsseg5e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG5E64V, MASK_VSSEG5E64V, match_vd_neq_vm, INSN_DREF },
  3482. +{"vlseg6e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG6E64V, MASK_VLSEG6E64V, match_vd_neq_vm, INSN_DREF },
  3483. +{"vsseg6e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG6E64V, MASK_VSSEG6E64V, match_vd_neq_vm, INSN_DREF },
  3484. +{"vlseg7e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG7E64V, MASK_VLSEG7E64V, match_vd_neq_vm, INSN_DREF },
  3485. +{"vsseg7e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG7E64V, MASK_VSSEG7E64V, match_vd_neq_vm, INSN_DREF },
  3486. +{"vlseg8e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG8E64V, MASK_VLSEG8E64V, match_vd_neq_vm, INSN_DREF },
  3487. +{"vsseg8e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VSSEG8E64V, MASK_VSSEG8E64V, match_vd_neq_vm, INSN_DREF },
  3488. +
  3489. +{"vlsseg2e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG2E8V, MASK_VLSSEG2E8V, match_vd_neq_vm, INSN_DREF },
  3490. +{"vssseg2e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG2E8V, MASK_VSSSEG2E8V, match_vd_neq_vm, INSN_DREF },
  3491. +{"vlsseg3e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG3E8V, MASK_VLSSEG3E8V, match_vd_neq_vm, INSN_DREF },
  3492. +{"vssseg3e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG3E8V, MASK_VSSSEG3E8V, match_vd_neq_vm, INSN_DREF },
  3493. +{"vlsseg4e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG4E8V, MASK_VLSSEG4E8V, match_vd_neq_vm, INSN_DREF },
  3494. +{"vssseg4e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG4E8V, MASK_VSSSEG4E8V, match_vd_neq_vm, INSN_DREF },
  3495. +{"vlsseg5e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG5E8V, MASK_VLSSEG5E8V, match_vd_neq_vm, INSN_DREF },
  3496. +{"vssseg5e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG5E8V, MASK_VSSSEG5E8V, match_vd_neq_vm, INSN_DREF },
  3497. +{"vlsseg6e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG6E8V, MASK_VLSSEG6E8V, match_vd_neq_vm, INSN_DREF },
  3498. +{"vssseg6e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG6E8V, MASK_VSSSEG6E8V, match_vd_neq_vm, INSN_DREF },
  3499. +{"vlsseg7e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG7E8V, MASK_VLSSEG7E8V, match_vd_neq_vm, INSN_DREF },
  3500. +{"vssseg7e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG7E8V, MASK_VSSSEG7E8V, match_vd_neq_vm, INSN_DREF },
  3501. +{"vlsseg8e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG8E8V, MASK_VLSSEG8E8V, match_vd_neq_vm, INSN_DREF },
  3502. +{"vssseg8e8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG8E8V, MASK_VSSSEG8E8V, match_vd_neq_vm, INSN_DREF },
  3503. +
  3504. +{"vlsseg2e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG2E16V, MASK_VLSSEG2E16V, match_vd_neq_vm, INSN_DREF },
  3505. +{"vssseg2e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG2E16V, MASK_VSSSEG2E16V, match_vd_neq_vm, INSN_DREF },
  3506. +{"vlsseg3e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG3E16V, MASK_VLSSEG3E16V, match_vd_neq_vm, INSN_DREF },
  3507. +{"vssseg3e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG3E16V, MASK_VSSSEG3E16V, match_vd_neq_vm, INSN_DREF },
  3508. +{"vlsseg4e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG4E16V, MASK_VLSSEG4E16V, match_vd_neq_vm, INSN_DREF },
  3509. +{"vssseg4e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG4E16V, MASK_VSSSEG4E16V, match_vd_neq_vm, INSN_DREF },
  3510. +{"vlsseg5e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG5E16V, MASK_VLSSEG5E16V, match_vd_neq_vm, INSN_DREF },
  3511. +{"vssseg5e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG5E16V, MASK_VSSSEG5E16V, match_vd_neq_vm, INSN_DREF },
  3512. +{"vlsseg6e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG6E16V, MASK_VLSSEG6E16V, match_vd_neq_vm, INSN_DREF },
  3513. +{"vssseg6e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG6E16V, MASK_VSSSEG6E16V, match_vd_neq_vm, INSN_DREF },
  3514. +{"vlsseg7e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG7E16V, MASK_VLSSEG7E16V, match_vd_neq_vm, INSN_DREF },
  3515. +{"vssseg7e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG7E16V, MASK_VSSSEG7E16V, match_vd_neq_vm, INSN_DREF },
  3516. +{"vlsseg8e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG8E16V, MASK_VLSSEG8E16V, match_vd_neq_vm, INSN_DREF },
  3517. +{"vssseg8e16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG8E16V, MASK_VSSSEG8E16V, match_vd_neq_vm, INSN_DREF },
  3518. +
  3519. +{"vlsseg2e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG2E32V, MASK_VLSSEG2E32V, match_vd_neq_vm, INSN_DREF },
  3520. +{"vssseg2e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG2E32V, MASK_VSSSEG2E32V, match_vd_neq_vm, INSN_DREF },
  3521. +{"vlsseg3e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG3E32V, MASK_VLSSEG3E32V, match_vd_neq_vm, INSN_DREF },
  3522. +{"vssseg3e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG3E32V, MASK_VSSSEG3E32V, match_vd_neq_vm, INSN_DREF },
  3523. +{"vlsseg4e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG4E32V, MASK_VLSSEG4E32V, match_vd_neq_vm, INSN_DREF },
  3524. +{"vssseg4e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG4E32V, MASK_VSSSEG4E32V, match_vd_neq_vm, INSN_DREF },
  3525. +{"vlsseg5e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG5E32V, MASK_VLSSEG5E32V, match_vd_neq_vm, INSN_DREF },
  3526. +{"vssseg5e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG5E32V, MASK_VSSSEG5E32V, match_vd_neq_vm, INSN_DREF },
  3527. +{"vlsseg6e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG6E32V, MASK_VLSSEG6E32V, match_vd_neq_vm, INSN_DREF },
  3528. +{"vssseg6e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG6E32V, MASK_VSSSEG6E32V, match_vd_neq_vm, INSN_DREF },
  3529. +{"vlsseg7e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG7E32V, MASK_VLSSEG7E32V, match_vd_neq_vm, INSN_DREF },
  3530. +{"vssseg7e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG7E32V, MASK_VSSSEG7E32V, match_vd_neq_vm, INSN_DREF },
  3531. +{"vlsseg8e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG8E32V, MASK_VLSSEG8E32V, match_vd_neq_vm, INSN_DREF },
  3532. +{"vssseg8e32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG8E32V, MASK_VSSSEG8E32V, match_vd_neq_vm, INSN_DREF },
  3533. +
  3534. +{"vlsseg2e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG2E64V, MASK_VLSSEG2E64V, match_vd_neq_vm, INSN_DREF },
  3535. +{"vssseg2e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG2E64V, MASK_VSSSEG2E64V, match_vd_neq_vm, INSN_DREF },
  3536. +{"vlsseg3e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG3E64V, MASK_VLSSEG3E64V, match_vd_neq_vm, INSN_DREF },
  3537. +{"vssseg3e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG3E64V, MASK_VSSSEG3E64V, match_vd_neq_vm, INSN_DREF },
  3538. +{"vlsseg4e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG4E64V, MASK_VLSSEG4E64V, match_vd_neq_vm, INSN_DREF },
  3539. +{"vssseg4e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG4E64V, MASK_VSSSEG4E64V, match_vd_neq_vm, INSN_DREF },
  3540. +{"vlsseg5e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG5E64V, MASK_VLSSEG5E64V, match_vd_neq_vm, INSN_DREF },
  3541. +{"vssseg5e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG5E64V, MASK_VSSSEG5E64V, match_vd_neq_vm, INSN_DREF },
  3542. +{"vlsseg6e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG6E64V, MASK_VLSSEG6E64V, match_vd_neq_vm, INSN_DREF },
  3543. +{"vssseg6e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG6E64V, MASK_VSSSEG6E64V, match_vd_neq_vm, INSN_DREF },
  3544. +{"vlsseg7e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG7E64V, MASK_VLSSEG7E64V, match_vd_neq_vm, INSN_DREF },
  3545. +{"vssseg7e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG7E64V, MASK_VSSSEG7E64V, match_vd_neq_vm, INSN_DREF },
  3546. +{"vlsseg8e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VLSSEG8E64V, MASK_VLSSEG8E64V, match_vd_neq_vm, INSN_DREF },
  3547. +{"vssseg8e64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),tVm", MATCH_VSSSEG8E64V, MASK_VSSSEG8E64V, match_vd_neq_vm, INSN_DREF },
  3548. +
  3549. +{"vloxseg2ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG2EI8V, MASK_VLOXSEG2EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3550. +{"vsoxseg2ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG2EI8V, MASK_VSOXSEG2EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3551. +{"vloxseg3ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG3EI8V, MASK_VLOXSEG3EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3552. +{"vsoxseg3ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG3EI8V, MASK_VSOXSEG3EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3553. +{"vloxseg4ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG4EI8V, MASK_VLOXSEG4EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3554. +{"vsoxseg4ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG4EI8V, MASK_VSOXSEG4EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3555. +{"vloxseg5ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG5EI8V, MASK_VLOXSEG5EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3556. +{"vsoxseg5ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG5EI8V, MASK_VSOXSEG5EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3557. +{"vloxseg6ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG6EI8V, MASK_VLOXSEG6EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3558. +{"vsoxseg6ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG6EI8V, MASK_VSOXSEG6EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3559. +{"vloxseg7ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG7EI8V, MASK_VLOXSEG7EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3560. +{"vsoxseg7ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG7EI8V, MASK_VSOXSEG7EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3561. +{"vloxseg8ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG8EI8V, MASK_VLOXSEG8EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3562. +{"vsoxseg8ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG8EI8V, MASK_VSOXSEG8EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3563. +
  3564. +{"vloxseg2ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG2EI16V, MASK_VLOXSEG2EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3565. +{"vsoxseg2ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG2EI16V, MASK_VSOXSEG2EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3566. +{"vloxseg3ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG3EI16V, MASK_VLOXSEG3EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3567. +{"vsoxseg3ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG3EI16V, MASK_VSOXSEG3EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3568. +{"vloxseg4ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG4EI16V, MASK_VLOXSEG4EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3569. +{"vsoxseg4ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG4EI16V, MASK_VSOXSEG4EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3570. +{"vloxseg5ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG5EI16V, MASK_VLOXSEG5EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3571. +{"vsoxseg5ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG5EI16V, MASK_VSOXSEG5EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3572. +{"vloxseg6ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG6EI16V, MASK_VLOXSEG6EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3573. +{"vsoxseg6ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG6EI16V, MASK_VSOXSEG6EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3574. +{"vloxseg7ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG7EI16V, MASK_VLOXSEG7EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3575. +{"vsoxseg7ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG7EI16V, MASK_VSOXSEG7EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3576. +{"vloxseg8ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG8EI16V, MASK_VLOXSEG8EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3577. +{"vsoxseg8ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG8EI16V, MASK_VSOXSEG8EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3578. +
  3579. +{"vloxseg2ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG2EI32V, MASK_VLOXSEG2EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3580. +{"vsoxseg2ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG2EI32V, MASK_VSOXSEG2EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3581. +{"vloxseg3ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG3EI32V, MASK_VLOXSEG3EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3582. +{"vsoxseg3ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG3EI32V, MASK_VSOXSEG3EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3583. +{"vloxseg4ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG4EI32V, MASK_VLOXSEG4EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3584. +{"vsoxseg4ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG4EI32V, MASK_VSOXSEG4EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3585. +{"vloxseg5ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG5EI32V, MASK_VLOXSEG5EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3586. +{"vsoxseg5ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG5EI32V, MASK_VSOXSEG5EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3587. +{"vloxseg6ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG6EI32V, MASK_VLOXSEG6EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3588. +{"vsoxseg6ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG6EI32V, MASK_VSOXSEG6EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3589. +{"vloxseg7ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG7EI32V, MASK_VLOXSEG7EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3590. +{"vsoxseg7ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG7EI32V, MASK_VSOXSEG7EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3591. +{"vloxseg8ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG8EI32V, MASK_VLOXSEG8EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3592. +{"vsoxseg8ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG8EI32V, MASK_VSOXSEG8EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3593. +
  3594. +{"vloxseg2ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG2EI64V, MASK_VLOXSEG2EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3595. +{"vsoxseg2ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG2EI64V, MASK_VSOXSEG2EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3596. +{"vloxseg3ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG3EI64V, MASK_VLOXSEG3EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3597. +{"vsoxseg3ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG3EI64V, MASK_VSOXSEG3EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3598. +{"vloxseg4ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG4EI64V, MASK_VLOXSEG4EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3599. +{"vsoxseg4ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG4EI64V, MASK_VSOXSEG4EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3600. +{"vloxseg5ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG5EI64V, MASK_VLOXSEG5EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3601. +{"vsoxseg5ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG5EI64V, MASK_VSOXSEG5EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3602. +{"vloxseg6ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG6EI64V, MASK_VLOXSEG6EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3603. +{"vsoxseg6ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG6EI64V, MASK_VSOXSEG6EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3604. +{"vloxseg7ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG7EI64V, MASK_VLOXSEG7EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3605. +{"vsoxseg7ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG7EI64V, MASK_VSOXSEG7EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3606. +{"vloxseg8ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLOXSEG8EI64V, MASK_VLOXSEG8EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3607. +{"vsoxseg8ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSOXSEG8EI64V, MASK_VSOXSEG8EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3608. +
  3609. +{"vluxseg2ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG2EI8V, MASK_VLUXSEG2EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3610. +{"vsuxseg2ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG2EI8V, MASK_VSUXSEG2EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3611. +{"vluxseg3ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG3EI8V, MASK_VLUXSEG3EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3612. +{"vsuxseg3ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG3EI8V, MASK_VSUXSEG3EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3613. +{"vluxseg4ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG4EI8V, MASK_VLUXSEG4EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3614. +{"vsuxseg4ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG4EI8V, MASK_VSUXSEG4EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3615. +{"vluxseg5ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG5EI8V, MASK_VLUXSEG5EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3616. +{"vsuxseg5ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG5EI8V, MASK_VSUXSEG5EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3617. +{"vluxseg6ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG6EI8V, MASK_VLUXSEG6EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3618. +{"vsuxseg6ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG6EI8V, MASK_VSUXSEG6EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3619. +{"vluxseg7ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG7EI8V, MASK_VLUXSEG7EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3620. +{"vsuxseg7ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG7EI8V, MASK_VSUXSEG7EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3621. +{"vluxseg8ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG8EI8V, MASK_VLUXSEG8EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3622. +{"vsuxseg8ei8.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG8EI8V, MASK_VSUXSEG8EI8V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3623. +
  3624. +{"vluxseg2ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG2EI16V, MASK_VLUXSEG2EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3625. +{"vsuxseg2ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG2EI16V, MASK_VSUXSEG2EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3626. +{"vluxseg3ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG3EI16V, MASK_VLUXSEG3EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3627. +{"vsuxseg3ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG3EI16V, MASK_VSUXSEG3EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3628. +{"vluxseg4ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG4EI16V, MASK_VLUXSEG4EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3629. +{"vsuxseg4ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG4EI16V, MASK_VSUXSEG4EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3630. +{"vluxseg5ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG5EI16V, MASK_VLUXSEG5EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3631. +{"vsuxseg5ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG5EI16V, MASK_VSUXSEG5EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3632. +{"vluxseg6ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG6EI16V, MASK_VLUXSEG6EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3633. +{"vsuxseg6ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG6EI16V, MASK_VSUXSEG6EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3634. +{"vluxseg7ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG7EI16V, MASK_VLUXSEG7EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3635. +{"vsuxseg7ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG7EI16V, MASK_VSUXSEG7EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3636. +{"vluxseg8ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG8EI16V, MASK_VLUXSEG8EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3637. +{"vsuxseg8ei16.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG8EI16V, MASK_VSUXSEG8EI16V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3638. +
  3639. +{"vluxseg2ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG2EI32V, MASK_VLUXSEG2EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3640. +{"vsuxseg2ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG2EI32V, MASK_VSUXSEG2EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3641. +{"vluxseg3ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG3EI32V, MASK_VLUXSEG3EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3642. +{"vsuxseg3ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG3EI32V, MASK_VSUXSEG3EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3643. +{"vluxseg4ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG4EI32V, MASK_VLUXSEG4EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3644. +{"vsuxseg4ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG4EI32V, MASK_VSUXSEG4EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3645. +{"vluxseg5ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG5EI32V, MASK_VLUXSEG5EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3646. +{"vsuxseg5ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG5EI32V, MASK_VSUXSEG5EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3647. +{"vluxseg6ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG6EI32V, MASK_VLUXSEG6EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3648. +{"vsuxseg6ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG6EI32V, MASK_VSUXSEG6EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3649. +{"vluxseg7ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG7EI32V, MASK_VLUXSEG7EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3650. +{"vsuxseg7ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG7EI32V, MASK_VSUXSEG7EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3651. +{"vluxseg8ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG8EI32V, MASK_VLUXSEG8EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3652. +{"vsuxseg8ei32.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG8EI32V, MASK_VSUXSEG8EI32V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3653. +
  3654. +{"vluxseg2ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG2EI64V, MASK_VLUXSEG2EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3655. +{"vsuxseg2ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG2EI64V, MASK_VSUXSEG2EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3656. +{"vluxseg3ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG3EI64V, MASK_VLUXSEG3EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3657. +{"vsuxseg3ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG3EI64V, MASK_VSUXSEG3EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3658. +{"vluxseg4ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG4EI64V, MASK_VLUXSEG4EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3659. +{"vsuxseg4ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG4EI64V, MASK_VSUXSEG4EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3660. +{"vluxseg5ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG5EI64V, MASK_VLUXSEG5EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3661. +{"vsuxseg5ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG5EI64V, MASK_VSUXSEG5EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3662. +{"vluxseg6ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG6EI64V, MASK_VLUXSEG6EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3663. +{"vsuxseg6ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG6EI64V, MASK_VSUXSEG6EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3664. +{"vluxseg7ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG7EI64V, MASK_VLUXSEG7EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3665. +{"vsuxseg7ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG7EI64V, MASK_VSUXSEG7EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3666. +{"vluxseg8ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VLUXSEG8EI64V, MASK_VLUXSEG8EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3667. +{"vsuxseg8ei64.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s),VtVm", MATCH_VSUXSEG8EI64V, MASK_VSUXSEG8EI64V, match_vd_neq_vs2_neq_vm, INSN_DREF },
  3668. +
  3669. +{"vlseg2e8ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG2E8FFV, MASK_VLSEG2E8FFV, match_vd_neq_vm, INSN_DREF },
  3670. +{"vlseg3e8ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG3E8FFV, MASK_VLSEG3E8FFV, match_vd_neq_vm, INSN_DREF },
  3671. +{"vlseg4e8ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG4E8FFV, MASK_VLSEG4E8FFV, match_vd_neq_vm, INSN_DREF },
  3672. +{"vlseg5e8ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG5E8FFV, MASK_VLSEG5E8FFV, match_vd_neq_vm, INSN_DREF },
  3673. +{"vlseg6e8ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG6E8FFV, MASK_VLSEG6E8FFV, match_vd_neq_vm, INSN_DREF },
  3674. +{"vlseg7e8ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG7E8FFV, MASK_VLSEG7E8FFV, match_vd_neq_vm, INSN_DREF },
  3675. +{"vlseg8e8ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG8E8FFV, MASK_VLSEG8E8FFV, match_vd_neq_vm, INSN_DREF },
  3676. +
  3677. +{"vlseg2e16ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG2E16FFV, MASK_VLSEG2E16FFV, match_vd_neq_vm, INSN_DREF },
  3678. +{"vlseg3e16ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG3E16FFV, MASK_VLSEG3E16FFV, match_vd_neq_vm, INSN_DREF },
  3679. +{"vlseg4e16ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG4E16FFV, MASK_VLSEG4E16FFV, match_vd_neq_vm, INSN_DREF },
  3680. +{"vlseg5e16ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG5E16FFV, MASK_VLSEG5E16FFV, match_vd_neq_vm, INSN_DREF },
  3681. +{"vlseg6e16ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG6E16FFV, MASK_VLSEG6E16FFV, match_vd_neq_vm, INSN_DREF },
  3682. +{"vlseg7e16ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG7E16FFV, MASK_VLSEG7E16FFV, match_vd_neq_vm, INSN_DREF },
  3683. +{"vlseg8e16ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG8E16FFV, MASK_VLSEG8E16FFV, match_vd_neq_vm, INSN_DREF },
  3684. +
  3685. +{"vlseg2e32ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG2E32FFV, MASK_VLSEG2E32FFV, match_vd_neq_vm, INSN_DREF },
  3686. +{"vlseg3e32ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG3E32FFV, MASK_VLSEG3E32FFV, match_vd_neq_vm, INSN_DREF },
  3687. +{"vlseg4e32ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG4E32FFV, MASK_VLSEG4E32FFV, match_vd_neq_vm, INSN_DREF },
  3688. +{"vlseg5e32ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG5E32FFV, MASK_VLSEG5E32FFV, match_vd_neq_vm, INSN_DREF },
  3689. +{"vlseg6e32ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG6E32FFV, MASK_VLSEG6E32FFV, match_vd_neq_vm, INSN_DREF },
  3690. +{"vlseg7e32ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG7E32FFV, MASK_VLSEG7E32FFV, match_vd_neq_vm, INSN_DREF },
  3691. +{"vlseg8e32ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG8E32FFV, MASK_VLSEG8E32FFV, match_vd_neq_vm, INSN_DREF },
  3692. +
  3693. +{"vlseg2e64ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG2E64FFV, MASK_VLSEG2E64FFV, match_vd_neq_vm, INSN_DREF },
  3694. +{"vlseg3e64ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG3E64FFV, MASK_VLSEG3E64FFV, match_vd_neq_vm, INSN_DREF },
  3695. +{"vlseg4e64ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG4E64FFV, MASK_VLSEG4E64FFV, match_vd_neq_vm, INSN_DREF },
  3696. +{"vlseg5e64ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG5E64FFV, MASK_VLSEG5E64FFV, match_vd_neq_vm, INSN_DREF },
  3697. +{"vlseg6e64ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG6E64FFV, MASK_VLSEG6E64FFV, match_vd_neq_vm, INSN_DREF },
  3698. +{"vlseg7e64ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG7E64FFV, MASK_VLSEG7E64FFV, match_vd_neq_vm, INSN_DREF },
  3699. +{"vlseg8e64ff.v", 0, INSN_CLASS_V_OR_ZVLSSEG, "Vd,0(s)Vm", MATCH_VLSEG8E64FFV, MASK_VLSEG8E64FFV, match_vd_neq_vm, INSN_DREF },
  3700. +
  3701. +{"vl1r.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL1RE8V, MASK_VL1RE8V, match_vls_nf_rv, INSN_DREF|INSN_ALIAS },
  3702. +{"vl1re8.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL1RE8V, MASK_VL1RE8V, match_vls_nf_rv, INSN_DREF },
  3703. +{"vl1re16.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL1RE16V, MASK_VL1RE16V, match_vls_nf_rv, INSN_DREF },
  3704. +{"vl1re32.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL1RE32V, MASK_VL1RE32V, match_vls_nf_rv, INSN_DREF },
  3705. +{"vl1re64.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL1RE64V, MASK_VL1RE64V, match_vls_nf_rv, INSN_DREF },
  3706. +
  3707. +{"vl2r.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL2RE8V, MASK_VL2RE8V, match_vls_nf_rv, INSN_DREF|INSN_ALIAS },
  3708. +{"vl2re8.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL2RE8V, MASK_VL2RE8V, match_vls_nf_rv, INSN_DREF },
  3709. +{"vl2re16.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL2RE16V, MASK_VL2RE16V, match_vls_nf_rv, INSN_DREF },
  3710. +{"vl2re32.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL2RE32V, MASK_VL2RE32V, match_vls_nf_rv, INSN_DREF },
  3711. +{"vl2re64.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL2RE64V, MASK_VL2RE64V, match_vls_nf_rv, INSN_DREF },
  3712. +
  3713. +{"vl4r.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL4RE8V, MASK_VL4RE8V, match_vls_nf_rv, INSN_DREF|INSN_ALIAS },
  3714. +{"vl4re8.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL4RE8V, MASK_VL4RE8V, match_vls_nf_rv, INSN_DREF },
  3715. +{"vl4re16.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL4RE16V, MASK_VL4RE16V, match_vls_nf_rv, INSN_DREF },
  3716. +{"vl4re32.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL4RE32V, MASK_VL4RE32V, match_vls_nf_rv, INSN_DREF },
  3717. +{"vl4re64.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL4RE64V, MASK_VL4RE64V, match_vls_nf_rv, INSN_DREF },
  3718. +
  3719. +{"vl8r.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL8RE8V, MASK_VL8RE8V, match_vls_nf_rv, INSN_DREF|INSN_ALIAS },
  3720. +{"vl8re8.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL8RE8V, MASK_VL8RE8V, match_vls_nf_rv, INSN_DREF },
  3721. +{"vl8re16.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL8RE16V, MASK_VL8RE16V, match_vls_nf_rv, INSN_DREF },
  3722. +{"vl8re32.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL8RE32V, MASK_VL8RE32V, match_vls_nf_rv, INSN_DREF },
  3723. +{"vl8re64.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VL8RE64V, MASK_VL8RE64V, match_vls_nf_rv, INSN_DREF },
  3724. +
  3725. +{"vs1r.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VS1RV, MASK_VS1RV, match_vls_nf_rv, INSN_DREF },
  3726. +{"vs2r.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VS2RV, MASK_VS2RV, match_vls_nf_rv, INSN_DREF },
  3727. +{"vs4r.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VS4RV, MASK_VS4RV, match_vls_nf_rv, INSN_DREF },
  3728. +{"vs8r.v", 0, INSN_CLASS_V, "Vd,0(s)", MATCH_VS8RV, MASK_VS8RV, match_vls_nf_rv, INSN_DREF },
  3729. +
  3730. +{"vamoaddei8.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOADDEI8V, MASK_VAMOADDEI8V, match_vd_neq_vm, INSN_DREF},
  3731. +{"vamoswapei8.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOSWAPEI8V, MASK_VAMOSWAPEI8V, match_vd_neq_vm, INSN_DREF},
  3732. +{"vamoxorei8.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOXOREI8V, MASK_VAMOXOREI8V, match_vd_neq_vm, INSN_DREF},
  3733. +{"vamoandei8.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOANDEI8V, MASK_VAMOANDEI8V, match_vd_neq_vm, INSN_DREF},
  3734. +{"vamoorei8.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOOREI8V, MASK_VAMOOREI8V, match_vd_neq_vm, INSN_DREF},
  3735. +{"vamominei8.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOMINEI8V, MASK_VAMOMINEI8V, match_vd_neq_vm, INSN_DREF},
  3736. +{"vamomaxei8.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOMAXEI8V, MASK_VAMOMAXEI8V, match_vd_neq_vm, INSN_DREF},
  3737. +{"vamominuei8.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOMINUEI8V, MASK_VAMOMINUEI8V, match_vd_neq_vm, INSN_DREF},
  3738. +{"vamomaxuei8.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOMAXUEI8V, MASK_VAMOMAXUEI8V, match_vd_neq_vm, INSN_DREF},
  3739. +
  3740. +{"vamoaddei16.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOADDEI16V, MASK_VAMOADDEI16V, match_vd_neq_vm, INSN_DREF},
  3741. +{"vamoswapei16.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOSWAPEI16V, MASK_VAMOSWAPEI16V, match_vd_neq_vm, INSN_DREF},
  3742. +{"vamoxorei16.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOXOREI16V, MASK_VAMOXOREI16V, match_vd_neq_vm, INSN_DREF},
  3743. +{"vamoandei16.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOANDEI16V, MASK_VAMOANDEI16V, match_vd_neq_vm, INSN_DREF},
  3744. +{"vamoorei16.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOOREI16V, MASK_VAMOOREI16V, match_vd_neq_vm, INSN_DREF},
  3745. +{"vamominei16.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOMINEI16V, MASK_VAMOMINEI16V, match_vd_neq_vm, INSN_DREF},
  3746. +{"vamomaxei16.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOMAXEI16V, MASK_VAMOMAXEI16V, match_vd_neq_vm, INSN_DREF},
  3747. +{"vamominuei16.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOMINUEI16V, MASK_VAMOMINUEI16V, match_vd_neq_vm, INSN_DREF},
  3748. +{"vamomaxuei16.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOMAXUEI16V, MASK_VAMOMAXUEI16V, match_vd_neq_vm, INSN_DREF},
  3749. +
  3750. +{"vamoaddei32.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOADDEI32V, MASK_VAMOADDEI32V, match_vd_neq_vm, INSN_DREF},
  3751. +{"vamoswapei32.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOSWAPEI32V, MASK_VAMOSWAPEI32V, match_vd_neq_vm, INSN_DREF},
  3752. +{"vamoxorei32.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOXOREI32V, MASK_VAMOXOREI32V, match_vd_neq_vm, INSN_DREF},
  3753. +{"vamoandei32.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOANDEI32V, MASK_VAMOANDEI32V, match_vd_neq_vm, INSN_DREF},
  3754. +{"vamoorei32.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOOREI32V, MASK_VAMOOREI32V, match_vd_neq_vm, INSN_DREF},
  3755. +{"vamominei32.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOMINEI32V, MASK_VAMOMINEI32V, match_vd_neq_vm, INSN_DREF},
  3756. +{"vamomaxei32.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOMAXEI32V, MASK_VAMOMAXEI32V, match_vd_neq_vm, INSN_DREF},
  3757. +{"vamominuei32.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOMINUEI32V, MASK_VAMOMINUEI32V, match_vd_neq_vm, INSN_DREF},
  3758. +{"vamomaxuei32.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOMAXUEI32V, MASK_VAMOMAXUEI32V, match_vd_neq_vm, INSN_DREF},
  3759. +
  3760. +{"vamoaddei64.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOADDEI64V, MASK_VAMOADDEI64V, match_vd_neq_vm, INSN_DREF},
  3761. +{"vamoswapei64.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOSWAPEI64V, MASK_VAMOSWAPEI64V, match_vd_neq_vm, INSN_DREF},
  3762. +{"vamoxorei64.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOXOREI64V, MASK_VAMOXOREI64V, match_vd_neq_vm, INSN_DREF},
  3763. +{"vamoandei64.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOANDEI64V, MASK_VAMOANDEI64V, match_vd_neq_vm, INSN_DREF},
  3764. +{"vamoorei64.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOOREI64V, MASK_VAMOOREI64V, match_vd_neq_vm, INSN_DREF},
  3765. +{"vamominei64.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOMINEI64V, MASK_VAMOMINEI64V, match_vd_neq_vm, INSN_DREF},
  3766. +{"vamomaxei64.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOMAXEI64V, MASK_VAMOMAXEI64V, match_vd_neq_vm, INSN_DREF},
  3767. +{"vamominuei64.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOMINUEI64V, MASK_VAMOMINUEI64V, match_vd_neq_vm, INSN_DREF},
  3768. +{"vamomaxuei64.v", 0, INSN_CLASS_V_OR_ZVAMO, "Ve,0(s),Vt,VfVm", MATCH_VAMOMAXUEI64V, MASK_VAMOMAXUEI64V, match_vd_neq_vm, INSN_DREF},
  3769. +
  3770. +{"vneg.v", 0, INSN_CLASS_V, "Vd,VtVm", MATCH_VRSUBVX, MASK_VRSUBVX | MASK_RS1, match_vd_neq_vm, INSN_ALIAS },
  3771. +
  3772. +{"vadd.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VADDVV, MASK_VADDVV, match_vd_neq_vm, 0 },
  3773. +{"vadd.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VADDVX, MASK_VADDVX, match_vd_neq_vm, 0 },
  3774. +{"vadd.vi", 0, INSN_CLASS_V, "Vd,Vt,ViVm", MATCH_VADDVI, MASK_VADDVI, match_vd_neq_vm, 0 },
  3775. +{"vsub.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VSUBVV, MASK_VSUBVV, match_vd_neq_vm, 0 },
  3776. +{"vsub.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VSUBVX, MASK_VSUBVX, match_vd_neq_vm, 0 },
  3777. +{"vrsub.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VRSUBVX, MASK_VRSUBVX, match_vd_neq_vm, 0 },
  3778. +{"vrsub.vi", 0, INSN_CLASS_V, "Vd,Vt,ViVm", MATCH_VRSUBVI, MASK_VRSUBVI, match_vd_neq_vm, 0 },
  3779. +
  3780. +{"vwcvt.x.x.v", 0, INSN_CLASS_V, "Vd,VtVm", MATCH_VWCVTXXV, MASK_VWCVTXXV, match_widen_vd_neq_vs2_neq_vm, INSN_ALIAS },
  3781. +{"vwcvtu.x.x.v", 0, INSN_CLASS_V, "Vd,VtVm", MATCH_VWCVTUXXV, MASK_VWCVTUXXV, match_widen_vd_neq_vs2_neq_vm, INSN_ALIAS },
  3782. +
  3783. +{"vwaddu.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VWADDUVV, MASK_VWADDUVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0 },
  3784. +{"vwaddu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VWADDUVX, MASK_VWADDUVX, match_widen_vd_neq_vs2_neq_vm, 0 },
  3785. +{"vwsubu.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VWSUBUVV, MASK_VWSUBUVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0 },
  3786. +{"vwsubu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VWSUBUVX, MASK_VWSUBUVX, match_widen_vd_neq_vs2_neq_vm, 0 },
  3787. +{"vwadd.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VWADDVV, MASK_VWADDVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0 },
  3788. +{"vwadd.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VWADDVX, MASK_VWADDVX, match_widen_vd_neq_vs2_neq_vm, 0 },
  3789. +{"vwsub.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VWSUBVV, MASK_VWSUBVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0 },
  3790. +{"vwsub.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VWSUBVX, MASK_VWSUBVX, match_widen_vd_neq_vs2_neq_vm, 0 },
  3791. +{"vwaddu.wv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VWADDUWV, MASK_VWADDUWV, match_widen_vd_neq_vs1_neq_vm, 0 },
  3792. +{"vwaddu.wx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VWADDUWX, MASK_VWADDUWX, match_widen_vd_neq_vm, 0 },
  3793. +{"vwsubu.wv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VWSUBUWV, MASK_VWSUBUWV, match_widen_vd_neq_vs1_neq_vm, 0 },
  3794. +{"vwsubu.wx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VWSUBUWX, MASK_VWSUBUWX, match_widen_vd_neq_vm, 0 },
  3795. +{"vwadd.wv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VWADDWV, MASK_VWADDWV, match_widen_vd_neq_vs1_neq_vm, 0 },
  3796. +{"vwadd.wx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VWADDWX, MASK_VWADDWX, match_widen_vd_neq_vm, 0 },
  3797. +{"vwsub.wv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VWSUBWV, MASK_VWSUBWV, match_widen_vd_neq_vs1_neq_vm, 0 },
  3798. +{"vwsub.wx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VWSUBWX, MASK_VWSUBWX, match_widen_vd_neq_vm, 0 },
  3799. +
  3800. +{"vzext.vf2", 0, INSN_CLASS_V, "Vd,VtVm", MATCH_VZEXT_VF2, MASK_VZEXT_VF2, match_vd_neq_vm, 0 },
  3801. +{"vsext.vf2", 0, INSN_CLASS_V, "Vd,VtVm", MATCH_VSEXT_VF2, MASK_VSEXT_VF2, match_vd_neq_vm, 0 },
  3802. +{"vzext.vf4", 0, INSN_CLASS_V, "Vd,VtVm", MATCH_VZEXT_VF4, MASK_VZEXT_VF4, match_vd_neq_vm, 0 },
  3803. +{"vsext.vf4", 0, INSN_CLASS_V, "Vd,VtVm", MATCH_VSEXT_VF4, MASK_VSEXT_VF4, match_vd_neq_vm, 0 },
  3804. +{"vzext.vf8", 0, INSN_CLASS_V, "Vd,VtVm", MATCH_VZEXT_VF8, MASK_VZEXT_VF8, match_vd_neq_vm, 0 },
  3805. +{"vsext.vf8", 0, INSN_CLASS_V, "Vd,VtVm", MATCH_VSEXT_VF8, MASK_VSEXT_VF8, match_vd_neq_vm, 0 },
  3806. +
  3807. +{"vadc.vvm", 0, INSN_CLASS_V, "Vd,Vt,Vs,V0", MATCH_VADCVVM, MASK_VADCVVM, match_vd_neq_vm, 0 },
  3808. +{"vadc.vxm", 0, INSN_CLASS_V, "Vd,Vt,s,V0", MATCH_VADCVXM, MASK_VADCVXM, match_vd_neq_vm, 0 },
  3809. +{"vadc.vim", 0, INSN_CLASS_V, "Vd,Vt,Vi,V0", MATCH_VADCVIM, MASK_VADCVIM, match_vd_neq_vm, 0 },
  3810. +{"vmadc.vvm", 0, INSN_CLASS_V, "Vd,Vt,Vs,V0", MATCH_VMADCVVM, MASK_VMADCVVM, match_opcode, 0 },
  3811. +{"vmadc.vxm", 0, INSN_CLASS_V, "Vd,Vt,s,V0", MATCH_VMADCVXM, MASK_VMADCVXM, match_opcode, 0 },
  3812. +{"vmadc.vim", 0, INSN_CLASS_V, "Vd,Vt,Vi,V0", MATCH_VMADCVIM, MASK_VMADCVIM, match_opcode, 0 },
  3813. +{"vmadc.vv", 0, INSN_CLASS_V, "Vd,Vt,Vs", MATCH_VMADCVV, MASK_VMADCVV, match_opcode, 0 },
  3814. +{"vmadc.vx", 0, INSN_CLASS_V, "Vd,Vt,s", MATCH_VMADCVX, MASK_VMADCVX, match_opcode, 0 },
  3815. +{"vmadc.vi", 0, INSN_CLASS_V, "Vd,Vt,Vi", MATCH_VMADCVI, MASK_VMADCVI, match_opcode, 0 },
  3816. +{"vsbc.vvm", 0, INSN_CLASS_V, "Vd,Vt,Vs,V0", MATCH_VSBCVVM, MASK_VSBCVVM, match_vd_neq_vm, 0 },
  3817. +{"vsbc.vxm", 0, INSN_CLASS_V, "Vd,Vt,s,V0", MATCH_VSBCVXM, MASK_VSBCVXM, match_vd_neq_vm, 0 },
  3818. +{"vmsbc.vvm", 0, INSN_CLASS_V, "Vd,Vt,Vs,V0", MATCH_VMSBCVVM, MASK_VMSBCVVM, match_opcode, 0 },
  3819. +{"vmsbc.vxm", 0, INSN_CLASS_V, "Vd,Vt,s,V0", MATCH_VMSBCVXM, MASK_VMSBCVXM, match_opcode, 0 },
  3820. +{"vmsbc.vv", 0, INSN_CLASS_V, "Vd,Vt,Vs", MATCH_VMSBCVV, MASK_VMSBCVV, match_opcode, 0 },
  3821. +{"vmsbc.vx", 0, INSN_CLASS_V, "Vd,Vt,s", MATCH_VMSBCVX, MASK_VMSBCVX, match_opcode, 0 },
  3822. +
  3823. +{"vnot.v", 0, INSN_CLASS_V, "Vd,VtVm", MATCH_VNOTV, MASK_VNOTV, match_vd_neq_vm, INSN_ALIAS },
  3824. +
  3825. +{"vand.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VANDVV, MASK_VANDVV, match_vd_neq_vm, 0 },
  3826. +{"vand.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VANDVX, MASK_VANDVX, match_vd_neq_vm, 0 },
  3827. +{"vand.vi", 0, INSN_CLASS_V, "Vd,Vt,ViVm", MATCH_VANDVI, MASK_VANDVI, match_vd_neq_vm, 0 },
  3828. +{"vor.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VORVV, MASK_VORVV, match_vd_neq_vm, 0 },
  3829. +{"vor.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VORVX, MASK_VORVX, match_vd_neq_vm, 0 },
  3830. +{"vor.vi", 0, INSN_CLASS_V, "Vd,Vt,ViVm", MATCH_VORVI, MASK_VORVI, match_vd_neq_vm, 0 },
  3831. +{"vxor.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VXORVV, MASK_VXORVV, match_vd_neq_vm, 0 },
  3832. +{"vxor.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VXORVX, MASK_VXORVX, match_vd_neq_vm, 0 },
  3833. +{"vxor.vi", 0, INSN_CLASS_V, "Vd,Vt,ViVm", MATCH_VXORVI, MASK_VXORVI, match_vd_neq_vm, 0 },
  3834. +
  3835. +{"vsll.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VSLLVV, MASK_VSLLVV, match_vd_neq_vm, 0 },
  3836. +{"vsll.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VSLLVX, MASK_VSLLVX, match_vd_neq_vm, 0 },
  3837. +{"vsll.vi", 0, INSN_CLASS_V, "Vd,Vt,VjVm", MATCH_VSLLVI, MASK_VSLLVI, match_vd_neq_vm, 0 },
  3838. +{"vsrl.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VSRLVV, MASK_VSRLVV, match_vd_neq_vm, 0 },
  3839. +{"vsrl.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VSRLVX, MASK_VSRLVX, match_vd_neq_vm, 0 },
  3840. +{"vsrl.vi", 0, INSN_CLASS_V, "Vd,Vt,VjVm", MATCH_VSRLVI, MASK_VSRLVI, match_vd_neq_vm, 0 },
  3841. +{"vsra.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VSRAVV, MASK_VSRAVV, match_vd_neq_vm, 0 },
  3842. +{"vsra.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VSRAVX, MASK_VSRAVX, match_vd_neq_vm, 0 },
  3843. +{"vsra.vi", 0, INSN_CLASS_V, "Vd,Vt,VjVm", MATCH_VSRAVI, MASK_VSRAVI, match_vd_neq_vm, 0 },
  3844. +
  3845. +{"vncvt.x.x.w",0, INSN_CLASS_V, "Vd,VtVm", MATCH_VNCVTXXW, MASK_VNCVTXXW, match_narrow_vd_neq_vs2_neq_vm, INSN_ALIAS },
  3846. +
  3847. +{"vnsrl.wv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VNSRLWV, MASK_VNSRLWV, match_narrow_vd_neq_vs2_neq_vm, 0 },
  3848. +{"vnsrl.wx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VNSRLWX, MASK_VNSRLWX, match_narrow_vd_neq_vs2_neq_vm, 0 },
  3849. +{"vnsrl.wi", 0, INSN_CLASS_V, "Vd,Vt,VjVm", MATCH_VNSRLWI, MASK_VNSRLWI, match_narrow_vd_neq_vs2_neq_vm, 0 },
  3850. +{"vnsra.wv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VNSRAWV, MASK_VNSRAWV, match_narrow_vd_neq_vs2_neq_vm, 0 },
  3851. +{"vnsra.wx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VNSRAWX, MASK_VNSRAWX, match_narrow_vd_neq_vs2_neq_vm, 0 },
  3852. +{"vnsra.wi", 0, INSN_CLASS_V, "Vd,Vt,VjVm", MATCH_VNSRAWI, MASK_VNSRAWI, match_narrow_vd_neq_vs2_neq_vm, 0 },
  3853. +
  3854. +{"vmseq.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VMSEQVV, MASK_VMSEQVV, match_opcode, 0 },
  3855. +{"vmseq.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VMSEQVX, MASK_VMSEQVX, match_opcode, 0 },
  3856. +{"vmseq.vi", 0, INSN_CLASS_V, "Vd,Vt,ViVm", MATCH_VMSEQVI, MASK_VMSEQVI, match_opcode, 0 },
  3857. +{"vmsne.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VMSNEVV, MASK_VMSNEVV, match_opcode, 0 },
  3858. +{"vmsne.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VMSNEVX, MASK_VMSNEVX, match_opcode, 0 },
  3859. +{"vmsne.vi", 0, INSN_CLASS_V, "Vd,Vt,ViVm", MATCH_VMSNEVI, MASK_VMSNEVI, match_opcode, 0 },
  3860. +{"vmsltu.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VMSLTUVV, MASK_VMSLTUVV, match_opcode, 0 },
  3861. +{"vmsltu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VMSLTUVX, MASK_VMSLTUVX, match_opcode, 0 },
  3862. +{"vmslt.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VMSLTVV, MASK_VMSLTVV, match_opcode, 0 },
  3863. +{"vmslt.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VMSLTVX, MASK_VMSLTVX, match_opcode, 0 },
  3864. +{"vmsleu.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VMSLEUVV, MASK_VMSLEUVV, match_opcode, 0 },
  3865. +{"vmsleu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VMSLEUVX, MASK_VMSLEUVX, match_opcode, 0 },
  3866. +{"vmsleu.vi", 0, INSN_CLASS_V, "Vd,Vt,ViVm", MATCH_VMSLEUVI, MASK_VMSLEUVI, match_opcode, 0 },
  3867. +{"vmsle.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VMSLEVV, MASK_VMSLEVV, match_opcode, 0 },
  3868. +{"vmsle.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VMSLEVX, MASK_VMSLEVX, match_opcode, 0 },
  3869. +{"vmsle.vi", 0, INSN_CLASS_V, "Vd,Vt,ViVm", MATCH_VMSLEVI, MASK_VMSLEVI, match_opcode, 0 },
  3870. +{"vmsgtu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VMSGTUVX, MASK_VMSGTUVX, match_opcode, 0 },
  3871. +{"vmsgtu.vi", 0, INSN_CLASS_V, "Vd,Vt,ViVm", MATCH_VMSGTUVI, MASK_VMSGTUVI, match_opcode, 0 },
  3872. +{"vmsgt.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VMSGTVX, MASK_VMSGTVX, match_opcode, 0 },
  3873. +{"vmsgt.vi", 0, INSN_CLASS_V, "Vd,Vt,ViVm", MATCH_VMSGTVI, MASK_VMSGTVI, match_opcode, 0 },
  3874. +
  3875. +/* These aliases are for assembly but not disassembly. */
  3876. +{"vmsgt.vv", 0, INSN_CLASS_V, "Vd,Vs,VtVm", MATCH_VMSLTVV, MASK_VMSLTVV, match_opcode, INSN_ALIAS },
  3877. +{"vmsgtu.vv", 0, INSN_CLASS_V, "Vd,Vs,VtVm", MATCH_VMSLTUVV, MASK_VMSLTUVV, match_opcode, INSN_ALIAS },
  3878. +{"vmsge.vv", 0, INSN_CLASS_V, "Vd,Vs,VtVm", MATCH_VMSLEVV, MASK_VMSLEVV, match_opcode, INSN_ALIAS },
  3879. +{"vmsgeu.vv", 0, INSN_CLASS_V, "Vd,Vs,VtVm", MATCH_VMSLEUVV, MASK_VMSLEUVV, match_opcode, INSN_ALIAS },
  3880. +{"vmslt.vi", 0, INSN_CLASS_V, "Vd,Vt,VkVm", MATCH_VMSLEVI, MASK_VMSLEVI, match_opcode, INSN_ALIAS },
  3881. +{"vmsltu.vi", 0, INSN_CLASS_V, "Vd,Vu,0Vm", MATCH_VMSNEVV, MASK_VMSNEVV, match_opcode, INSN_ALIAS },
  3882. +{"vmsltu.vi", 0, INSN_CLASS_V, "Vd,Vt,VkVm", MATCH_VMSLEUVI, MASK_VMSLEUVI, match_opcode, INSN_ALIAS },
  3883. +{"vmsge.vi", 0, INSN_CLASS_V, "Vd,Vt,VkVm", MATCH_VMSGTVI, MASK_VMSGTVI, match_opcode, INSN_ALIAS },
  3884. +{"vmsgeu.vi", 0, INSN_CLASS_V, "Vd,Vu,0Vm", MATCH_VMSEQVV, MASK_VMSEQVV, match_opcode, INSN_ALIAS },
  3885. +{"vmsgeu.vi", 0, INSN_CLASS_V, "Vd,Vt,VkVm", MATCH_VMSGTUVI, MASK_VMSGTUVI, match_opcode, INSN_ALIAS },
  3886. +
  3887. +{"vmsge.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", 0, (int) M_VMSGE, match_never, INSN_MACRO },
  3888. +{"vmsge.vx", 0, INSN_CLASS_V, "Vd,Vt,s,VM,VT", 0, (int) M_VMSGE, match_never, INSN_MACRO },
  3889. +{"vmsgeu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", 0, (int) M_VMSGEU, match_never, INSN_MACRO },
  3890. +{"vmsgeu.vx", 0, INSN_CLASS_V, "Vd,Vt,s,VM,VT", 0, (int) M_VMSGEU, match_never, INSN_MACRO },
  3891. +
  3892. +{"vminu.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VMINUVV, MASK_VMINUVV, match_vd_neq_vm, 0},
  3893. +{"vminu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VMINUVX, MASK_VMINUVX, match_vd_neq_vm, 0},
  3894. +{"vmin.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VMINVV, MASK_VMINVV, match_vd_neq_vm, 0},
  3895. +{"vmin.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VMINVX, MASK_VMINVX, match_vd_neq_vm, 0},
  3896. +{"vmaxu.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VMAXUVV, MASK_VMAXUVV, match_vd_neq_vm, 0},
  3897. +{"vmaxu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VMAXUVX, MASK_VMAXUVX, match_vd_neq_vm, 0},
  3898. +{"vmax.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VMAXVV, MASK_VMAXVV, match_vd_neq_vm, 0},
  3899. +{"vmax.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VMAXVX, MASK_VMAXVX, match_vd_neq_vm, 0},
  3900. +
  3901. +{"vmul.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VMULVV, MASK_VMULVV, match_vd_neq_vm, 0 },
  3902. +{"vmul.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VMULVX, MASK_VMULVX, match_vd_neq_vm, 0 },
  3903. +{"vmulh.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VMULHVV, MASK_VMULHVV, match_vd_neq_vm, 0 },
  3904. +{"vmulh.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VMULHVX, MASK_VMULHVX, match_vd_neq_vm, 0 },
  3905. +{"vmulhu.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VMULHUVV, MASK_VMULHUVV, match_vd_neq_vm, 0 },
  3906. +{"vmulhu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VMULHUVX, MASK_VMULHUVX, match_vd_neq_vm, 0 },
  3907. +{"vmulhsu.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VMULHSUVV, MASK_VMULHSUVV, match_vd_neq_vm, 0 },
  3908. +{"vmulhsu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VMULHSUVX, MASK_VMULHSUVX, match_vd_neq_vm, 0 },
  3909. +
  3910. +{"vwmul.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VWMULVV, MASK_VWMULVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0 },
  3911. +{"vwmul.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VWMULVX, MASK_VWMULVX, match_widen_vd_neq_vs2_neq_vm, 0 },
  3912. +{"vwmulu.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VWMULUVV, MASK_VWMULUVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0 },
  3913. +{"vwmulu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VWMULUVX, MASK_VWMULUVX, match_widen_vd_neq_vs2_neq_vm, 0 },
  3914. +{"vwmulsu.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VWMULSUVV, MASK_VWMULSUVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0 },
  3915. +{"vwmulsu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VWMULSUVX, MASK_VWMULSUVX, match_widen_vd_neq_vs2_neq_vm, 0 },
  3916. +
  3917. +{"vmacc.vv", 0, INSN_CLASS_V, "Vd,Vs,VtVm", MATCH_VMACCVV, MASK_VMACCVV, match_vd_neq_vm, 0},
  3918. +{"vmacc.vx", 0, INSN_CLASS_V, "Vd,s,VtVm", MATCH_VMACCVX, MASK_VMACCVX, match_vd_neq_vm, 0},
  3919. +{"vnmsac.vv", 0, INSN_CLASS_V, "Vd,Vs,VtVm", MATCH_VNMSACVV, MASK_VNMSACVV, match_vd_neq_vm, 0},
  3920. +{"vnmsac.vx", 0, INSN_CLASS_V, "Vd,s,VtVm", MATCH_VNMSACVX, MASK_VNMSACVX, match_vd_neq_vm, 0},
  3921. +{"vmadd.vv", 0, INSN_CLASS_V, "Vd,Vs,VtVm", MATCH_VMADDVV, MASK_VMADDVV, match_vd_neq_vm, 0},
  3922. +{"vmadd.vx", 0, INSN_CLASS_V, "Vd,s,VtVm", MATCH_VMADDVX, MASK_VMADDVX, match_vd_neq_vm, 0},
  3923. +{"vnmsub.vv", 0, INSN_CLASS_V, "Vd,Vs,VtVm", MATCH_VNMSUBVV, MASK_VNMSUBVV, match_vd_neq_vm, 0},
  3924. +{"vnmsub.vx", 0, INSN_CLASS_V, "Vd,s,VtVm", MATCH_VNMSUBVX, MASK_VNMSUBVX, match_vd_neq_vm, 0},
  3925. +
  3926. +{"vwmaccu.vv", 0, INSN_CLASS_V, "Vd,Vs,VtVm", MATCH_VWMACCUVV, MASK_VWMACCUVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0},
  3927. +{"vwmaccu.vx", 0, INSN_CLASS_V, "Vd,s,VtVm", MATCH_VWMACCUVX, MASK_VWMACCUVX, match_widen_vd_neq_vs2_neq_vm, 0},
  3928. +{"vwmacc.vv", 0, INSN_CLASS_V, "Vd,Vs,VtVm", MATCH_VWMACCVV, MASK_VWMACCVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0},
  3929. +{"vwmacc.vx", 0, INSN_CLASS_V, "Vd,s,VtVm", MATCH_VWMACCVX, MASK_VWMACCVX, match_widen_vd_neq_vs2_neq_vm, 0},
  3930. +{"vwmaccsu.vv", 0, INSN_CLASS_V, "Vd,Vs,VtVm", MATCH_VWMACCSUVV, MASK_VWMACCSUVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0},
  3931. +{"vwmaccsu.vx", 0, INSN_CLASS_V, "Vd,s,VtVm", MATCH_VWMACCSUVX, MASK_VWMACCSUVX, match_widen_vd_neq_vs2_neq_vm, 0},
  3932. +{"vwmaccus.vx", 0, INSN_CLASS_V, "Vd,s,VtVm", MATCH_VWMACCUSVX, MASK_VWMACCUSVX, match_widen_vd_neq_vs2_neq_vm, 0},
  3933. +
  3934. +{"vdivu.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VDIVUVV, MASK_VDIVUVV, match_vd_neq_vm, 0 },
  3935. +{"vdivu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VDIVUVX, MASK_VDIVUVX, match_vd_neq_vm, 0 },
  3936. +{"vdiv.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VDIVVV, MASK_VDIVVV, match_vd_neq_vm, 0 },
  3937. +{"vdiv.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VDIVVX, MASK_VDIVVX, match_vd_neq_vm, 0 },
  3938. +{"vremu.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VREMUVV, MASK_VREMUVV, match_vd_neq_vm, 0 },
  3939. +{"vremu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VREMUVX, MASK_VREMUVX, match_vd_neq_vm, 0 },
  3940. +{"vrem.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VREMVV, MASK_VREMVV, match_vd_neq_vm, 0 },
  3941. +{"vrem.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VREMVX, MASK_VREMVX, match_vd_neq_vm, 0 },
  3942. +
  3943. +{"vmerge.vvm", 0, INSN_CLASS_V, "Vd,Vt,Vs,V0", MATCH_VMERGEVVM, MASK_VMERGEVVM, match_opcode, 0 },
  3944. +{"vmerge.vxm", 0, INSN_CLASS_V, "Vd,Vt,s,V0", MATCH_VMERGEVXM, MASK_VMERGEVXM, match_opcode, 0 },
  3945. +{"vmerge.vim", 0, INSN_CLASS_V, "Vd,Vt,Vi,V0", MATCH_VMERGEVIM, MASK_VMERGEVIM, match_opcode, 0 },
  3946. +
  3947. +{"vmv.v.v", 0, INSN_CLASS_V, "Vd,Vs", MATCH_VMVVV, MASK_VMVVV, match_opcode, 0 },
  3948. +{"vmv.v.x", 0, INSN_CLASS_V, "Vd,s", MATCH_VMVVX, MASK_VMVVX, match_opcode, 0 },
  3949. +{"vmv.v.i", 0, INSN_CLASS_V, "Vd,Vi", MATCH_VMVVI, MASK_VMVVI, match_opcode, 0 },
  3950. +
  3951. +{"vsaddu.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VSADDUVV, MASK_VSADDUVV, match_vd_neq_vm, 0 },
  3952. +{"vsaddu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VSADDUVX, MASK_VSADDUVX, match_vd_neq_vm, 0 },
  3953. +{"vsaddu.vi", 0, INSN_CLASS_V, "Vd,Vt,ViVm", MATCH_VSADDUVI, MASK_VSADDUVI, match_vd_neq_vm, 0 },
  3954. +{"vsadd.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VSADDVV, MASK_VSADDVV, match_vd_neq_vm, 0 },
  3955. +{"vsadd.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VSADDVX, MASK_VSADDVX, match_vd_neq_vm, 0 },
  3956. +{"vsadd.vi", 0, INSN_CLASS_V, "Vd,Vt,ViVm", MATCH_VSADDVI, MASK_VSADDVI, match_vd_neq_vm, 0 },
  3957. +{"vssubu.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VSSUBUVV, MASK_VSSUBUVV, match_vd_neq_vm, 0 },
  3958. +{"vssubu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VSSUBUVX, MASK_VSSUBUVX, match_vd_neq_vm, 0 },
  3959. +{"vssub.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VSSUBVV, MASK_VSSUBVV, match_vd_neq_vm, 0 },
  3960. +{"vssub.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VSSUBVX, MASK_VSSUBVX, match_vd_neq_vm, 0 },
  3961. +
  3962. +{"vaaddu.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VAADDUVV, MASK_VAADDUVV, match_vd_neq_vm, 0 },
  3963. +{"vaaddu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VAADDUVX, MASK_VAADDUVX, match_vd_neq_vm, 0 },
  3964. +{"vaadd.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VAADDVV, MASK_VAADDVV, match_vd_neq_vm, 0 },
  3965. +{"vaadd.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VAADDVX, MASK_VAADDVX, match_vd_neq_vm, 0 },
  3966. +{"vasubu.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VASUBUVV, MASK_VASUBUVV, match_vd_neq_vm, 0 },
  3967. +{"vasubu.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VASUBUVX, MASK_VASUBUVX, match_vd_neq_vm, 0 },
  3968. +{"vasub.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VASUBVV, MASK_VASUBVV, match_vd_neq_vm, 0 },
  3969. +{"vasub.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VASUBVX, MASK_VASUBVX, match_vd_neq_vm, 0 },
  3970. +
  3971. +{"vsmul.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VSMULVV, MASK_VSMULVV, match_vd_neq_vm, 0 },
  3972. +{"vsmul.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VSMULVX, MASK_VSMULVX, match_vd_neq_vm, 0 },
  3973. +
  3974. +{"vssrl.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VSSRLVV, MASK_VSSRLVV, match_vd_neq_vm, 0 },
  3975. +{"vssrl.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VSSRLVX, MASK_VSSRLVX, match_vd_neq_vm, 0 },
  3976. +{"vssrl.vi", 0, INSN_CLASS_V, "Vd,Vt,VjVm", MATCH_VSSRLVI, MASK_VSSRLVI, match_vd_neq_vm, 0 },
  3977. +{"vssra.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VSSRAVV, MASK_VSSRAVV, match_vd_neq_vm, 0 },
  3978. +{"vssra.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VSSRAVX, MASK_VSSRAVX, match_vd_neq_vm, 0 },
  3979. +{"vssra.vi", 0, INSN_CLASS_V, "Vd,Vt,VjVm", MATCH_VSSRAVI, MASK_VSSRAVI, match_vd_neq_vm, 0 },
  3980. +
  3981. +{"vnclipu.wv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VNCLIPUWV, MASK_VNCLIPUWV, match_narrow_vd_neq_vs2_neq_vm, 0 },
  3982. +{"vnclipu.wx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VNCLIPUWX, MASK_VNCLIPUWX, match_narrow_vd_neq_vs2_neq_vm, 0 },
  3983. +{"vnclipu.wi", 0, INSN_CLASS_V, "Vd,Vt,VjVm", MATCH_VNCLIPUWI, MASK_VNCLIPUWI, match_narrow_vd_neq_vs2_neq_vm, 0 },
  3984. +{"vnclip.wv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VNCLIPWV, MASK_VNCLIPWV, match_narrow_vd_neq_vs2_neq_vm, 0 },
  3985. +{"vnclip.wx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VNCLIPWX, MASK_VNCLIPWX, match_narrow_vd_neq_vs2_neq_vm, 0 },
  3986. +{"vnclip.wi", 0, INSN_CLASS_V, "Vd,Vt,VjVm", MATCH_VNCLIPWI, MASK_VNCLIPWI, match_narrow_vd_neq_vs2_neq_vm, 0 },
  3987. +
  3988. +{"vfadd.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFADDVV, MASK_VFADDVV, match_vd_neq_vm, 0},
  3989. +{"vfadd.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFADDVF, MASK_VFADDVF, match_vd_neq_vm, 0},
  3990. +{"vfsub.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFSUBVV, MASK_VFSUBVV, match_vd_neq_vm, 0},
  3991. +{"vfsub.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFSUBVF, MASK_VFSUBVF, match_vd_neq_vm, 0},
  3992. +{"vfrsub.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFRSUBVF, MASK_VFRSUBVF, match_vd_neq_vm, 0},
  3993. +
  3994. +{"vfwadd.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFWADDVV, MASK_VFWADDVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0},
  3995. +{"vfwadd.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFWADDVF, MASK_VFWADDVF, match_widen_vd_neq_vs2_neq_vm, 0},
  3996. +{"vfwsub.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFWSUBVV, MASK_VFWSUBVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0},
  3997. +{"vfwsub.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFWSUBVF, MASK_VFWSUBVF, match_widen_vd_neq_vs2_neq_vm, 0},
  3998. +{"vfwadd.wv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFWADDWV, MASK_VFWADDWV, match_widen_vd_neq_vs1_neq_vm, 0},
  3999. +{"vfwadd.wf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFWADDWF, MASK_VFWADDWF, match_widen_vd_neq_vm, 0},
  4000. +{"vfwsub.wv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFWSUBWV, MASK_VFWSUBWV, match_widen_vd_neq_vs1_neq_vm, 0},
  4001. +{"vfwsub.wf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFWSUBWF, MASK_VFWSUBWF, match_widen_vd_neq_vm, 0},
  4002. +
  4003. +{"vfmul.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFMULVV, MASK_VFMULVV, match_vd_neq_vm, 0},
  4004. +{"vfmul.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFMULVF, MASK_VFMULVF, match_vd_neq_vm, 0},
  4005. +{"vfdiv.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFDIVVV, MASK_VFDIVVV, match_vd_neq_vm, 0},
  4006. +{"vfdiv.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFDIVVF, MASK_VFDIVVF, match_vd_neq_vm, 0},
  4007. +{"vfrdiv.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFRDIVVF, MASK_VFRDIVVF, match_vd_neq_vm, 0},
  4008. +
  4009. +{"vfwmul.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFWMULVV, MASK_VFWMULVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0},
  4010. +{"vfwmul.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFWMULVF, MASK_VFWMULVF, match_widen_vd_neq_vs2_neq_vm, 0},
  4011. +
  4012. +{"vfmadd.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vs,VtVm", MATCH_VFMADDVV, MASK_VFMADDVV, match_vd_neq_vm, 0},
  4013. +{"vfmadd.vf", 0, INSN_CLASS_V_AND_F, "Vd,S,VtVm", MATCH_VFMADDVF, MASK_VFMADDVF, match_vd_neq_vm, 0},
  4014. +{"vfnmadd.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vs,VtVm", MATCH_VFNMADDVV, MASK_VFNMADDVV, match_vd_neq_vm, 0},
  4015. +{"vfnmadd.vf", 0, INSN_CLASS_V_AND_F, "Vd,S,VtVm", MATCH_VFNMADDVF, MASK_VFNMADDVF, match_vd_neq_vm, 0},
  4016. +{"vfmsub.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vs,VtVm", MATCH_VFMSUBVV, MASK_VFMSUBVV, match_vd_neq_vm, 0},
  4017. +{"vfmsub.vf", 0, INSN_CLASS_V_AND_F, "Vd,S,VtVm", MATCH_VFMSUBVF, MASK_VFMSUBVF, match_vd_neq_vm, 0},
  4018. +{"vfnmsub.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vs,VtVm", MATCH_VFNMSUBVV, MASK_VFNMSUBVV, match_vd_neq_vm, 0},
  4019. +{"vfnmsub.vf", 0, INSN_CLASS_V_AND_F, "Vd,S,VtVm", MATCH_VFNMSUBVF, MASK_VFNMSUBVF, match_vd_neq_vm, 0},
  4020. +{"vfmacc.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vs,VtVm", MATCH_VFMACCVV, MASK_VFMACCVV, match_vd_neq_vm, 0},
  4021. +{"vfmacc.vf", 0, INSN_CLASS_V_AND_F, "Vd,S,VtVm", MATCH_VFMACCVF, MASK_VFMACCVF, match_vd_neq_vm, 0},
  4022. +{"vfnmacc.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vs,VtVm", MATCH_VFNMACCVV, MASK_VFNMACCVV, match_vd_neq_vm, 0},
  4023. +{"vfnmacc.vf", 0, INSN_CLASS_V_AND_F, "Vd,S,VtVm", MATCH_VFNMACCVF, MASK_VFNMACCVF, match_vd_neq_vm, 0},
  4024. +{"vfmsac.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vs,VtVm", MATCH_VFMSACVV, MASK_VFMSACVV, match_vd_neq_vm, 0},
  4025. +{"vfmsac.vf", 0, INSN_CLASS_V_AND_F, "Vd,S,VtVm", MATCH_VFMSACVF, MASK_VFMSACVF, match_vd_neq_vm, 0},
  4026. +{"vfnmsac.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vs,VtVm", MATCH_VFNMSACVV, MASK_VFNMSACVV, match_vd_neq_vm, 0},
  4027. +{"vfnmsac.vf", 0, INSN_CLASS_V_AND_F, "Vd,S,VtVm", MATCH_VFNMSACVF, MASK_VFNMSACVF, match_vd_neq_vm, 0},
  4028. +
  4029. +{"vfwmacc.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vs,VtVm", MATCH_VFWMACCVV, MASK_VFWMACCVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0},
  4030. +{"vfwmacc.vf", 0, INSN_CLASS_V_AND_F, "Vd,S,VtVm", MATCH_VFWMACCVF, MASK_VFWMACCVF, match_widen_vd_neq_vs2_neq_vm, 0},
  4031. +{"vfwnmacc.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vs,VtVm", MATCH_VFWNMACCVV, MASK_VFWNMACCVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0},
  4032. +{"vfwnmacc.vf", 0, INSN_CLASS_V_AND_F, "Vd,S,VtVm", MATCH_VFWNMACCVF, MASK_VFWNMACCVF, match_widen_vd_neq_vs2_neq_vm, 0},
  4033. +{"vfwmsac.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vs,VtVm", MATCH_VFWMSACVV, MASK_VFWMSACVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0},
  4034. +{"vfwmsac.vf", 0, INSN_CLASS_V_AND_F, "Vd,S,VtVm", MATCH_VFWMSACVF, MASK_VFWMSACVF, match_widen_vd_neq_vs2_neq_vm, 0},
  4035. +{"vfwnmsac.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vs,VtVm", MATCH_VFWNMSACVV, MASK_VFWNMSACVV, match_widen_vd_neq_vs1_neq_vs2_neq_vm, 0},
  4036. +{"vfwnmsac.vf", 0, INSN_CLASS_V_AND_F, "Vd,S,VtVm", MATCH_VFWNMSACVF, MASK_VFWNMSACVF, match_widen_vd_neq_vs2_neq_vm, 0},
  4037. +
  4038. +{"vfsqrt.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFSQRTV, MASK_VFSQRTV, match_vd_neq_vm, 0},
  4039. +{"vfrsqrt7.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFRSQRT7V, MASK_VFRSQRT7V, match_vd_neq_vm, 0},
  4040. +{"vfrsqrte7.v",0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFRSQRT7V, MASK_VFRSQRT7V, match_vd_neq_vm, 0},
  4041. +{"vfrec7.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFREC7V, MASK_VFREC7V, match_vd_neq_vm, 0},
  4042. +{"vfrece7.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFREC7V, MASK_VFREC7V, match_vd_neq_vm, 0},
  4043. +{"vfclass.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFCLASSV, MASK_VFCLASSV, match_vd_neq_vm, 0},
  4044. +
  4045. +{"vfmin.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFMINVV, MASK_VFMINVV, match_vd_neq_vm, 0},
  4046. +{"vfmin.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFMINVF, MASK_VFMINVF, match_vd_neq_vm, 0},
  4047. +{"vfmax.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFMAXVV, MASK_VFMAXVV, match_vd_neq_vm, 0},
  4048. +{"vfmax.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFMAXVF, MASK_VFMAXVF, match_vd_neq_vm, 0},
  4049. +
  4050. +{"vfneg.v", 0, INSN_CLASS_V_AND_F, "Vd,VuVm", MATCH_VFSGNJNVV, MASK_VFSGNJNVV, match_vs1_eq_vs2_neq_vm, INSN_ALIAS },
  4051. +{"vfabs.v", 0, INSN_CLASS_V_AND_F, "Vd,VuVm", MATCH_VFSGNJXVV, MASK_VFSGNJXVV, match_vs1_eq_vs2_neq_vm, INSN_ALIAS },
  4052. +
  4053. +{"vfsgnj.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFSGNJVV, MASK_VFSGNJVV, match_vd_neq_vm, 0},
  4054. +{"vfsgnj.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFSGNJVF, MASK_VFSGNJVF, match_vd_neq_vm, 0},
  4055. +{"vfsgnjn.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFSGNJNVV, MASK_VFSGNJNVV, match_vd_neq_vm, 0},
  4056. +{"vfsgnjn.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFSGNJNVF, MASK_VFSGNJNVF, match_vd_neq_vm, 0},
  4057. +{"vfsgnjx.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFSGNJXVV, MASK_VFSGNJXVV, match_vd_neq_vm, 0},
  4058. +{"vfsgnjx.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFSGNJXVF, MASK_VFSGNJXVF, match_vd_neq_vm, 0},
  4059. +
  4060. +{"vmfeq.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VMFEQVV, MASK_VMFEQVV, match_opcode, 0},
  4061. +{"vmfeq.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VMFEQVF, MASK_VMFEQVF, match_opcode, 0},
  4062. +{"vmfne.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VMFNEVV, MASK_VMFNEVV, match_opcode, 0},
  4063. +{"vmfne.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VMFNEVF, MASK_VMFNEVF, match_opcode, 0},
  4064. +{"vmflt.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VMFLTVV, MASK_VMFLTVV, match_opcode, 0},
  4065. +{"vmflt.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VMFLTVF, MASK_VMFLTVF, match_opcode, 0},
  4066. +{"vmfle.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VMFLEVV, MASK_VMFLEVV, match_opcode, 0},
  4067. +{"vmfle.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VMFLEVF, MASK_VMFLEVF, match_opcode, 0},
  4068. +{"vmfgt.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VMFGTVF, MASK_VMFGTVF, match_opcode, 0},
  4069. +{"vmfge.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VMFGEVF, MASK_VMFGEVF, match_opcode, 0},
  4070. +
  4071. +/* These aliases are for assembly but not disassembly. */
  4072. +{"vmfgt.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vs,VtVm", MATCH_VMFLTVV, MASK_VMFLTVV, match_opcode, INSN_ALIAS},
  4073. +{"vmfge.vv", 0, INSN_CLASS_V_AND_F, "Vd,Vs,VtVm", MATCH_VMFLEVV, MASK_VMFLEVV, match_opcode, INSN_ALIAS},
  4074. +
  4075. +{"vfmerge.vfm",0, INSN_CLASS_V_AND_F, "Vd,Vt,S,V0", MATCH_VFMERGEVFM, MASK_VFMERGEVFM, match_opcode, 0},
  4076. +{"vfmv.v.f", 0, INSN_CLASS_V_AND_F, "Vd,S", MATCH_VFMVVF, MASK_VFMVVF, match_opcode, 0 },
  4077. +
  4078. +{"vfcvt.xu.f.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFCVTXUFV, MASK_VFCVTXUFV, match_vd_neq_vm, 0},
  4079. +{"vfcvt.x.f.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFCVTXFV, MASK_VFCVTXFV, match_vd_neq_vm, 0},
  4080. +{"vfcvt.rtz.xu.f.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFCVTRTZXUFV, MASK_VFCVTRTZXUFV, match_vd_neq_vm, 0},
  4081. +{"vfcvt.rtz.x.f.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFCVTRTZXFV, MASK_VFCVTRTZXFV, match_vd_neq_vm, 0},
  4082. +{"vfcvt.f.xu.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFCVTFXUV, MASK_VFCVTFXUV, match_vd_neq_vm, 0},
  4083. +{"vfcvt.f.x.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFCVTFXV, MASK_VFCVTFXV, match_vd_neq_vm, 0},
  4084. +
  4085. +{"vfwcvt.xu.f.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFWCVTXUFV, MASK_VFWCVTXUFV, match_widen_vd_neq_vs2_neq_vm, 0},
  4086. +{"vfwcvt.x.f.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFWCVTXFV, MASK_VFWCVTXFV, match_widen_vd_neq_vs2_neq_vm, 0},
  4087. +{"vfwcvt.rtz.xu.f.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFWCVTRTZXUFV, MASK_VFWCVTRTZXUFV, match_widen_vd_neq_vs2_neq_vm, 0},
  4088. +{"vfwcvt.rtz.x.f.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFWCVTRTZXFV, MASK_VFWCVTRTZXFV, match_widen_vd_neq_vs2_neq_vm, 0},
  4089. +{"vfwcvt.f.xu.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFWCVTFXUV, MASK_VFWCVTFXUV, match_widen_vd_neq_vs2_neq_vm, 0},
  4090. +{"vfwcvt.f.x.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFWCVTFXV, MASK_VFWCVTFXV, match_widen_vd_neq_vs2_neq_vm, 0},
  4091. +{"vfwcvt.f.f.v", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFWCVTFFV, MASK_VFWCVTFFV, match_widen_vd_neq_vs2_neq_vm, 0},
  4092. +
  4093. +{"vfncvt.xu.f.w", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFNCVTXUFW, MASK_VFNCVTXUFW, match_narrow_vd_neq_vs2_neq_vm, 0},
  4094. +{"vfncvt.x.f.w", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFNCVTXFW, MASK_VFNCVTXFW, match_narrow_vd_neq_vs2_neq_vm, 0},
  4095. +{"vfncvt.rtz.xu.f.w", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFNCVTRTZXUFW, MASK_VFNCVTRTZXUFW, match_narrow_vd_neq_vs2_neq_vm, 0},
  4096. +{"vfncvt.rtz.x.f.w", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFNCVTRTZXFW, MASK_VFNCVTRTZXFW, match_narrow_vd_neq_vs2_neq_vm, 0},
  4097. +{"vfncvt.f.xu.w", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFNCVTFXUW, MASK_VFNCVTFXUW, match_narrow_vd_neq_vs2_neq_vm, 0},
  4098. +{"vfncvt.f.x.w", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFNCVTFXW, MASK_VFNCVTFXW, match_narrow_vd_neq_vs2_neq_vm, 0},
  4099. +{"vfncvt.f.f.w", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFNCVTFFW, MASK_VFNCVTFFW, match_narrow_vd_neq_vs2_neq_vm, 0},
  4100. +{"vfncvt.rod.f.f.w", 0, INSN_CLASS_V_AND_F, "Vd,VtVm", MATCH_VFNCVTRODFFW, MASK_VFNCVTRODFFW, match_narrow_vd_neq_vs2_neq_vm, 0},
  4101. +
  4102. +{"vredsum.vs", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VREDSUMVS, MASK_VREDSUMVS, match_opcode, 0},
  4103. +{"vredmaxu.vs",0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VREDMAXUVS, MASK_VREDMAXUVS, match_opcode, 0},
  4104. +{"vredmax.vs", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VREDMAXVS, MASK_VREDMAXVS, match_opcode, 0},
  4105. +{"vredminu.vs",0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VREDMINUVS, MASK_VREDMINUVS, match_opcode, 0},
  4106. +{"vredmin.vs", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VREDMINVS, MASK_VREDMINVS, match_opcode, 0},
  4107. +{"vredand.vs", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VREDANDVS, MASK_VREDANDVS, match_opcode, 0},
  4108. +{"vredor.vs", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VREDORVS, MASK_VREDORVS, match_opcode, 0},
  4109. +{"vredxor.vs", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VREDXORVS, MASK_VREDXORVS, match_opcode, 0},
  4110. +
  4111. +{"vwredsumu.vs",0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VWREDSUMUVS, MASK_VWREDSUMUVS, match_opcode, 0},
  4112. +{"vwredsum.vs",0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VWREDSUMVS, MASK_VWREDSUMVS, match_opcode, 0},
  4113. +
  4114. +{"vfredosum.vs",0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFREDOSUMVS, MASK_VFREDOSUMVS, match_opcode, 0},
  4115. +{"vfredsum.vs", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFREDSUMVS, MASK_VFREDSUMVS, match_opcode, 0},
  4116. +{"vfredmax.vs", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFREDMAXVS, MASK_VFREDMAXVS, match_opcode, 0},
  4117. +{"vfredmin.vs", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFREDMINVS, MASK_VFREDMINVS, match_opcode, 0},
  4118. +
  4119. +{"vfwredosum.vs",0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFWREDOSUMVS, MASK_VFWREDOSUMVS, match_opcode, 0},
  4120. +{"vfwredsum.vs", 0, INSN_CLASS_V_AND_F, "Vd,Vt,VsVm", MATCH_VFWREDSUMVS, MASK_VFWREDSUMVS, match_opcode, 0},
  4121. +
  4122. +{"vmmv.m", 0, INSN_CLASS_V, "Vd,Vu", MATCH_VMANDMM, MASK_VMANDMM, match_vs1_eq_vs2, INSN_ALIAS},
  4123. +{"vmcpy.m", 0, INSN_CLASS_V, "Vd,Vu", MATCH_VMANDMM, MASK_VMANDMM, match_vs1_eq_vs2, INSN_ALIAS},
  4124. +{"vmclr.m", 0, INSN_CLASS_V, "Vv", MATCH_VMXORMM, MASK_VMXORMM, match_vd_eq_vs1_eq_vs2, INSN_ALIAS},
  4125. +{"vmset.m", 0, INSN_CLASS_V, "Vv", MATCH_VMXNORMM, MASK_VMXNORMM, match_vd_eq_vs1_eq_vs2, INSN_ALIAS},
  4126. +{"vmnot.m", 0, INSN_CLASS_V, "Vd,Vu", MATCH_VMNANDMM, MASK_VMNANDMM, match_vs1_eq_vs2, INSN_ALIAS},
  4127. +
  4128. +{"vmand.mm", 0, INSN_CLASS_V, "Vd,Vt,Vs", MATCH_VMANDMM, MASK_VMANDMM, match_opcode, 0},
  4129. +{"vmnand.mm", 0, INSN_CLASS_V, "Vd,Vt,Vs", MATCH_VMNANDMM, MASK_VMNANDMM, match_opcode, 0},
  4130. +{"vmandnot.mm",0, INSN_CLASS_V, "Vd,Vt,Vs", MATCH_VMANDNOTMM, MASK_VMANDNOTMM, match_opcode, 0},
  4131. +{"vmxor.mm", 0, INSN_CLASS_V, "Vd,Vt,Vs", MATCH_VMXORMM, MASK_VMXORMM, match_opcode, 0},
  4132. +{"vmor.mm", 0, INSN_CLASS_V, "Vd,Vt,Vs", MATCH_VMORMM, MASK_VMORMM, match_opcode, 0},
  4133. +{"vmnor.mm", 0, INSN_CLASS_V, "Vd,Vt,Vs", MATCH_VMNORMM, MASK_VMNORMM, match_opcode, 0},
  4134. +{"vmornot.mm", 0, INSN_CLASS_V, "Vd,Vt,Vs", MATCH_VMORNOTMM, MASK_VMORNOTMM, match_opcode, 0},
  4135. +{"vmxnor.mm", 0, INSN_CLASS_V, "Vd,Vt,Vs", MATCH_VMXNORMM, MASK_VMXNORMM, match_opcode, 0},
  4136. +
  4137. +{"vpopc.m", 0, INSN_CLASS_V, "d,VtVm", MATCH_VPOPCM, MASK_VPOPCM, match_opcode, 0},
  4138. +{"vfirst.m", 0, INSN_CLASS_V, "d,VtVm", MATCH_VFIRSTM, MASK_VFIRSTM, match_opcode, 0},
  4139. +{"vmsbf.m", 0, INSN_CLASS_V, "Vd,VtVm", MATCH_VMSBFM, MASK_VMSBFM, match_vd_neq_vs2_neq_vm, 0},
  4140. +{"vmsif.m", 0, INSN_CLASS_V, "Vd,VtVm", MATCH_VMSIFM, MASK_VMSIFM, match_vd_neq_vs2_neq_vm, 0},
  4141. +{"vmsof.m", 0, INSN_CLASS_V, "Vd,VtVm", MATCH_VMSOFM, MASK_VMSOFM, match_vd_neq_vs2_neq_vm, 0},
  4142. +{"viota.m", 0, INSN_CLASS_V, "Vd,VtVm", MATCH_VIOTAM, MASK_VIOTAM, match_vd_neq_vs2_neq_vm, 0},
  4143. +{"vid.v", 0, INSN_CLASS_V, "VdVm", MATCH_VIDV, MASK_VIDV, match_vd_neq_vm, 0},
  4144. +
  4145. +{"vmv.x.s", 0, INSN_CLASS_V, "d,Vt", MATCH_VMVXS, MASK_VMVXS, match_opcode, 0},
  4146. +{"vmv.s.x", 0, INSN_CLASS_V, "Vd,s", MATCH_VMVSX, MASK_VMVSX, match_opcode, 0},
  4147. +
  4148. +{"vfmv.f.s", 0, INSN_CLASS_V_AND_F, "D,Vt", MATCH_VFMVFS, MASK_VFMVFS, match_opcode, 0},
  4149. +{"vfmv.s.f", 0, INSN_CLASS_V_AND_F, "Vd,S", MATCH_VFMVSF, MASK_VFMVSF, match_opcode, 0},
  4150. +
  4151. +{"vslideup.vx",0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VSLIDEUPVX, MASK_VSLIDEUPVX, match_vd_neq_vs2_neq_vm, 0},
  4152. +{"vslideup.vi",0, INSN_CLASS_V, "Vd,Vt,VjVm", MATCH_VSLIDEUPVI, MASK_VSLIDEUPVI, match_vd_neq_vs2_neq_vm, 0},
  4153. +{"vslidedown.vx",0,INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VSLIDEDOWNVX, MASK_VSLIDEDOWNVX, match_vd_neq_vm, 0},
  4154. +{"vslidedown.vi",0,INSN_CLASS_V, "Vd,Vt,VjVm", MATCH_VSLIDEDOWNVI, MASK_VSLIDEDOWNVI, match_vd_neq_vm, 0},
  4155. +
  4156. +{"vslide1up.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VSLIDE1UPVX, MASK_VSLIDE1UPVX, match_vd_neq_vs2_neq_vm, 0},
  4157. +{"vslide1down.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VSLIDE1DOWNVX, MASK_VSLIDE1DOWNVX, match_vd_neq_vm, 0},
  4158. +{"vfslide1up.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFSLIDE1UPVF, MASK_VFSLIDE1UPVF, match_vd_neq_vs2_neq_vm, 0},
  4159. +{"vfslide1down.vf", 0, INSN_CLASS_V_AND_F, "Vd,Vt,SVm", MATCH_VFSLIDE1DOWNVF, MASK_VFSLIDE1DOWNVF, match_vd_neq_vm, 0},
  4160. +
  4161. +{"vrgather.vv", 0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VRGATHERVV, MASK_VRGATHERVV, match_vd_neq_vs1_neq_vs2_neq_vm, 0},
  4162. +{"vrgather.vx", 0, INSN_CLASS_V, "Vd,Vt,sVm", MATCH_VRGATHERVX, MASK_VRGATHERVX, match_vd_neq_vs2_neq_vm, 0},
  4163. +{"vrgather.vi", 0, INSN_CLASS_V, "Vd,Vt,VjVm", MATCH_VRGATHERVI, MASK_VRGATHERVI, match_vd_neq_vs2_neq_vm, 0},
  4164. +{"vrgatherei16.vv",0, INSN_CLASS_V, "Vd,Vt,VsVm", MATCH_VRGATHEREI16VV, MASK_VRGATHEREI16VV, match_vd_neq_vs1_neq_vs2_neq_vm, 0},
  4165. +
  4166. +{"vcompress.vm",0, INSN_CLASS_V, "Vd,Vt,Vs", MATCH_VCOMPRESSVM, MASK_VCOMPRESSVM, match_vd_neq_vs1_neq_vs2, 0},
  4167. +
  4168. +{"vmv1r.v", 0, INSN_CLASS_V, "Vd,Vt", MATCH_VMV1RV, MASK_VMV1RV, match_vmv_nf_rv, 0},
  4169. +{"vmv2r.v", 0, INSN_CLASS_V, "Vd,Vt", MATCH_VMV2RV, MASK_VMV2RV, match_vmv_nf_rv, 0},
  4170. +{"vmv4r.v", 0, INSN_CLASS_V, "Vd,Vt", MATCH_VMV4RV, MASK_VMV4RV, match_vmv_nf_rv, 0},
  4171. +{"vmv8r.v", 0, INSN_CLASS_V, "Vd,Vt", MATCH_VMV8RV, MASK_VMV8RV, match_vmv_nf_rv, 0},
  4172. +/* END RVV */
  4173. /* Terminate the list. */
  4174. {0, 0, INSN_CLASS_NONE, 0, 0, 0, 0, 0}
  4175. };
  4176. @@ -929,6 +2120,26 @@ const struct riscv_opcode riscv_insn_types[] =
  4177. {"r", 0, INSN_CLASS_F, "O4,F3,F7,d,S,T", 0, 0, match_opcode, 0 },
  4178. {"r", 0, INSN_CLASS_F, "O4,F3,F7,D,S,T", 0, 0, match_opcode, 0 },
  4179. +{"r", 0, INSN_CLASS_V, "O4,F3,F7,Vd,s,t", 0, 0, match_opcode, 0 },
  4180. +{"r", 0, INSN_CLASS_V_AND_F, "O4,F3,F7,Vd,S,t", 0, 0, match_opcode, 0 },
  4181. +{"r", 0, INSN_CLASS_V_AND_F, "O4,F3,F7,Vd,s,T", 0, 0, match_opcode, 0 },
  4182. +{"r", 0, INSN_CLASS_V_AND_F, "O4,F3,F7,Vd,S,T", 0, 0, match_opcode, 0 },
  4183. +{"r", 0, INSN_CLASS_V, "O4,F3,F7,d,Vs,t", 0, 0, match_opcode, 0 },
  4184. +{"r", 0, INSN_CLASS_V_AND_F, "O4,F3,F7,D,Vs,t", 0, 0, match_opcode, 0 },
  4185. +{"r", 0, INSN_CLASS_V_AND_F, "O4,F3,F7,d,Vs,T", 0, 0, match_opcode, 0 },
  4186. +{"r", 0, INSN_CLASS_V_AND_F, "O4,F3,F7,D,Vs,T", 0, 0, match_opcode, 0 },
  4187. +{"r", 0, INSN_CLASS_V, "O4,F3,F7,d,s,Vt", 0, 0, match_opcode, 0 },
  4188. +{"r", 0, INSN_CLASS_V_AND_F, "O4,F3,F7,D,s,Vt", 0, 0, match_opcode, 0 },
  4189. +{"r", 0, INSN_CLASS_V_AND_F, "O4,F3,F7,d,S,Vt", 0, 0, match_opcode, 0 },
  4190. +{"r", 0, INSN_CLASS_V_AND_F, "O4,F3,F7,D,S,Vt", 0, 0, match_opcode, 0 },
  4191. +{"r", 0, INSN_CLASS_V, "O4,F3,F7,Vd,Vs,t", 0, 0, match_opcode, 0 },
  4192. +{"r", 0, INSN_CLASS_V_AND_F, "O4,F3,F7,Vd,Vs,T", 0, 0, match_opcode, 0 },
  4193. +{"r", 0, INSN_CLASS_V, "O4,F3,F7,Vd,s,Vt", 0, 0, match_opcode, 0 },
  4194. +{"r", 0, INSN_CLASS_V_AND_F, "O4,F3,F7,Vd,S,Vt", 0, 0, match_opcode, 0 },
  4195. +{"r", 0, INSN_CLASS_V, "O4,F3,F7,d,Vs,Vt", 0, 0, match_opcode, 0 },
  4196. +{"r", 0, INSN_CLASS_V_AND_F, "O4,F3,F7,D,Vs,Vt", 0, 0, match_opcode, 0 },
  4197. +{"r", 0, INSN_CLASS_V, "O4,F3,F7,Vd,Vs,Vt", 0, 0, match_opcode, 0 },
  4198. +
  4199. {"r", 0, INSN_CLASS_I, "O4,F3,F2,d,s,t,r", 0, 0, match_opcode, 0 },
  4200. {"r", 0, INSN_CLASS_F, "O4,F3,F2,D,s,t,r", 0, 0, match_opcode, 0 },
  4201. {"r", 0, INSN_CLASS_F, "O4,F3,F2,d,S,t,r", 0, 0, match_opcode, 0 },
  4202. @@ -1073,6 +2284,8 @@ const struct riscv_ext_version riscv_ext_version_table[] =
  4203. {"zbs", ISA_SPEC_CLASS_DRAFT, 0, 93},
  4204. {"zbt", ISA_SPEC_CLASS_DRAFT, 0, 93},
  4205. +{"zvamo", ISA_SPEC_CLASS_NONE, 1, 0},
  4206. +{"zvlsseg", ISA_SPEC_CLASS_NONE, 1, 0},
  4207. /* Terminate the list. */
  4208. {NULL, 0, 0, 0}
  4209. --
  4210. 2.33.0