0008-merge-from-riscv-gcc-10.2.0-to-support-bitmanip.patch 71 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542
  1. From 461f89263df4d8c27a2a99a9e0561e016e2752c6 Mon Sep 17 00:00:00 2001
  2. From: "yilun.xie" <yilun.xie@starfivetech.com>
  3. Date: Sat, 9 Oct 2021 10:43:19 +0800
  4. Subject: [PATCH 08/15] merge from riscv-gcc-10.2.0 to support bitmanip
  5. ---
  6. gcc/config/riscv/bitmanip.md | 482 ++++++++++++++++
  7. gcc/config/riscv/rvintrin.h | 1033 ++++++++++++++++++++++++++++++++++
  8. 2 files changed, 1515 insertions(+)
  9. create mode 100644 gcc/config/riscv/bitmanip.md
  10. create mode 100644 gcc/config/riscv/rvintrin.h
  11. diff --git a/gcc/config/riscv/bitmanip.md b/gcc/config/riscv/bitmanip.md
  12. new file mode 100644
  13. index 00000000000..6653219c3f0
  14. --- /dev/null
  15. +++ b/gcc/config/riscv/bitmanip.md
  16. @@ -0,0 +1,482 @@
  17. +;; Machine description for RISC-V Bit Manipulation operations.
  18. +;; Copyright (C) 2019 Free Software Foundation, Inc.
  19. +
  20. +;; This file is part of GCC.
  21. +
  22. +;; GCC is free software; you can redistribute it and/or modify
  23. +;; it under the terms of the GNU General Public License as published by
  24. +;; the Free Software Foundation; either version 3, or (at your option)
  25. +;; any later version.
  26. +
  27. +;; GCC is distributed in the hope that it will be useful,
  28. +;; but WITHOUT ANY WARRANTY; without even the implied warranty of
  29. +;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  30. +;; GNU General Public License for more details.
  31. +
  32. +;; You should have received a copy of the GNU General Public License
  33. +;; along with GCC; see the file COPYING3. If not see
  34. +;; <http://www.gnu.org/licenses/>.
  35. +
  36. +(define_code_iterator bitmanip_bitwise [and ior])
  37. +
  38. +(define_code_iterator any_minmax [smin smax umin umax])
  39. +
  40. +(define_code_iterator clz_ctz_pcnt [clz ctz popcount])
  41. +
  42. +(define_code_attr bitmanip_optab [(smin "smin")
  43. + (smax "smax")
  44. + (umin "umin")
  45. + (umax "umax")
  46. + (clz "clz")
  47. + (ctz "ctz")
  48. + (popcount "popcount")])
  49. +
  50. +(define_code_attr bitmanip_insn [(smin "min")
  51. + (smax "max")
  52. + (umin "minu")
  53. + (umax "maxu")
  54. + (clz "clz")
  55. + (ctz "ctz")
  56. + (popcount "cpop")])
  57. +
  58. +(define_mode_attr shiftm1 [(SI "const31_operand") (DI "const63_operand")])
  59. +
  60. +(define_insn "<bitmanip_optab>si2"
  61. + [(set (match_operand:SI 0 "register_operand" "=r")
  62. + (clz_ctz_pcnt:SI (match_operand:SI 1 "register_operand" "r")))]
  63. + "TARGET_ZBB"
  64. + { return TARGET_64BIT ? "<bitmanip_insn>w\t%0,%1" : "<bitmanip_insn>\t%0,%1"; }
  65. + [(set_attr "type" "bitmanip")])
  66. +
  67. +;; TODO: In theory zero_extend should be OK to combine too, since the output
  68. +;; range is 0 ~ 32, zero_extend or sign_extend will get same result.
  69. +(define_insn "*<bitmanip_optab>disi2"
  70. + [(set (match_operand:DI 0 "register_operand" "=r")
  71. + (sign_extend:DI
  72. + (clz_ctz_pcnt:SI (match_operand:SI 1 "register_operand" "r"))))]
  73. + "TARGET_64BIT && TARGET_ZBB"
  74. + "<bitmanip_insn>w\t%0,%1"
  75. + [(set_attr "type" "bitmanip")])
  76. +
  77. +(define_insn "<bitmanip_optab>di2"
  78. + [(set (match_operand:DI 0 "register_operand" "=r")
  79. + (clz_ctz_pcnt:DI (match_operand:DI 1 "register_operand" "r")))]
  80. + "TARGET_64BIT && TARGET_ZBB"
  81. + "<bitmanip_insn>\t%0,%1"
  82. + [(set_attr "type" "bitmanip")])
  83. +
  84. +(define_insn "*<optab>_not<mode>"
  85. + [(set (match_operand:X 0 "register_operand" "=r")
  86. + (bitmanip_bitwise:X (not:X (match_operand:X 1 "register_operand" "r"))
  87. + (match_operand:X 2 "register_operand" "r")))]
  88. + "TARGET_ZBB || TARGET_ZBP"
  89. + "<insn>n\t%0,%2,%1"
  90. + [(set_attr "type" "bitmanip")])
  91. +
  92. +(define_insn "*xor_not<mode>"
  93. + [(set (match_operand:X 0 "register_operand" "=r")
  94. + (not:X (xor:X (match_operand:X 1 "register_operand" "r")
  95. + (match_operand:X 2 "register_operand" "r"))))]
  96. + "TARGET_ZBB || TARGET_ZBP"
  97. + "xnor\t%0,%1,%2"
  98. + [(set_attr "type" "bitmanip")])
  99. +
  100. +;;; ??? pack
  101. +
  102. +(define_insn "*zero_extendhi<GPR:mode>2_bitmanip"
  103. + [(set (match_operand:GPR 0 "register_operand" "=r,r")
  104. + (zero_extend:GPR (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
  105. + "TARGET_ZBB || TARGET_ZBP"
  106. + "@
  107. + zext.h\t%0,%1
  108. + lhu\t%0,%1"
  109. + [(set_attr "type" "bitmanip,load")])
  110. +
  111. +(define_insn "*zero_extendsidi2_bitmanip"
  112. + [(set (match_operand:DI 0 "register_operand" "=r,r")
  113. + (zero_extend:DI (match_operand:SI 1 "nonimmediate_operand" "r,m")))]
  114. + "TARGET_64BIT && (TARGET_ZBB || TARGET_ZBA)"
  115. + "@
  116. + zext.w\t%0,%1
  117. + lwu\t%0,%1"
  118. + [(set_attr "type" "bitmanip,load")])
  119. +
  120. +(define_insn "<bitmanip_optab><mode>3"
  121. + [(set (match_operand:X 0 "register_operand" "=r")
  122. + (any_minmax:X (match_operand:X 1 "register_operand" "r")
  123. + (match_operand:X 2 "register_operand" "r")))]
  124. + "TARGET_ZBB"
  125. + "<bitmanip_insn>\t%0,%1,%2"
  126. + [(set_attr "type" "bitmanip")])
  127. +
  128. +(define_insn "*bset<mode>"
  129. + [(set (match_operand:X 0 "register_operand" "=r")
  130. + (ior:X (ashift:X (const_int 1)
  131. + (match_operand:QI 2 "register_operand" "r"))
  132. + (match_operand:X 1 "register_operand" "r")))]
  133. + "TARGET_ZBS"
  134. + "bset\t%0,%1,%2"
  135. + [(set_attr "type" "bitmanip")])
  136. +
  137. +(define_insn "*bset<mode>_mask"
  138. + [(set (match_operand:X 0 "register_operand" "=r")
  139. + (ior:X (ashift:X (const_int 1)
  140. + (subreg:QI
  141. + (and:X (match_operand:X 2 "register_operand" "r")
  142. + (match_operand 3 "<X:shiftm1>" "i")) 0))
  143. + (match_operand:X 1 "register_operand" "r")))]
  144. + "TARGET_ZBS"
  145. + "bset\t%0,%1,%2"
  146. + [(set_attr "type" "bitmanip")])
  147. +
  148. +(define_insn "*bset<mode>_1"
  149. + [(set (match_operand:X 0 "register_operand" "=r")
  150. + (ashift:X (const_int 1)
  151. + (match_operand:QI 1 "register_operand" "r")))]
  152. + "TARGET_ZBS"
  153. + "bset\t%0,x0,%1"
  154. + [(set_attr "type" "bitmanip")])
  155. +
  156. +(define_insn "*bset<mode>_1_mask"
  157. + [(set (match_operand:X 0 "register_operand" "=r")
  158. + (ashift:X (const_int 1)
  159. + (subreg:QI
  160. + (and:X (match_operand:X 1 "register_operand" "r")
  161. + (match_operand 2 "<X:shiftm1>" "i")) 0)))]
  162. + "TARGET_ZBS"
  163. + "bset\t%0,x0,%1"
  164. + [(set_attr "type" "bitmanip")])
  165. +
  166. +(define_insn "*bseti<mode>"
  167. + [(set (match_operand:X 0 "register_operand" "=r")
  168. + (ior:X (match_operand:X 1 "register_operand" "r")
  169. + (match_operand 2 "single_bit_mask_operand" "i")))]
  170. + "TARGET_ZBS"
  171. + "bseti\t%0,%1,%S2"
  172. + [(set_attr "type" "bitmanip")])
  173. +
  174. +(define_insn "*bsetw"
  175. + [(set (match_operand:DI 0 "register_operand" "=r")
  176. + (sign_extend:DI
  177. + (subreg:SI
  178. + (ior:DI (subreg:DI
  179. + (ashift:SI (const_int 1)
  180. + (match_operand:QI 2 "register_operand" "r")) 0)
  181. + (match_operand:DI 1 "register_operand" "r")) 0)))]
  182. + "TARGET_64BIT && TARGET_ZBS"
  183. + "bsetw\t%0,%1,%2"
  184. + [(set_attr "type" "bitmanip")])
  185. +
  186. +(define_insn "*bsetw_mask"
  187. + [(set (match_operand:DI 0 "register_operand" "=r")
  188. + (sign_extend:DI
  189. + (subreg:SI
  190. + (ior:DI (subreg:DI
  191. + (ashift:SI
  192. + (const_int 1)
  193. + (subreg:QI
  194. + (and:DI (match_operand:DI 2 "register_operand" "r")
  195. + (match_operand 3 "const31_operand" "i")) 0)) 0)
  196. + (match_operand:DI 1 "register_operand" "r")) 0)))]
  197. + "TARGET_64BIT && TARGET_ZBS"
  198. + "bsetw\t%0,%1,%2"
  199. + [(set_attr "type" "bitmanip")])
  200. +
  201. +(define_insn "*bsetiw"
  202. + [(set (match_operand:DI 0 "register_operand" "=r")
  203. + (ior:DI (sign_extend:DI (match_operand:SI 1 "register_operand" "r"))
  204. + (match_operand 2 "single_bit_mask_operand" "i")))]
  205. + "TARGET_64BIT && TARGET_ZBS"
  206. + "bsetiw\t%0,%1,%S2"
  207. + [(set_attr "type" "bitmanip")])
  208. +
  209. +(define_insn "*bclr<mode>"
  210. + [(set (match_operand:X 0 "register_operand" "=r")
  211. + (and:X (rotate:X (const_int -2)
  212. + (match_operand:QI 2 "register_operand" "r"))
  213. + (match_operand:X 1 "register_operand" "r")))]
  214. + "TARGET_ZBS"
  215. + "bclr\t%0,%1,%2"
  216. + [(set_attr "type" "bitmanip")])
  217. +
  218. +(define_insn "*bclri<mode>"
  219. + [(set (match_operand:X 0 "register_operand" "=r")
  220. + (and:X (match_operand:X 1 "register_operand" "r")
  221. + (match_operand 2 "not_single_bit_mask_operand" "i")))]
  222. + "TARGET_ZBS"
  223. + "bclri\t%0,%1,%T2"
  224. + [(set_attr "type" "bitmanip")])
  225. +
  226. +(define_insn "*bclrw"
  227. + [(set (match_operand:DI 0 "register_operand" "=r")
  228. + (sign_extend:DI
  229. + (subreg:SI
  230. + (and:DI
  231. + (not:DI (subreg:DI
  232. + (ashift:SI (const_int 1)
  233. + (match_operand:QI 2 "register_operand" "r")) 0))
  234. + (match_operand:DI 1 "register_operand" "r")) 0)))]
  235. + "TARGET_64BIT && TARGET_ZBS"
  236. + "bclrw\t%0,%1,%2"
  237. + [(set_attr "type" "bitmanip")])
  238. +
  239. +(define_insn "*bclriw"
  240. + [(set (match_operand:DI 0 "register_operand" "=r")
  241. + (and:DI (sign_extend:DI (match_operand:SI 1 "register_operand" "r"))
  242. + (match_operand 2 "not_single_bit_mask_operand" "i")))]
  243. + "TARGET_64BIT && TARGET_ZBS"
  244. + "bclriw\t%0,%1,%T2"
  245. + [(set_attr "type" "bitmanip")])
  246. +
  247. +(define_insn "*binv<mode>"
  248. + [(set (match_operand:X 0 "register_operand" "=r")
  249. + (xor:X (ashift:X (const_int 1)
  250. + (match_operand:QI 2 "register_operand" "r"))
  251. + (match_operand:X 1 "register_operand" "r")))]
  252. + "TARGET_ZBS"
  253. + "binv\t%0,%1,%2"
  254. + [(set_attr "type" "bitmanip")])
  255. +
  256. +(define_insn "*binvi<mode>"
  257. + [(set (match_operand:X 0 "register_operand" "=r")
  258. + (xor:X (match_operand:X 1 "register_operand" "r")
  259. + (match_operand 2 "single_bit_mask_operand" "i")))]
  260. + "TARGET_ZBS"
  261. + "binvi\t%0,%1,%S2"
  262. + [(set_attr "type" "bitmanip")])
  263. +
  264. +(define_insn "*binvw"
  265. + [(set (match_operand:DI 0 "register_operand" "=r")
  266. + (sign_extend:DI
  267. + (subreg:SI
  268. + (xor:DI (subreg:DI
  269. + (ashift:SI (const_int 1)
  270. + (match_operand:QI 2 "register_operand" "r")) 0)
  271. + (match_operand:DI 1 "register_operand" "r")) 0)))]
  272. + "TARGET_64BIT && TARGET_ZBS"
  273. + "binvw\t%0,%1,%2"
  274. + [(set_attr "type" "bitmanip")])
  275. +
  276. +(define_insn "*binviw"
  277. + [(set (match_operand:DI 0 "register_operand" "=r")
  278. + (xor:DI (sign_extend:DI (match_operand:SI 1 "register_operand" "r"))
  279. + (match_operand 2 "single_bit_mask_operand" "i")))]
  280. + "TARGET_64BIT && TARGET_ZBS"
  281. + "binviw\t%0,%1,%S2"
  282. + [(set_attr "type" "bitmanip")])
  283. +
  284. +(define_insn "*bext<mode>"
  285. + [(set (match_operand:X 0 "register_operand" "=r")
  286. + (zero_extract:X (match_operand:X 1 "register_operand" "r")
  287. + (const_int 1)
  288. + (zero_extend:X
  289. + (match_operand:QI 2 "register_operand" "r"))))]
  290. + "TARGET_ZBS"
  291. + "bext\t%0,%1,%2"
  292. + [(set_attr "type" "bitmanip")])
  293. +
  294. +(define_insn "*bexti"
  295. + [(set (match_operand:X 0 "register_operand" "=r")
  296. + (zero_extract:X (match_operand:X 1 "register_operand" "r")
  297. + (const_int 1)
  298. + (match_operand 2 "immediate_operand" "i")))]
  299. + "TARGET_ZBS"
  300. + "bexti\t%0,%1,%2"
  301. + [(set_attr "type" "bitmanip")])
  302. +
  303. +(define_insn "*bextw"
  304. + [(set (match_operand:DI 0 "register_operand" "=r")
  305. + (and:DI
  306. + (subreg:DI
  307. + (lshiftrt:SI (match_operand:SI 1 "register_operand" "r")
  308. + (match_operand:QI 2 "register_operand" "r")) 0)
  309. + (const_int 1)))]
  310. + "TARGET_64BIT && TARGET_ZBS"
  311. + "bextw\t%0,%1,%2"
  312. + [(set_attr "type" "bitmanip")])
  313. +
  314. +;;; ??? s[lr]o*
  315. +
  316. +(define_insn "rotrsi3"
  317. + [(set (match_operand:SI 0 "register_operand" "=r")
  318. + (rotatert:SI (match_operand:SI 1 "register_operand" "r")
  319. + (match_operand:QI 2 "arith_operand" "rI")))]
  320. + "TARGET_ZBB || TARGET_ZBP"
  321. + { return TARGET_64BIT ? "ror%i2w\t%0,%1,%2" : "ror%i2\t%0,%1,%2"; }
  322. + [(set_attr "type" "bitmanip")])
  323. +
  324. +(define_insn "rotrdi3"
  325. + [(set (match_operand:DI 0 "register_operand" "=r")
  326. + (rotatert:DI (match_operand:DI 1 "register_operand" "r")
  327. + (match_operand:QI 2 "arith_operand" "rI")))]
  328. + "TARGET_64BIT && (TARGET_ZBB || TARGET_ZBP)"
  329. + "ror%i2\t%0,%1,%2"
  330. + [(set_attr "type" "bitmanip")])
  331. +
  332. +(define_expand "riscv_rolw"
  333. + [(match_operand:SI 0 "register_operand" "=r")
  334. + (match_operand:SI 1 "register_operand" "r")
  335. + (match_operand:SI 2 "register_operand" "r")]
  336. + "TARGET_64BIT && (TARGET_ZBB || TARGET_ZBP)"
  337. +{
  338. + emit_insn (gen_rotlsi3 (operands[0], operands[1], operands[2]));
  339. + DONE;
  340. +})
  341. +
  342. +(define_insn "rotlsi3"
  343. + [(set (match_operand:SI 0 "register_operand" "=r")
  344. + (rotate:SI (match_operand:SI 1 "register_operand" "r")
  345. + (match_operand:QI 2 "register_operand" "r")))]
  346. + "TARGET_ZBB || TARGET_ZBP"
  347. + { return TARGET_64BIT ? "rolw\t%0,%1,%2" : "rol\t%0,%1,%2"; }
  348. + [(set_attr "type" "bitmanip")])
  349. +
  350. +(define_insn "rotldi3"
  351. + [(set (match_operand:DI 0 "register_operand" "=r")
  352. + (rotate:DI (match_operand:DI 1 "register_operand" "r")
  353. + (match_operand:QI 2 "register_operand" "r")))]
  354. + "TARGET_64BIT && (TARGET_ZBB || TARGET_ZBP)"
  355. + "rol\t%0,%1,%2"
  356. + [(set_attr "type" "bitmanip")])
  357. +
  358. +(define_insn "rotlsi3_sext"
  359. + [(set (match_operand:DI 0 "register_operand" "=r")
  360. + (sign_extend:DI (rotate:SI (match_operand:SI 1 "register_operand" "r")
  361. + (match_operand:QI 2 "register_operand" "r"))))]
  362. + "TARGET_64BIT && (TARGET_ZBB || TARGET_ZBP)"
  363. + "rolw\t%0,%1,%2"
  364. + [(set_attr "type" "bitmanip")])
  365. +
  366. +;;; ??? grev
  367. +
  368. +(define_insn "bswapsi2"
  369. + [(set (match_operand:SI 0 "register_operand" "=r")
  370. + (bswap:SI (match_operand:SI 1 "register_operand" "r")))]
  371. + "TARGET_ZBB || TARGET_ZBP"
  372. +{
  373. + if (TARGET_64BIT)
  374. + return TARGET_ZBB ? "rev8\t%0,%1\n\tsrai\t%0,%0,32" : "rev8.w\t%0,%1";
  375. + else
  376. + return "rev8\t%0,%1";
  377. +}
  378. + [(set_attr "type" "bitmanip")])
  379. +
  380. +(define_insn "bswapdi2"
  381. + [(set (match_operand:DI 0 "register_operand" "=r")
  382. + (bswap:DI (match_operand:DI 1 "register_operand" "r")))]
  383. + "TARGET_64BIT && (TARGET_ZBB || TARGET_ZBP)"
  384. + "rev8\t%0,%1"
  385. + [(set_attr "type" "bitmanip")])
  386. +
  387. +;;; ??? shfl/unshfl
  388. +
  389. +;;; ??? bext/bdep
  390. +
  391. +;;; ??? clmul
  392. +
  393. +;;; ??? crc
  394. +
  395. +;;; ??? bmat
  396. +
  397. +(define_insn "*cmix"
  398. + [(set (match_operand:X 0 "register_operand" "=r")
  399. + (xor:X (and:X (xor:X (match_operand:X 1 "register_operand" "r")
  400. + (match_operand:X 3 "register_operand" "r"))
  401. + (match_operand:X 2 "register_operand" "r"))
  402. + (match_dup 3)))]
  403. + "TARGET_ZBT"
  404. + "cmix\t%0,%2,%1,%3"
  405. + [(set_attr "type" "bitmanip")])
  406. +
  407. +;; ??? Can we do this by using the % communtative constraint?
  408. +
  409. +(define_insn "*cmix2"
  410. + [(set (match_operand:X 0 "register_operand" "=r")
  411. + (xor:X (and:X (xor:X (match_operand:X 1 "register_operand" "r")
  412. + (match_operand:X 3 "register_operand" "r"))
  413. + (match_operand:X 2 "register_operand" "r"))
  414. + (match_dup 1)))]
  415. + "TARGET_ZBT"
  416. + "cmix\t%0,%2,%3,%1"
  417. + [(set_attr "type" "bitmanip")])
  418. +
  419. +;;; ??? cmov
  420. +
  421. +(define_insn "*mov<X:mode>cc_ne_bitmanip"
  422. + [(set (match_operand:X 0 "register_operand" "=r")
  423. + (if_then_else:X
  424. + (ne (match_operand:X 1 "register_operand" "r") (const_int 0))
  425. + (match_operand:X 2 "register_operand" "r")
  426. + (match_operand:X 3 "register_operand" "r")))]
  427. + "TARGET_ZBT"
  428. + "cmov\t%0,%1,%2,%3"
  429. + [(set_attr "type" "bitmanip")])
  430. +
  431. +(define_insn "*mov<X:mode>cc_eq_bitmanip"
  432. + [(set (match_operand:X 0 "register_operand" "=r")
  433. + (if_then_else:X
  434. + (eq (match_operand:X 1 "register_operand" "r") (const_int 0))
  435. + (match_operand:X 2 "register_operand" "r")
  436. + (match_operand:X 3 "register_operand" "r")))]
  437. + "TARGET_ZBT"
  438. + "cmov\t%0,%1,%3,%2"
  439. + [(set_attr "type" "bitmanip")])
  440. +
  441. +;;; ??? fs[lr]
  442. +
  443. +(define_insn "*shNadd"
  444. + [(set (match_operand:X 0 "register_operand" "=r")
  445. + (plus:X (ashift:X (match_operand:X 1 "register_operand" "r")
  446. + (match_operand:QI 2 "immediate_operand" "I"))
  447. + (match_operand:X 3 "register_operand" "r")))]
  448. + "TARGET_ZBA
  449. + && (INTVAL (operands[2]) >= 1) && (INTVAL (operands[2]) <= 3)"
  450. + "sh%2add\t%0,%1,%3"
  451. + [(set_attr "type" "bitmanip")])
  452. +
  453. +(define_insn "*shNadduw"
  454. + [(set (match_operand:DI 0 "register_operand" "=r")
  455. + (plus:DI
  456. + (and:DI (ashift:DI (match_operand:DI 1 "register_operand" "r")
  457. + (match_operand:QI 2 "immediate_operand" "I"))
  458. + (match_operand 3 "immediate_operand" ""))
  459. + (match_operand:DI 4 "register_operand" "r")))]
  460. + "TARGET_64BIT && TARGET_ZBA
  461. + && (INTVAL (operands[2]) >= 1) && (INTVAL (operands[2]) <= 3)
  462. + && (INTVAL (operands[3]) >> INTVAL (operands[2])) == 0xffffffff"
  463. + "sh%2add.uw\t%0,%1,%4"
  464. + [(set_attr "type" "bitmanip")])
  465. +
  466. +(define_insn "*add.uw"
  467. + [(set (match_operand:DI 0 "register_operand" "=r")
  468. + (plus:DI (zero_extend:DI
  469. + (match_operand:SI 1 "register_operand" "r"))
  470. + (match_operand:DI 2 "register_operand" "r")))]
  471. + "TARGET_64BIT && TARGET_ZBA"
  472. + "add.uw\t%0,%1,%2"
  473. + [(set_attr "type" "bitmanip")])
  474. +
  475. +(define_insn "*slliuw"
  476. + [(set (match_operand:DI 0 "register_operand" "=r")
  477. + (and:DI (ashift:DI (match_operand:DI 1 "register_operand" "r")
  478. + (match_operand:QI 2 "immediate_operand" "I"))
  479. + (match_operand 3 "immediate_operand" "")))]
  480. + "TARGET_64BIT && TARGET_ZBA
  481. + && (INTVAL (operands[3]) >> INTVAL (operands[2])) == 0xffffffff"
  482. + "slli.uw\t%0,%1,%2"
  483. + [(set_attr "type" "bitmanip")])
  484. +
  485. +;; ??? bfxp
  486. +
  487. +;; sext
  488. +
  489. +(define_insn "*extend<SHORT:mode><SUPERQI:mode>2_bitmanip"
  490. + [(set (match_operand:SUPERQI 0 "register_operand" "=r,r")
  491. + (sign_extend:SUPERQI
  492. + (match_operand:SHORT 1 "nonimmediate_operand" " r,m")))]
  493. + "TARGET_ZBB"
  494. + "@
  495. + sext.<SHORT:size>\t%0,%1
  496. + l<SHORT:size>\t%0,%1"
  497. + [(set_attr "type" "bitmanip")
  498. + (set_attr "length" "4")])
  499. diff --git a/gcc/config/riscv/rvintrin.h b/gcc/config/riscv/rvintrin.h
  500. new file mode 100644
  501. index 00000000000..0f6bfef3ff4
  502. --- /dev/null
  503. +++ b/gcc/config/riscv/rvintrin.h
  504. @@ -0,0 +1,1033 @@
  505. +/*
  506. + * RISC-V "B" extension proposal intrinsics and emulation
  507. + *
  508. + * Copyright (C) 2019 Clifford Wolf <clifford@clifford.at>
  509. + *
  510. + * Permission to use, copy, modify, and/or distribute this software for any
  511. + * purpose with or without fee is hereby granted, provided that the above
  512. + * copyright notice and this permission notice appear in all copies.
  513. + *
  514. + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
  515. + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
  516. + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
  517. + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
  518. + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
  519. + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
  520. + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
  521. + *
  522. + * ----------------------------------------------------------------------
  523. + *
  524. + * Define RVINTRIN_EMULATE to enable emulation mode.
  525. + *
  526. + * This header defines C inline functions with "mockup intrinsics" for
  527. + * RISC-V "B" extension proposal instructions.
  528. + *
  529. + * _rv_*(...)
  530. + * RV32/64 intrinsics that operate on the "long" data type
  531. + *
  532. + * _rv32_*(...)
  533. + * RV32/64 intrinsics that operate on the "int32_t" data type
  534. + *
  535. + * _rv64_*(...)
  536. + * RV64-only intrinsics that operate on the "int64_t" data type
  537. + *
  538. + */
  539. +
  540. +#ifndef RVINTRIN_H
  541. +#define RVINTRIN_H
  542. +
  543. +#include <limits.h>
  544. +#include <stdint.h>
  545. +
  546. +#if !defined(__riscv_xlen) && !defined(RVINTRIN_EMULATE)
  547. +# warning "Target is not RISC-V. Enabling <rvintrin.h> emulation mode."
  548. +# define RVINTRIN_EMULATE 1
  549. +#endif
  550. +
  551. +#ifndef RVINTRIN_EMULATE
  552. +
  553. +#if __riscv_xlen == 32
  554. +# define RVINTRIN_RV32
  555. +#endif
  556. +
  557. +#if __riscv_xlen == 64
  558. +# define RVINTRIN_RV64
  559. +#endif
  560. +
  561. +#ifdef RVINTRIN_RV32
  562. +static inline int32_t _rv32_clz (int32_t rs1) { int32_t rd; __asm__ ("clz %0, %1" : "=r"(rd) : "r"(rs1)); return rd; }
  563. +static inline int32_t _rv32_ctz (int32_t rs1) { int32_t rd; __asm__ ("ctz %0, %1" : "=r"(rd) : "r"(rs1)); return rd; }
  564. +static inline int32_t _rv32_pcnt (int32_t rs1) { int32_t rd; __asm__ ("pcnt %0, %1" : "=r"(rd) : "r"(rs1)); return rd; }
  565. +static inline int32_t _rv32_sext_b(int32_t rs1) { int32_t rd; __asm__ ("sext.b %0, %1" : "=r"(rd) : "r"(rs1)); return rd; }
  566. +static inline int32_t _rv32_sext_h(int32_t rs1) { int32_t rd; __asm__ ("sext.h %0, %1" : "=r"(rd) : "r"(rs1)); return rd; }
  567. +#endif
  568. +
  569. +#ifdef RVINTRIN_RV64
  570. +static inline int32_t _rv32_clz (int32_t rs1) { int32_t rd; __asm__ ("clzw %0, %1" : "=r"(rd) : "r"(rs1)); return rd; }
  571. +static inline int32_t _rv32_ctz (int32_t rs1) { int32_t rd; __asm__ ("ctzw %0, %1" : "=r"(rd) : "r"(rs1)); return rd; }
  572. +static inline int32_t _rv32_pcnt (int32_t rs1) { int32_t rd; __asm__ ("pcntw %0, %1" : "=r"(rd) : "r"(rs1)); return rd; }
  573. +static inline int32_t _rv32_sext_b(int32_t rs1) { int32_t rd; __asm__ ("sext.b %0, %1" : "=r"(rd) : "r"(rs1)); return rd; }
  574. +static inline int32_t _rv32_sext_h(int32_t rs1) { int32_t rd; __asm__ ("sext.h %0, %1" : "=r"(rd) : "r"(rs1)); return rd; }
  575. +
  576. +static inline int64_t _rv64_clz (int64_t rs1) { int64_t rd; __asm__ ("clz %0, %1" : "=r"(rd) : "r"(rs1)); return rd; }
  577. +static inline int64_t _rv64_ctz (int64_t rs1) { int64_t rd; __asm__ ("ctz %0, %1" : "=r"(rd) : "r"(rs1)); return rd; }
  578. +static inline int64_t _rv64_pcnt (int64_t rs1) { int64_t rd; __asm__ ("pcnt %0, %1" : "=r"(rd) : "r"(rs1)); return rd; }
  579. +static inline int32_t _rv64_sext_b(int32_t rs1) { int32_t rd; __asm__ ("sext.b %0, %1" : "=r"(rd) : "r"(rs1)); return rd; }
  580. +static inline int32_t _rv64_sext_h(int32_t rs1) { int32_t rd; __asm__ ("sext.h %0, %1" : "=r"(rd) : "r"(rs1)); return rd; }
  581. +#endif
  582. +
  583. +#ifdef RVINTRIN_RV32
  584. +static inline int32_t _rv32_pack (int32_t rs1, int32_t rs2) { int32_t rd; __asm__ ("pack %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  585. +static inline int32_t _rv32_packu(int32_t rs1, int32_t rs2) { int32_t rd; __asm__ ("packu %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  586. +static inline int32_t _rv32_packh(int32_t rs1, int32_t rs2) { int32_t rd; __asm__ ("packh %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  587. +static inline int32_t _rv32_bfp (int32_t rs1, int32_t rs2) { int32_t rd; __asm__ ("bfp %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  588. +#endif
  589. +
  590. +#ifdef RVINTRIN_RV64
  591. +static inline int32_t _rv32_pack (int32_t rs1, int32_t rs2) { int32_t rd; __asm__ ("packw %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  592. +static inline int32_t _rv32_packu(int32_t rs1, int32_t rs2) { int32_t rd; __asm__ ("packuw %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  593. +static inline int32_t _rv32_packh(int32_t rs1, int32_t rs2) { int32_t rd; __asm__ ("packh %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  594. +static inline int32_t _rv32_bfp (int32_t rs1, int32_t rs2) { int32_t rd; __asm__ ("bfpw %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  595. +
  596. +static inline int64_t _rv64_pack (int64_t rs1, int64_t rs2) { int64_t rd; __asm__ ("pack %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  597. +static inline int64_t _rv64_packu(int64_t rs1, int64_t rs2) { int64_t rd; __asm__ ("packu %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  598. +static inline int64_t _rv64_packh(int64_t rs1, int64_t rs2) { int64_t rd; __asm__ ("packh %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  599. +static inline int64_t _rv64_bfp (int64_t rs1, int64_t rs2) { int64_t rd; __asm__ ("bfp %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  600. +#endif
  601. +
  602. +static inline int32_t _rv32_min (int32_t rs1, int32_t rs2) { int32_t rd; __asm__ ("min %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  603. +static inline int32_t _rv32_minu(int32_t rs1, int32_t rs2) { int32_t rd; __asm__ ("minu %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  604. +static inline int32_t _rv32_max (int32_t rs1, int32_t rs2) { int32_t rd; __asm__ ("max %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  605. +static inline int32_t _rv32_maxu(int32_t rs1, int32_t rs2) { int32_t rd; __asm__ ("maxu %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  606. +
  607. +#ifdef RVINTRIN_RV64
  608. +static inline int64_t _rv64_min (int64_t rs1, int64_t rs2) { int64_t rd; __asm__ ("min %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  609. +static inline int64_t _rv64_minu(int64_t rs1, int64_t rs2) { int64_t rd; __asm__ ("minu %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  610. +static inline int64_t _rv64_max (int64_t rs1, int64_t rs2) { int64_t rd; __asm__ ("max %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  611. +static inline int64_t _rv64_maxu(int64_t rs1, int64_t rs2) { int64_t rd; __asm__ ("maxu %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  612. +#endif
  613. +
  614. +#ifdef RVINTRIN_RV32
  615. +static inline int32_t _rv32_bset (int32_t rs1, int32_t rs2) { int32_t rd; if (__builtin_constant_p(rs2)) __asm__ ("bseti %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(31 & rs2)); else __asm__ ("bset %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  616. +static inline int32_t _rv32_bclr (int32_t rs1, int32_t rs2) { int32_t rd; if (__builtin_constant_p(rs2)) __asm__ ("bclri %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(31 & rs2)); else __asm__ ("bclr %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  617. +static inline int32_t _rv32_binv (int32_t rs1, int32_t rs2) { int32_t rd; if (__builtin_constant_p(rs2)) __asm__ ("binvi %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(31 & rs2)); else __asm__ ("binv %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  618. +static inline int32_t _rv32_bext (int32_t rs1, int32_t rs2) { int32_t rd; if (__builtin_constant_p(rs2)) __asm__ ("bexti %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(31 & rs2)); else __asm__ ("bext %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  619. +#endif
  620. +
  621. +#ifdef RVINTRIN_RV64
  622. +static inline int32_t _rv32_bset (int32_t rs1, int32_t rs2) { int32_t rd; if (__builtin_constant_p(rs2)) __asm__ ("bsetiw %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(31 & rs2)); else __asm__ ("bsetw %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  623. +static inline int32_t _rv32_bclr (int32_t rs1, int32_t rs2) { int32_t rd; if (__builtin_constant_p(rs2)) __asm__ ("bclriw %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(31 & rs2)); else __asm__ ("bclrw %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  624. +static inline int32_t _rv32_binv (int32_t rs1, int32_t rs2) { int32_t rd; if (__builtin_constant_p(rs2)) __asm__ ("binviw %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(31 & rs2)); else __asm__ ("binvw %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  625. +static inline int32_t _rv32_bext (int32_t rs1, int32_t rs2) { int32_t rd; if (__builtin_constant_p(rs2)) __asm__ ("bexti %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(31 & rs2)); else __asm__ ("bextw %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  626. +
  627. +static inline int64_t _rv64_bset (int64_t rs1, int64_t rs2) { int64_t rd; if (__builtin_constant_p(rs2)) __asm__ ("bseti %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(63 & rs2)); else __asm__ ("bset %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  628. +static inline int64_t _rv64_bclr (int64_t rs1, int64_t rs2) { int64_t rd; if (__builtin_constant_p(rs2)) __asm__ ("bclri %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(63 & rs2)); else __asm__ ("bclr %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  629. +static inline int64_t _rv64_binv (int64_t rs1, int64_t rs2) { int64_t rd; if (__builtin_constant_p(rs2)) __asm__ ("binvi %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(63 & rs2)); else __asm__ ("binv %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  630. +static inline int64_t _rv64_bext (int64_t rs1, int64_t rs2) { int64_t rd; if (__builtin_constant_p(rs2)) __asm__ ("bexti %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(63 & rs2)); else __asm__ ("bext %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  631. +#endif
  632. +
  633. +#ifdef RVINTRIN_RV32
  634. +static inline int32_t _rv32_sll (int32_t rs1, int32_t rs2) { int32_t rd; if (__builtin_constant_p(rs2)) __asm__ ("slli %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(31 & rs2)); else __asm__ ("sll %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  635. +static inline int32_t _rv32_srl (int32_t rs1, int32_t rs2) { int32_t rd; if (__builtin_constant_p(rs2)) __asm__ ("srli %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(31 & rs2)); else __asm__ ("srl %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  636. +static inline int32_t _rv32_sra (int32_t rs1, int32_t rs2) { int32_t rd; if (__builtin_constant_p(rs2)) __asm__ ("srai %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(31 & rs2)); else __asm__ ("sra %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  637. +static inline int32_t _rv32_slo (int32_t rs1, int32_t rs2) { int32_t rd; if (__builtin_constant_p(rs2)) __asm__ ("sloi %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(31 & rs2)); else __asm__ ("slo %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  638. +static inline int32_t _rv32_sro (int32_t rs1, int32_t rs2) { int32_t rd; if (__builtin_constant_p(rs2)) __asm__ ("sroi %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(31 & rs2)); else __asm__ ("sro %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  639. +static inline int32_t _rv32_rol (int32_t rs1, int32_t rs2) { int32_t rd; if (__builtin_constant_p(rs2)) __asm__ ("rori %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(31 & -rs2)); else __asm__ ("rol %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  640. +static inline int32_t _rv32_ror (int32_t rs1, int32_t rs2) { int32_t rd; if (__builtin_constant_p(rs2)) __asm__ ("rori %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(31 & rs2)); else __asm__ ("ror %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  641. +static inline int32_t _rv32_grev (int32_t rs1, int32_t rs2) { int32_t rd; if (__builtin_constant_p(rs2)) __asm__ ("grevi %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(31 & rs2)); else __asm__ ("grev %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  642. +static inline int32_t _rv32_gorc (int32_t rs1, int32_t rs2) { int32_t rd; if (__builtin_constant_p(rs2)) __asm__ ("gorci %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(31 & rs2)); else __asm__ ("gorc %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  643. +static inline int32_t _rv32_shfl (int32_t rs1, int32_t rs2) { int32_t rd; if (__builtin_constant_p(rs2)) __asm__ ("shfli %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(15 & rs2)); else __asm__ ("shfl %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  644. +static inline int32_t _rv32_unshfl (int32_t rs1, int32_t rs2) { int32_t rd; if (__builtin_constant_p(rs2)) __asm__ ("unshfli %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(15 & rs2)); else __asm__ ("unshfl %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  645. +#endif
  646. +
  647. +#ifdef RVINTRIN_RV64
  648. +static inline int32_t _rv32_sll (int32_t rs1, int32_t rs2) { int32_t rd; if (__builtin_constant_p(rs2)) __asm__ ("slliw %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(31 & rs2)); else __asm__ ("sllw %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  649. +static inline int32_t _rv32_srl (int32_t rs1, int32_t rs2) { int32_t rd; if (__builtin_constant_p(rs2)) __asm__ ("srliw %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(31 & rs2)); else __asm__ ("srlw %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  650. +static inline int32_t _rv32_sra (int32_t rs1, int32_t rs2) { int32_t rd; if (__builtin_constant_p(rs2)) __asm__ ("sraiw %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(31 & rs2)); else __asm__ ("sraw %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  651. +static inline int32_t _rv32_slo (int32_t rs1, int32_t rs2) { int32_t rd; if (__builtin_constant_p(rs2)) __asm__ ("sloiw %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(31 & rs2)); else __asm__ ("slow %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  652. +static inline int32_t _rv32_sro (int32_t rs1, int32_t rs2) { int32_t rd; if (__builtin_constant_p(rs2)) __asm__ ("sroiw %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(31 & rs2)); else __asm__ ("srow %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  653. +static inline int32_t _rv32_rol (int32_t rs1, int32_t rs2) { int32_t rd; if (__builtin_constant_p(rs2)) __asm__ ("roriw %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(31 & -rs2)); else __asm__ ("rolw %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  654. +static inline int32_t _rv32_ror (int32_t rs1, int32_t rs2) { int32_t rd; if (__builtin_constant_p(rs2)) __asm__ ("roriw %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(31 & rs2)); else __asm__ ("rorw %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  655. +static inline int32_t _rv32_grev (int32_t rs1, int32_t rs2) { int32_t rd; if (__builtin_constant_p(rs2)) __asm__ ("greviw %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(31 & rs2)); else __asm__ ("grevw %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  656. +static inline int32_t _rv32_gorc (int32_t rs1, int32_t rs2) { int32_t rd; if (__builtin_constant_p(rs2)) __asm__ ("gorciw %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(31 & rs2)); else __asm__ ("gorcw %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  657. +static inline int32_t _rv32_shfl (int32_t rs1, int32_t rs2) { int32_t rd; if (__builtin_constant_p(rs2)) __asm__ ("shfli %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(15 & rs2)); else __asm__ ("shflw %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  658. +static inline int32_t _rv32_unshfl (int32_t rs1, int32_t rs2) { int32_t rd; if (__builtin_constant_p(rs2)) __asm__ ("unshfli %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(15 & rs2)); else __asm__ ("unshflw %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  659. +
  660. +static inline int64_t _rv64_sll (int64_t rs1, int64_t rs2) { int64_t rd; if (__builtin_constant_p(rs2)) __asm__ ("slli %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(63 & rs2)); else __asm__ ("sll %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  661. +static inline int64_t _rv64_srl (int64_t rs1, int64_t rs2) { int64_t rd; if (__builtin_constant_p(rs2)) __asm__ ("srli %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(63 & rs2)); else __asm__ ("srl %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  662. +static inline int64_t _rv64_sra (int64_t rs1, int64_t rs2) { int64_t rd; if (__builtin_constant_p(rs2)) __asm__ ("srai %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(63 & rs2)); else __asm__ ("sra %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  663. +static inline int64_t _rv64_slo (int64_t rs1, int64_t rs2) { int64_t rd; if (__builtin_constant_p(rs2)) __asm__ ("sloi %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(63 & rs2)); else __asm__ ("slo %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  664. +static inline int64_t _rv64_sro (int64_t rs1, int64_t rs2) { int64_t rd; if (__builtin_constant_p(rs2)) __asm__ ("sroi %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(63 & rs2)); else __asm__ ("sro %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  665. +static inline int64_t _rv64_rol (int64_t rs1, int64_t rs2) { int64_t rd; if (__builtin_constant_p(rs2)) __asm__ ("rori %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(63 & -rs2)); else __asm__ ("rol %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  666. +static inline int64_t _rv64_ror (int64_t rs1, int64_t rs2) { int64_t rd; if (__builtin_constant_p(rs2)) __asm__ ("rori %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(63 & rs2)); else __asm__ ("ror %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  667. +static inline int64_t _rv64_grev (int64_t rs1, int64_t rs2) { int64_t rd; if (__builtin_constant_p(rs2)) __asm__ ("grevi %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(63 & rs2)); else __asm__ ("grev %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  668. +static inline int64_t _rv64_gorc (int64_t rs1, int64_t rs2) { int64_t rd; if (__builtin_constant_p(rs2)) __asm__ ("gorci %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(63 & rs2)); else __asm__ ("gorc %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  669. +static inline int64_t _rv64_shfl (int64_t rs1, int64_t rs2) { int64_t rd; if (__builtin_constant_p(rs2)) __asm__ ("shfli %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(31 & rs2)); else __asm__ ("shfl %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  670. +static inline int64_t _rv64_unshfl (int64_t rs1, int64_t rs2) { int64_t rd; if (__builtin_constant_p(rs2)) __asm__ ("unshfli %0, %1, %2" : "=r"(rd) : "r"(rs1), "i"(31 & rs2)); else __asm__ ("unshfl %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  671. +#endif
  672. +
  673. +#ifdef RVINTRIN_RV32
  674. +static inline int32_t _rv32_bext(int32_t rs1, int32_t rs2) { int32_t rd; __asm__ ("bext %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  675. +static inline int32_t _rv32_bdep(int32_t rs1, int32_t rs2) { int32_t rd; __asm__ ("bdep %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  676. +#endif
  677. +
  678. +#ifdef RVINTRIN_RV64
  679. +static inline int32_t _rv32_bext(int32_t rs1, int32_t rs2) { int32_t rd; __asm__ ("bextw %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  680. +static inline int32_t _rv32_bdep(int32_t rs1, int32_t rs2) { int32_t rd; __asm__ ("bdepw %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  681. +
  682. +static inline int64_t _rv64_bext(int64_t rs1, int64_t rs2) { int64_t rd; __asm__ ("bext %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  683. +static inline int64_t _rv64_bdep(int64_t rs1, int64_t rs2) { int64_t rd; __asm__ ("bdep %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  684. +#endif
  685. +
  686. +#ifdef RVINTRIN_RV32
  687. +static inline int32_t _rv32_clmul (int32_t rs1, int32_t rs2) { int32_t rd; __asm__ ("clmul %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  688. +static inline int32_t _rv32_clmulh(int32_t rs1, int32_t rs2) { int32_t rd; __asm__ ("clmulh %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  689. +static inline int32_t _rv32_clmulr(int32_t rs1, int32_t rs2) { int32_t rd; __asm__ ("clmulr %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  690. +#endif
  691. +
  692. +#ifdef RVINTRIN_RV64
  693. +static inline int32_t _rv32_clmul (int32_t rs1, int32_t rs2) { int32_t rd; __asm__ ("clmulw %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  694. +static inline int32_t _rv32_clmulh(int32_t rs1, int32_t rs2) { int32_t rd; __asm__ ("clmulhw %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  695. +static inline int32_t _rv32_clmulr(int32_t rs1, int32_t rs2) { int32_t rd; __asm__ ("clmulrw %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  696. +
  697. +static inline int64_t _rv64_clmul (int64_t rs1, int64_t rs2) { int64_t rd; __asm__ ("clmul %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  698. +static inline int64_t _rv64_clmulh(int64_t rs1, int64_t rs2) { int64_t rd; __asm__ ("clmulh %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  699. +static inline int64_t _rv64_clmulr(int64_t rs1, int64_t rs2) { int64_t rd; __asm__ ("clmulr %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  700. +#endif
  701. +
  702. +static inline long _rv_crc32_b (long rs1) { long rd; __asm__ ("crc32.b %0, %1" : "=r"(rd) : "r"(rs1)); return rd; }
  703. +static inline long _rv_crc32_h (long rs1) { long rd; __asm__ ("crc32.h %0, %1" : "=r"(rd) : "r"(rs1)); return rd; }
  704. +static inline long _rv_crc32_w (long rs1) { long rd; __asm__ ("crc32.w %0, %1" : "=r"(rd) : "r"(rs1)); return rd; }
  705. +
  706. +static inline long _rv_crc32c_b(long rs1) { long rd; __asm__ ("crc32c.b %0, %1" : "=r"(rd) : "r"(rs1)); return rd; }
  707. +static inline long _rv_crc32c_h(long rs1) { long rd; __asm__ ("crc32c.h %0, %1" : "=r"(rd) : "r"(rs1)); return rd; }
  708. +static inline long _rv_crc32c_w(long rs1) { long rd; __asm__ ("crc32c.w %0, %1" : "=r"(rd) : "r"(rs1)); return rd; }
  709. +
  710. +#ifdef RVINTRIN_RV64
  711. +static inline long _rv_crc32_d (long rs1) { long rd; __asm__ ("crc32.d %0, %1" : "=r"(rd) : "r"(rs1)); return rd; }
  712. +static inline long _rv_crc32c_d(long rs1) { long rd; __asm__ ("crc32c.d %0, %1" : "=r"(rd) : "r"(rs1)); return rd; }
  713. +#endif
  714. +
  715. +#ifdef RVINTRIN_RV64
  716. +static inline int64_t _rv64_bmatflip(int64_t rs1) { long rd; __asm__ ("bmatflip %0, %1" : "=r"(rd) : "r"(rs1)); return rd; }
  717. +static inline int64_t _rv64_bmator (int64_t rs1, int64_t rs2) { long rd; __asm__ ("bmator %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  718. +static inline int64_t _rv64_bmatxor (int64_t rs1, int64_t rs2) { long rd; __asm__ ("bmatxor %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  719. +#endif
  720. +
  721. +static inline long _rv_cmix(long rs2, long rs1, long rs3) { long rd; __asm__ ("cmix %0, %1, %2, %3" : "=r"(rd) : "r"(rs2), "r"(rs1), "r"(rs3)); return rd; }
  722. +static inline long _rv_cmov(long rs2, long rs1, long rs3) { long rd; __asm__ ("cmov %0, %1, %2, %3" : "=r"(rd) : "r"(rs2), "r"(rs1), "r"(rs3)); return rd; }
  723. +
  724. +#ifdef RVINTRIN_RV32
  725. +static inline int32_t _rv32_fsl(int32_t rs1, int32_t rs3, int32_t rs2)
  726. +{
  727. + int32_t rd;
  728. + if (__builtin_constant_p(rs2)) {
  729. + rs2 &= 63;
  730. + if (rs2 < 32)
  731. + __asm__ ("fsli %0, %1, %2, %3" : "=r"(rd) : "r"(rs1), "r"(rs3), "i"(rs2));
  732. + else
  733. + __asm__ ("fsli %0, %1, %2, %3" : "=r"(rd) : "r"(rs3), "r"(rs1), "i"(rs2 & 31));
  734. + } else {
  735. + __asm__ ("fsl %0, %1, %2, %3" : "=r"(rd) : "r"(rs1), "r"(rs3), "r"(rs2));
  736. + }
  737. + return rd;
  738. +}
  739. +
  740. +static inline int32_t _rv32_fsr(int32_t rs1, int32_t rs3, int32_t rs2)
  741. +{
  742. + int32_t rd;
  743. + if (__builtin_constant_p(rs2)) {
  744. + rs2 &= 63;
  745. + if (rs2 < 32)
  746. + __asm__ ("fsri %0, %1, %2, %3" : "=r"(rd) : "r"(rs1), "r"(rs3), "i"(rs2));
  747. + else
  748. + __asm__ ("fsri %0, %1, %2, %3" : "=r"(rd) : "r"(rs3), "r"(rs1), "i"(rs2 & 31));
  749. + } else {
  750. + __asm__ ("fsr %0, %1, %2, %3" : "=r"(rd) : "r"(rs1), "r"(rs3), "r"(rs2));
  751. + }
  752. + return rd;
  753. +}
  754. +#endif
  755. +
  756. +#ifdef RVINTRIN_RV64
  757. +static inline int32_t _rv32_fsl(int32_t rs1, int32_t rs3, int32_t rs2)
  758. +{
  759. + int32_t rd;
  760. + if (__builtin_constant_p(rs2)) {
  761. + rs2 &= 63;
  762. + if (rs2 < 32)
  763. + __asm__ ("fsliw %0, %1, %2, %3" : "=r"(rd) : "r"(rs1), "r"(rs3), "i"(rs2));
  764. + else
  765. + __asm__ ("fsliw %0, %1, %2, %3" : "=r"(rd) : "r"(rs3), "r"(rs1), "i"(rs2 & 31));
  766. + } else {
  767. + __asm__ ("fslw %0, %1, %2, %3" : "=r"(rd) : "r"(rs1), "r"(rs3), "r"(rs2));
  768. + }
  769. + return rd;
  770. +}
  771. +
  772. +static inline int32_t _rv32_fsr(int32_t rs1, int32_t rs3, int32_t rs2)
  773. +{
  774. + int32_t rd;
  775. + if (__builtin_constant_p(rs2)) {
  776. + rs2 &= 63;
  777. + if (rs2 < 32)
  778. + __asm__ ("fsriw %0, %1, %2, %3" : "=r"(rd) : "r"(rs1), "r"(rs3), "i"(rs2));
  779. + else
  780. + __asm__ ("fsriw %0, %1, %2, %3" : "=r"(rd) : "r"(rs3), "r"(rs1), "i"(rs2 & 31));
  781. + } else {
  782. + __asm__ ("fsrw %0, %1, %2, %3" : "=r"(rd) : "r"(rs1), "r"(rs3), "r"(rs2));
  783. + }
  784. + return rd;
  785. +}
  786. +
  787. +static inline int64_t _rv64_fsl(int64_t rs1, int64_t rs3, int64_t rs2)
  788. +{
  789. + int64_t rd;
  790. + if (__builtin_constant_p(rs2)) {
  791. + rs2 &= 127;
  792. + if (rs2 < 64)
  793. + __asm__ ("fsli %0, %1, %2, %3" : "=r"(rd) : "r"(rs1), "r"(rs3), "i"(rs2));
  794. + else
  795. + __asm__ ("fsli %0, %1, %2, %3" : "=r"(rd) : "r"(rs3), "r"(rs1), "i"(rs2 & 63));
  796. + } else {
  797. + __asm__ ("fsl %0, %1, %2, %3" : "=r"(rd) : "r"(rs1), "r"(rs3), "r"(rs2));
  798. + }
  799. + return rd;
  800. +}
  801. +
  802. +static inline int64_t _rv64_fsr(int64_t rs1, int64_t rs3, int64_t rs2)
  803. +{
  804. + int64_t rd;
  805. + if (__builtin_constant_p(rs2)) {
  806. + rs2 &= 127;
  807. + if (rs2 < 64)
  808. + __asm__ ("fsri %0, %1, %2, %3" : "=r"(rd) : "r"(rs1), "r"(rs3), "i"(rs2));
  809. + else
  810. + __asm__ ("fsri %0, %1, %2, %3" : "=r"(rd) : "r"(rs3), "r"(rs1), "i"(rs2 & 63));
  811. + } else {
  812. + __asm__ ("fsr %0, %1, %2, %3" : "=r"(rd) : "r"(rs1), "r"(rs3), "r"(rs2));
  813. + }
  814. + return rd;
  815. +}
  816. +#endif
  817. +
  818. +static inline long _rv_andn(long rs1, long rs2) { long rd; __asm__ ("andn %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  819. +static inline long _rv_orn (long rs1, long rs2) { long rd; __asm__ ("orn %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  820. +static inline long _rv_xnor(long rs1, long rs2) { long rd; __asm__ ("xnor %0, %1, %2" : "=r"(rd) : "r"(rs1), "r"(rs2)); return rd; }
  821. +
  822. +#else // RVINTRIN_EMULATE
  823. +
  824. +#if UINT_MAX != 0xffffffffU
  825. +# error "<rvintrin.h> emulation mode only supports systems with sizeof(int) = 4."
  826. +#endif
  827. +
  828. +#if (ULLONG_MAX == 0xffffffffLLU) || (ULLONG_MAX != 0xffffffffffffffffLLU)
  829. +# error "<rvintrin.h> emulation mode only supports systems with sizeof(long long) = 8."
  830. +#endif
  831. +
  832. +#if UINT_MAX == ULONG_MAX
  833. +# define RVINTRIN_RV32
  834. +#else
  835. +# define RVINTRIN_RV64
  836. +#endif
  837. +
  838. +#ifdef RVINTRIN_NOBUILTINS
  839. +static inline int32_t _rv32_clz(int32_t rs1) { for (int i=0; i < 32; i++) { if (1 & (rs1 >> (31-i))) return i; } return 32; }
  840. +static inline int64_t _rv64_clz(int64_t rs1) { for (int i=0; i < 64; i++) { if (1 & (rs1 >> (63-i))) return i; } return 64; }
  841. +
  842. +static inline int32_t _rv32_ctz(int32_t rs1) { for (int i=0; i < 32; i++) { if (1 & (rs1 >> i)) return i; } return 32; }
  843. +static inline int64_t _rv64_ctz(int64_t rs1) { for (int i=0; i < 64; i++) { if (1 & (rs1 >> i)) return i; } return 64; }
  844. +
  845. +static inline int32_t _rv32_pcnt(int32_t rs1) { int k=0; for (int i=0; i < 32; i++) { if (1 & (rs1 >> i)) k++; } return k; }
  846. +static inline int64_t _rv64_pcnt(int64_t rs1) { int k=0; for (int i=0; i < 64; i++) { if (1 & (rs1 >> i)) k++; } return k; }
  847. +#else
  848. +static inline int32_t _rv32_clz(int32_t rs1) { return rs1 ? __builtin_clz(rs1) : 32; }
  849. +static inline int64_t _rv64_clz(int64_t rs1) { return rs1 ? __builtin_clzll(rs1) : 64; }
  850. +
  851. +static inline int32_t _rv32_ctz(int32_t rs1) { return rs1 ? __builtin_ctz(rs1) : 32; }
  852. +static inline int64_t _rv64_ctz(int64_t rs1) { return rs1 ? __builtin_ctzll(rs1) : 64; }
  853. +
  854. +static inline int32_t _rv32_pcnt(int32_t rs1) { return __builtin_popcount(rs1); }
  855. +static inline int64_t _rv64_pcnt(int64_t rs1) { return __builtin_popcountll(rs1); }
  856. +#endif
  857. +
  858. +static inline int32_t _rv32_sext_b(int32_t rs1) { return rs1 << (32-8) >> (32-8); }
  859. +static inline int64_t _rv64_sext_b(int64_t rs1) { return rs1 << (64-8) >> (64-8); }
  860. +
  861. +static inline int32_t _rv32_sext_h(int32_t rs1) { return rs1 << (32-16) >> (32-16); }
  862. +static inline int64_t _rv64_sext_h(int64_t rs1) { return rs1 << (64-16) >> (64-16); }
  863. +
  864. +static inline int32_t _rv32_pack(int32_t rs1, int32_t rs2) { return (rs1 & 0x0000ffff) | (rs2 << 16); }
  865. +static inline int64_t _rv64_pack(int64_t rs1, int64_t rs2) { return (rs1 & 0xffffffffLL) | (rs2 << 32); }
  866. +
  867. +static inline int32_t _rv32_packu(int32_t rs1, int32_t rs2) { return ((rs1 >> 16) & 0x0000ffff) | (rs2 >> 16 << 16); }
  868. +static inline int64_t _rv64_packu(int64_t rs1, int64_t rs2) { return ((rs1 >> 32) & 0xffffffffLL) | (rs2 >> 32 << 32); }
  869. +
  870. +static inline int32_t _rv32_packh(int32_t rs1, int32_t rs2) { return (rs1 & 0xff) | ((rs2 & 0xff) << 8); }
  871. +static inline int64_t _rv64_packh(int64_t rs1, int64_t rs2) { return (rs1 & 0xff) | ((rs2 & 0xff) << 8); }
  872. +
  873. +static inline int32_t _rv32_min (int32_t rs1, int32_t rs2) { return rs1 < rs2 ? rs1 : rs2; }
  874. +static inline int32_t _rv32_minu(int32_t rs1, int32_t rs2) { return (uint32_t)rs1 < (uint32_t)rs2 ? rs1 : rs2; }
  875. +static inline int32_t _rv32_max (int32_t rs1, int32_t rs2) { return rs1 > rs2 ? rs1 : rs2; }
  876. +static inline int32_t _rv32_maxu(int32_t rs1, int32_t rs2) { return (uint32_t)rs1 > (uint32_t)rs2 ? rs1 : rs2; }
  877. +
  878. +static inline int64_t _rv64_min (int64_t rs1, int64_t rs2) { return rs1 < rs2 ? rs1 : rs2; }
  879. +static inline int64_t _rv64_minu(int64_t rs1, int64_t rs2) { return (uint64_t)rs1 < (uint64_t)rs2 ? rs1 : rs2; }
  880. +static inline int64_t _rv64_max (int64_t rs1, int64_t rs2) { return rs1 > rs2 ? rs1 : rs2; }
  881. +static inline int64_t _rv64_maxu(int64_t rs1, int64_t rs2) { return (uint64_t)rs1 > (uint64_t)rs2 ? rs1 : rs2; }
  882. +
  883. +static inline int32_t _rv32_bset (int32_t rs1, int32_t rs2) { return rs1 | (1 << (rs2 & 31)); }
  884. +static inline int32_t _rv32_bclr (int32_t rs1, int32_t rs2) { return rs1 & ~(1 << (rs2 & 31)); }
  885. +static inline int32_t _rv32_binv (int32_t rs1, int32_t rs2) { return rs1 ^ (1 << (rs2 & 31)); }
  886. +static inline int32_t _rv32_bext (int32_t rs1, int32_t rs2) { return 1 & (rs1 >> (rs2 & 31)); }
  887. +
  888. +static inline int64_t _rv64_bset (int64_t rs1, int64_t rs2) { return rs1 | (1LL << (rs2 & 63)); }
  889. +static inline int64_t _rv64_bclr (int64_t rs1, int64_t rs2) { return rs1 & ~(1LL << (rs2 & 63)); }
  890. +static inline int64_t _rv64_binv (int64_t rs1, int64_t rs2) { return rs1 ^ (1LL << (rs2 & 63)); }
  891. +static inline int64_t _rv64_bext (int64_t rs1, int64_t rs2) { return 1LL & (rs1 >> (rs2 & 63)); }
  892. +
  893. +static inline int32_t _rv32_sll (int32_t rs1, int32_t rs2) { return rs1 << (rs2 & 31); }
  894. +static inline int32_t _rv32_srl (int32_t rs1, int32_t rs2) { return (uint32_t)rs1 >> (rs2 & 31); }
  895. +static inline int32_t _rv32_sra (int32_t rs1, int32_t rs2) { return rs1 >> (rs2 & 31); }
  896. +static inline int32_t _rv32_slo (int32_t rs1, int32_t rs2) { return ~(~rs1 << (rs2 & 31)); }
  897. +static inline int32_t _rv32_sro (int32_t rs1, int32_t rs2) { return ~(~(uint32_t)rs1 >> (rs2 & 31)); }
  898. +static inline int32_t _rv32_rol (int32_t rs1, int32_t rs2) { return _rv32_sll(rs1, rs2) | _rv32_srl(rs1, -rs2); }
  899. +static inline int32_t _rv32_ror (int32_t rs1, int32_t rs2) { return _rv32_srl(rs1, rs2) | _rv32_sll(rs1, -rs2); }
  900. +
  901. +static inline int32_t _rv32_bfp(int32_t rs1, int32_t rs2)
  902. +{
  903. + uint32_t cfg = rs2 >> 16;
  904. + int len = (cfg >> 8) & 15;
  905. + int off = cfg & 31;
  906. + len = len ? len : 16;
  907. + uint32_t mask = _rv32_slo(0, len) << off;
  908. + uint32_t data = rs2 << off;
  909. + return (data & mask) | (rs1 & ~mask);
  910. +}
  911. +
  912. +static inline int32_t _rv32_grev(int32_t rs1, int32_t rs2)
  913. +{
  914. + uint32_t x = rs1;
  915. + int shamt = rs2 & 31;
  916. + if (shamt & 1) x = ((x & 0x55555555) << 1) | ((x & 0xAAAAAAAA) >> 1);
  917. + if (shamt & 2) x = ((x & 0x33333333) << 2) | ((x & 0xCCCCCCCC) >> 2);
  918. + if (shamt & 4) x = ((x & 0x0F0F0F0F) << 4) | ((x & 0xF0F0F0F0) >> 4);
  919. + if (shamt & 8) x = ((x & 0x00FF00FF) << 8) | ((x & 0xFF00FF00) >> 8);
  920. + if (shamt & 16) x = ((x & 0x0000FFFF) << 16) | ((x & 0xFFFF0000) >> 16);
  921. + return x;
  922. +}
  923. +
  924. +static inline int32_t _rv32_gorc(int32_t rs1, int32_t rs2)
  925. +{
  926. + uint32_t x = rs1;
  927. + int shamt = rs2 & 31;
  928. + if (shamt & 1) x |= ((x & 0x55555555) << 1) | ((x & 0xAAAAAAAA) >> 1);
  929. + if (shamt & 2) x |= ((x & 0x33333333) << 2) | ((x & 0xCCCCCCCC) >> 2);
  930. + if (shamt & 4) x |= ((x & 0x0F0F0F0F) << 4) | ((x & 0xF0F0F0F0) >> 4);
  931. + if (shamt & 8) x |= ((x & 0x00FF00FF) << 8) | ((x & 0xFF00FF00) >> 8);
  932. + if (shamt & 16) x |= ((x & 0x0000FFFF) << 16) | ((x & 0xFFFF0000) >> 16);
  933. + return x;
  934. +}
  935. +
  936. +static inline uint32_t _rvintrin_shuffle32_stage(uint32_t src, uint32_t maskL, uint32_t maskR, int N)
  937. +{
  938. + uint32_t x = src & ~(maskL | maskR);
  939. + x |= ((src << N) & maskL) | ((src >> N) & maskR);
  940. + return x;
  941. +}
  942. +
  943. +static inline int32_t _rv32_shfl(int32_t rs1, int32_t rs2)
  944. +{
  945. + uint32_t x = rs1;
  946. + int shamt = rs2 & 15;
  947. +
  948. + if (shamt & 8) x = _rvintrin_shuffle32_stage(x, 0x00ff0000, 0x0000ff00, 8);
  949. + if (shamt & 4) x = _rvintrin_shuffle32_stage(x, 0x0f000f00, 0x00f000f0, 4);
  950. + if (shamt & 2) x = _rvintrin_shuffle32_stage(x, 0x30303030, 0x0c0c0c0c, 2);
  951. + if (shamt & 1) x = _rvintrin_shuffle32_stage(x, 0x44444444, 0x22222222, 1);
  952. +
  953. + return x;
  954. +}
  955. +
  956. +static inline int32_t _rv32_unshfl(int32_t rs1, int32_t rs2)
  957. +{
  958. + uint32_t x = rs1;
  959. + int shamt = rs2 & 15;
  960. +
  961. + if (shamt & 1) x = _rvintrin_shuffle32_stage(x, 0x44444444, 0x22222222, 1);
  962. + if (shamt & 2) x = _rvintrin_shuffle32_stage(x, 0x30303030, 0x0c0c0c0c, 2);
  963. + if (shamt & 4) x = _rvintrin_shuffle32_stage(x, 0x0f000f00, 0x00f000f0, 4);
  964. + if (shamt & 8) x = _rvintrin_shuffle32_stage(x, 0x00ff0000, 0x0000ff00, 8);
  965. +
  966. + return x;
  967. +}
  968. +
  969. +static inline int64_t _rv64_sll (int64_t rs1, int64_t rs2) { return rs1 << (rs2 & 63); }
  970. +static inline int64_t _rv64_srl (int64_t rs1, int64_t rs2) { return (uint64_t)rs1 >> (rs2 & 63); }
  971. +static inline int64_t _rv64_sra (int64_t rs1, int64_t rs2) { return rs1 >> (rs2 & 63); }
  972. +static inline int64_t _rv64_slo (int64_t rs1, int64_t rs2) { return ~(~rs1 << (rs2 & 63)); }
  973. +static inline int64_t _rv64_sro (int64_t rs1, int64_t rs2) { return ~(~(uint64_t)rs1 >> (rs2 & 63)); }
  974. +static inline int64_t _rv64_rol (int64_t rs1, int64_t rs2) { return _rv64_sll(rs1, rs2) | _rv64_srl(rs1, -rs2); }
  975. +static inline int64_t _rv64_ror (int64_t rs1, int64_t rs2) { return _rv64_srl(rs1, rs2) | _rv64_sll(rs1, -rs2); }
  976. +
  977. +static inline int64_t _rv64_bfp(int64_t rs1, int64_t rs2)
  978. +{
  979. + uint64_t cfg = (uint64_t)rs2 >> 32;
  980. + if ((cfg >> 30) == 2)
  981. + cfg = cfg >> 16;
  982. + int len = (cfg >> 8) & 31;
  983. + int off = cfg & 63;
  984. + len = len ? len : 32;
  985. + uint64_t mask = _rv64_slo(0, len) << off;
  986. + uint64_t data = rs2 << off;
  987. + return (data & mask) | (rs1 & ~mask);
  988. +}
  989. +
  990. +static inline int64_t _rv64_grev(int64_t rs1, int64_t rs2)
  991. +{
  992. + uint64_t x = rs1;
  993. + int shamt = rs2 & 63;
  994. + if (shamt & 1) x = ((x & 0x5555555555555555LL) << 1) | ((x & 0xAAAAAAAAAAAAAAAALL) >> 1);
  995. + if (shamt & 2) x = ((x & 0x3333333333333333LL) << 2) | ((x & 0xCCCCCCCCCCCCCCCCLL) >> 2);
  996. + if (shamt & 4) x = ((x & 0x0F0F0F0F0F0F0F0FLL) << 4) | ((x & 0xF0F0F0F0F0F0F0F0LL) >> 4);
  997. + if (shamt & 8) x = ((x & 0x00FF00FF00FF00FFLL) << 8) | ((x & 0xFF00FF00FF00FF00LL) >> 8);
  998. + if (shamt & 16) x = ((x & 0x0000FFFF0000FFFFLL) << 16) | ((x & 0xFFFF0000FFFF0000LL) >> 16);
  999. + if (shamt & 32) x = ((x & 0x00000000FFFFFFFFLL) << 32) | ((x & 0xFFFFFFFF00000000LL) >> 32);
  1000. + return x;
  1001. +}
  1002. +
  1003. +static inline int64_t _rv64_gorc(int64_t rs1, int64_t rs2)
  1004. +{
  1005. + uint64_t x = rs1;
  1006. + int shamt = rs2 & 63;
  1007. + if (shamt & 1) x |= ((x & 0x5555555555555555LL) << 1) | ((x & 0xAAAAAAAAAAAAAAAALL) >> 1);
  1008. + if (shamt & 2) x |= ((x & 0x3333333333333333LL) << 2) | ((x & 0xCCCCCCCCCCCCCCCCLL) >> 2);
  1009. + if (shamt & 4) x |= ((x & 0x0F0F0F0F0F0F0F0FLL) << 4) | ((x & 0xF0F0F0F0F0F0F0F0LL) >> 4);
  1010. + if (shamt & 8) x |= ((x & 0x00FF00FF00FF00FFLL) << 8) | ((x & 0xFF00FF00FF00FF00LL) >> 8);
  1011. + if (shamt & 16) x |= ((x & 0x0000FFFF0000FFFFLL) << 16) | ((x & 0xFFFF0000FFFF0000LL) >> 16);
  1012. + if (shamt & 32) x |= ((x & 0x00000000FFFFFFFFLL) << 32) | ((x & 0xFFFFFFFF00000000LL) >> 32);
  1013. + return x;
  1014. +}
  1015. +
  1016. +static inline uint64_t _rvintrin_shuffle64_stage(uint64_t src, uint64_t maskL, uint64_t maskR, int N)
  1017. +{
  1018. + uint64_t x = src & ~(maskL | maskR);
  1019. + x |= ((src << N) & maskL) | ((src >> N) & maskR);
  1020. + return x;
  1021. +}
  1022. +
  1023. +static inline int64_t _rv64_shfl(int64_t rs1, int64_t rs2)
  1024. +{
  1025. + uint64_t x = rs1;
  1026. + int shamt = rs2 & 31;
  1027. + if (shamt & 16) x = _rvintrin_shuffle64_stage(x, 0x0000ffff00000000LL, 0x00000000ffff0000LL, 16);
  1028. + if (shamt & 8) x = _rvintrin_shuffle64_stage(x, 0x00ff000000ff0000LL, 0x0000ff000000ff00LL, 8);
  1029. + if (shamt & 4) x = _rvintrin_shuffle64_stage(x, 0x0f000f000f000f00LL, 0x00f000f000f000f0LL, 4);
  1030. + if (shamt & 2) x = _rvintrin_shuffle64_stage(x, 0x3030303030303030LL, 0x0c0c0c0c0c0c0c0cLL, 2);
  1031. + if (shamt & 1) x = _rvintrin_shuffle64_stage(x, 0x4444444444444444LL, 0x2222222222222222LL, 1);
  1032. + return x;
  1033. +}
  1034. +
  1035. +static inline int64_t _rv64_unshfl(int64_t rs1, int64_t rs2)
  1036. +{
  1037. + uint64_t x = rs1;
  1038. + int shamt = rs2 & 31;
  1039. + if (shamt & 1) x = _rvintrin_shuffle64_stage(x, 0x4444444444444444LL, 0x2222222222222222LL, 1);
  1040. + if (shamt & 2) x = _rvintrin_shuffle64_stage(x, 0x3030303030303030LL, 0x0c0c0c0c0c0c0c0cLL, 2);
  1041. + if (shamt & 4) x = _rvintrin_shuffle64_stage(x, 0x0f000f000f000f00LL, 0x00f000f000f000f0LL, 4);
  1042. + if (shamt & 8) x = _rvintrin_shuffle64_stage(x, 0x00ff000000ff0000LL, 0x0000ff000000ff00LL, 8);
  1043. + if (shamt & 16) x = _rvintrin_shuffle64_stage(x, 0x0000ffff00000000LL, 0x00000000ffff0000LL, 16);
  1044. + return x;
  1045. +}
  1046. +
  1047. +static inline int32_t _rv32_bext(int32_t rs1, int32_t rs2)
  1048. +{
  1049. + uint32_t c = 0, i = 0, data = rs1, mask = rs2;
  1050. + while (mask) {
  1051. + uint32_t b = mask & ~((mask | (mask-1)) + 1);
  1052. + c |= (data & b) >> (_rv32_ctz(b) - i);
  1053. + i += _rv32_pcnt(b);
  1054. + mask -= b;
  1055. + }
  1056. + return c;
  1057. +}
  1058. +
  1059. +static inline int32_t _rv32_bdep(int32_t rs1, int32_t rs2)
  1060. +{
  1061. + uint32_t c = 0, i = 0, data = rs1, mask = rs2;
  1062. + while (mask) {
  1063. + uint32_t b = mask & ~((mask | (mask-1)) + 1);
  1064. + c |= (data << (_rv32_ctz(b) - i)) & b;
  1065. + i += _rv32_pcnt(b);
  1066. + mask -= b;
  1067. + }
  1068. + return c;
  1069. +}
  1070. +
  1071. +static inline int64_t _rv64_bext(int64_t rs1, int64_t rs2)
  1072. +{
  1073. + uint64_t c = 0, i = 0, data = rs1, mask = rs2;
  1074. + while (mask) {
  1075. + uint64_t b = mask & ~((mask | (mask-1)) + 1);
  1076. + c |= (data & b) >> (_rv64_ctz(b) - i);
  1077. + i += _rv64_pcnt(b);
  1078. + mask -= b;
  1079. + }
  1080. + return c;
  1081. +}
  1082. +
  1083. +static inline int64_t _rv64_bdep(int64_t rs1, int64_t rs2)
  1084. +{
  1085. + uint64_t c = 0, i = 0, data = rs1, mask = rs2;
  1086. + while (mask) {
  1087. + uint64_t b = mask & ~((mask | (mask-1)) + 1);
  1088. + c |= (data << (_rv64_ctz(b) - i)) & b;
  1089. + i += _rv64_pcnt(b);
  1090. + mask -= b;
  1091. + }
  1092. + return c;
  1093. +}
  1094. +
  1095. +static inline int32_t _rv32_clmul(int32_t rs1, int32_t rs2)
  1096. +{
  1097. + uint32_t a = rs1, b = rs2, x = 0;
  1098. + for (int i = 0; i < 32; i++)
  1099. + if ((b >> i) & 1)
  1100. + x ^= a << i;
  1101. + return x;
  1102. +}
  1103. +
  1104. +static inline int32_t _rv32_clmulh(int32_t rs1, int32_t rs2)
  1105. +{
  1106. + uint32_t a = rs1, b = rs2, x = 0;
  1107. + for (int i = 1; i < 32; i++)
  1108. + if ((b >> i) & 1)
  1109. + x ^= a >> (32-i);
  1110. + return x;
  1111. +}
  1112. +
  1113. +static inline int32_t _rv32_clmulr(int32_t rs1, int32_t rs2)
  1114. +{
  1115. + uint32_t a = rs1, b = rs2, x = 0;
  1116. + for (int i = 0; i < 32; i++)
  1117. + if ((b >> i) & 1)
  1118. + x ^= a >> (31-i);
  1119. + return x;
  1120. +}
  1121. +
  1122. +static inline int64_t _rv64_clmul(int64_t rs1, int64_t rs2)
  1123. +{
  1124. + uint64_t a = rs1, b = rs2, x = 0;
  1125. + for (int i = 0; i < 64; i++)
  1126. + if ((b >> i) & 1)
  1127. + x ^= a << i;
  1128. + return x;
  1129. +}
  1130. +
  1131. +static inline int64_t _rv64_clmulh(int64_t rs1, int64_t rs2)
  1132. +{
  1133. + uint64_t a = rs1, b = rs2, x = 0;
  1134. + for (int i = 1; i < 64; i++)
  1135. + if ((b >> i) & 1)
  1136. + x ^= a >> (64-i);
  1137. + return x;
  1138. +}
  1139. +
  1140. +static inline int64_t _rv64_clmulr(int64_t rs1, int64_t rs2)
  1141. +{
  1142. + uint64_t a = rs1, b = rs2, x = 0;
  1143. + for (int i = 0; i < 64; i++)
  1144. + if ((b >> i) & 1)
  1145. + x ^= a >> (63-i);
  1146. + return x;
  1147. +}
  1148. +
  1149. +static inline long _rvintrin_crc32(unsigned long x, int nbits)
  1150. +{
  1151. + for (int i = 0; i < nbits; i++)
  1152. + x = (x >> 1) ^ (0xEDB88320 & ~((x&1)-1));
  1153. + return x;
  1154. +}
  1155. +
  1156. +static inline long _rvintrin_crc32c(unsigned long x, int nbits)
  1157. +{
  1158. + for (int i = 0; i < nbits; i++)
  1159. + x = (x >> 1) ^ (0x82F63B78 & ~((x&1)-1));
  1160. + return x;
  1161. +}
  1162. +
  1163. +static inline long _rv_crc32_b(long rs1) { return _rvintrin_crc32(rs1, 8); }
  1164. +static inline long _rv_crc32_h(long rs1) { return _rvintrin_crc32(rs1, 16); }
  1165. +static inline long _rv_crc32_w(long rs1) { return _rvintrin_crc32(rs1, 32); }
  1166. +
  1167. +static inline long _rv_crc32c_b(long rs1) { return _rvintrin_crc32c(rs1, 8); }
  1168. +static inline long _rv_crc32c_h(long rs1) { return _rvintrin_crc32c(rs1, 16); }
  1169. +static inline long _rv_crc32c_w(long rs1) { return _rvintrin_crc32c(rs1, 32); }
  1170. +
  1171. +#ifdef RVINTRIN_RV64
  1172. +static inline long _rv_crc32_d (long rs1) { return _rvintrin_crc32 (rs1, 64); }
  1173. +static inline long _rv_crc32c_d(long rs1) { return _rvintrin_crc32c(rs1, 64); }
  1174. +#endif
  1175. +
  1176. +static inline int64_t _rv64_bmatflip(int64_t rs1)
  1177. +{
  1178. + uint64_t x = rs1;
  1179. + x = _rv64_shfl(x, 31);
  1180. + x = _rv64_shfl(x, 31);
  1181. + x = _rv64_shfl(x, 31);
  1182. + return x;
  1183. +}
  1184. +
  1185. +static inline int64_t _rv64_bmatxor(int64_t rs1, int64_t rs2)
  1186. +{
  1187. + // transpose of rs2
  1188. + int64_t rs2t = _rv64_bmatflip(rs2);
  1189. +
  1190. + uint8_t u[8]; // rows of rs1
  1191. + uint8_t v[8]; // cols of rs2
  1192. +
  1193. + for (int i = 0; i < 8; i++) {
  1194. + u[i] = rs1 >> (i*8);
  1195. + v[i] = rs2t >> (i*8);
  1196. + }
  1197. +
  1198. + uint64_t x = 0;
  1199. + for (int i = 0; i < 64; i++) {
  1200. + if (_rv64_pcnt(u[i / 8] & v[i % 8]) & 1)
  1201. + x |= 1LL << i;
  1202. + }
  1203. +
  1204. + return x;
  1205. +}
  1206. +
  1207. +static inline int64_t _rv64_bmator(int64_t rs1, int64_t rs2)
  1208. +{
  1209. + // transpose of rs2
  1210. + int64_t rs2t = _rv64_bmatflip(rs2);
  1211. +
  1212. + uint8_t u[8]; // rows of rs1
  1213. + uint8_t v[8]; // cols of rs2
  1214. +
  1215. + for (int i = 0; i < 8; i++) {
  1216. + u[i] = rs1 >> (i*8);
  1217. + v[i] = rs2t >> (i*8);
  1218. + }
  1219. +
  1220. + uint64_t x = 0;
  1221. + for (int i = 0; i < 64; i++) {
  1222. + if ((u[i / 8] & v[i % 8]) != 0)
  1223. + x |= 1LL << i;
  1224. + }
  1225. +
  1226. + return x;
  1227. +}
  1228. +
  1229. +static inline long _rv_cmix(long rs2, long rs1, long rs3)
  1230. +{
  1231. + return (rs1 & rs2) | (rs3 & ~rs2);
  1232. +}
  1233. +
  1234. +static inline long _rv_cmov(long rs2, long rs1, long rs3)
  1235. +{
  1236. + return rs2 ? rs1 : rs3;
  1237. +}
  1238. +
  1239. +static inline int32_t _rv32_fsl(int32_t rs1, int32_t rs3, int32_t rs2)
  1240. +{
  1241. + int shamt = rs2 & 63;
  1242. + uint32_t A = rs1, B = rs3;
  1243. + if (shamt >= 32) {
  1244. + shamt -= 32;
  1245. + A = rs3;
  1246. + B = rs1;
  1247. + }
  1248. + return shamt ? (A << shamt) | (B >> (32-shamt)) : A;
  1249. +}
  1250. +
  1251. +static inline int32_t _rv32_fsr(int32_t rs1, int32_t rs3, int32_t rs2)
  1252. +{
  1253. + int shamt = rs2 & 63;
  1254. + uint32_t A = rs1, B = rs3;
  1255. + if (shamt >= 32) {
  1256. + shamt -= 32;
  1257. + A = rs3;
  1258. + B = rs1;
  1259. + }
  1260. + return shamt ? (A >> shamt) | (B << (32-shamt)) : A;
  1261. +}
  1262. +
  1263. +static inline int64_t _rv64_fsl(int64_t rs1, int64_t rs3, int64_t rs2)
  1264. +{
  1265. + int shamt = rs2 & 127;
  1266. + uint64_t A = rs1, B = rs3;
  1267. + if (shamt >= 64) {
  1268. + shamt -= 64;
  1269. + A = rs3;
  1270. + B = rs1;
  1271. + }
  1272. + return shamt ? (A << shamt) | (B >> (64-shamt)) : A;
  1273. +}
  1274. +
  1275. +static inline int64_t _rv64_fsr(int64_t rs1, int64_t rs3, int64_t rs2)
  1276. +{
  1277. + int shamt = rs2 & 127;
  1278. + uint64_t A = rs1, B = rs3;
  1279. + if (shamt >= 64) {
  1280. + shamt -= 64;
  1281. + A = rs3;
  1282. + B = rs1;
  1283. + }
  1284. + return shamt ? (A >> shamt) | (B << (64-shamt)) : A;
  1285. +}
  1286. +
  1287. +static inline long _rv_andn(long rs1, long rs2) { return rs1 & ~rs2; }
  1288. +static inline long _rv_orn (long rs1, long rs2) { return rs1 | ~rs2; }
  1289. +static inline long _rv_xnor(long rs1, long rs2) { return rs1 ^ ~rs2; }
  1290. +
  1291. +#endif // RVINTRIN_EMULATE
  1292. +
  1293. +#ifdef RVINTRIN_RV32
  1294. +static inline long _rv_clz (long rs1) { return _rv32_clz (rs1); }
  1295. +static inline long _rv_ctz (long rs1) { return _rv32_ctz (rs1); }
  1296. +static inline long _rv_pcnt (long rs1) { return _rv32_pcnt (rs1); }
  1297. +static inline long _rv_sext_b (long rs1) { return _rv32_sext_b(rs1); }
  1298. +static inline long _rv_sext_h (long rs1) { return _rv32_sext_h(rs1); }
  1299. +
  1300. +static inline long _rv_pack (long rs1, long rs2) { return _rv32_pack (rs1, rs2); }
  1301. +static inline long _rv_packu (long rs1, long rs2) { return _rv32_packu (rs1, rs2); }
  1302. +static inline long _rv_packh (long rs1, long rs2) { return _rv32_packh (rs1, rs2); }
  1303. +static inline long _rv_bfp (long rs1, long rs2) { return _rv32_bfp (rs1, rs2); }
  1304. +static inline long _rv_min (long rs1, long rs2) { return _rv32_min (rs1, rs2); }
  1305. +static inline long _rv_minu (long rs1, long rs2) { return _rv32_minu (rs1, rs2); }
  1306. +static inline long _rv_max (long rs1, long rs2) { return _rv32_max (rs1, rs2); }
  1307. +static inline long _rv_maxu (long rs1, long rs2) { return _rv32_maxu (rs1, rs2); }
  1308. +static inline long _rv_bset (long rs1, long rs2) { return _rv32_bset (rs1, rs2); }
  1309. +static inline long _rv_bclr (long rs1, long rs2) { return _rv32_bclr (rs1, rs2); }
  1310. +static inline long _rv_binv (long rs1, long rs2) { return _rv32_binv (rs1, rs2); }
  1311. +static inline long _rv_bext (long rs1, long rs2) { return _rv32_bext (rs1, rs2); }
  1312. +static inline long _rv_sll (long rs1, long rs2) { return _rv32_sll (rs1, rs2); }
  1313. +static inline long _rv_srl (long rs1, long rs2) { return _rv32_srl (rs1, rs2); }
  1314. +static inline long _rv_sra (long rs1, long rs2) { return _rv32_sra (rs1, rs2); }
  1315. +static inline long _rv_slo (long rs1, long rs2) { return _rv32_slo (rs1, rs2); }
  1316. +static inline long _rv_sro (long rs1, long rs2) { return _rv32_sro (rs1, rs2); }
  1317. +static inline long _rv_rol (long rs1, long rs2) { return _rv32_rol (rs1, rs2); }
  1318. +static inline long _rv_ror (long rs1, long rs2) { return _rv32_ror (rs1, rs2); }
  1319. +static inline long _rv_grev (long rs1, long rs2) { return _rv32_grev (rs1, rs2); }
  1320. +static inline long _rv_gorc (long rs1, long rs2) { return _rv32_gorc (rs1, rs2); }
  1321. +static inline long _rv_shfl (long rs1, long rs2) { return _rv32_shfl (rs1, rs2); }
  1322. +static inline long _rv_unshfl (long rs1, long rs2) { return _rv32_unshfl (rs1, rs2); }
  1323. +static inline long _rv_bext (long rs1, long rs2) { return _rv32_bext (rs1, rs2); }
  1324. +static inline long _rv_bdep (long rs1, long rs2) { return _rv32_bdep (rs1, rs2); }
  1325. +static inline long _rv_clmul (long rs1, long rs2) { return _rv32_clmul (rs1, rs2); }
  1326. +static inline long _rv_clmulh (long rs1, long rs2) { return _rv32_clmulh (rs1, rs2); }
  1327. +static inline long _rv_clmulr (long rs1, long rs2) { return _rv32_clmulr (rs1, rs2); }
  1328. +
  1329. +static inline long _rv_fsl(long rs1, long rs3, long rs2) { return _rv32_fsl(rs1, rs3, rs2); }
  1330. +static inline long _rv_fsr(long rs1, long rs3, long rs2) { return _rv32_fsr(rs1, rs3, rs2); }
  1331. +#endif
  1332. +
  1333. +#ifdef RVINTRIN_RV64
  1334. +static inline long _rv_clz (long rs1) { return _rv64_clz (rs1); }
  1335. +static inline long _rv_ctz (long rs1) { return _rv64_ctz (rs1); }
  1336. +static inline long _rv_pcnt (long rs1) { return _rv64_pcnt (rs1); }
  1337. +static inline long _rv_sext_b (long rs1) { return _rv64_sext_b (rs1); }
  1338. +static inline long _rv_sext_h (long rs1) { return _rv64_sext_h (rs1); }
  1339. +static inline long _rv_bmatflip(long rs1) { return _rv64_bmatflip(rs1); }
  1340. +
  1341. +static inline long _rv_pack (long rs1, long rs2) { return _rv64_pack (rs1, rs2); }
  1342. +static inline long _rv_packu (long rs1, long rs2) { return _rv64_packu (rs1, rs2); }
  1343. +static inline long _rv_packh (long rs1, long rs2) { return _rv64_packh (rs1, rs2); }
  1344. +static inline long _rv_bfp (long rs1, long rs2) { return _rv64_bfp (rs1, rs2); }
  1345. +static inline long _rv_min (long rs1, long rs2) { return _rv64_min (rs1, rs2); }
  1346. +static inline long _rv_minu (long rs1, long rs2) { return _rv64_minu (rs1, rs2); }
  1347. +static inline long _rv_max (long rs1, long rs2) { return _rv64_max (rs1, rs2); }
  1348. +static inline long _rv_maxu (long rs1, long rs2) { return _rv64_maxu (rs1, rs2); }
  1349. +static inline long _rv_bset (long rs1, long rs2) { return _rv64_bset (rs1, rs2); }
  1350. +static inline long _rv_bclr (long rs1, long rs2) { return _rv64_bclr (rs1, rs2); }
  1351. +static inline long _rv_binv (long rs1, long rs2) { return _rv64_binv (rs1, rs2); }
  1352. +static inline long _rv_bext (long rs1, long rs2) { return _rv64_bext (rs1, rs2); }
  1353. +static inline long _rv_sll (long rs1, long rs2) { return _rv64_sll (rs1, rs2); }
  1354. +static inline long _rv_srl (long rs1, long rs2) { return _rv64_srl (rs1, rs2); }
  1355. +static inline long _rv_sra (long rs1, long rs2) { return _rv64_sra (rs1, rs2); }
  1356. +static inline long _rv_slo (long rs1, long rs2) { return _rv64_slo (rs1, rs2); }
  1357. +static inline long _rv_sro (long rs1, long rs2) { return _rv64_sro (rs1, rs2); }
  1358. +static inline long _rv_rol (long rs1, long rs2) { return _rv64_rol (rs1, rs2); }
  1359. +static inline long _rv_ror (long rs1, long rs2) { return _rv64_ror (rs1, rs2); }
  1360. +static inline long _rv_grev (long rs1, long rs2) { return _rv64_grev (rs1, rs2); }
  1361. +static inline long _rv_gorc (long rs1, long rs2) { return _rv64_gorc (rs1, rs2); }
  1362. +static inline long _rv_shfl (long rs1, long rs2) { return _rv64_shfl (rs1, rs2); }
  1363. +static inline long _rv_unshfl (long rs1, long rs2) { return _rv64_unshfl (rs1, rs2); }
  1364. +static inline long _rv_bext (long rs1, long rs2) { return _rv64_bext (rs1, rs2); }
  1365. +static inline long _rv_bdep (long rs1, long rs2) { return _rv64_bdep (rs1, rs2); }
  1366. +static inline long _rv_clmul (long rs1, long rs2) { return _rv64_clmul (rs1, rs2); }
  1367. +static inline long _rv_clmulh (long rs1, long rs2) { return _rv64_clmulh (rs1, rs2); }
  1368. +static inline long _rv_clmulr (long rs1, long rs2) { return _rv64_clmulr (rs1, rs2); }
  1369. +static inline long _rv_bmator (long rs1, long rs2) { return _rv64_bmator (rs1, rs2); }
  1370. +static inline long _rv_bmatxor(long rs1, long rs2) { return _rv64_bmatxor(rs1, rs2); }
  1371. +
  1372. +static inline long _rv_fsl(long rs1, long rs3, long rs2) { return _rv64_fsl(rs1, rs3, rs2); }
  1373. +static inline long _rv_fsr(long rs1, long rs3, long rs2) { return _rv64_fsr(rs1, rs3, rs2); }
  1374. +#endif
  1375. +
  1376. +#ifdef RVINTRIN_RV32
  1377. +
  1378. +#define RVINTRIN_GREV_PSEUDO_OP32(_arg, _name) \
  1379. + static inline long _rv_ ## _name(long rs1) { return _rv_grev (rs1, _arg); } \
  1380. + static inline int32_t _rv32_ ## _name(int32_t rs1) { return _rv32_grev(rs1, _arg); }
  1381. +
  1382. +#define RVINTRIN_GREV_PSEUDO_OP64(_arg, _name)
  1383. +
  1384. +#else
  1385. +
  1386. +#define RVINTRIN_GREV_PSEUDO_OP32(_arg, _name) \
  1387. + static inline int32_t _rv32_ ## _name(int32_t rs1) { return _rv32_grev(rs1, _arg); }
  1388. +
  1389. +#define RVINTRIN_GREV_PSEUDO_OP64(_arg, _name) \
  1390. + static inline long _rv_ ## _name(long rs1) { return _rv_grev (rs1, _arg); } \
  1391. + static inline int64_t _rv64_ ## _name(int64_t rs1) { return _rv64_grev(rs1, _arg); }
  1392. +#endif
  1393. +
  1394. +RVINTRIN_GREV_PSEUDO_OP32( 1, rev_p)
  1395. +RVINTRIN_GREV_PSEUDO_OP32( 2, rev2_n)
  1396. +RVINTRIN_GREV_PSEUDO_OP32( 3, rev_n)
  1397. +RVINTRIN_GREV_PSEUDO_OP32( 4, rev4_b)
  1398. +RVINTRIN_GREV_PSEUDO_OP32( 6, rev2_b)
  1399. +RVINTRIN_GREV_PSEUDO_OP32( 7, rev_b)
  1400. +RVINTRIN_GREV_PSEUDO_OP32( 8, rev8_h)
  1401. +RVINTRIN_GREV_PSEUDO_OP32(12, rev4_h)
  1402. +RVINTRIN_GREV_PSEUDO_OP32(14, rev2_h)
  1403. +RVINTRIN_GREV_PSEUDO_OP32(15, rev_h)
  1404. +RVINTRIN_GREV_PSEUDO_OP32(16, rev16)
  1405. +RVINTRIN_GREV_PSEUDO_OP32(24, rev8)
  1406. +RVINTRIN_GREV_PSEUDO_OP32(28, rev4)
  1407. +RVINTRIN_GREV_PSEUDO_OP32(30, rev2)
  1408. +RVINTRIN_GREV_PSEUDO_OP32(31, rev)
  1409. +
  1410. +RVINTRIN_GREV_PSEUDO_OP64( 1, rev_p)
  1411. +RVINTRIN_GREV_PSEUDO_OP64( 2, rev2_n)
  1412. +RVINTRIN_GREV_PSEUDO_OP64( 3, rev_n)
  1413. +RVINTRIN_GREV_PSEUDO_OP64( 4, rev4_b)
  1414. +RVINTRIN_GREV_PSEUDO_OP64( 6, rev2_b)
  1415. +RVINTRIN_GREV_PSEUDO_OP64( 7, rev_b)
  1416. +RVINTRIN_GREV_PSEUDO_OP64( 8, rev8_h)
  1417. +RVINTRIN_GREV_PSEUDO_OP64(12, rev4_h)
  1418. +RVINTRIN_GREV_PSEUDO_OP64(14, rev2_h)
  1419. +RVINTRIN_GREV_PSEUDO_OP64(15, rev_h)
  1420. +RVINTRIN_GREV_PSEUDO_OP64(16, rev16_w)
  1421. +RVINTRIN_GREV_PSEUDO_OP64(24, rev8_w)
  1422. +RVINTRIN_GREV_PSEUDO_OP64(28, rev4_w)
  1423. +RVINTRIN_GREV_PSEUDO_OP64(30, rev2_w)
  1424. +RVINTRIN_GREV_PSEUDO_OP64(31, rev_w)
  1425. +RVINTRIN_GREV_PSEUDO_OP64(32, rev32)
  1426. +RVINTRIN_GREV_PSEUDO_OP64(48, rev16)
  1427. +RVINTRIN_GREV_PSEUDO_OP64(56, rev8)
  1428. +RVINTRIN_GREV_PSEUDO_OP64(60, rev4)
  1429. +RVINTRIN_GREV_PSEUDO_OP64(62, rev2)
  1430. +RVINTRIN_GREV_PSEUDO_OP64(63, rev)
  1431. +
  1432. +#ifdef RVINTRIN_RV32
  1433. +
  1434. +#define RVINTRIN_GORC_PSEUDO_OP32(_arg, _name) \
  1435. + static inline long _rv_ ## _name(long rs1) { return _rv_gorc (rs1, _arg); } \
  1436. + static inline int32_t _rv32_ ## _name(int32_t rs1) { return _rv32_gorc(rs1, _arg); }
  1437. +
  1438. +#define RVINTRIN_GORC_PSEUDO_OP64(_arg, _name)
  1439. +
  1440. +#else
  1441. +
  1442. +#define RVINTRIN_GORC_PSEUDO_OP32(_arg, _name) \
  1443. + static inline int32_t _rv32_ ## _name(int32_t rs1) { return _rv32_gorc(rs1, _arg); }
  1444. +
  1445. +#define RVINTRIN_GORC_PSEUDO_OP64(_arg, _name) \
  1446. + static inline long _rv_ ## _name(long rs1) { return _rv_gorc (rs1, _arg); } \
  1447. + static inline int64_t _rv64_ ## _name(int64_t rs1) { return _rv64_gorc(rs1, _arg); }
  1448. +#endif
  1449. +
  1450. +RVINTRIN_GORC_PSEUDO_OP32( 1, orc_p)
  1451. +RVINTRIN_GORC_PSEUDO_OP32( 2, orc2_n)
  1452. +RVINTRIN_GORC_PSEUDO_OP32( 3, orc_n)
  1453. +RVINTRIN_GORC_PSEUDO_OP32( 4, orc4_b)
  1454. +RVINTRIN_GORC_PSEUDO_OP32( 6, orc2_b)
  1455. +RVINTRIN_GORC_PSEUDO_OP32( 7, orc_b)
  1456. +RVINTRIN_GORC_PSEUDO_OP32( 8, orc8_h)
  1457. +RVINTRIN_GORC_PSEUDO_OP32(12, orc4_h)
  1458. +RVINTRIN_GORC_PSEUDO_OP32(14, orc2_h)
  1459. +RVINTRIN_GORC_PSEUDO_OP32(15, orc_h)
  1460. +RVINTRIN_GORC_PSEUDO_OP32(16, orc16)
  1461. +RVINTRIN_GORC_PSEUDO_OP32(24, orc8)
  1462. +RVINTRIN_GORC_PSEUDO_OP32(28, orc4)
  1463. +RVINTRIN_GORC_PSEUDO_OP32(30, orc2)
  1464. +RVINTRIN_GORC_PSEUDO_OP32(31, orc)
  1465. +
  1466. +RVINTRIN_GORC_PSEUDO_OP64( 1, orc_p)
  1467. +RVINTRIN_GORC_PSEUDO_OP64( 2, orc2_n)
  1468. +RVINTRIN_GORC_PSEUDO_OP64( 3, orc_n)
  1469. +RVINTRIN_GORC_PSEUDO_OP64( 4, orc4_b)
  1470. +RVINTRIN_GORC_PSEUDO_OP64( 6, orc2_b)
  1471. +RVINTRIN_GORC_PSEUDO_OP64( 7, orc_b)
  1472. +RVINTRIN_GORC_PSEUDO_OP64( 8, orc8_h)
  1473. +RVINTRIN_GORC_PSEUDO_OP64(12, orc4_h)
  1474. +RVINTRIN_GORC_PSEUDO_OP64(14, orc2_h)
  1475. +RVINTRIN_GORC_PSEUDO_OP64(15, orc_h)
  1476. +RVINTRIN_GORC_PSEUDO_OP64(16, orc16_w)
  1477. +RVINTRIN_GORC_PSEUDO_OP64(24, orc8_w)
  1478. +RVINTRIN_GORC_PSEUDO_OP64(28, orc4_w)
  1479. +RVINTRIN_GORC_PSEUDO_OP64(30, orc2_w)
  1480. +RVINTRIN_GORC_PSEUDO_OP64(31, orc_w)
  1481. +RVINTRIN_GORC_PSEUDO_OP64(32, orc32)
  1482. +RVINTRIN_GORC_PSEUDO_OP64(48, orc16)
  1483. +RVINTRIN_GORC_PSEUDO_OP64(56, orc8)
  1484. +RVINTRIN_GORC_PSEUDO_OP64(60, orc4)
  1485. +RVINTRIN_GORC_PSEUDO_OP64(62, orc2)
  1486. +RVINTRIN_GORC_PSEUDO_OP64(63, orc)
  1487. +
  1488. +#ifdef RVINTRIN_RV32
  1489. +
  1490. +#define RVINTRIN_SHFL_PSEUDO_OP32(_arg, _name) \
  1491. + static inline long _rv_ ## _name(long rs1) { return _rv_shfl (rs1, _arg); } \
  1492. + static inline long _rv_un ## _name(long rs1) { return _rv_unshfl (rs1, _arg); } \
  1493. + static inline int32_t _rv32_un ## _name(int32_t rs1) { return _rv32_shfl (rs1, _arg); } \
  1494. + static inline int32_t _rv32_ ## _name(int32_t rs1) { return _rv32_unshfl(rs1, _arg); }
  1495. +
  1496. +#define RVINTRIN_SHFL_PSEUDO_OP64(_arg, _name)
  1497. +
  1498. +#else
  1499. +
  1500. +#define RVINTRIN_SHFL_PSEUDO_OP32(_arg, _name)
  1501. +
  1502. +#define RVINTRIN_SHFL_PSEUDO_OP64(_arg, _name) \
  1503. + static inline long _rv_ ## _name(long rs1) { return _rv_shfl (rs1, _arg); } \
  1504. + static inline long _rv_un ## _name(long rs1) { return _rv_unshfl (rs1, _arg); } \
  1505. + static inline int64_t _rv64_ ## _name(int64_t rs1) { return _rv64_shfl (rs1, _arg); } \
  1506. + static inline int64_t _rv64_un ## _name(int64_t rs1) { return _rv64_unshfl(rs1, _arg); }
  1507. +
  1508. +#endif
  1509. +
  1510. +RVINTRIN_SHFL_PSEUDO_OP32( 1, zip_n)
  1511. +RVINTRIN_SHFL_PSEUDO_OP32( 2, zip2_b)
  1512. +RVINTRIN_SHFL_PSEUDO_OP32( 3, zip_b)
  1513. +RVINTRIN_SHFL_PSEUDO_OP32( 4, zip4_h)
  1514. +RVINTRIN_SHFL_PSEUDO_OP32( 6, zip2_h)
  1515. +RVINTRIN_SHFL_PSEUDO_OP32( 7, zip_h)
  1516. +RVINTRIN_SHFL_PSEUDO_OP32( 8, zip8)
  1517. +RVINTRIN_SHFL_PSEUDO_OP32(12, zip4)
  1518. +RVINTRIN_SHFL_PSEUDO_OP32(14, zip2)
  1519. +RVINTRIN_SHFL_PSEUDO_OP32(15, zip)
  1520. +
  1521. +RVINTRIN_SHFL_PSEUDO_OP64( 1, zip_n)
  1522. +RVINTRIN_SHFL_PSEUDO_OP64( 2, zip2_b)
  1523. +RVINTRIN_SHFL_PSEUDO_OP64( 3, zip_b)
  1524. +RVINTRIN_SHFL_PSEUDO_OP64( 4, zip4_h)
  1525. +RVINTRIN_SHFL_PSEUDO_OP64( 6, zip2_h)
  1526. +RVINTRIN_SHFL_PSEUDO_OP64( 7, zip_h)
  1527. +RVINTRIN_SHFL_PSEUDO_OP64( 8, zip8_w)
  1528. +RVINTRIN_SHFL_PSEUDO_OP64(12, zip4_w)
  1529. +RVINTRIN_SHFL_PSEUDO_OP64(14, zip2_w)
  1530. +RVINTRIN_SHFL_PSEUDO_OP64(15, zip_w)
  1531. +RVINTRIN_SHFL_PSEUDO_OP64(16, zip16)
  1532. +RVINTRIN_SHFL_PSEUDO_OP64(24, zip8)
  1533. +RVINTRIN_SHFL_PSEUDO_OP64(28, zip4)
  1534. +RVINTRIN_SHFL_PSEUDO_OP64(30, zip2)
  1535. +RVINTRIN_SHFL_PSEUDO_OP64(31, zip)
  1536. +
  1537. +#endif // RVINTRIN_H
  1538. --
  1539. 2.33.1