stm32-cryp.c 46 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917
  1. // SPDX-License-Identifier: GPL-2.0-only
  2. /*
  3. * Copyright (C) STMicroelectronics SA 2017
  4. * Author: Fabien Dessenne <fabien.dessenne@st.com>
  5. */
  6. #include <linux/clk.h>
  7. #include <linux/delay.h>
  8. #include <linux/interrupt.h>
  9. #include <linux/iopoll.h>
  10. #include <linux/module.h>
  11. #include <linux/of_device.h>
  12. #include <linux/platform_device.h>
  13. #include <linux/pm_runtime.h>
  14. #include <linux/reset.h>
  15. #include <crypto/aes.h>
  16. #include <crypto/internal/des.h>
  17. #include <crypto/engine.h>
  18. #include <crypto/scatterwalk.h>
  19. #include <crypto/internal/aead.h>
  20. #include <crypto/internal/skcipher.h>
  21. #define DRIVER_NAME "stm32-cryp"
  22. /* Bit [0] encrypt / decrypt */
  23. #define FLG_ENCRYPT BIT(0)
  24. /* Bit [8..1] algo & operation mode */
  25. #define FLG_AES BIT(1)
  26. #define FLG_DES BIT(2)
  27. #define FLG_TDES BIT(3)
  28. #define FLG_ECB BIT(4)
  29. #define FLG_CBC BIT(5)
  30. #define FLG_CTR BIT(6)
  31. #define FLG_GCM BIT(7)
  32. #define FLG_CCM BIT(8)
  33. /* Mode mask = bits [15..0] */
  34. #define FLG_MODE_MASK GENMASK(15, 0)
  35. /* Bit [31..16] status */
  36. /* Registers */
  37. #define CRYP_CR 0x00000000
  38. #define CRYP_SR 0x00000004
  39. #define CRYP_DIN 0x00000008
  40. #define CRYP_DOUT 0x0000000C
  41. #define CRYP_DMACR 0x00000010
  42. #define CRYP_IMSCR 0x00000014
  43. #define CRYP_RISR 0x00000018
  44. #define CRYP_MISR 0x0000001C
  45. #define CRYP_K0LR 0x00000020
  46. #define CRYP_K0RR 0x00000024
  47. #define CRYP_K1LR 0x00000028
  48. #define CRYP_K1RR 0x0000002C
  49. #define CRYP_K2LR 0x00000030
  50. #define CRYP_K2RR 0x00000034
  51. #define CRYP_K3LR 0x00000038
  52. #define CRYP_K3RR 0x0000003C
  53. #define CRYP_IV0LR 0x00000040
  54. #define CRYP_IV0RR 0x00000044
  55. #define CRYP_IV1LR 0x00000048
  56. #define CRYP_IV1RR 0x0000004C
  57. #define CRYP_CSGCMCCM0R 0x00000050
  58. #define CRYP_CSGCM0R 0x00000070
  59. /* Registers values */
  60. #define CR_DEC_NOT_ENC 0x00000004
  61. #define CR_TDES_ECB 0x00000000
  62. #define CR_TDES_CBC 0x00000008
  63. #define CR_DES_ECB 0x00000010
  64. #define CR_DES_CBC 0x00000018
  65. #define CR_AES_ECB 0x00000020
  66. #define CR_AES_CBC 0x00000028
  67. #define CR_AES_CTR 0x00000030
  68. #define CR_AES_KP 0x00000038
  69. #define CR_AES_GCM 0x00080000
  70. #define CR_AES_CCM 0x00080008
  71. #define CR_AES_UNKNOWN 0xFFFFFFFF
  72. #define CR_ALGO_MASK 0x00080038
  73. #define CR_DATA32 0x00000000
  74. #define CR_DATA16 0x00000040
  75. #define CR_DATA8 0x00000080
  76. #define CR_DATA1 0x000000C0
  77. #define CR_KEY128 0x00000000
  78. #define CR_KEY192 0x00000100
  79. #define CR_KEY256 0x00000200
  80. #define CR_FFLUSH 0x00004000
  81. #define CR_CRYPEN 0x00008000
  82. #define CR_PH_INIT 0x00000000
  83. #define CR_PH_HEADER 0x00010000
  84. #define CR_PH_PAYLOAD 0x00020000
  85. #define CR_PH_FINAL 0x00030000
  86. #define CR_PH_MASK 0x00030000
  87. #define CR_NBPBL_SHIFT 20
  88. #define SR_BUSY 0x00000010
  89. #define SR_OFNE 0x00000004
  90. #define IMSCR_IN BIT(0)
  91. #define IMSCR_OUT BIT(1)
  92. #define MISR_IN BIT(0)
  93. #define MISR_OUT BIT(1)
  94. /* Misc */
  95. #define AES_BLOCK_32 (AES_BLOCK_SIZE / sizeof(u32))
  96. #define GCM_CTR_INIT 2
  97. #define CRYP_AUTOSUSPEND_DELAY 50
  98. struct stm32_cryp_caps {
  99. bool swap_final;
  100. bool padding_wa;
  101. };
  102. struct stm32_cryp_ctx {
  103. struct crypto_engine_ctx enginectx;
  104. struct stm32_cryp *cryp;
  105. int keylen;
  106. __be32 key[AES_KEYSIZE_256 / sizeof(u32)];
  107. unsigned long flags;
  108. };
  109. struct stm32_cryp_reqctx {
  110. unsigned long mode;
  111. };
  112. struct stm32_cryp {
  113. struct list_head list;
  114. struct device *dev;
  115. void __iomem *regs;
  116. struct clk *clk;
  117. unsigned long flags;
  118. u32 irq_status;
  119. const struct stm32_cryp_caps *caps;
  120. struct stm32_cryp_ctx *ctx;
  121. struct crypto_engine *engine;
  122. struct skcipher_request *req;
  123. struct aead_request *areq;
  124. size_t authsize;
  125. size_t hw_blocksize;
  126. size_t payload_in;
  127. size_t header_in;
  128. size_t payload_out;
  129. struct scatterlist *out_sg;
  130. struct scatter_walk in_walk;
  131. struct scatter_walk out_walk;
  132. __be32 last_ctr[4];
  133. u32 gcm_ctr;
  134. };
  135. struct stm32_cryp_list {
  136. struct list_head dev_list;
  137. spinlock_t lock; /* protect dev_list */
  138. };
  139. static struct stm32_cryp_list cryp_list = {
  140. .dev_list = LIST_HEAD_INIT(cryp_list.dev_list),
  141. .lock = __SPIN_LOCK_UNLOCKED(cryp_list.lock),
  142. };
  143. static inline bool is_aes(struct stm32_cryp *cryp)
  144. {
  145. return cryp->flags & FLG_AES;
  146. }
  147. static inline bool is_des(struct stm32_cryp *cryp)
  148. {
  149. return cryp->flags & FLG_DES;
  150. }
  151. static inline bool is_tdes(struct stm32_cryp *cryp)
  152. {
  153. return cryp->flags & FLG_TDES;
  154. }
  155. static inline bool is_ecb(struct stm32_cryp *cryp)
  156. {
  157. return cryp->flags & FLG_ECB;
  158. }
  159. static inline bool is_cbc(struct stm32_cryp *cryp)
  160. {
  161. return cryp->flags & FLG_CBC;
  162. }
  163. static inline bool is_ctr(struct stm32_cryp *cryp)
  164. {
  165. return cryp->flags & FLG_CTR;
  166. }
  167. static inline bool is_gcm(struct stm32_cryp *cryp)
  168. {
  169. return cryp->flags & FLG_GCM;
  170. }
  171. static inline bool is_ccm(struct stm32_cryp *cryp)
  172. {
  173. return cryp->flags & FLG_CCM;
  174. }
  175. static inline bool is_encrypt(struct stm32_cryp *cryp)
  176. {
  177. return cryp->flags & FLG_ENCRYPT;
  178. }
  179. static inline bool is_decrypt(struct stm32_cryp *cryp)
  180. {
  181. return !is_encrypt(cryp);
  182. }
  183. static inline u32 stm32_cryp_read(struct stm32_cryp *cryp, u32 ofst)
  184. {
  185. return readl_relaxed(cryp->regs + ofst);
  186. }
  187. static inline void stm32_cryp_write(struct stm32_cryp *cryp, u32 ofst, u32 val)
  188. {
  189. writel_relaxed(val, cryp->regs + ofst);
  190. }
  191. static inline int stm32_cryp_wait_busy(struct stm32_cryp *cryp)
  192. {
  193. u32 status;
  194. return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status,
  195. !(status & SR_BUSY), 10, 100000);
  196. }
  197. static inline int stm32_cryp_wait_enable(struct stm32_cryp *cryp)
  198. {
  199. u32 status;
  200. return readl_relaxed_poll_timeout(cryp->regs + CRYP_CR, status,
  201. !(status & CR_CRYPEN), 10, 100000);
  202. }
  203. static inline int stm32_cryp_wait_output(struct stm32_cryp *cryp)
  204. {
  205. u32 status;
  206. return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status,
  207. status & SR_OFNE, 10, 100000);
  208. }
  209. static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp);
  210. static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err);
  211. static struct stm32_cryp *stm32_cryp_find_dev(struct stm32_cryp_ctx *ctx)
  212. {
  213. struct stm32_cryp *tmp, *cryp = NULL;
  214. spin_lock_bh(&cryp_list.lock);
  215. if (!ctx->cryp) {
  216. list_for_each_entry(tmp, &cryp_list.dev_list, list) {
  217. cryp = tmp;
  218. break;
  219. }
  220. ctx->cryp = cryp;
  221. } else {
  222. cryp = ctx->cryp;
  223. }
  224. spin_unlock_bh(&cryp_list.lock);
  225. return cryp;
  226. }
  227. static void stm32_cryp_hw_write_iv(struct stm32_cryp *cryp, __be32 *iv)
  228. {
  229. if (!iv)
  230. return;
  231. stm32_cryp_write(cryp, CRYP_IV0LR, be32_to_cpu(*iv++));
  232. stm32_cryp_write(cryp, CRYP_IV0RR, be32_to_cpu(*iv++));
  233. if (is_aes(cryp)) {
  234. stm32_cryp_write(cryp, CRYP_IV1LR, be32_to_cpu(*iv++));
  235. stm32_cryp_write(cryp, CRYP_IV1RR, be32_to_cpu(*iv++));
  236. }
  237. }
  238. static void stm32_cryp_get_iv(struct stm32_cryp *cryp)
  239. {
  240. struct skcipher_request *req = cryp->req;
  241. __be32 *tmp = (void *)req->iv;
  242. if (!tmp)
  243. return;
  244. *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0LR));
  245. *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0RR));
  246. if (is_aes(cryp)) {
  247. *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1LR));
  248. *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1RR));
  249. }
  250. }
  251. static void stm32_cryp_hw_write_key(struct stm32_cryp *c)
  252. {
  253. unsigned int i;
  254. int r_id;
  255. if (is_des(c)) {
  256. stm32_cryp_write(c, CRYP_K1LR, be32_to_cpu(c->ctx->key[0]));
  257. stm32_cryp_write(c, CRYP_K1RR, be32_to_cpu(c->ctx->key[1]));
  258. } else {
  259. r_id = CRYP_K3RR;
  260. for (i = c->ctx->keylen / sizeof(u32); i > 0; i--, r_id -= 4)
  261. stm32_cryp_write(c, r_id,
  262. be32_to_cpu(c->ctx->key[i - 1]));
  263. }
  264. }
  265. static u32 stm32_cryp_get_hw_mode(struct stm32_cryp *cryp)
  266. {
  267. if (is_aes(cryp) && is_ecb(cryp))
  268. return CR_AES_ECB;
  269. if (is_aes(cryp) && is_cbc(cryp))
  270. return CR_AES_CBC;
  271. if (is_aes(cryp) && is_ctr(cryp))
  272. return CR_AES_CTR;
  273. if (is_aes(cryp) && is_gcm(cryp))
  274. return CR_AES_GCM;
  275. if (is_aes(cryp) && is_ccm(cryp))
  276. return CR_AES_CCM;
  277. if (is_des(cryp) && is_ecb(cryp))
  278. return CR_DES_ECB;
  279. if (is_des(cryp) && is_cbc(cryp))
  280. return CR_DES_CBC;
  281. if (is_tdes(cryp) && is_ecb(cryp))
  282. return CR_TDES_ECB;
  283. if (is_tdes(cryp) && is_cbc(cryp))
  284. return CR_TDES_CBC;
  285. dev_err(cryp->dev, "Unknown mode\n");
  286. return CR_AES_UNKNOWN;
  287. }
  288. static unsigned int stm32_cryp_get_input_text_len(struct stm32_cryp *cryp)
  289. {
  290. return is_encrypt(cryp) ? cryp->areq->cryptlen :
  291. cryp->areq->cryptlen - cryp->authsize;
  292. }
  293. static int stm32_cryp_gcm_init(struct stm32_cryp *cryp, u32 cfg)
  294. {
  295. int ret;
  296. __be32 iv[4];
  297. /* Phase 1 : init */
  298. memcpy(iv, cryp->areq->iv, 12);
  299. iv[3] = cpu_to_be32(GCM_CTR_INIT);
  300. cryp->gcm_ctr = GCM_CTR_INIT;
  301. stm32_cryp_hw_write_iv(cryp, iv);
  302. stm32_cryp_write(cryp, CRYP_CR, cfg | CR_PH_INIT | CR_CRYPEN);
  303. /* Wait for end of processing */
  304. ret = stm32_cryp_wait_enable(cryp);
  305. if (ret) {
  306. dev_err(cryp->dev, "Timeout (gcm init)\n");
  307. return ret;
  308. }
  309. /* Prepare next phase */
  310. if (cryp->areq->assoclen) {
  311. cfg |= CR_PH_HEADER;
  312. stm32_cryp_write(cryp, CRYP_CR, cfg);
  313. } else if (stm32_cryp_get_input_text_len(cryp)) {
  314. cfg |= CR_PH_PAYLOAD;
  315. stm32_cryp_write(cryp, CRYP_CR, cfg);
  316. }
  317. return 0;
  318. }
  319. static void stm32_crypt_gcmccm_end_header(struct stm32_cryp *cryp)
  320. {
  321. u32 cfg;
  322. int err;
  323. /* Check if whole header written */
  324. if (!cryp->header_in) {
  325. /* Wait for completion */
  326. err = stm32_cryp_wait_busy(cryp);
  327. if (err) {
  328. dev_err(cryp->dev, "Timeout (gcm/ccm header)\n");
  329. stm32_cryp_write(cryp, CRYP_IMSCR, 0);
  330. stm32_cryp_finish_req(cryp, err);
  331. return;
  332. }
  333. if (stm32_cryp_get_input_text_len(cryp)) {
  334. /* Phase 3 : payload */
  335. cfg = stm32_cryp_read(cryp, CRYP_CR);
  336. cfg &= ~CR_CRYPEN;
  337. stm32_cryp_write(cryp, CRYP_CR, cfg);
  338. cfg &= ~CR_PH_MASK;
  339. cfg |= CR_PH_PAYLOAD | CR_CRYPEN;
  340. stm32_cryp_write(cryp, CRYP_CR, cfg);
  341. } else {
  342. /*
  343. * Phase 4 : tag.
  344. * Nothing to read, nothing to write, caller have to
  345. * end request
  346. */
  347. }
  348. }
  349. }
  350. static void stm32_cryp_write_ccm_first_header(struct stm32_cryp *cryp)
  351. {
  352. unsigned int i;
  353. size_t written;
  354. size_t len;
  355. u32 alen = cryp->areq->assoclen;
  356. u32 block[AES_BLOCK_32] = {0};
  357. u8 *b8 = (u8 *)block;
  358. if (alen <= 65280) {
  359. /* Write first u32 of B1 */
  360. b8[0] = (alen >> 8) & 0xFF;
  361. b8[1] = alen & 0xFF;
  362. len = 2;
  363. } else {
  364. /* Build the two first u32 of B1 */
  365. b8[0] = 0xFF;
  366. b8[1] = 0xFE;
  367. b8[2] = (alen & 0xFF000000) >> 24;
  368. b8[3] = (alen & 0x00FF0000) >> 16;
  369. b8[4] = (alen & 0x0000FF00) >> 8;
  370. b8[5] = alen & 0x000000FF;
  371. len = 6;
  372. }
  373. written = min_t(size_t, AES_BLOCK_SIZE - len, alen);
  374. scatterwalk_copychunks((char *)block + len, &cryp->in_walk, written, 0);
  375. for (i = 0; i < AES_BLOCK_32; i++)
  376. stm32_cryp_write(cryp, CRYP_DIN, block[i]);
  377. cryp->header_in -= written;
  378. stm32_crypt_gcmccm_end_header(cryp);
  379. }
  380. static int stm32_cryp_ccm_init(struct stm32_cryp *cryp, u32 cfg)
  381. {
  382. int ret;
  383. u32 iv_32[AES_BLOCK_32], b0_32[AES_BLOCK_32];
  384. u8 *iv = (u8 *)iv_32, *b0 = (u8 *)b0_32;
  385. __be32 *bd;
  386. u32 *d;
  387. unsigned int i, textlen;
  388. /* Phase 1 : init. Firstly set the CTR value to 1 (not 0) */
  389. memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
  390. memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
  391. iv[AES_BLOCK_SIZE - 1] = 1;
  392. stm32_cryp_hw_write_iv(cryp, (__be32 *)iv);
  393. /* Build B0 */
  394. memcpy(b0, iv, AES_BLOCK_SIZE);
  395. b0[0] |= (8 * ((cryp->authsize - 2) / 2));
  396. if (cryp->areq->assoclen)
  397. b0[0] |= 0x40;
  398. textlen = stm32_cryp_get_input_text_len(cryp);
  399. b0[AES_BLOCK_SIZE - 2] = textlen >> 8;
  400. b0[AES_BLOCK_SIZE - 1] = textlen & 0xFF;
  401. /* Enable HW */
  402. stm32_cryp_write(cryp, CRYP_CR, cfg | CR_PH_INIT | CR_CRYPEN);
  403. /* Write B0 */
  404. d = (u32 *)b0;
  405. bd = (__be32 *)b0;
  406. for (i = 0; i < AES_BLOCK_32; i++) {
  407. u32 xd = d[i];
  408. if (!cryp->caps->padding_wa)
  409. xd = be32_to_cpu(bd[i]);
  410. stm32_cryp_write(cryp, CRYP_DIN, xd);
  411. }
  412. /* Wait for end of processing */
  413. ret = stm32_cryp_wait_enable(cryp);
  414. if (ret) {
  415. dev_err(cryp->dev, "Timeout (ccm init)\n");
  416. return ret;
  417. }
  418. /* Prepare next phase */
  419. if (cryp->areq->assoclen) {
  420. cfg |= CR_PH_HEADER | CR_CRYPEN;
  421. stm32_cryp_write(cryp, CRYP_CR, cfg);
  422. /* Write first (special) block (may move to next phase [payload]) */
  423. stm32_cryp_write_ccm_first_header(cryp);
  424. } else if (stm32_cryp_get_input_text_len(cryp)) {
  425. cfg |= CR_PH_PAYLOAD;
  426. stm32_cryp_write(cryp, CRYP_CR, cfg);
  427. }
  428. return 0;
  429. }
  430. static int stm32_cryp_hw_init(struct stm32_cryp *cryp)
  431. {
  432. int ret;
  433. u32 cfg, hw_mode;
  434. pm_runtime_get_sync(cryp->dev);
  435. /* Disable interrupt */
  436. stm32_cryp_write(cryp, CRYP_IMSCR, 0);
  437. /* Set key */
  438. stm32_cryp_hw_write_key(cryp);
  439. /* Set configuration */
  440. cfg = CR_DATA8 | CR_FFLUSH;
  441. switch (cryp->ctx->keylen) {
  442. case AES_KEYSIZE_128:
  443. cfg |= CR_KEY128;
  444. break;
  445. case AES_KEYSIZE_192:
  446. cfg |= CR_KEY192;
  447. break;
  448. default:
  449. case AES_KEYSIZE_256:
  450. cfg |= CR_KEY256;
  451. break;
  452. }
  453. hw_mode = stm32_cryp_get_hw_mode(cryp);
  454. if (hw_mode == CR_AES_UNKNOWN)
  455. return -EINVAL;
  456. /* AES ECB/CBC decrypt: run key preparation first */
  457. if (is_decrypt(cryp) &&
  458. ((hw_mode == CR_AES_ECB) || (hw_mode == CR_AES_CBC))) {
  459. stm32_cryp_write(cryp, CRYP_CR, cfg | CR_AES_KP | CR_CRYPEN);
  460. /* Wait for end of processing */
  461. ret = stm32_cryp_wait_busy(cryp);
  462. if (ret) {
  463. dev_err(cryp->dev, "Timeout (key preparation)\n");
  464. return ret;
  465. }
  466. }
  467. cfg |= hw_mode;
  468. if (is_decrypt(cryp))
  469. cfg |= CR_DEC_NOT_ENC;
  470. /* Apply config and flush (valid when CRYPEN = 0) */
  471. stm32_cryp_write(cryp, CRYP_CR, cfg);
  472. switch (hw_mode) {
  473. case CR_AES_GCM:
  474. case CR_AES_CCM:
  475. /* Phase 1 : init */
  476. if (hw_mode == CR_AES_CCM)
  477. ret = stm32_cryp_ccm_init(cryp, cfg);
  478. else
  479. ret = stm32_cryp_gcm_init(cryp, cfg);
  480. if (ret)
  481. return ret;
  482. break;
  483. case CR_DES_CBC:
  484. case CR_TDES_CBC:
  485. case CR_AES_CBC:
  486. case CR_AES_CTR:
  487. stm32_cryp_hw_write_iv(cryp, (__be32 *)cryp->req->iv);
  488. break;
  489. default:
  490. break;
  491. }
  492. /* Enable now */
  493. cfg |= CR_CRYPEN;
  494. stm32_cryp_write(cryp, CRYP_CR, cfg);
  495. return 0;
  496. }
  497. static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err)
  498. {
  499. if (!err && (is_gcm(cryp) || is_ccm(cryp)))
  500. /* Phase 4 : output tag */
  501. err = stm32_cryp_read_auth_tag(cryp);
  502. if (!err && (!(is_gcm(cryp) || is_ccm(cryp) || is_ecb(cryp))))
  503. stm32_cryp_get_iv(cryp);
  504. pm_runtime_mark_last_busy(cryp->dev);
  505. pm_runtime_put_autosuspend(cryp->dev);
  506. if (is_gcm(cryp) || is_ccm(cryp))
  507. crypto_finalize_aead_request(cryp->engine, cryp->areq, err);
  508. else
  509. crypto_finalize_skcipher_request(cryp->engine, cryp->req,
  510. err);
  511. }
  512. static int stm32_cryp_cpu_start(struct stm32_cryp *cryp)
  513. {
  514. /* Enable interrupt and let the IRQ handler do everything */
  515. stm32_cryp_write(cryp, CRYP_IMSCR, IMSCR_IN | IMSCR_OUT);
  516. return 0;
  517. }
  518. static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq);
  519. static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine,
  520. void *areq);
  521. static int stm32_cryp_init_tfm(struct crypto_skcipher *tfm)
  522. {
  523. struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
  524. crypto_skcipher_set_reqsize(tfm, sizeof(struct stm32_cryp_reqctx));
  525. ctx->enginectx.op.do_one_request = stm32_cryp_cipher_one_req;
  526. ctx->enginectx.op.prepare_request = stm32_cryp_prepare_cipher_req;
  527. ctx->enginectx.op.unprepare_request = NULL;
  528. return 0;
  529. }
  530. static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq);
  531. static int stm32_cryp_prepare_aead_req(struct crypto_engine *engine,
  532. void *areq);
  533. static int stm32_cryp_aes_aead_init(struct crypto_aead *tfm)
  534. {
  535. struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm);
  536. tfm->reqsize = sizeof(struct stm32_cryp_reqctx);
  537. ctx->enginectx.op.do_one_request = stm32_cryp_aead_one_req;
  538. ctx->enginectx.op.prepare_request = stm32_cryp_prepare_aead_req;
  539. ctx->enginectx.op.unprepare_request = NULL;
  540. return 0;
  541. }
  542. static int stm32_cryp_crypt(struct skcipher_request *req, unsigned long mode)
  543. {
  544. struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(
  545. crypto_skcipher_reqtfm(req));
  546. struct stm32_cryp_reqctx *rctx = skcipher_request_ctx(req);
  547. struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
  548. if (!cryp)
  549. return -ENODEV;
  550. rctx->mode = mode;
  551. return crypto_transfer_skcipher_request_to_engine(cryp->engine, req);
  552. }
  553. static int stm32_cryp_aead_crypt(struct aead_request *req, unsigned long mode)
  554. {
  555. struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
  556. struct stm32_cryp_reqctx *rctx = aead_request_ctx(req);
  557. struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
  558. if (!cryp)
  559. return -ENODEV;
  560. rctx->mode = mode;
  561. return crypto_transfer_aead_request_to_engine(cryp->engine, req);
  562. }
  563. static int stm32_cryp_setkey(struct crypto_skcipher *tfm, const u8 *key,
  564. unsigned int keylen)
  565. {
  566. struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
  567. memcpy(ctx->key, key, keylen);
  568. ctx->keylen = keylen;
  569. return 0;
  570. }
  571. static int stm32_cryp_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
  572. unsigned int keylen)
  573. {
  574. if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
  575. keylen != AES_KEYSIZE_256)
  576. return -EINVAL;
  577. else
  578. return stm32_cryp_setkey(tfm, key, keylen);
  579. }
  580. static int stm32_cryp_des_setkey(struct crypto_skcipher *tfm, const u8 *key,
  581. unsigned int keylen)
  582. {
  583. return verify_skcipher_des_key(tfm, key) ?:
  584. stm32_cryp_setkey(tfm, key, keylen);
  585. }
  586. static int stm32_cryp_tdes_setkey(struct crypto_skcipher *tfm, const u8 *key,
  587. unsigned int keylen)
  588. {
  589. return verify_skcipher_des3_key(tfm, key) ?:
  590. stm32_cryp_setkey(tfm, key, keylen);
  591. }
  592. static int stm32_cryp_aes_aead_setkey(struct crypto_aead *tfm, const u8 *key,
  593. unsigned int keylen)
  594. {
  595. struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm);
  596. if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
  597. keylen != AES_KEYSIZE_256)
  598. return -EINVAL;
  599. memcpy(ctx->key, key, keylen);
  600. ctx->keylen = keylen;
  601. return 0;
  602. }
  603. static int stm32_cryp_aes_gcm_setauthsize(struct crypto_aead *tfm,
  604. unsigned int authsize)
  605. {
  606. switch (authsize) {
  607. case 4:
  608. case 8:
  609. case 12:
  610. case 13:
  611. case 14:
  612. case 15:
  613. case 16:
  614. break;
  615. default:
  616. return -EINVAL;
  617. }
  618. return 0;
  619. }
  620. static int stm32_cryp_aes_ccm_setauthsize(struct crypto_aead *tfm,
  621. unsigned int authsize)
  622. {
  623. switch (authsize) {
  624. case 4:
  625. case 6:
  626. case 8:
  627. case 10:
  628. case 12:
  629. case 14:
  630. case 16:
  631. break;
  632. default:
  633. return -EINVAL;
  634. }
  635. return 0;
  636. }
  637. static int stm32_cryp_aes_ecb_encrypt(struct skcipher_request *req)
  638. {
  639. if (req->cryptlen % AES_BLOCK_SIZE)
  640. return -EINVAL;
  641. if (req->cryptlen == 0)
  642. return 0;
  643. return stm32_cryp_crypt(req, FLG_AES | FLG_ECB | FLG_ENCRYPT);
  644. }
  645. static int stm32_cryp_aes_ecb_decrypt(struct skcipher_request *req)
  646. {
  647. if (req->cryptlen % AES_BLOCK_SIZE)
  648. return -EINVAL;
  649. if (req->cryptlen == 0)
  650. return 0;
  651. return stm32_cryp_crypt(req, FLG_AES | FLG_ECB);
  652. }
  653. static int stm32_cryp_aes_cbc_encrypt(struct skcipher_request *req)
  654. {
  655. if (req->cryptlen % AES_BLOCK_SIZE)
  656. return -EINVAL;
  657. if (req->cryptlen == 0)
  658. return 0;
  659. return stm32_cryp_crypt(req, FLG_AES | FLG_CBC | FLG_ENCRYPT);
  660. }
  661. static int stm32_cryp_aes_cbc_decrypt(struct skcipher_request *req)
  662. {
  663. if (req->cryptlen % AES_BLOCK_SIZE)
  664. return -EINVAL;
  665. if (req->cryptlen == 0)
  666. return 0;
  667. return stm32_cryp_crypt(req, FLG_AES | FLG_CBC);
  668. }
  669. static int stm32_cryp_aes_ctr_encrypt(struct skcipher_request *req)
  670. {
  671. if (req->cryptlen == 0)
  672. return 0;
  673. return stm32_cryp_crypt(req, FLG_AES | FLG_CTR | FLG_ENCRYPT);
  674. }
  675. static int stm32_cryp_aes_ctr_decrypt(struct skcipher_request *req)
  676. {
  677. if (req->cryptlen == 0)
  678. return 0;
  679. return stm32_cryp_crypt(req, FLG_AES | FLG_CTR);
  680. }
  681. static int stm32_cryp_aes_gcm_encrypt(struct aead_request *req)
  682. {
  683. return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM | FLG_ENCRYPT);
  684. }
  685. static int stm32_cryp_aes_gcm_decrypt(struct aead_request *req)
  686. {
  687. return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM);
  688. }
  689. static inline int crypto_ccm_check_iv(const u8 *iv)
  690. {
  691. /* 2 <= L <= 8, so 1 <= L' <= 7. */
  692. if (iv[0] < 1 || iv[0] > 7)
  693. return -EINVAL;
  694. return 0;
  695. }
  696. static int stm32_cryp_aes_ccm_encrypt(struct aead_request *req)
  697. {
  698. int err;
  699. err = crypto_ccm_check_iv(req->iv);
  700. if (err)
  701. return err;
  702. return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM | FLG_ENCRYPT);
  703. }
  704. static int stm32_cryp_aes_ccm_decrypt(struct aead_request *req)
  705. {
  706. int err;
  707. err = crypto_ccm_check_iv(req->iv);
  708. if (err)
  709. return err;
  710. return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM);
  711. }
  712. static int stm32_cryp_des_ecb_encrypt(struct skcipher_request *req)
  713. {
  714. if (req->cryptlen % DES_BLOCK_SIZE)
  715. return -EINVAL;
  716. if (req->cryptlen == 0)
  717. return 0;
  718. return stm32_cryp_crypt(req, FLG_DES | FLG_ECB | FLG_ENCRYPT);
  719. }
  720. static int stm32_cryp_des_ecb_decrypt(struct skcipher_request *req)
  721. {
  722. if (req->cryptlen % DES_BLOCK_SIZE)
  723. return -EINVAL;
  724. if (req->cryptlen == 0)
  725. return 0;
  726. return stm32_cryp_crypt(req, FLG_DES | FLG_ECB);
  727. }
  728. static int stm32_cryp_des_cbc_encrypt(struct skcipher_request *req)
  729. {
  730. if (req->cryptlen % DES_BLOCK_SIZE)
  731. return -EINVAL;
  732. if (req->cryptlen == 0)
  733. return 0;
  734. return stm32_cryp_crypt(req, FLG_DES | FLG_CBC | FLG_ENCRYPT);
  735. }
  736. static int stm32_cryp_des_cbc_decrypt(struct skcipher_request *req)
  737. {
  738. if (req->cryptlen % DES_BLOCK_SIZE)
  739. return -EINVAL;
  740. if (req->cryptlen == 0)
  741. return 0;
  742. return stm32_cryp_crypt(req, FLG_DES | FLG_CBC);
  743. }
  744. static int stm32_cryp_tdes_ecb_encrypt(struct skcipher_request *req)
  745. {
  746. if (req->cryptlen % DES_BLOCK_SIZE)
  747. return -EINVAL;
  748. if (req->cryptlen == 0)
  749. return 0;
  750. return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB | FLG_ENCRYPT);
  751. }
  752. static int stm32_cryp_tdes_ecb_decrypt(struct skcipher_request *req)
  753. {
  754. if (req->cryptlen % DES_BLOCK_SIZE)
  755. return -EINVAL;
  756. if (req->cryptlen == 0)
  757. return 0;
  758. return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB);
  759. }
  760. static int stm32_cryp_tdes_cbc_encrypt(struct skcipher_request *req)
  761. {
  762. if (req->cryptlen % DES_BLOCK_SIZE)
  763. return -EINVAL;
  764. if (req->cryptlen == 0)
  765. return 0;
  766. return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC | FLG_ENCRYPT);
  767. }
  768. static int stm32_cryp_tdes_cbc_decrypt(struct skcipher_request *req)
  769. {
  770. if (req->cryptlen % DES_BLOCK_SIZE)
  771. return -EINVAL;
  772. if (req->cryptlen == 0)
  773. return 0;
  774. return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC);
  775. }
  776. static int stm32_cryp_prepare_req(struct skcipher_request *req,
  777. struct aead_request *areq)
  778. {
  779. struct stm32_cryp_ctx *ctx;
  780. struct stm32_cryp *cryp;
  781. struct stm32_cryp_reqctx *rctx;
  782. struct scatterlist *in_sg;
  783. int ret;
  784. if (!req && !areq)
  785. return -EINVAL;
  786. ctx = req ? crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)) :
  787. crypto_aead_ctx(crypto_aead_reqtfm(areq));
  788. cryp = ctx->cryp;
  789. if (!cryp)
  790. return -ENODEV;
  791. rctx = req ? skcipher_request_ctx(req) : aead_request_ctx(areq);
  792. rctx->mode &= FLG_MODE_MASK;
  793. ctx->cryp = cryp;
  794. cryp->flags = (cryp->flags & ~FLG_MODE_MASK) | rctx->mode;
  795. cryp->hw_blocksize = is_aes(cryp) ? AES_BLOCK_SIZE : DES_BLOCK_SIZE;
  796. cryp->ctx = ctx;
  797. if (req) {
  798. cryp->req = req;
  799. cryp->areq = NULL;
  800. cryp->header_in = 0;
  801. cryp->payload_in = req->cryptlen;
  802. cryp->payload_out = req->cryptlen;
  803. cryp->authsize = 0;
  804. } else {
  805. /*
  806. * Length of input and output data:
  807. * Encryption case:
  808. * INPUT = AssocData || PlainText
  809. * <- assoclen -> <- cryptlen ->
  810. *
  811. * OUTPUT = AssocData || CipherText || AuthTag
  812. * <- assoclen -> <-- cryptlen --> <- authsize ->
  813. *
  814. * Decryption case:
  815. * INPUT = AssocData || CipherTex || AuthTag
  816. * <- assoclen ---> <---------- cryptlen ---------->
  817. *
  818. * OUTPUT = AssocData || PlainText
  819. * <- assoclen -> <- cryptlen - authsize ->
  820. */
  821. cryp->areq = areq;
  822. cryp->req = NULL;
  823. cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq));
  824. if (is_encrypt(cryp)) {
  825. cryp->payload_in = areq->cryptlen;
  826. cryp->header_in = areq->assoclen;
  827. cryp->payload_out = areq->cryptlen;
  828. } else {
  829. cryp->payload_in = areq->cryptlen - cryp->authsize;
  830. cryp->header_in = areq->assoclen;
  831. cryp->payload_out = cryp->payload_in;
  832. }
  833. }
  834. in_sg = req ? req->src : areq->src;
  835. scatterwalk_start(&cryp->in_walk, in_sg);
  836. cryp->out_sg = req ? req->dst : areq->dst;
  837. scatterwalk_start(&cryp->out_walk, cryp->out_sg);
  838. if (is_gcm(cryp) || is_ccm(cryp)) {
  839. /* In output, jump after assoc data */
  840. scatterwalk_copychunks(NULL, &cryp->out_walk, cryp->areq->assoclen, 2);
  841. }
  842. if (is_ctr(cryp))
  843. memset(cryp->last_ctr, 0, sizeof(cryp->last_ctr));
  844. ret = stm32_cryp_hw_init(cryp);
  845. return ret;
  846. }
  847. static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine,
  848. void *areq)
  849. {
  850. struct skcipher_request *req = container_of(areq,
  851. struct skcipher_request,
  852. base);
  853. return stm32_cryp_prepare_req(req, NULL);
  854. }
  855. static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq)
  856. {
  857. struct skcipher_request *req = container_of(areq,
  858. struct skcipher_request,
  859. base);
  860. struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(
  861. crypto_skcipher_reqtfm(req));
  862. struct stm32_cryp *cryp = ctx->cryp;
  863. if (!cryp)
  864. return -ENODEV;
  865. return stm32_cryp_cpu_start(cryp);
  866. }
  867. static int stm32_cryp_prepare_aead_req(struct crypto_engine *engine, void *areq)
  868. {
  869. struct aead_request *req = container_of(areq, struct aead_request,
  870. base);
  871. return stm32_cryp_prepare_req(NULL, req);
  872. }
  873. static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq)
  874. {
  875. struct aead_request *req = container_of(areq, struct aead_request,
  876. base);
  877. struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
  878. struct stm32_cryp *cryp = ctx->cryp;
  879. if (!cryp)
  880. return -ENODEV;
  881. if (unlikely(!cryp->payload_in && !cryp->header_in)) {
  882. /* No input data to process: get tag and finish */
  883. stm32_cryp_finish_req(cryp, 0);
  884. return 0;
  885. }
  886. return stm32_cryp_cpu_start(cryp);
  887. }
  888. static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp)
  889. {
  890. u32 cfg, size_bit;
  891. unsigned int i;
  892. int ret = 0;
  893. /* Update Config */
  894. cfg = stm32_cryp_read(cryp, CRYP_CR);
  895. cfg &= ~CR_PH_MASK;
  896. cfg |= CR_PH_FINAL;
  897. cfg &= ~CR_DEC_NOT_ENC;
  898. cfg |= CR_CRYPEN;
  899. stm32_cryp_write(cryp, CRYP_CR, cfg);
  900. if (is_gcm(cryp)) {
  901. /* GCM: write aad and payload size (in bits) */
  902. size_bit = cryp->areq->assoclen * 8;
  903. if (cryp->caps->swap_final)
  904. size_bit = (__force u32)cpu_to_be32(size_bit);
  905. stm32_cryp_write(cryp, CRYP_DIN, 0);
  906. stm32_cryp_write(cryp, CRYP_DIN, size_bit);
  907. size_bit = is_encrypt(cryp) ? cryp->areq->cryptlen :
  908. cryp->areq->cryptlen - cryp->authsize;
  909. size_bit *= 8;
  910. if (cryp->caps->swap_final)
  911. size_bit = (__force u32)cpu_to_be32(size_bit);
  912. stm32_cryp_write(cryp, CRYP_DIN, 0);
  913. stm32_cryp_write(cryp, CRYP_DIN, size_bit);
  914. } else {
  915. /* CCM: write CTR0 */
  916. u32 iv32[AES_BLOCK_32];
  917. u8 *iv = (u8 *)iv32;
  918. __be32 *biv = (__be32 *)iv32;
  919. memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
  920. memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
  921. for (i = 0; i < AES_BLOCK_32; i++) {
  922. u32 xiv = iv32[i];
  923. if (!cryp->caps->padding_wa)
  924. xiv = be32_to_cpu(biv[i]);
  925. stm32_cryp_write(cryp, CRYP_DIN, xiv);
  926. }
  927. }
  928. /* Wait for output data */
  929. ret = stm32_cryp_wait_output(cryp);
  930. if (ret) {
  931. dev_err(cryp->dev, "Timeout (read tag)\n");
  932. return ret;
  933. }
  934. if (is_encrypt(cryp)) {
  935. u32 out_tag[AES_BLOCK_32];
  936. /* Get and write tag */
  937. for (i = 0; i < AES_BLOCK_32; i++)
  938. out_tag[i] = stm32_cryp_read(cryp, CRYP_DOUT);
  939. scatterwalk_copychunks(out_tag, &cryp->out_walk, cryp->authsize, 1);
  940. } else {
  941. /* Get and check tag */
  942. u32 in_tag[AES_BLOCK_32], out_tag[AES_BLOCK_32];
  943. scatterwalk_copychunks(in_tag, &cryp->in_walk, cryp->authsize, 0);
  944. for (i = 0; i < AES_BLOCK_32; i++)
  945. out_tag[i] = stm32_cryp_read(cryp, CRYP_DOUT);
  946. if (crypto_memneq(in_tag, out_tag, cryp->authsize))
  947. ret = -EBADMSG;
  948. }
  949. /* Disable cryp */
  950. cfg &= ~CR_CRYPEN;
  951. stm32_cryp_write(cryp, CRYP_CR, cfg);
  952. return ret;
  953. }
  954. static void stm32_cryp_check_ctr_counter(struct stm32_cryp *cryp)
  955. {
  956. u32 cr;
  957. if (unlikely(cryp->last_ctr[3] == cpu_to_be32(0xFFFFFFFF))) {
  958. /*
  959. * In this case, we need to increment manually the ctr counter,
  960. * as HW doesn't handle the U32 carry.
  961. */
  962. crypto_inc((u8 *)cryp->last_ctr, sizeof(cryp->last_ctr));
  963. cr = stm32_cryp_read(cryp, CRYP_CR);
  964. stm32_cryp_write(cryp, CRYP_CR, cr & ~CR_CRYPEN);
  965. stm32_cryp_hw_write_iv(cryp, cryp->last_ctr);
  966. stm32_cryp_write(cryp, CRYP_CR, cr);
  967. }
  968. /* The IV registers are BE */
  969. cryp->last_ctr[0] = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0LR));
  970. cryp->last_ctr[1] = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0RR));
  971. cryp->last_ctr[2] = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1LR));
  972. cryp->last_ctr[3] = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1RR));
  973. }
  974. static void stm32_cryp_irq_read_data(struct stm32_cryp *cryp)
  975. {
  976. unsigned int i;
  977. u32 block[AES_BLOCK_32];
  978. for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++)
  979. block[i] = stm32_cryp_read(cryp, CRYP_DOUT);
  980. scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
  981. cryp->payload_out), 1);
  982. cryp->payload_out -= min_t(size_t, cryp->hw_blocksize,
  983. cryp->payload_out);
  984. }
  985. static void stm32_cryp_irq_write_block(struct stm32_cryp *cryp)
  986. {
  987. unsigned int i;
  988. u32 block[AES_BLOCK_32] = {0};
  989. scatterwalk_copychunks(block, &cryp->in_walk, min_t(size_t, cryp->hw_blocksize,
  990. cryp->payload_in), 0);
  991. for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++)
  992. stm32_cryp_write(cryp, CRYP_DIN, block[i]);
  993. cryp->payload_in -= min_t(size_t, cryp->hw_blocksize, cryp->payload_in);
  994. }
  995. static void stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp *cryp)
  996. {
  997. int err;
  998. u32 cfg, block[AES_BLOCK_32] = {0};
  999. unsigned int i;
  1000. /* 'Special workaround' procedure described in the datasheet */
  1001. /* a) disable ip */
  1002. stm32_cryp_write(cryp, CRYP_IMSCR, 0);
  1003. cfg = stm32_cryp_read(cryp, CRYP_CR);
  1004. cfg &= ~CR_CRYPEN;
  1005. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1006. /* b) Update IV1R */
  1007. stm32_cryp_write(cryp, CRYP_IV1RR, cryp->gcm_ctr - 2);
  1008. /* c) change mode to CTR */
  1009. cfg &= ~CR_ALGO_MASK;
  1010. cfg |= CR_AES_CTR;
  1011. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1012. /* a) enable IP */
  1013. cfg |= CR_CRYPEN;
  1014. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1015. /* b) pad and write the last block */
  1016. stm32_cryp_irq_write_block(cryp);
  1017. /* wait end of process */
  1018. err = stm32_cryp_wait_output(cryp);
  1019. if (err) {
  1020. dev_err(cryp->dev, "Timeout (write gcm last data)\n");
  1021. return stm32_cryp_finish_req(cryp, err);
  1022. }
  1023. /* c) get and store encrypted data */
  1024. /*
  1025. * Same code as stm32_cryp_irq_read_data(), but we want to store
  1026. * block value
  1027. */
  1028. for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++)
  1029. block[i] = stm32_cryp_read(cryp, CRYP_DOUT);
  1030. scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
  1031. cryp->payload_out), 1);
  1032. cryp->payload_out -= min_t(size_t, cryp->hw_blocksize,
  1033. cryp->payload_out);
  1034. /* d) change mode back to AES GCM */
  1035. cfg &= ~CR_ALGO_MASK;
  1036. cfg |= CR_AES_GCM;
  1037. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1038. /* e) change phase to Final */
  1039. cfg &= ~CR_PH_MASK;
  1040. cfg |= CR_PH_FINAL;
  1041. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1042. /* f) write padded data */
  1043. for (i = 0; i < AES_BLOCK_32; i++)
  1044. stm32_cryp_write(cryp, CRYP_DIN, block[i]);
  1045. /* g) Empty fifo out */
  1046. err = stm32_cryp_wait_output(cryp);
  1047. if (err) {
  1048. dev_err(cryp->dev, "Timeout (write gcm padded data)\n");
  1049. return stm32_cryp_finish_req(cryp, err);
  1050. }
  1051. for (i = 0; i < AES_BLOCK_32; i++)
  1052. stm32_cryp_read(cryp, CRYP_DOUT);
  1053. /* h) run the he normal Final phase */
  1054. stm32_cryp_finish_req(cryp, 0);
  1055. }
  1056. static void stm32_cryp_irq_set_npblb(struct stm32_cryp *cryp)
  1057. {
  1058. u32 cfg;
  1059. /* disable ip, set NPBLB and reneable ip */
  1060. cfg = stm32_cryp_read(cryp, CRYP_CR);
  1061. cfg &= ~CR_CRYPEN;
  1062. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1063. cfg |= (cryp->hw_blocksize - cryp->payload_in) << CR_NBPBL_SHIFT;
  1064. cfg |= CR_CRYPEN;
  1065. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1066. }
  1067. static void stm32_cryp_irq_write_ccm_padded_data(struct stm32_cryp *cryp)
  1068. {
  1069. int err = 0;
  1070. u32 cfg, iv1tmp;
  1071. u32 cstmp1[AES_BLOCK_32], cstmp2[AES_BLOCK_32];
  1072. u32 block[AES_BLOCK_32] = {0};
  1073. unsigned int i;
  1074. /* 'Special workaround' procedure described in the datasheet */
  1075. /* a) disable ip */
  1076. stm32_cryp_write(cryp, CRYP_IMSCR, 0);
  1077. cfg = stm32_cryp_read(cryp, CRYP_CR);
  1078. cfg &= ~CR_CRYPEN;
  1079. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1080. /* b) get IV1 from CRYP_CSGCMCCM7 */
  1081. iv1tmp = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + 7 * 4);
  1082. /* c) Load CRYP_CSGCMCCMxR */
  1083. for (i = 0; i < ARRAY_SIZE(cstmp1); i++)
  1084. cstmp1[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
  1085. /* d) Write IV1R */
  1086. stm32_cryp_write(cryp, CRYP_IV1RR, iv1tmp);
  1087. /* e) change mode to CTR */
  1088. cfg &= ~CR_ALGO_MASK;
  1089. cfg |= CR_AES_CTR;
  1090. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1091. /* a) enable IP */
  1092. cfg |= CR_CRYPEN;
  1093. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1094. /* b) pad and write the last block */
  1095. stm32_cryp_irq_write_block(cryp);
  1096. /* wait end of process */
  1097. err = stm32_cryp_wait_output(cryp);
  1098. if (err) {
  1099. dev_err(cryp->dev, "Timeout (wite ccm padded data)\n");
  1100. return stm32_cryp_finish_req(cryp, err);
  1101. }
  1102. /* c) get and store decrypted data */
  1103. /*
  1104. * Same code as stm32_cryp_irq_read_data(), but we want to store
  1105. * block value
  1106. */
  1107. for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++)
  1108. block[i] = stm32_cryp_read(cryp, CRYP_DOUT);
  1109. scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
  1110. cryp->payload_out), 1);
  1111. cryp->payload_out -= min_t(size_t, cryp->hw_blocksize, cryp->payload_out);
  1112. /* d) Load again CRYP_CSGCMCCMxR */
  1113. for (i = 0; i < ARRAY_SIZE(cstmp2); i++)
  1114. cstmp2[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
  1115. /* e) change mode back to AES CCM */
  1116. cfg &= ~CR_ALGO_MASK;
  1117. cfg |= CR_AES_CCM;
  1118. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1119. /* f) change phase to header */
  1120. cfg &= ~CR_PH_MASK;
  1121. cfg |= CR_PH_HEADER;
  1122. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1123. /* g) XOR and write padded data */
  1124. for (i = 0; i < ARRAY_SIZE(block); i++) {
  1125. block[i] ^= cstmp1[i];
  1126. block[i] ^= cstmp2[i];
  1127. stm32_cryp_write(cryp, CRYP_DIN, block[i]);
  1128. }
  1129. /* h) wait for completion */
  1130. err = stm32_cryp_wait_busy(cryp);
  1131. if (err)
  1132. dev_err(cryp->dev, "Timeout (wite ccm padded data)\n");
  1133. /* i) run the he normal Final phase */
  1134. stm32_cryp_finish_req(cryp, err);
  1135. }
  1136. static void stm32_cryp_irq_write_data(struct stm32_cryp *cryp)
  1137. {
  1138. if (unlikely(!cryp->payload_in)) {
  1139. dev_warn(cryp->dev, "No more data to process\n");
  1140. return;
  1141. }
  1142. if (unlikely(cryp->payload_in < AES_BLOCK_SIZE &&
  1143. (stm32_cryp_get_hw_mode(cryp) == CR_AES_GCM) &&
  1144. is_encrypt(cryp))) {
  1145. /* Padding for AES GCM encryption */
  1146. if (cryp->caps->padding_wa) {
  1147. /* Special case 1 */
  1148. stm32_cryp_irq_write_gcm_padded_data(cryp);
  1149. return;
  1150. }
  1151. /* Setting padding bytes (NBBLB) */
  1152. stm32_cryp_irq_set_npblb(cryp);
  1153. }
  1154. if (unlikely((cryp->payload_in < AES_BLOCK_SIZE) &&
  1155. (stm32_cryp_get_hw_mode(cryp) == CR_AES_CCM) &&
  1156. is_decrypt(cryp))) {
  1157. /* Padding for AES CCM decryption */
  1158. if (cryp->caps->padding_wa) {
  1159. /* Special case 2 */
  1160. stm32_cryp_irq_write_ccm_padded_data(cryp);
  1161. return;
  1162. }
  1163. /* Setting padding bytes (NBBLB) */
  1164. stm32_cryp_irq_set_npblb(cryp);
  1165. }
  1166. if (is_aes(cryp) && is_ctr(cryp))
  1167. stm32_cryp_check_ctr_counter(cryp);
  1168. stm32_cryp_irq_write_block(cryp);
  1169. }
  1170. static void stm32_cryp_irq_write_gcmccm_header(struct stm32_cryp *cryp)
  1171. {
  1172. unsigned int i;
  1173. u32 block[AES_BLOCK_32] = {0};
  1174. size_t written;
  1175. written = min_t(size_t, AES_BLOCK_SIZE, cryp->header_in);
  1176. scatterwalk_copychunks(block, &cryp->in_walk, written, 0);
  1177. for (i = 0; i < AES_BLOCK_32; i++)
  1178. stm32_cryp_write(cryp, CRYP_DIN, block[i]);
  1179. cryp->header_in -= written;
  1180. stm32_crypt_gcmccm_end_header(cryp);
  1181. }
  1182. static irqreturn_t stm32_cryp_irq_thread(int irq, void *arg)
  1183. {
  1184. struct stm32_cryp *cryp = arg;
  1185. u32 ph;
  1186. u32 it_mask = stm32_cryp_read(cryp, CRYP_IMSCR);
  1187. if (cryp->irq_status & MISR_OUT)
  1188. /* Output FIFO IRQ: read data */
  1189. stm32_cryp_irq_read_data(cryp);
  1190. if (cryp->irq_status & MISR_IN) {
  1191. if (is_gcm(cryp) || is_ccm(cryp)) {
  1192. ph = stm32_cryp_read(cryp, CRYP_CR) & CR_PH_MASK;
  1193. if (unlikely(ph == CR_PH_HEADER))
  1194. /* Write Header */
  1195. stm32_cryp_irq_write_gcmccm_header(cryp);
  1196. else
  1197. /* Input FIFO IRQ: write data */
  1198. stm32_cryp_irq_write_data(cryp);
  1199. if (is_gcm(cryp))
  1200. cryp->gcm_ctr++;
  1201. } else {
  1202. /* Input FIFO IRQ: write data */
  1203. stm32_cryp_irq_write_data(cryp);
  1204. }
  1205. }
  1206. /* Mask useless interrupts */
  1207. if (!cryp->payload_in && !cryp->header_in)
  1208. it_mask &= ~IMSCR_IN;
  1209. if (!cryp->payload_out)
  1210. it_mask &= ~IMSCR_OUT;
  1211. stm32_cryp_write(cryp, CRYP_IMSCR, it_mask);
  1212. if (!cryp->payload_in && !cryp->header_in && !cryp->payload_out)
  1213. stm32_cryp_finish_req(cryp, 0);
  1214. return IRQ_HANDLED;
  1215. }
  1216. static irqreturn_t stm32_cryp_irq(int irq, void *arg)
  1217. {
  1218. struct stm32_cryp *cryp = arg;
  1219. cryp->irq_status = stm32_cryp_read(cryp, CRYP_MISR);
  1220. return IRQ_WAKE_THREAD;
  1221. }
  1222. static struct skcipher_alg crypto_algs[] = {
  1223. {
  1224. .base.cra_name = "ecb(aes)",
  1225. .base.cra_driver_name = "stm32-ecb-aes",
  1226. .base.cra_priority = 200,
  1227. .base.cra_flags = CRYPTO_ALG_ASYNC,
  1228. .base.cra_blocksize = AES_BLOCK_SIZE,
  1229. .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
  1230. .base.cra_alignmask = 0,
  1231. .base.cra_module = THIS_MODULE,
  1232. .init = stm32_cryp_init_tfm,
  1233. .min_keysize = AES_MIN_KEY_SIZE,
  1234. .max_keysize = AES_MAX_KEY_SIZE,
  1235. .setkey = stm32_cryp_aes_setkey,
  1236. .encrypt = stm32_cryp_aes_ecb_encrypt,
  1237. .decrypt = stm32_cryp_aes_ecb_decrypt,
  1238. },
  1239. {
  1240. .base.cra_name = "cbc(aes)",
  1241. .base.cra_driver_name = "stm32-cbc-aes",
  1242. .base.cra_priority = 200,
  1243. .base.cra_flags = CRYPTO_ALG_ASYNC,
  1244. .base.cra_blocksize = AES_BLOCK_SIZE,
  1245. .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
  1246. .base.cra_alignmask = 0,
  1247. .base.cra_module = THIS_MODULE,
  1248. .init = stm32_cryp_init_tfm,
  1249. .min_keysize = AES_MIN_KEY_SIZE,
  1250. .max_keysize = AES_MAX_KEY_SIZE,
  1251. .ivsize = AES_BLOCK_SIZE,
  1252. .setkey = stm32_cryp_aes_setkey,
  1253. .encrypt = stm32_cryp_aes_cbc_encrypt,
  1254. .decrypt = stm32_cryp_aes_cbc_decrypt,
  1255. },
  1256. {
  1257. .base.cra_name = "ctr(aes)",
  1258. .base.cra_driver_name = "stm32-ctr-aes",
  1259. .base.cra_priority = 200,
  1260. .base.cra_flags = CRYPTO_ALG_ASYNC,
  1261. .base.cra_blocksize = 1,
  1262. .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
  1263. .base.cra_alignmask = 0,
  1264. .base.cra_module = THIS_MODULE,
  1265. .init = stm32_cryp_init_tfm,
  1266. .min_keysize = AES_MIN_KEY_SIZE,
  1267. .max_keysize = AES_MAX_KEY_SIZE,
  1268. .ivsize = AES_BLOCK_SIZE,
  1269. .setkey = stm32_cryp_aes_setkey,
  1270. .encrypt = stm32_cryp_aes_ctr_encrypt,
  1271. .decrypt = stm32_cryp_aes_ctr_decrypt,
  1272. },
  1273. {
  1274. .base.cra_name = "ecb(des)",
  1275. .base.cra_driver_name = "stm32-ecb-des",
  1276. .base.cra_priority = 200,
  1277. .base.cra_flags = CRYPTO_ALG_ASYNC,
  1278. .base.cra_blocksize = DES_BLOCK_SIZE,
  1279. .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
  1280. .base.cra_alignmask = 0,
  1281. .base.cra_module = THIS_MODULE,
  1282. .init = stm32_cryp_init_tfm,
  1283. .min_keysize = DES_BLOCK_SIZE,
  1284. .max_keysize = DES_BLOCK_SIZE,
  1285. .setkey = stm32_cryp_des_setkey,
  1286. .encrypt = stm32_cryp_des_ecb_encrypt,
  1287. .decrypt = stm32_cryp_des_ecb_decrypt,
  1288. },
  1289. {
  1290. .base.cra_name = "cbc(des)",
  1291. .base.cra_driver_name = "stm32-cbc-des",
  1292. .base.cra_priority = 200,
  1293. .base.cra_flags = CRYPTO_ALG_ASYNC,
  1294. .base.cra_blocksize = DES_BLOCK_SIZE,
  1295. .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
  1296. .base.cra_alignmask = 0,
  1297. .base.cra_module = THIS_MODULE,
  1298. .init = stm32_cryp_init_tfm,
  1299. .min_keysize = DES_BLOCK_SIZE,
  1300. .max_keysize = DES_BLOCK_SIZE,
  1301. .ivsize = DES_BLOCK_SIZE,
  1302. .setkey = stm32_cryp_des_setkey,
  1303. .encrypt = stm32_cryp_des_cbc_encrypt,
  1304. .decrypt = stm32_cryp_des_cbc_decrypt,
  1305. },
  1306. {
  1307. .base.cra_name = "ecb(des3_ede)",
  1308. .base.cra_driver_name = "stm32-ecb-des3",
  1309. .base.cra_priority = 200,
  1310. .base.cra_flags = CRYPTO_ALG_ASYNC,
  1311. .base.cra_blocksize = DES_BLOCK_SIZE,
  1312. .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
  1313. .base.cra_alignmask = 0,
  1314. .base.cra_module = THIS_MODULE,
  1315. .init = stm32_cryp_init_tfm,
  1316. .min_keysize = 3 * DES_BLOCK_SIZE,
  1317. .max_keysize = 3 * DES_BLOCK_SIZE,
  1318. .setkey = stm32_cryp_tdes_setkey,
  1319. .encrypt = stm32_cryp_tdes_ecb_encrypt,
  1320. .decrypt = stm32_cryp_tdes_ecb_decrypt,
  1321. },
  1322. {
  1323. .base.cra_name = "cbc(des3_ede)",
  1324. .base.cra_driver_name = "stm32-cbc-des3",
  1325. .base.cra_priority = 200,
  1326. .base.cra_flags = CRYPTO_ALG_ASYNC,
  1327. .base.cra_blocksize = DES_BLOCK_SIZE,
  1328. .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
  1329. .base.cra_alignmask = 0,
  1330. .base.cra_module = THIS_MODULE,
  1331. .init = stm32_cryp_init_tfm,
  1332. .min_keysize = 3 * DES_BLOCK_SIZE,
  1333. .max_keysize = 3 * DES_BLOCK_SIZE,
  1334. .ivsize = DES_BLOCK_SIZE,
  1335. .setkey = stm32_cryp_tdes_setkey,
  1336. .encrypt = stm32_cryp_tdes_cbc_encrypt,
  1337. .decrypt = stm32_cryp_tdes_cbc_decrypt,
  1338. },
  1339. };
  1340. static struct aead_alg aead_algs[] = {
  1341. {
  1342. .setkey = stm32_cryp_aes_aead_setkey,
  1343. .setauthsize = stm32_cryp_aes_gcm_setauthsize,
  1344. .encrypt = stm32_cryp_aes_gcm_encrypt,
  1345. .decrypt = stm32_cryp_aes_gcm_decrypt,
  1346. .init = stm32_cryp_aes_aead_init,
  1347. .ivsize = 12,
  1348. .maxauthsize = AES_BLOCK_SIZE,
  1349. .base = {
  1350. .cra_name = "gcm(aes)",
  1351. .cra_driver_name = "stm32-gcm-aes",
  1352. .cra_priority = 200,
  1353. .cra_flags = CRYPTO_ALG_ASYNC,
  1354. .cra_blocksize = 1,
  1355. .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
  1356. .cra_alignmask = 0,
  1357. .cra_module = THIS_MODULE,
  1358. },
  1359. },
  1360. {
  1361. .setkey = stm32_cryp_aes_aead_setkey,
  1362. .setauthsize = stm32_cryp_aes_ccm_setauthsize,
  1363. .encrypt = stm32_cryp_aes_ccm_encrypt,
  1364. .decrypt = stm32_cryp_aes_ccm_decrypt,
  1365. .init = stm32_cryp_aes_aead_init,
  1366. .ivsize = AES_BLOCK_SIZE,
  1367. .maxauthsize = AES_BLOCK_SIZE,
  1368. .base = {
  1369. .cra_name = "ccm(aes)",
  1370. .cra_driver_name = "stm32-ccm-aes",
  1371. .cra_priority = 200,
  1372. .cra_flags = CRYPTO_ALG_ASYNC,
  1373. .cra_blocksize = 1,
  1374. .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
  1375. .cra_alignmask = 0,
  1376. .cra_module = THIS_MODULE,
  1377. },
  1378. },
  1379. };
  1380. static const struct stm32_cryp_caps f7_data = {
  1381. .swap_final = true,
  1382. .padding_wa = true,
  1383. };
  1384. static const struct stm32_cryp_caps mp1_data = {
  1385. .swap_final = false,
  1386. .padding_wa = false,
  1387. };
  1388. static const struct of_device_id stm32_dt_ids[] = {
  1389. { .compatible = "st,stm32f756-cryp", .data = &f7_data},
  1390. { .compatible = "st,stm32mp1-cryp", .data = &mp1_data},
  1391. {},
  1392. };
  1393. MODULE_DEVICE_TABLE(of, stm32_dt_ids);
  1394. static int stm32_cryp_probe(struct platform_device *pdev)
  1395. {
  1396. struct device *dev = &pdev->dev;
  1397. struct stm32_cryp *cryp;
  1398. struct reset_control *rst;
  1399. int irq, ret;
  1400. cryp = devm_kzalloc(dev, sizeof(*cryp), GFP_KERNEL);
  1401. if (!cryp)
  1402. return -ENOMEM;
  1403. cryp->caps = of_device_get_match_data(dev);
  1404. if (!cryp->caps)
  1405. return -ENODEV;
  1406. cryp->dev = dev;
  1407. cryp->regs = devm_platform_ioremap_resource(pdev, 0);
  1408. if (IS_ERR(cryp->regs))
  1409. return PTR_ERR(cryp->regs);
  1410. irq = platform_get_irq(pdev, 0);
  1411. if (irq < 0)
  1412. return irq;
  1413. ret = devm_request_threaded_irq(dev, irq, stm32_cryp_irq,
  1414. stm32_cryp_irq_thread, IRQF_ONESHOT,
  1415. dev_name(dev), cryp);
  1416. if (ret) {
  1417. dev_err(dev, "Cannot grab IRQ\n");
  1418. return ret;
  1419. }
  1420. cryp->clk = devm_clk_get(dev, NULL);
  1421. if (IS_ERR(cryp->clk)) {
  1422. dev_err(dev, "Could not get clock\n");
  1423. return PTR_ERR(cryp->clk);
  1424. }
  1425. ret = clk_prepare_enable(cryp->clk);
  1426. if (ret) {
  1427. dev_err(cryp->dev, "Failed to enable clock\n");
  1428. return ret;
  1429. }
  1430. pm_runtime_set_autosuspend_delay(dev, CRYP_AUTOSUSPEND_DELAY);
  1431. pm_runtime_use_autosuspend(dev);
  1432. pm_runtime_get_noresume(dev);
  1433. pm_runtime_set_active(dev);
  1434. pm_runtime_enable(dev);
  1435. rst = devm_reset_control_get(dev, NULL);
  1436. if (!IS_ERR(rst)) {
  1437. reset_control_assert(rst);
  1438. udelay(2);
  1439. reset_control_deassert(rst);
  1440. }
  1441. platform_set_drvdata(pdev, cryp);
  1442. spin_lock(&cryp_list.lock);
  1443. list_add(&cryp->list, &cryp_list.dev_list);
  1444. spin_unlock(&cryp_list.lock);
  1445. /* Initialize crypto engine */
  1446. cryp->engine = crypto_engine_alloc_init(dev, 1);
  1447. if (!cryp->engine) {
  1448. dev_err(dev, "Could not init crypto engine\n");
  1449. ret = -ENOMEM;
  1450. goto err_engine1;
  1451. }
  1452. ret = crypto_engine_start(cryp->engine);
  1453. if (ret) {
  1454. dev_err(dev, "Could not start crypto engine\n");
  1455. goto err_engine2;
  1456. }
  1457. ret = crypto_register_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
  1458. if (ret) {
  1459. dev_err(dev, "Could not register algs\n");
  1460. goto err_algs;
  1461. }
  1462. ret = crypto_register_aeads(aead_algs, ARRAY_SIZE(aead_algs));
  1463. if (ret)
  1464. goto err_aead_algs;
  1465. dev_info(dev, "Initialized\n");
  1466. pm_runtime_put_sync(dev);
  1467. return 0;
  1468. err_aead_algs:
  1469. crypto_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
  1470. err_algs:
  1471. err_engine2:
  1472. crypto_engine_exit(cryp->engine);
  1473. err_engine1:
  1474. spin_lock(&cryp_list.lock);
  1475. list_del(&cryp->list);
  1476. spin_unlock(&cryp_list.lock);
  1477. pm_runtime_disable(dev);
  1478. pm_runtime_put_noidle(dev);
  1479. clk_disable_unprepare(cryp->clk);
  1480. return ret;
  1481. }
  1482. static int stm32_cryp_remove(struct platform_device *pdev)
  1483. {
  1484. struct stm32_cryp *cryp = platform_get_drvdata(pdev);
  1485. int ret;
  1486. if (!cryp)
  1487. return -ENODEV;
  1488. ret = pm_runtime_resume_and_get(cryp->dev);
  1489. if (ret < 0)
  1490. return ret;
  1491. crypto_unregister_aeads(aead_algs, ARRAY_SIZE(aead_algs));
  1492. crypto_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
  1493. crypto_engine_exit(cryp->engine);
  1494. spin_lock(&cryp_list.lock);
  1495. list_del(&cryp->list);
  1496. spin_unlock(&cryp_list.lock);
  1497. pm_runtime_disable(cryp->dev);
  1498. pm_runtime_put_noidle(cryp->dev);
  1499. clk_disable_unprepare(cryp->clk);
  1500. return 0;
  1501. }
  1502. #ifdef CONFIG_PM
  1503. static int stm32_cryp_runtime_suspend(struct device *dev)
  1504. {
  1505. struct stm32_cryp *cryp = dev_get_drvdata(dev);
  1506. clk_disable_unprepare(cryp->clk);
  1507. return 0;
  1508. }
  1509. static int stm32_cryp_runtime_resume(struct device *dev)
  1510. {
  1511. struct stm32_cryp *cryp = dev_get_drvdata(dev);
  1512. int ret;
  1513. ret = clk_prepare_enable(cryp->clk);
  1514. if (ret) {
  1515. dev_err(cryp->dev, "Failed to prepare_enable clock\n");
  1516. return ret;
  1517. }
  1518. return 0;
  1519. }
  1520. #endif
  1521. static const struct dev_pm_ops stm32_cryp_pm_ops = {
  1522. SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend,
  1523. pm_runtime_force_resume)
  1524. SET_RUNTIME_PM_OPS(stm32_cryp_runtime_suspend,
  1525. stm32_cryp_runtime_resume, NULL)
  1526. };
  1527. static struct platform_driver stm32_cryp_driver = {
  1528. .probe = stm32_cryp_probe,
  1529. .remove = stm32_cryp_remove,
  1530. .driver = {
  1531. .name = DRIVER_NAME,
  1532. .pm = &stm32_cryp_pm_ops,
  1533. .of_match_table = stm32_dt_ids,
  1534. },
  1535. };
  1536. module_platform_driver(stm32_cryp_driver);
  1537. MODULE_AUTHOR("Fabien Dessenne <fabien.dessenne@st.com>");
  1538. MODULE_DESCRIPTION("STMicrolectronics STM32 CRYP hardware driver");
  1539. MODULE_LICENSE("GPL");