aes-cipher-core.S 3.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132
  1. /* SPDX-License-Identifier: GPL-2.0-only */
  2. /*
  3. * Scalar AES core transform
  4. *
  5. * Copyright (C) 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
  6. */
  7. #include <linux/linkage.h>
  8. #include <asm/assembler.h>
  9. #include <asm/cache.h>
  10. .text
  11. rk .req x0
  12. out .req x1
  13. in .req x2
  14. rounds .req x3
  15. tt .req x2
  16. .macro __pair1, sz, op, reg0, reg1, in0, in1e, in1d, shift
  17. .ifc \op\shift, b0
  18. ubfiz \reg0, \in0, #2, #8
  19. ubfiz \reg1, \in1e, #2, #8
  20. .else
  21. ubfx \reg0, \in0, #\shift, #8
  22. ubfx \reg1, \in1e, #\shift, #8
  23. .endif
  24. /*
  25. * AArch64 cannot do byte size indexed loads from a table containing
  26. * 32-bit quantities, i.e., 'ldrb w12, [tt, w12, uxtw #2]' is not a
  27. * valid instruction. So perform the shift explicitly first for the
  28. * high bytes (the low byte is shifted implicitly by using ubfiz rather
  29. * than ubfx above)
  30. */
  31. .ifnc \op, b
  32. ldr \reg0, [tt, \reg0, uxtw #2]
  33. ldr \reg1, [tt, \reg1, uxtw #2]
  34. .else
  35. .if \shift > 0
  36. lsl \reg0, \reg0, #2
  37. lsl \reg1, \reg1, #2
  38. .endif
  39. ldrb \reg0, [tt, \reg0, uxtw]
  40. ldrb \reg1, [tt, \reg1, uxtw]
  41. .endif
  42. .endm
  43. .macro __pair0, sz, op, reg0, reg1, in0, in1e, in1d, shift
  44. ubfx \reg0, \in0, #\shift, #8
  45. ubfx \reg1, \in1d, #\shift, #8
  46. ldr\op \reg0, [tt, \reg0, uxtw #\sz]
  47. ldr\op \reg1, [tt, \reg1, uxtw #\sz]
  48. .endm
  49. .macro __hround, out0, out1, in0, in1, in2, in3, t0, t1, enc, sz, op
  50. ldp \out0, \out1, [rk], #8
  51. __pair\enc \sz, \op, w12, w13, \in0, \in1, \in3, 0
  52. __pair\enc \sz, \op, w14, w15, \in1, \in2, \in0, 8
  53. __pair\enc \sz, \op, w16, w17, \in2, \in3, \in1, 16
  54. __pair\enc \sz, \op, \t0, \t1, \in3, \in0, \in2, 24
  55. eor \out0, \out0, w12
  56. eor \out1, \out1, w13
  57. eor \out0, \out0, w14, ror #24
  58. eor \out1, \out1, w15, ror #24
  59. eor \out0, \out0, w16, ror #16
  60. eor \out1, \out1, w17, ror #16
  61. eor \out0, \out0, \t0, ror #8
  62. eor \out1, \out1, \t1, ror #8
  63. .endm
  64. .macro fround, out0, out1, out2, out3, in0, in1, in2, in3, sz=2, op
  65. __hround \out0, \out1, \in0, \in1, \in2, \in3, \out2, \out3, 1, \sz, \op
  66. __hround \out2, \out3, \in2, \in3, \in0, \in1, \in1, \in2, 1, \sz, \op
  67. .endm
  68. .macro iround, out0, out1, out2, out3, in0, in1, in2, in3, sz=2, op
  69. __hround \out0, \out1, \in0, \in3, \in2, \in1, \out2, \out3, 0, \sz, \op
  70. __hround \out2, \out3, \in2, \in1, \in0, \in3, \in1, \in0, 0, \sz, \op
  71. .endm
  72. .macro do_crypt, round, ttab, ltab, bsz
  73. ldp w4, w5, [in]
  74. ldp w6, w7, [in, #8]
  75. ldp w8, w9, [rk], #16
  76. ldp w10, w11, [rk, #-8]
  77. CPU_BE( rev w4, w4 )
  78. CPU_BE( rev w5, w5 )
  79. CPU_BE( rev w6, w6 )
  80. CPU_BE( rev w7, w7 )
  81. eor w4, w4, w8
  82. eor w5, w5, w9
  83. eor w6, w6, w10
  84. eor w7, w7, w11
  85. adr_l tt, \ttab
  86. tbnz rounds, #1, 1f
  87. 0: \round w8, w9, w10, w11, w4, w5, w6, w7
  88. \round w4, w5, w6, w7, w8, w9, w10, w11
  89. 1: subs rounds, rounds, #4
  90. \round w8, w9, w10, w11, w4, w5, w6, w7
  91. b.ls 3f
  92. 2: \round w4, w5, w6, w7, w8, w9, w10, w11
  93. b 0b
  94. 3: adr_l tt, \ltab
  95. \round w4, w5, w6, w7, w8, w9, w10, w11, \bsz, b
  96. CPU_BE( rev w4, w4 )
  97. CPU_BE( rev w5, w5 )
  98. CPU_BE( rev w6, w6 )
  99. CPU_BE( rev w7, w7 )
  100. stp w4, w5, [out]
  101. stp w6, w7, [out, #8]
  102. ret
  103. .endm
  104. SYM_FUNC_START(__aes_arm64_encrypt)
  105. do_crypt fround, crypto_ft_tab, crypto_ft_tab + 1, 2
  106. SYM_FUNC_END(__aes_arm64_encrypt)
  107. .align 5
  108. SYM_FUNC_START(__aes_arm64_decrypt)
  109. do_crypt iround, crypto_it_tab, crypto_aes_inv_sbox, 0
  110. SYM_FUNC_END(__aes_arm64_decrypt)