sha256-core.S_shipped 62 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816
  1. @ SPDX-License-Identifier: GPL-2.0
  2. @ This code is taken from the OpenSSL project but the author (Andy Polyakov)
  3. @ has relicensed it under the GPLv2. Therefore this program is free software;
  4. @ you can redistribute it and/or modify it under the terms of the GNU General
  5. @ Public License version 2 as published by the Free Software Foundation.
  6. @
  7. @ The original headers, including the original license headers, are
  8. @ included below for completeness.
  9. @ ====================================================================
  10. @ Written by Andy Polyakov <appro@openssl.org> for the OpenSSL
  11. @ project. The module is, however, dual licensed under OpenSSL and
  12. @ CRYPTOGAMS licenses depending on where you obtain it. For further
  13. @ details see https://www.openssl.org/~appro/cryptogams/.
  14. @ ====================================================================
  15. @ SHA256 block procedure for ARMv4. May 2007.
  16. @ Performance is ~2x better than gcc 3.4 generated code and in "abso-
  17. @ lute" terms is ~2250 cycles per 64-byte block or ~35 cycles per
  18. @ byte [on single-issue Xscale PXA250 core].
  19. @ July 2010.
  20. @
  21. @ Rescheduling for dual-issue pipeline resulted in 22% improvement on
  22. @ Cortex A8 core and ~20 cycles per processed byte.
  23. @ February 2011.
  24. @
  25. @ Profiler-assisted and platform-specific optimization resulted in 16%
  26. @ improvement on Cortex A8 core and ~15.4 cycles per processed byte.
  27. @ September 2013.
  28. @
  29. @ Add NEON implementation. On Cortex A8 it was measured to process one
  30. @ byte in 12.5 cycles or 23% faster than integer-only code. Snapdragon
  31. @ S4 does it in 12.5 cycles too, but it's 50% faster than integer-only
  32. @ code (meaning that latter performs sub-optimally, nothing was done
  33. @ about it).
  34. @ May 2014.
  35. @
  36. @ Add ARMv8 code path performing at 2.0 cpb on Apple A7.
  37. #ifndef __KERNEL__
  38. # include "arm_arch.h"
  39. #else
  40. # define __ARM_ARCH__ __LINUX_ARM_ARCH__
  41. # define __ARM_MAX_ARCH__ 7
  42. #endif
  43. .text
  44. #if __ARM_ARCH__<7
  45. .code 32
  46. #else
  47. .syntax unified
  48. # ifdef __thumb2__
  49. .thumb
  50. # else
  51. .code 32
  52. # endif
  53. #endif
  54. .type K256,%object
  55. .align 5
  56. K256:
  57. .word 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
  58. .word 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5
  59. .word 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3
  60. .word 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174
  61. .word 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc
  62. .word 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da
  63. .word 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7
  64. .word 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967
  65. .word 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13
  66. .word 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85
  67. .word 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3
  68. .word 0xd192e819,0xd6990624,0xf40e3585,0x106aa070
  69. .word 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5
  70. .word 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3
  71. .word 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208
  72. .word 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2
  73. .size K256,.-K256
  74. .word 0 @ terminator
  75. #if __ARM_MAX_ARCH__>=7 && !defined(__KERNEL__)
  76. .LOPENSSL_armcap:
  77. .word OPENSSL_armcap_P-sha256_block_data_order
  78. #endif
  79. .align 5
  80. .global sha256_block_data_order
  81. .type sha256_block_data_order,%function
  82. sha256_block_data_order:
  83. .Lsha256_block_data_order:
  84. #if __ARM_ARCH__<7
  85. sub r3,pc,#8 @ sha256_block_data_order
  86. #else
  87. adr r3,.Lsha256_block_data_order
  88. #endif
  89. #if __ARM_MAX_ARCH__>=7 && !defined(__KERNEL__)
  90. ldr r12,.LOPENSSL_armcap
  91. ldr r12,[r3,r12] @ OPENSSL_armcap_P
  92. tst r12,#ARMV8_SHA256
  93. bne .LARMv8
  94. tst r12,#ARMV7_NEON
  95. bne .LNEON
  96. #endif
  97. add r2,r1,r2,lsl#6 @ len to point at the end of inp
  98. stmdb sp!,{r0,r1,r2,r4-r11,lr}
  99. ldmia r0,{r4,r5,r6,r7,r8,r9,r10,r11}
  100. sub r14,r3,#256+32 @ K256
  101. sub sp,sp,#16*4 @ alloca(X[16])
  102. .Loop:
  103. # if __ARM_ARCH__>=7
  104. ldr r2,[r1],#4
  105. # else
  106. ldrb r2,[r1,#3]
  107. # endif
  108. eor r3,r5,r6 @ magic
  109. eor r12,r12,r12
  110. #if __ARM_ARCH__>=7
  111. @ ldr r2,[r1],#4 @ 0
  112. # if 0==15
  113. str r1,[sp,#17*4] @ make room for r1
  114. # endif
  115. eor r0,r8,r8,ror#5
  116. add r4,r4,r12 @ h+=Maj(a,b,c) from the past
  117. eor r0,r0,r8,ror#19 @ Sigma1(e)
  118. # ifndef __ARMEB__
  119. rev r2,r2
  120. # endif
  121. #else
  122. @ ldrb r2,[r1,#3] @ 0
  123. add r4,r4,r12 @ h+=Maj(a,b,c) from the past
  124. ldrb r12,[r1,#2]
  125. ldrb r0,[r1,#1]
  126. orr r2,r2,r12,lsl#8
  127. ldrb r12,[r1],#4
  128. orr r2,r2,r0,lsl#16
  129. # if 0==15
  130. str r1,[sp,#17*4] @ make room for r1
  131. # endif
  132. eor r0,r8,r8,ror#5
  133. orr r2,r2,r12,lsl#24
  134. eor r0,r0,r8,ror#19 @ Sigma1(e)
  135. #endif
  136. ldr r12,[r14],#4 @ *K256++
  137. add r11,r11,r2 @ h+=X[i]
  138. str r2,[sp,#0*4]
  139. eor r2,r9,r10
  140. add r11,r11,r0,ror#6 @ h+=Sigma1(e)
  141. and r2,r2,r8
  142. add r11,r11,r12 @ h+=K256[i]
  143. eor r2,r2,r10 @ Ch(e,f,g)
  144. eor r0,r4,r4,ror#11
  145. add r11,r11,r2 @ h+=Ch(e,f,g)
  146. #if 0==31
  147. and r12,r12,#0xff
  148. cmp r12,#0xf2 @ done?
  149. #endif
  150. #if 0<15
  151. # if __ARM_ARCH__>=7
  152. ldr r2,[r1],#4 @ prefetch
  153. # else
  154. ldrb r2,[r1,#3]
  155. # endif
  156. eor r12,r4,r5 @ a^b, b^c in next round
  157. #else
  158. ldr r2,[sp,#2*4] @ from future BODY_16_xx
  159. eor r12,r4,r5 @ a^b, b^c in next round
  160. ldr r1,[sp,#15*4] @ from future BODY_16_xx
  161. #endif
  162. eor r0,r0,r4,ror#20 @ Sigma0(a)
  163. and r3,r3,r12 @ (b^c)&=(a^b)
  164. add r7,r7,r11 @ d+=h
  165. eor r3,r3,r5 @ Maj(a,b,c)
  166. add r11,r11,r0,ror#2 @ h+=Sigma0(a)
  167. @ add r11,r11,r3 @ h+=Maj(a,b,c)
  168. #if __ARM_ARCH__>=7
  169. @ ldr r2,[r1],#4 @ 1
  170. # if 1==15
  171. str r1,[sp,#17*4] @ make room for r1
  172. # endif
  173. eor r0,r7,r7,ror#5
  174. add r11,r11,r3 @ h+=Maj(a,b,c) from the past
  175. eor r0,r0,r7,ror#19 @ Sigma1(e)
  176. # ifndef __ARMEB__
  177. rev r2,r2
  178. # endif
  179. #else
  180. @ ldrb r2,[r1,#3] @ 1
  181. add r11,r11,r3 @ h+=Maj(a,b,c) from the past
  182. ldrb r3,[r1,#2]
  183. ldrb r0,[r1,#1]
  184. orr r2,r2,r3,lsl#8
  185. ldrb r3,[r1],#4
  186. orr r2,r2,r0,lsl#16
  187. # if 1==15
  188. str r1,[sp,#17*4] @ make room for r1
  189. # endif
  190. eor r0,r7,r7,ror#5
  191. orr r2,r2,r3,lsl#24
  192. eor r0,r0,r7,ror#19 @ Sigma1(e)
  193. #endif
  194. ldr r3,[r14],#4 @ *K256++
  195. add r10,r10,r2 @ h+=X[i]
  196. str r2,[sp,#1*4]
  197. eor r2,r8,r9
  198. add r10,r10,r0,ror#6 @ h+=Sigma1(e)
  199. and r2,r2,r7
  200. add r10,r10,r3 @ h+=K256[i]
  201. eor r2,r2,r9 @ Ch(e,f,g)
  202. eor r0,r11,r11,ror#11
  203. add r10,r10,r2 @ h+=Ch(e,f,g)
  204. #if 1==31
  205. and r3,r3,#0xff
  206. cmp r3,#0xf2 @ done?
  207. #endif
  208. #if 1<15
  209. # if __ARM_ARCH__>=7
  210. ldr r2,[r1],#4 @ prefetch
  211. # else
  212. ldrb r2,[r1,#3]
  213. # endif
  214. eor r3,r11,r4 @ a^b, b^c in next round
  215. #else
  216. ldr r2,[sp,#3*4] @ from future BODY_16_xx
  217. eor r3,r11,r4 @ a^b, b^c in next round
  218. ldr r1,[sp,#0*4] @ from future BODY_16_xx
  219. #endif
  220. eor r0,r0,r11,ror#20 @ Sigma0(a)
  221. and r12,r12,r3 @ (b^c)&=(a^b)
  222. add r6,r6,r10 @ d+=h
  223. eor r12,r12,r4 @ Maj(a,b,c)
  224. add r10,r10,r0,ror#2 @ h+=Sigma0(a)
  225. @ add r10,r10,r12 @ h+=Maj(a,b,c)
  226. #if __ARM_ARCH__>=7
  227. @ ldr r2,[r1],#4 @ 2
  228. # if 2==15
  229. str r1,[sp,#17*4] @ make room for r1
  230. # endif
  231. eor r0,r6,r6,ror#5
  232. add r10,r10,r12 @ h+=Maj(a,b,c) from the past
  233. eor r0,r0,r6,ror#19 @ Sigma1(e)
  234. # ifndef __ARMEB__
  235. rev r2,r2
  236. # endif
  237. #else
  238. @ ldrb r2,[r1,#3] @ 2
  239. add r10,r10,r12 @ h+=Maj(a,b,c) from the past
  240. ldrb r12,[r1,#2]
  241. ldrb r0,[r1,#1]
  242. orr r2,r2,r12,lsl#8
  243. ldrb r12,[r1],#4
  244. orr r2,r2,r0,lsl#16
  245. # if 2==15
  246. str r1,[sp,#17*4] @ make room for r1
  247. # endif
  248. eor r0,r6,r6,ror#5
  249. orr r2,r2,r12,lsl#24
  250. eor r0,r0,r6,ror#19 @ Sigma1(e)
  251. #endif
  252. ldr r12,[r14],#4 @ *K256++
  253. add r9,r9,r2 @ h+=X[i]
  254. str r2,[sp,#2*4]
  255. eor r2,r7,r8
  256. add r9,r9,r0,ror#6 @ h+=Sigma1(e)
  257. and r2,r2,r6
  258. add r9,r9,r12 @ h+=K256[i]
  259. eor r2,r2,r8 @ Ch(e,f,g)
  260. eor r0,r10,r10,ror#11
  261. add r9,r9,r2 @ h+=Ch(e,f,g)
  262. #if 2==31
  263. and r12,r12,#0xff
  264. cmp r12,#0xf2 @ done?
  265. #endif
  266. #if 2<15
  267. # if __ARM_ARCH__>=7
  268. ldr r2,[r1],#4 @ prefetch
  269. # else
  270. ldrb r2,[r1,#3]
  271. # endif
  272. eor r12,r10,r11 @ a^b, b^c in next round
  273. #else
  274. ldr r2,[sp,#4*4] @ from future BODY_16_xx
  275. eor r12,r10,r11 @ a^b, b^c in next round
  276. ldr r1,[sp,#1*4] @ from future BODY_16_xx
  277. #endif
  278. eor r0,r0,r10,ror#20 @ Sigma0(a)
  279. and r3,r3,r12 @ (b^c)&=(a^b)
  280. add r5,r5,r9 @ d+=h
  281. eor r3,r3,r11 @ Maj(a,b,c)
  282. add r9,r9,r0,ror#2 @ h+=Sigma0(a)
  283. @ add r9,r9,r3 @ h+=Maj(a,b,c)
  284. #if __ARM_ARCH__>=7
  285. @ ldr r2,[r1],#4 @ 3
  286. # if 3==15
  287. str r1,[sp,#17*4] @ make room for r1
  288. # endif
  289. eor r0,r5,r5,ror#5
  290. add r9,r9,r3 @ h+=Maj(a,b,c) from the past
  291. eor r0,r0,r5,ror#19 @ Sigma1(e)
  292. # ifndef __ARMEB__
  293. rev r2,r2
  294. # endif
  295. #else
  296. @ ldrb r2,[r1,#3] @ 3
  297. add r9,r9,r3 @ h+=Maj(a,b,c) from the past
  298. ldrb r3,[r1,#2]
  299. ldrb r0,[r1,#1]
  300. orr r2,r2,r3,lsl#8
  301. ldrb r3,[r1],#4
  302. orr r2,r2,r0,lsl#16
  303. # if 3==15
  304. str r1,[sp,#17*4] @ make room for r1
  305. # endif
  306. eor r0,r5,r5,ror#5
  307. orr r2,r2,r3,lsl#24
  308. eor r0,r0,r5,ror#19 @ Sigma1(e)
  309. #endif
  310. ldr r3,[r14],#4 @ *K256++
  311. add r8,r8,r2 @ h+=X[i]
  312. str r2,[sp,#3*4]
  313. eor r2,r6,r7
  314. add r8,r8,r0,ror#6 @ h+=Sigma1(e)
  315. and r2,r2,r5
  316. add r8,r8,r3 @ h+=K256[i]
  317. eor r2,r2,r7 @ Ch(e,f,g)
  318. eor r0,r9,r9,ror#11
  319. add r8,r8,r2 @ h+=Ch(e,f,g)
  320. #if 3==31
  321. and r3,r3,#0xff
  322. cmp r3,#0xf2 @ done?
  323. #endif
  324. #if 3<15
  325. # if __ARM_ARCH__>=7
  326. ldr r2,[r1],#4 @ prefetch
  327. # else
  328. ldrb r2,[r1,#3]
  329. # endif
  330. eor r3,r9,r10 @ a^b, b^c in next round
  331. #else
  332. ldr r2,[sp,#5*4] @ from future BODY_16_xx
  333. eor r3,r9,r10 @ a^b, b^c in next round
  334. ldr r1,[sp,#2*4] @ from future BODY_16_xx
  335. #endif
  336. eor r0,r0,r9,ror#20 @ Sigma0(a)
  337. and r12,r12,r3 @ (b^c)&=(a^b)
  338. add r4,r4,r8 @ d+=h
  339. eor r12,r12,r10 @ Maj(a,b,c)
  340. add r8,r8,r0,ror#2 @ h+=Sigma0(a)
  341. @ add r8,r8,r12 @ h+=Maj(a,b,c)
  342. #if __ARM_ARCH__>=7
  343. @ ldr r2,[r1],#4 @ 4
  344. # if 4==15
  345. str r1,[sp,#17*4] @ make room for r1
  346. # endif
  347. eor r0,r4,r4,ror#5
  348. add r8,r8,r12 @ h+=Maj(a,b,c) from the past
  349. eor r0,r0,r4,ror#19 @ Sigma1(e)
  350. # ifndef __ARMEB__
  351. rev r2,r2
  352. # endif
  353. #else
  354. @ ldrb r2,[r1,#3] @ 4
  355. add r8,r8,r12 @ h+=Maj(a,b,c) from the past
  356. ldrb r12,[r1,#2]
  357. ldrb r0,[r1,#1]
  358. orr r2,r2,r12,lsl#8
  359. ldrb r12,[r1],#4
  360. orr r2,r2,r0,lsl#16
  361. # if 4==15
  362. str r1,[sp,#17*4] @ make room for r1
  363. # endif
  364. eor r0,r4,r4,ror#5
  365. orr r2,r2,r12,lsl#24
  366. eor r0,r0,r4,ror#19 @ Sigma1(e)
  367. #endif
  368. ldr r12,[r14],#4 @ *K256++
  369. add r7,r7,r2 @ h+=X[i]
  370. str r2,[sp,#4*4]
  371. eor r2,r5,r6
  372. add r7,r7,r0,ror#6 @ h+=Sigma1(e)
  373. and r2,r2,r4
  374. add r7,r7,r12 @ h+=K256[i]
  375. eor r2,r2,r6 @ Ch(e,f,g)
  376. eor r0,r8,r8,ror#11
  377. add r7,r7,r2 @ h+=Ch(e,f,g)
  378. #if 4==31
  379. and r12,r12,#0xff
  380. cmp r12,#0xf2 @ done?
  381. #endif
  382. #if 4<15
  383. # if __ARM_ARCH__>=7
  384. ldr r2,[r1],#4 @ prefetch
  385. # else
  386. ldrb r2,[r1,#3]
  387. # endif
  388. eor r12,r8,r9 @ a^b, b^c in next round
  389. #else
  390. ldr r2,[sp,#6*4] @ from future BODY_16_xx
  391. eor r12,r8,r9 @ a^b, b^c in next round
  392. ldr r1,[sp,#3*4] @ from future BODY_16_xx
  393. #endif
  394. eor r0,r0,r8,ror#20 @ Sigma0(a)
  395. and r3,r3,r12 @ (b^c)&=(a^b)
  396. add r11,r11,r7 @ d+=h
  397. eor r3,r3,r9 @ Maj(a,b,c)
  398. add r7,r7,r0,ror#2 @ h+=Sigma0(a)
  399. @ add r7,r7,r3 @ h+=Maj(a,b,c)
  400. #if __ARM_ARCH__>=7
  401. @ ldr r2,[r1],#4 @ 5
  402. # if 5==15
  403. str r1,[sp,#17*4] @ make room for r1
  404. # endif
  405. eor r0,r11,r11,ror#5
  406. add r7,r7,r3 @ h+=Maj(a,b,c) from the past
  407. eor r0,r0,r11,ror#19 @ Sigma1(e)
  408. # ifndef __ARMEB__
  409. rev r2,r2
  410. # endif
  411. #else
  412. @ ldrb r2,[r1,#3] @ 5
  413. add r7,r7,r3 @ h+=Maj(a,b,c) from the past
  414. ldrb r3,[r1,#2]
  415. ldrb r0,[r1,#1]
  416. orr r2,r2,r3,lsl#8
  417. ldrb r3,[r1],#4
  418. orr r2,r2,r0,lsl#16
  419. # if 5==15
  420. str r1,[sp,#17*4] @ make room for r1
  421. # endif
  422. eor r0,r11,r11,ror#5
  423. orr r2,r2,r3,lsl#24
  424. eor r0,r0,r11,ror#19 @ Sigma1(e)
  425. #endif
  426. ldr r3,[r14],#4 @ *K256++
  427. add r6,r6,r2 @ h+=X[i]
  428. str r2,[sp,#5*4]
  429. eor r2,r4,r5
  430. add r6,r6,r0,ror#6 @ h+=Sigma1(e)
  431. and r2,r2,r11
  432. add r6,r6,r3 @ h+=K256[i]
  433. eor r2,r2,r5 @ Ch(e,f,g)
  434. eor r0,r7,r7,ror#11
  435. add r6,r6,r2 @ h+=Ch(e,f,g)
  436. #if 5==31
  437. and r3,r3,#0xff
  438. cmp r3,#0xf2 @ done?
  439. #endif
  440. #if 5<15
  441. # if __ARM_ARCH__>=7
  442. ldr r2,[r1],#4 @ prefetch
  443. # else
  444. ldrb r2,[r1,#3]
  445. # endif
  446. eor r3,r7,r8 @ a^b, b^c in next round
  447. #else
  448. ldr r2,[sp,#7*4] @ from future BODY_16_xx
  449. eor r3,r7,r8 @ a^b, b^c in next round
  450. ldr r1,[sp,#4*4] @ from future BODY_16_xx
  451. #endif
  452. eor r0,r0,r7,ror#20 @ Sigma0(a)
  453. and r12,r12,r3 @ (b^c)&=(a^b)
  454. add r10,r10,r6 @ d+=h
  455. eor r12,r12,r8 @ Maj(a,b,c)
  456. add r6,r6,r0,ror#2 @ h+=Sigma0(a)
  457. @ add r6,r6,r12 @ h+=Maj(a,b,c)
  458. #if __ARM_ARCH__>=7
  459. @ ldr r2,[r1],#4 @ 6
  460. # if 6==15
  461. str r1,[sp,#17*4] @ make room for r1
  462. # endif
  463. eor r0,r10,r10,ror#5
  464. add r6,r6,r12 @ h+=Maj(a,b,c) from the past
  465. eor r0,r0,r10,ror#19 @ Sigma1(e)
  466. # ifndef __ARMEB__
  467. rev r2,r2
  468. # endif
  469. #else
  470. @ ldrb r2,[r1,#3] @ 6
  471. add r6,r6,r12 @ h+=Maj(a,b,c) from the past
  472. ldrb r12,[r1,#2]
  473. ldrb r0,[r1,#1]
  474. orr r2,r2,r12,lsl#8
  475. ldrb r12,[r1],#4
  476. orr r2,r2,r0,lsl#16
  477. # if 6==15
  478. str r1,[sp,#17*4] @ make room for r1
  479. # endif
  480. eor r0,r10,r10,ror#5
  481. orr r2,r2,r12,lsl#24
  482. eor r0,r0,r10,ror#19 @ Sigma1(e)
  483. #endif
  484. ldr r12,[r14],#4 @ *K256++
  485. add r5,r5,r2 @ h+=X[i]
  486. str r2,[sp,#6*4]
  487. eor r2,r11,r4
  488. add r5,r5,r0,ror#6 @ h+=Sigma1(e)
  489. and r2,r2,r10
  490. add r5,r5,r12 @ h+=K256[i]
  491. eor r2,r2,r4 @ Ch(e,f,g)
  492. eor r0,r6,r6,ror#11
  493. add r5,r5,r2 @ h+=Ch(e,f,g)
  494. #if 6==31
  495. and r12,r12,#0xff
  496. cmp r12,#0xf2 @ done?
  497. #endif
  498. #if 6<15
  499. # if __ARM_ARCH__>=7
  500. ldr r2,[r1],#4 @ prefetch
  501. # else
  502. ldrb r2,[r1,#3]
  503. # endif
  504. eor r12,r6,r7 @ a^b, b^c in next round
  505. #else
  506. ldr r2,[sp,#8*4] @ from future BODY_16_xx
  507. eor r12,r6,r7 @ a^b, b^c in next round
  508. ldr r1,[sp,#5*4] @ from future BODY_16_xx
  509. #endif
  510. eor r0,r0,r6,ror#20 @ Sigma0(a)
  511. and r3,r3,r12 @ (b^c)&=(a^b)
  512. add r9,r9,r5 @ d+=h
  513. eor r3,r3,r7 @ Maj(a,b,c)
  514. add r5,r5,r0,ror#2 @ h+=Sigma0(a)
  515. @ add r5,r5,r3 @ h+=Maj(a,b,c)
  516. #if __ARM_ARCH__>=7
  517. @ ldr r2,[r1],#4 @ 7
  518. # if 7==15
  519. str r1,[sp,#17*4] @ make room for r1
  520. # endif
  521. eor r0,r9,r9,ror#5
  522. add r5,r5,r3 @ h+=Maj(a,b,c) from the past
  523. eor r0,r0,r9,ror#19 @ Sigma1(e)
  524. # ifndef __ARMEB__
  525. rev r2,r2
  526. # endif
  527. #else
  528. @ ldrb r2,[r1,#3] @ 7
  529. add r5,r5,r3 @ h+=Maj(a,b,c) from the past
  530. ldrb r3,[r1,#2]
  531. ldrb r0,[r1,#1]
  532. orr r2,r2,r3,lsl#8
  533. ldrb r3,[r1],#4
  534. orr r2,r2,r0,lsl#16
  535. # if 7==15
  536. str r1,[sp,#17*4] @ make room for r1
  537. # endif
  538. eor r0,r9,r9,ror#5
  539. orr r2,r2,r3,lsl#24
  540. eor r0,r0,r9,ror#19 @ Sigma1(e)
  541. #endif
  542. ldr r3,[r14],#4 @ *K256++
  543. add r4,r4,r2 @ h+=X[i]
  544. str r2,[sp,#7*4]
  545. eor r2,r10,r11
  546. add r4,r4,r0,ror#6 @ h+=Sigma1(e)
  547. and r2,r2,r9
  548. add r4,r4,r3 @ h+=K256[i]
  549. eor r2,r2,r11 @ Ch(e,f,g)
  550. eor r0,r5,r5,ror#11
  551. add r4,r4,r2 @ h+=Ch(e,f,g)
  552. #if 7==31
  553. and r3,r3,#0xff
  554. cmp r3,#0xf2 @ done?
  555. #endif
  556. #if 7<15
  557. # if __ARM_ARCH__>=7
  558. ldr r2,[r1],#4 @ prefetch
  559. # else
  560. ldrb r2,[r1,#3]
  561. # endif
  562. eor r3,r5,r6 @ a^b, b^c in next round
  563. #else
  564. ldr r2,[sp,#9*4] @ from future BODY_16_xx
  565. eor r3,r5,r6 @ a^b, b^c in next round
  566. ldr r1,[sp,#6*4] @ from future BODY_16_xx
  567. #endif
  568. eor r0,r0,r5,ror#20 @ Sigma0(a)
  569. and r12,r12,r3 @ (b^c)&=(a^b)
  570. add r8,r8,r4 @ d+=h
  571. eor r12,r12,r6 @ Maj(a,b,c)
  572. add r4,r4,r0,ror#2 @ h+=Sigma0(a)
  573. @ add r4,r4,r12 @ h+=Maj(a,b,c)
  574. #if __ARM_ARCH__>=7
  575. @ ldr r2,[r1],#4 @ 8
  576. # if 8==15
  577. str r1,[sp,#17*4] @ make room for r1
  578. # endif
  579. eor r0,r8,r8,ror#5
  580. add r4,r4,r12 @ h+=Maj(a,b,c) from the past
  581. eor r0,r0,r8,ror#19 @ Sigma1(e)
  582. # ifndef __ARMEB__
  583. rev r2,r2
  584. # endif
  585. #else
  586. @ ldrb r2,[r1,#3] @ 8
  587. add r4,r4,r12 @ h+=Maj(a,b,c) from the past
  588. ldrb r12,[r1,#2]
  589. ldrb r0,[r1,#1]
  590. orr r2,r2,r12,lsl#8
  591. ldrb r12,[r1],#4
  592. orr r2,r2,r0,lsl#16
  593. # if 8==15
  594. str r1,[sp,#17*4] @ make room for r1
  595. # endif
  596. eor r0,r8,r8,ror#5
  597. orr r2,r2,r12,lsl#24
  598. eor r0,r0,r8,ror#19 @ Sigma1(e)
  599. #endif
  600. ldr r12,[r14],#4 @ *K256++
  601. add r11,r11,r2 @ h+=X[i]
  602. str r2,[sp,#8*4]
  603. eor r2,r9,r10
  604. add r11,r11,r0,ror#6 @ h+=Sigma1(e)
  605. and r2,r2,r8
  606. add r11,r11,r12 @ h+=K256[i]
  607. eor r2,r2,r10 @ Ch(e,f,g)
  608. eor r0,r4,r4,ror#11
  609. add r11,r11,r2 @ h+=Ch(e,f,g)
  610. #if 8==31
  611. and r12,r12,#0xff
  612. cmp r12,#0xf2 @ done?
  613. #endif
  614. #if 8<15
  615. # if __ARM_ARCH__>=7
  616. ldr r2,[r1],#4 @ prefetch
  617. # else
  618. ldrb r2,[r1,#3]
  619. # endif
  620. eor r12,r4,r5 @ a^b, b^c in next round
  621. #else
  622. ldr r2,[sp,#10*4] @ from future BODY_16_xx
  623. eor r12,r4,r5 @ a^b, b^c in next round
  624. ldr r1,[sp,#7*4] @ from future BODY_16_xx
  625. #endif
  626. eor r0,r0,r4,ror#20 @ Sigma0(a)
  627. and r3,r3,r12 @ (b^c)&=(a^b)
  628. add r7,r7,r11 @ d+=h
  629. eor r3,r3,r5 @ Maj(a,b,c)
  630. add r11,r11,r0,ror#2 @ h+=Sigma0(a)
  631. @ add r11,r11,r3 @ h+=Maj(a,b,c)
  632. #if __ARM_ARCH__>=7
  633. @ ldr r2,[r1],#4 @ 9
  634. # if 9==15
  635. str r1,[sp,#17*4] @ make room for r1
  636. # endif
  637. eor r0,r7,r7,ror#5
  638. add r11,r11,r3 @ h+=Maj(a,b,c) from the past
  639. eor r0,r0,r7,ror#19 @ Sigma1(e)
  640. # ifndef __ARMEB__
  641. rev r2,r2
  642. # endif
  643. #else
  644. @ ldrb r2,[r1,#3] @ 9
  645. add r11,r11,r3 @ h+=Maj(a,b,c) from the past
  646. ldrb r3,[r1,#2]
  647. ldrb r0,[r1,#1]
  648. orr r2,r2,r3,lsl#8
  649. ldrb r3,[r1],#4
  650. orr r2,r2,r0,lsl#16
  651. # if 9==15
  652. str r1,[sp,#17*4] @ make room for r1
  653. # endif
  654. eor r0,r7,r7,ror#5
  655. orr r2,r2,r3,lsl#24
  656. eor r0,r0,r7,ror#19 @ Sigma1(e)
  657. #endif
  658. ldr r3,[r14],#4 @ *K256++
  659. add r10,r10,r2 @ h+=X[i]
  660. str r2,[sp,#9*4]
  661. eor r2,r8,r9
  662. add r10,r10,r0,ror#6 @ h+=Sigma1(e)
  663. and r2,r2,r7
  664. add r10,r10,r3 @ h+=K256[i]
  665. eor r2,r2,r9 @ Ch(e,f,g)
  666. eor r0,r11,r11,ror#11
  667. add r10,r10,r2 @ h+=Ch(e,f,g)
  668. #if 9==31
  669. and r3,r3,#0xff
  670. cmp r3,#0xf2 @ done?
  671. #endif
  672. #if 9<15
  673. # if __ARM_ARCH__>=7
  674. ldr r2,[r1],#4 @ prefetch
  675. # else
  676. ldrb r2,[r1,#3]
  677. # endif
  678. eor r3,r11,r4 @ a^b, b^c in next round
  679. #else
  680. ldr r2,[sp,#11*4] @ from future BODY_16_xx
  681. eor r3,r11,r4 @ a^b, b^c in next round
  682. ldr r1,[sp,#8*4] @ from future BODY_16_xx
  683. #endif
  684. eor r0,r0,r11,ror#20 @ Sigma0(a)
  685. and r12,r12,r3 @ (b^c)&=(a^b)
  686. add r6,r6,r10 @ d+=h
  687. eor r12,r12,r4 @ Maj(a,b,c)
  688. add r10,r10,r0,ror#2 @ h+=Sigma0(a)
  689. @ add r10,r10,r12 @ h+=Maj(a,b,c)
  690. #if __ARM_ARCH__>=7
  691. @ ldr r2,[r1],#4 @ 10
  692. # if 10==15
  693. str r1,[sp,#17*4] @ make room for r1
  694. # endif
  695. eor r0,r6,r6,ror#5
  696. add r10,r10,r12 @ h+=Maj(a,b,c) from the past
  697. eor r0,r0,r6,ror#19 @ Sigma1(e)
  698. # ifndef __ARMEB__
  699. rev r2,r2
  700. # endif
  701. #else
  702. @ ldrb r2,[r1,#3] @ 10
  703. add r10,r10,r12 @ h+=Maj(a,b,c) from the past
  704. ldrb r12,[r1,#2]
  705. ldrb r0,[r1,#1]
  706. orr r2,r2,r12,lsl#8
  707. ldrb r12,[r1],#4
  708. orr r2,r2,r0,lsl#16
  709. # if 10==15
  710. str r1,[sp,#17*4] @ make room for r1
  711. # endif
  712. eor r0,r6,r6,ror#5
  713. orr r2,r2,r12,lsl#24
  714. eor r0,r0,r6,ror#19 @ Sigma1(e)
  715. #endif
  716. ldr r12,[r14],#4 @ *K256++
  717. add r9,r9,r2 @ h+=X[i]
  718. str r2,[sp,#10*4]
  719. eor r2,r7,r8
  720. add r9,r9,r0,ror#6 @ h+=Sigma1(e)
  721. and r2,r2,r6
  722. add r9,r9,r12 @ h+=K256[i]
  723. eor r2,r2,r8 @ Ch(e,f,g)
  724. eor r0,r10,r10,ror#11
  725. add r9,r9,r2 @ h+=Ch(e,f,g)
  726. #if 10==31
  727. and r12,r12,#0xff
  728. cmp r12,#0xf2 @ done?
  729. #endif
  730. #if 10<15
  731. # if __ARM_ARCH__>=7
  732. ldr r2,[r1],#4 @ prefetch
  733. # else
  734. ldrb r2,[r1,#3]
  735. # endif
  736. eor r12,r10,r11 @ a^b, b^c in next round
  737. #else
  738. ldr r2,[sp,#12*4] @ from future BODY_16_xx
  739. eor r12,r10,r11 @ a^b, b^c in next round
  740. ldr r1,[sp,#9*4] @ from future BODY_16_xx
  741. #endif
  742. eor r0,r0,r10,ror#20 @ Sigma0(a)
  743. and r3,r3,r12 @ (b^c)&=(a^b)
  744. add r5,r5,r9 @ d+=h
  745. eor r3,r3,r11 @ Maj(a,b,c)
  746. add r9,r9,r0,ror#2 @ h+=Sigma0(a)
  747. @ add r9,r9,r3 @ h+=Maj(a,b,c)
  748. #if __ARM_ARCH__>=7
  749. @ ldr r2,[r1],#4 @ 11
  750. # if 11==15
  751. str r1,[sp,#17*4] @ make room for r1
  752. # endif
  753. eor r0,r5,r5,ror#5
  754. add r9,r9,r3 @ h+=Maj(a,b,c) from the past
  755. eor r0,r0,r5,ror#19 @ Sigma1(e)
  756. # ifndef __ARMEB__
  757. rev r2,r2
  758. # endif
  759. #else
  760. @ ldrb r2,[r1,#3] @ 11
  761. add r9,r9,r3 @ h+=Maj(a,b,c) from the past
  762. ldrb r3,[r1,#2]
  763. ldrb r0,[r1,#1]
  764. orr r2,r2,r3,lsl#8
  765. ldrb r3,[r1],#4
  766. orr r2,r2,r0,lsl#16
  767. # if 11==15
  768. str r1,[sp,#17*4] @ make room for r1
  769. # endif
  770. eor r0,r5,r5,ror#5
  771. orr r2,r2,r3,lsl#24
  772. eor r0,r0,r5,ror#19 @ Sigma1(e)
  773. #endif
  774. ldr r3,[r14],#4 @ *K256++
  775. add r8,r8,r2 @ h+=X[i]
  776. str r2,[sp,#11*4]
  777. eor r2,r6,r7
  778. add r8,r8,r0,ror#6 @ h+=Sigma1(e)
  779. and r2,r2,r5
  780. add r8,r8,r3 @ h+=K256[i]
  781. eor r2,r2,r7 @ Ch(e,f,g)
  782. eor r0,r9,r9,ror#11
  783. add r8,r8,r2 @ h+=Ch(e,f,g)
  784. #if 11==31
  785. and r3,r3,#0xff
  786. cmp r3,#0xf2 @ done?
  787. #endif
  788. #if 11<15
  789. # if __ARM_ARCH__>=7
  790. ldr r2,[r1],#4 @ prefetch
  791. # else
  792. ldrb r2,[r1,#3]
  793. # endif
  794. eor r3,r9,r10 @ a^b, b^c in next round
  795. #else
  796. ldr r2,[sp,#13*4] @ from future BODY_16_xx
  797. eor r3,r9,r10 @ a^b, b^c in next round
  798. ldr r1,[sp,#10*4] @ from future BODY_16_xx
  799. #endif
  800. eor r0,r0,r9,ror#20 @ Sigma0(a)
  801. and r12,r12,r3 @ (b^c)&=(a^b)
  802. add r4,r4,r8 @ d+=h
  803. eor r12,r12,r10 @ Maj(a,b,c)
  804. add r8,r8,r0,ror#2 @ h+=Sigma0(a)
  805. @ add r8,r8,r12 @ h+=Maj(a,b,c)
  806. #if __ARM_ARCH__>=7
  807. @ ldr r2,[r1],#4 @ 12
  808. # if 12==15
  809. str r1,[sp,#17*4] @ make room for r1
  810. # endif
  811. eor r0,r4,r4,ror#5
  812. add r8,r8,r12 @ h+=Maj(a,b,c) from the past
  813. eor r0,r0,r4,ror#19 @ Sigma1(e)
  814. # ifndef __ARMEB__
  815. rev r2,r2
  816. # endif
  817. #else
  818. @ ldrb r2,[r1,#3] @ 12
  819. add r8,r8,r12 @ h+=Maj(a,b,c) from the past
  820. ldrb r12,[r1,#2]
  821. ldrb r0,[r1,#1]
  822. orr r2,r2,r12,lsl#8
  823. ldrb r12,[r1],#4
  824. orr r2,r2,r0,lsl#16
  825. # if 12==15
  826. str r1,[sp,#17*4] @ make room for r1
  827. # endif
  828. eor r0,r4,r4,ror#5
  829. orr r2,r2,r12,lsl#24
  830. eor r0,r0,r4,ror#19 @ Sigma1(e)
  831. #endif
  832. ldr r12,[r14],#4 @ *K256++
  833. add r7,r7,r2 @ h+=X[i]
  834. str r2,[sp,#12*4]
  835. eor r2,r5,r6
  836. add r7,r7,r0,ror#6 @ h+=Sigma1(e)
  837. and r2,r2,r4
  838. add r7,r7,r12 @ h+=K256[i]
  839. eor r2,r2,r6 @ Ch(e,f,g)
  840. eor r0,r8,r8,ror#11
  841. add r7,r7,r2 @ h+=Ch(e,f,g)
  842. #if 12==31
  843. and r12,r12,#0xff
  844. cmp r12,#0xf2 @ done?
  845. #endif
  846. #if 12<15
  847. # if __ARM_ARCH__>=7
  848. ldr r2,[r1],#4 @ prefetch
  849. # else
  850. ldrb r2,[r1,#3]
  851. # endif
  852. eor r12,r8,r9 @ a^b, b^c in next round
  853. #else
  854. ldr r2,[sp,#14*4] @ from future BODY_16_xx
  855. eor r12,r8,r9 @ a^b, b^c in next round
  856. ldr r1,[sp,#11*4] @ from future BODY_16_xx
  857. #endif
  858. eor r0,r0,r8,ror#20 @ Sigma0(a)
  859. and r3,r3,r12 @ (b^c)&=(a^b)
  860. add r11,r11,r7 @ d+=h
  861. eor r3,r3,r9 @ Maj(a,b,c)
  862. add r7,r7,r0,ror#2 @ h+=Sigma0(a)
  863. @ add r7,r7,r3 @ h+=Maj(a,b,c)
  864. #if __ARM_ARCH__>=7
  865. @ ldr r2,[r1],#4 @ 13
  866. # if 13==15
  867. str r1,[sp,#17*4] @ make room for r1
  868. # endif
  869. eor r0,r11,r11,ror#5
  870. add r7,r7,r3 @ h+=Maj(a,b,c) from the past
  871. eor r0,r0,r11,ror#19 @ Sigma1(e)
  872. # ifndef __ARMEB__
  873. rev r2,r2
  874. # endif
  875. #else
  876. @ ldrb r2,[r1,#3] @ 13
  877. add r7,r7,r3 @ h+=Maj(a,b,c) from the past
  878. ldrb r3,[r1,#2]
  879. ldrb r0,[r1,#1]
  880. orr r2,r2,r3,lsl#8
  881. ldrb r3,[r1],#4
  882. orr r2,r2,r0,lsl#16
  883. # if 13==15
  884. str r1,[sp,#17*4] @ make room for r1
  885. # endif
  886. eor r0,r11,r11,ror#5
  887. orr r2,r2,r3,lsl#24
  888. eor r0,r0,r11,ror#19 @ Sigma1(e)
  889. #endif
  890. ldr r3,[r14],#4 @ *K256++
  891. add r6,r6,r2 @ h+=X[i]
  892. str r2,[sp,#13*4]
  893. eor r2,r4,r5
  894. add r6,r6,r0,ror#6 @ h+=Sigma1(e)
  895. and r2,r2,r11
  896. add r6,r6,r3 @ h+=K256[i]
  897. eor r2,r2,r5 @ Ch(e,f,g)
  898. eor r0,r7,r7,ror#11
  899. add r6,r6,r2 @ h+=Ch(e,f,g)
  900. #if 13==31
  901. and r3,r3,#0xff
  902. cmp r3,#0xf2 @ done?
  903. #endif
  904. #if 13<15
  905. # if __ARM_ARCH__>=7
  906. ldr r2,[r1],#4 @ prefetch
  907. # else
  908. ldrb r2,[r1,#3]
  909. # endif
  910. eor r3,r7,r8 @ a^b, b^c in next round
  911. #else
  912. ldr r2,[sp,#15*4] @ from future BODY_16_xx
  913. eor r3,r7,r8 @ a^b, b^c in next round
  914. ldr r1,[sp,#12*4] @ from future BODY_16_xx
  915. #endif
  916. eor r0,r0,r7,ror#20 @ Sigma0(a)
  917. and r12,r12,r3 @ (b^c)&=(a^b)
  918. add r10,r10,r6 @ d+=h
  919. eor r12,r12,r8 @ Maj(a,b,c)
  920. add r6,r6,r0,ror#2 @ h+=Sigma0(a)
  921. @ add r6,r6,r12 @ h+=Maj(a,b,c)
  922. #if __ARM_ARCH__>=7
  923. @ ldr r2,[r1],#4 @ 14
  924. # if 14==15
  925. str r1,[sp,#17*4] @ make room for r1
  926. # endif
  927. eor r0,r10,r10,ror#5
  928. add r6,r6,r12 @ h+=Maj(a,b,c) from the past
  929. eor r0,r0,r10,ror#19 @ Sigma1(e)
  930. # ifndef __ARMEB__
  931. rev r2,r2
  932. # endif
  933. #else
  934. @ ldrb r2,[r1,#3] @ 14
  935. add r6,r6,r12 @ h+=Maj(a,b,c) from the past
  936. ldrb r12,[r1,#2]
  937. ldrb r0,[r1,#1]
  938. orr r2,r2,r12,lsl#8
  939. ldrb r12,[r1],#4
  940. orr r2,r2,r0,lsl#16
  941. # if 14==15
  942. str r1,[sp,#17*4] @ make room for r1
  943. # endif
  944. eor r0,r10,r10,ror#5
  945. orr r2,r2,r12,lsl#24
  946. eor r0,r0,r10,ror#19 @ Sigma1(e)
  947. #endif
  948. ldr r12,[r14],#4 @ *K256++
  949. add r5,r5,r2 @ h+=X[i]
  950. str r2,[sp,#14*4]
  951. eor r2,r11,r4
  952. add r5,r5,r0,ror#6 @ h+=Sigma1(e)
  953. and r2,r2,r10
  954. add r5,r5,r12 @ h+=K256[i]
  955. eor r2,r2,r4 @ Ch(e,f,g)
  956. eor r0,r6,r6,ror#11
  957. add r5,r5,r2 @ h+=Ch(e,f,g)
  958. #if 14==31
  959. and r12,r12,#0xff
  960. cmp r12,#0xf2 @ done?
  961. #endif
  962. #if 14<15
  963. # if __ARM_ARCH__>=7
  964. ldr r2,[r1],#4 @ prefetch
  965. # else
  966. ldrb r2,[r1,#3]
  967. # endif
  968. eor r12,r6,r7 @ a^b, b^c in next round
  969. #else
  970. ldr r2,[sp,#0*4] @ from future BODY_16_xx
  971. eor r12,r6,r7 @ a^b, b^c in next round
  972. ldr r1,[sp,#13*4] @ from future BODY_16_xx
  973. #endif
  974. eor r0,r0,r6,ror#20 @ Sigma0(a)
  975. and r3,r3,r12 @ (b^c)&=(a^b)
  976. add r9,r9,r5 @ d+=h
  977. eor r3,r3,r7 @ Maj(a,b,c)
  978. add r5,r5,r0,ror#2 @ h+=Sigma0(a)
  979. @ add r5,r5,r3 @ h+=Maj(a,b,c)
  980. #if __ARM_ARCH__>=7
  981. @ ldr r2,[r1],#4 @ 15
  982. # if 15==15
  983. str r1,[sp,#17*4] @ make room for r1
  984. # endif
  985. eor r0,r9,r9,ror#5
  986. add r5,r5,r3 @ h+=Maj(a,b,c) from the past
  987. eor r0,r0,r9,ror#19 @ Sigma1(e)
  988. # ifndef __ARMEB__
  989. rev r2,r2
  990. # endif
  991. #else
  992. @ ldrb r2,[r1,#3] @ 15
  993. add r5,r5,r3 @ h+=Maj(a,b,c) from the past
  994. ldrb r3,[r1,#2]
  995. ldrb r0,[r1,#1]
  996. orr r2,r2,r3,lsl#8
  997. ldrb r3,[r1],#4
  998. orr r2,r2,r0,lsl#16
  999. # if 15==15
  1000. str r1,[sp,#17*4] @ make room for r1
  1001. # endif
  1002. eor r0,r9,r9,ror#5
  1003. orr r2,r2,r3,lsl#24
  1004. eor r0,r0,r9,ror#19 @ Sigma1(e)
  1005. #endif
  1006. ldr r3,[r14],#4 @ *K256++
  1007. add r4,r4,r2 @ h+=X[i]
  1008. str r2,[sp,#15*4]
  1009. eor r2,r10,r11
  1010. add r4,r4,r0,ror#6 @ h+=Sigma1(e)
  1011. and r2,r2,r9
  1012. add r4,r4,r3 @ h+=K256[i]
  1013. eor r2,r2,r11 @ Ch(e,f,g)
  1014. eor r0,r5,r5,ror#11
  1015. add r4,r4,r2 @ h+=Ch(e,f,g)
  1016. #if 15==31
  1017. and r3,r3,#0xff
  1018. cmp r3,#0xf2 @ done?
  1019. #endif
  1020. #if 15<15
  1021. # if __ARM_ARCH__>=7
  1022. ldr r2,[r1],#4 @ prefetch
  1023. # else
  1024. ldrb r2,[r1,#3]
  1025. # endif
  1026. eor r3,r5,r6 @ a^b, b^c in next round
  1027. #else
  1028. ldr r2,[sp,#1*4] @ from future BODY_16_xx
  1029. eor r3,r5,r6 @ a^b, b^c in next round
  1030. ldr r1,[sp,#14*4] @ from future BODY_16_xx
  1031. #endif
  1032. eor r0,r0,r5,ror#20 @ Sigma0(a)
  1033. and r12,r12,r3 @ (b^c)&=(a^b)
  1034. add r8,r8,r4 @ d+=h
  1035. eor r12,r12,r6 @ Maj(a,b,c)
  1036. add r4,r4,r0,ror#2 @ h+=Sigma0(a)
  1037. @ add r4,r4,r12 @ h+=Maj(a,b,c)
  1038. .Lrounds_16_xx:
  1039. @ ldr r2,[sp,#1*4] @ 16
  1040. @ ldr r1,[sp,#14*4]
  1041. mov r0,r2,ror#7
  1042. add r4,r4,r12 @ h+=Maj(a,b,c) from the past
  1043. mov r12,r1,ror#17
  1044. eor r0,r0,r2,ror#18
  1045. eor r12,r12,r1,ror#19
  1046. eor r0,r0,r2,lsr#3 @ sigma0(X[i+1])
  1047. ldr r2,[sp,#0*4]
  1048. eor r12,r12,r1,lsr#10 @ sigma1(X[i+14])
  1049. ldr r1,[sp,#9*4]
  1050. add r12,r12,r0
  1051. eor r0,r8,r8,ror#5 @ from BODY_00_15
  1052. add r2,r2,r12
  1053. eor r0,r0,r8,ror#19 @ Sigma1(e)
  1054. add r2,r2,r1 @ X[i]
  1055. ldr r12,[r14],#4 @ *K256++
  1056. add r11,r11,r2 @ h+=X[i]
  1057. str r2,[sp,#0*4]
  1058. eor r2,r9,r10
  1059. add r11,r11,r0,ror#6 @ h+=Sigma1(e)
  1060. and r2,r2,r8
  1061. add r11,r11,r12 @ h+=K256[i]
  1062. eor r2,r2,r10 @ Ch(e,f,g)
  1063. eor r0,r4,r4,ror#11
  1064. add r11,r11,r2 @ h+=Ch(e,f,g)
  1065. #if 16==31
  1066. and r12,r12,#0xff
  1067. cmp r12,#0xf2 @ done?
  1068. #endif
  1069. #if 16<15
  1070. # if __ARM_ARCH__>=7
  1071. ldr r2,[r1],#4 @ prefetch
  1072. # else
  1073. ldrb r2,[r1,#3]
  1074. # endif
  1075. eor r12,r4,r5 @ a^b, b^c in next round
  1076. #else
  1077. ldr r2,[sp,#2*4] @ from future BODY_16_xx
  1078. eor r12,r4,r5 @ a^b, b^c in next round
  1079. ldr r1,[sp,#15*4] @ from future BODY_16_xx
  1080. #endif
  1081. eor r0,r0,r4,ror#20 @ Sigma0(a)
  1082. and r3,r3,r12 @ (b^c)&=(a^b)
  1083. add r7,r7,r11 @ d+=h
  1084. eor r3,r3,r5 @ Maj(a,b,c)
  1085. add r11,r11,r0,ror#2 @ h+=Sigma0(a)
  1086. @ add r11,r11,r3 @ h+=Maj(a,b,c)
  1087. @ ldr r2,[sp,#2*4] @ 17
  1088. @ ldr r1,[sp,#15*4]
  1089. mov r0,r2,ror#7
  1090. add r11,r11,r3 @ h+=Maj(a,b,c) from the past
  1091. mov r3,r1,ror#17
  1092. eor r0,r0,r2,ror#18
  1093. eor r3,r3,r1,ror#19
  1094. eor r0,r0,r2,lsr#3 @ sigma0(X[i+1])
  1095. ldr r2,[sp,#1*4]
  1096. eor r3,r3,r1,lsr#10 @ sigma1(X[i+14])
  1097. ldr r1,[sp,#10*4]
  1098. add r3,r3,r0
  1099. eor r0,r7,r7,ror#5 @ from BODY_00_15
  1100. add r2,r2,r3
  1101. eor r0,r0,r7,ror#19 @ Sigma1(e)
  1102. add r2,r2,r1 @ X[i]
  1103. ldr r3,[r14],#4 @ *K256++
  1104. add r10,r10,r2 @ h+=X[i]
  1105. str r2,[sp,#1*4]
  1106. eor r2,r8,r9
  1107. add r10,r10,r0,ror#6 @ h+=Sigma1(e)
  1108. and r2,r2,r7
  1109. add r10,r10,r3 @ h+=K256[i]
  1110. eor r2,r2,r9 @ Ch(e,f,g)
  1111. eor r0,r11,r11,ror#11
  1112. add r10,r10,r2 @ h+=Ch(e,f,g)
  1113. #if 17==31
  1114. and r3,r3,#0xff
  1115. cmp r3,#0xf2 @ done?
  1116. #endif
  1117. #if 17<15
  1118. # if __ARM_ARCH__>=7
  1119. ldr r2,[r1],#4 @ prefetch
  1120. # else
  1121. ldrb r2,[r1,#3]
  1122. # endif
  1123. eor r3,r11,r4 @ a^b, b^c in next round
  1124. #else
  1125. ldr r2,[sp,#3*4] @ from future BODY_16_xx
  1126. eor r3,r11,r4 @ a^b, b^c in next round
  1127. ldr r1,[sp,#0*4] @ from future BODY_16_xx
  1128. #endif
  1129. eor r0,r0,r11,ror#20 @ Sigma0(a)
  1130. and r12,r12,r3 @ (b^c)&=(a^b)
  1131. add r6,r6,r10 @ d+=h
  1132. eor r12,r12,r4 @ Maj(a,b,c)
  1133. add r10,r10,r0,ror#2 @ h+=Sigma0(a)
  1134. @ add r10,r10,r12 @ h+=Maj(a,b,c)
  1135. @ ldr r2,[sp,#3*4] @ 18
  1136. @ ldr r1,[sp,#0*4]
  1137. mov r0,r2,ror#7
  1138. add r10,r10,r12 @ h+=Maj(a,b,c) from the past
  1139. mov r12,r1,ror#17
  1140. eor r0,r0,r2,ror#18
  1141. eor r12,r12,r1,ror#19
  1142. eor r0,r0,r2,lsr#3 @ sigma0(X[i+1])
  1143. ldr r2,[sp,#2*4]
  1144. eor r12,r12,r1,lsr#10 @ sigma1(X[i+14])
  1145. ldr r1,[sp,#11*4]
  1146. add r12,r12,r0
  1147. eor r0,r6,r6,ror#5 @ from BODY_00_15
  1148. add r2,r2,r12
  1149. eor r0,r0,r6,ror#19 @ Sigma1(e)
  1150. add r2,r2,r1 @ X[i]
  1151. ldr r12,[r14],#4 @ *K256++
  1152. add r9,r9,r2 @ h+=X[i]
  1153. str r2,[sp,#2*4]
  1154. eor r2,r7,r8
  1155. add r9,r9,r0,ror#6 @ h+=Sigma1(e)
  1156. and r2,r2,r6
  1157. add r9,r9,r12 @ h+=K256[i]
  1158. eor r2,r2,r8 @ Ch(e,f,g)
  1159. eor r0,r10,r10,ror#11
  1160. add r9,r9,r2 @ h+=Ch(e,f,g)
  1161. #if 18==31
  1162. and r12,r12,#0xff
  1163. cmp r12,#0xf2 @ done?
  1164. #endif
  1165. #if 18<15
  1166. # if __ARM_ARCH__>=7
  1167. ldr r2,[r1],#4 @ prefetch
  1168. # else
  1169. ldrb r2,[r1,#3]
  1170. # endif
  1171. eor r12,r10,r11 @ a^b, b^c in next round
  1172. #else
  1173. ldr r2,[sp,#4*4] @ from future BODY_16_xx
  1174. eor r12,r10,r11 @ a^b, b^c in next round
  1175. ldr r1,[sp,#1*4] @ from future BODY_16_xx
  1176. #endif
  1177. eor r0,r0,r10,ror#20 @ Sigma0(a)
  1178. and r3,r3,r12 @ (b^c)&=(a^b)
  1179. add r5,r5,r9 @ d+=h
  1180. eor r3,r3,r11 @ Maj(a,b,c)
  1181. add r9,r9,r0,ror#2 @ h+=Sigma0(a)
  1182. @ add r9,r9,r3 @ h+=Maj(a,b,c)
  1183. @ ldr r2,[sp,#4*4] @ 19
  1184. @ ldr r1,[sp,#1*4]
  1185. mov r0,r2,ror#7
  1186. add r9,r9,r3 @ h+=Maj(a,b,c) from the past
  1187. mov r3,r1,ror#17
  1188. eor r0,r0,r2,ror#18
  1189. eor r3,r3,r1,ror#19
  1190. eor r0,r0,r2,lsr#3 @ sigma0(X[i+1])
  1191. ldr r2,[sp,#3*4]
  1192. eor r3,r3,r1,lsr#10 @ sigma1(X[i+14])
  1193. ldr r1,[sp,#12*4]
  1194. add r3,r3,r0
  1195. eor r0,r5,r5,ror#5 @ from BODY_00_15
  1196. add r2,r2,r3
  1197. eor r0,r0,r5,ror#19 @ Sigma1(e)
  1198. add r2,r2,r1 @ X[i]
  1199. ldr r3,[r14],#4 @ *K256++
  1200. add r8,r8,r2 @ h+=X[i]
  1201. str r2,[sp,#3*4]
  1202. eor r2,r6,r7
  1203. add r8,r8,r0,ror#6 @ h+=Sigma1(e)
  1204. and r2,r2,r5
  1205. add r8,r8,r3 @ h+=K256[i]
  1206. eor r2,r2,r7 @ Ch(e,f,g)
  1207. eor r0,r9,r9,ror#11
  1208. add r8,r8,r2 @ h+=Ch(e,f,g)
  1209. #if 19==31
  1210. and r3,r3,#0xff
  1211. cmp r3,#0xf2 @ done?
  1212. #endif
  1213. #if 19<15
  1214. # if __ARM_ARCH__>=7
  1215. ldr r2,[r1],#4 @ prefetch
  1216. # else
  1217. ldrb r2,[r1,#3]
  1218. # endif
  1219. eor r3,r9,r10 @ a^b, b^c in next round
  1220. #else
  1221. ldr r2,[sp,#5*4] @ from future BODY_16_xx
  1222. eor r3,r9,r10 @ a^b, b^c in next round
  1223. ldr r1,[sp,#2*4] @ from future BODY_16_xx
  1224. #endif
  1225. eor r0,r0,r9,ror#20 @ Sigma0(a)
  1226. and r12,r12,r3 @ (b^c)&=(a^b)
  1227. add r4,r4,r8 @ d+=h
  1228. eor r12,r12,r10 @ Maj(a,b,c)
  1229. add r8,r8,r0,ror#2 @ h+=Sigma0(a)
  1230. @ add r8,r8,r12 @ h+=Maj(a,b,c)
  1231. @ ldr r2,[sp,#5*4] @ 20
  1232. @ ldr r1,[sp,#2*4]
  1233. mov r0,r2,ror#7
  1234. add r8,r8,r12 @ h+=Maj(a,b,c) from the past
  1235. mov r12,r1,ror#17
  1236. eor r0,r0,r2,ror#18
  1237. eor r12,r12,r1,ror#19
  1238. eor r0,r0,r2,lsr#3 @ sigma0(X[i+1])
  1239. ldr r2,[sp,#4*4]
  1240. eor r12,r12,r1,lsr#10 @ sigma1(X[i+14])
  1241. ldr r1,[sp,#13*4]
  1242. add r12,r12,r0
  1243. eor r0,r4,r4,ror#5 @ from BODY_00_15
  1244. add r2,r2,r12
  1245. eor r0,r0,r4,ror#19 @ Sigma1(e)
  1246. add r2,r2,r1 @ X[i]
  1247. ldr r12,[r14],#4 @ *K256++
  1248. add r7,r7,r2 @ h+=X[i]
  1249. str r2,[sp,#4*4]
  1250. eor r2,r5,r6
  1251. add r7,r7,r0,ror#6 @ h+=Sigma1(e)
  1252. and r2,r2,r4
  1253. add r7,r7,r12 @ h+=K256[i]
  1254. eor r2,r2,r6 @ Ch(e,f,g)
  1255. eor r0,r8,r8,ror#11
  1256. add r7,r7,r2 @ h+=Ch(e,f,g)
  1257. #if 20==31
  1258. and r12,r12,#0xff
  1259. cmp r12,#0xf2 @ done?
  1260. #endif
  1261. #if 20<15
  1262. # if __ARM_ARCH__>=7
  1263. ldr r2,[r1],#4 @ prefetch
  1264. # else
  1265. ldrb r2,[r1,#3]
  1266. # endif
  1267. eor r12,r8,r9 @ a^b, b^c in next round
  1268. #else
  1269. ldr r2,[sp,#6*4] @ from future BODY_16_xx
  1270. eor r12,r8,r9 @ a^b, b^c in next round
  1271. ldr r1,[sp,#3*4] @ from future BODY_16_xx
  1272. #endif
  1273. eor r0,r0,r8,ror#20 @ Sigma0(a)
  1274. and r3,r3,r12 @ (b^c)&=(a^b)
  1275. add r11,r11,r7 @ d+=h
  1276. eor r3,r3,r9 @ Maj(a,b,c)
  1277. add r7,r7,r0,ror#2 @ h+=Sigma0(a)
  1278. @ add r7,r7,r3 @ h+=Maj(a,b,c)
  1279. @ ldr r2,[sp,#6*4] @ 21
  1280. @ ldr r1,[sp,#3*4]
  1281. mov r0,r2,ror#7
  1282. add r7,r7,r3 @ h+=Maj(a,b,c) from the past
  1283. mov r3,r1,ror#17
  1284. eor r0,r0,r2,ror#18
  1285. eor r3,r3,r1,ror#19
  1286. eor r0,r0,r2,lsr#3 @ sigma0(X[i+1])
  1287. ldr r2,[sp,#5*4]
  1288. eor r3,r3,r1,lsr#10 @ sigma1(X[i+14])
  1289. ldr r1,[sp,#14*4]
  1290. add r3,r3,r0
  1291. eor r0,r11,r11,ror#5 @ from BODY_00_15
  1292. add r2,r2,r3
  1293. eor r0,r0,r11,ror#19 @ Sigma1(e)
  1294. add r2,r2,r1 @ X[i]
  1295. ldr r3,[r14],#4 @ *K256++
  1296. add r6,r6,r2 @ h+=X[i]
  1297. str r2,[sp,#5*4]
  1298. eor r2,r4,r5
  1299. add r6,r6,r0,ror#6 @ h+=Sigma1(e)
  1300. and r2,r2,r11
  1301. add r6,r6,r3 @ h+=K256[i]
  1302. eor r2,r2,r5 @ Ch(e,f,g)
  1303. eor r0,r7,r7,ror#11
  1304. add r6,r6,r2 @ h+=Ch(e,f,g)
  1305. #if 21==31
  1306. and r3,r3,#0xff
  1307. cmp r3,#0xf2 @ done?
  1308. #endif
  1309. #if 21<15
  1310. # if __ARM_ARCH__>=7
  1311. ldr r2,[r1],#4 @ prefetch
  1312. # else
  1313. ldrb r2,[r1,#3]
  1314. # endif
  1315. eor r3,r7,r8 @ a^b, b^c in next round
  1316. #else
  1317. ldr r2,[sp,#7*4] @ from future BODY_16_xx
  1318. eor r3,r7,r8 @ a^b, b^c in next round
  1319. ldr r1,[sp,#4*4] @ from future BODY_16_xx
  1320. #endif
  1321. eor r0,r0,r7,ror#20 @ Sigma0(a)
  1322. and r12,r12,r3 @ (b^c)&=(a^b)
  1323. add r10,r10,r6 @ d+=h
  1324. eor r12,r12,r8 @ Maj(a,b,c)
  1325. add r6,r6,r0,ror#2 @ h+=Sigma0(a)
  1326. @ add r6,r6,r12 @ h+=Maj(a,b,c)
  1327. @ ldr r2,[sp,#7*4] @ 22
  1328. @ ldr r1,[sp,#4*4]
  1329. mov r0,r2,ror#7
  1330. add r6,r6,r12 @ h+=Maj(a,b,c) from the past
  1331. mov r12,r1,ror#17
  1332. eor r0,r0,r2,ror#18
  1333. eor r12,r12,r1,ror#19
  1334. eor r0,r0,r2,lsr#3 @ sigma0(X[i+1])
  1335. ldr r2,[sp,#6*4]
  1336. eor r12,r12,r1,lsr#10 @ sigma1(X[i+14])
  1337. ldr r1,[sp,#15*4]
  1338. add r12,r12,r0
  1339. eor r0,r10,r10,ror#5 @ from BODY_00_15
  1340. add r2,r2,r12
  1341. eor r0,r0,r10,ror#19 @ Sigma1(e)
  1342. add r2,r2,r1 @ X[i]
  1343. ldr r12,[r14],#4 @ *K256++
  1344. add r5,r5,r2 @ h+=X[i]
  1345. str r2,[sp,#6*4]
  1346. eor r2,r11,r4
  1347. add r5,r5,r0,ror#6 @ h+=Sigma1(e)
  1348. and r2,r2,r10
  1349. add r5,r5,r12 @ h+=K256[i]
  1350. eor r2,r2,r4 @ Ch(e,f,g)
  1351. eor r0,r6,r6,ror#11
  1352. add r5,r5,r2 @ h+=Ch(e,f,g)
  1353. #if 22==31
  1354. and r12,r12,#0xff
  1355. cmp r12,#0xf2 @ done?
  1356. #endif
  1357. #if 22<15
  1358. # if __ARM_ARCH__>=7
  1359. ldr r2,[r1],#4 @ prefetch
  1360. # else
  1361. ldrb r2,[r1,#3]
  1362. # endif
  1363. eor r12,r6,r7 @ a^b, b^c in next round
  1364. #else
  1365. ldr r2,[sp,#8*4] @ from future BODY_16_xx
  1366. eor r12,r6,r7 @ a^b, b^c in next round
  1367. ldr r1,[sp,#5*4] @ from future BODY_16_xx
  1368. #endif
  1369. eor r0,r0,r6,ror#20 @ Sigma0(a)
  1370. and r3,r3,r12 @ (b^c)&=(a^b)
  1371. add r9,r9,r5 @ d+=h
  1372. eor r3,r3,r7 @ Maj(a,b,c)
  1373. add r5,r5,r0,ror#2 @ h+=Sigma0(a)
  1374. @ add r5,r5,r3 @ h+=Maj(a,b,c)
  1375. @ ldr r2,[sp,#8*4] @ 23
  1376. @ ldr r1,[sp,#5*4]
  1377. mov r0,r2,ror#7
  1378. add r5,r5,r3 @ h+=Maj(a,b,c) from the past
  1379. mov r3,r1,ror#17
  1380. eor r0,r0,r2,ror#18
  1381. eor r3,r3,r1,ror#19
  1382. eor r0,r0,r2,lsr#3 @ sigma0(X[i+1])
  1383. ldr r2,[sp,#7*4]
  1384. eor r3,r3,r1,lsr#10 @ sigma1(X[i+14])
  1385. ldr r1,[sp,#0*4]
  1386. add r3,r3,r0
  1387. eor r0,r9,r9,ror#5 @ from BODY_00_15
  1388. add r2,r2,r3
  1389. eor r0,r0,r9,ror#19 @ Sigma1(e)
  1390. add r2,r2,r1 @ X[i]
  1391. ldr r3,[r14],#4 @ *K256++
  1392. add r4,r4,r2 @ h+=X[i]
  1393. str r2,[sp,#7*4]
  1394. eor r2,r10,r11
  1395. add r4,r4,r0,ror#6 @ h+=Sigma1(e)
  1396. and r2,r2,r9
  1397. add r4,r4,r3 @ h+=K256[i]
  1398. eor r2,r2,r11 @ Ch(e,f,g)
  1399. eor r0,r5,r5,ror#11
  1400. add r4,r4,r2 @ h+=Ch(e,f,g)
  1401. #if 23==31
  1402. and r3,r3,#0xff
  1403. cmp r3,#0xf2 @ done?
  1404. #endif
  1405. #if 23<15
  1406. # if __ARM_ARCH__>=7
  1407. ldr r2,[r1],#4 @ prefetch
  1408. # else
  1409. ldrb r2,[r1,#3]
  1410. # endif
  1411. eor r3,r5,r6 @ a^b, b^c in next round
  1412. #else
  1413. ldr r2,[sp,#9*4] @ from future BODY_16_xx
  1414. eor r3,r5,r6 @ a^b, b^c in next round
  1415. ldr r1,[sp,#6*4] @ from future BODY_16_xx
  1416. #endif
  1417. eor r0,r0,r5,ror#20 @ Sigma0(a)
  1418. and r12,r12,r3 @ (b^c)&=(a^b)
  1419. add r8,r8,r4 @ d+=h
  1420. eor r12,r12,r6 @ Maj(a,b,c)
  1421. add r4,r4,r0,ror#2 @ h+=Sigma0(a)
  1422. @ add r4,r4,r12 @ h+=Maj(a,b,c)
  1423. @ ldr r2,[sp,#9*4] @ 24
  1424. @ ldr r1,[sp,#6*4]
  1425. mov r0,r2,ror#7
  1426. add r4,r4,r12 @ h+=Maj(a,b,c) from the past
  1427. mov r12,r1,ror#17
  1428. eor r0,r0,r2,ror#18
  1429. eor r12,r12,r1,ror#19
  1430. eor r0,r0,r2,lsr#3 @ sigma0(X[i+1])
  1431. ldr r2,[sp,#8*4]
  1432. eor r12,r12,r1,lsr#10 @ sigma1(X[i+14])
  1433. ldr r1,[sp,#1*4]
  1434. add r12,r12,r0
  1435. eor r0,r8,r8,ror#5 @ from BODY_00_15
  1436. add r2,r2,r12
  1437. eor r0,r0,r8,ror#19 @ Sigma1(e)
  1438. add r2,r2,r1 @ X[i]
  1439. ldr r12,[r14],#4 @ *K256++
  1440. add r11,r11,r2 @ h+=X[i]
  1441. str r2,[sp,#8*4]
  1442. eor r2,r9,r10
  1443. add r11,r11,r0,ror#6 @ h+=Sigma1(e)
  1444. and r2,r2,r8
  1445. add r11,r11,r12 @ h+=K256[i]
  1446. eor r2,r2,r10 @ Ch(e,f,g)
  1447. eor r0,r4,r4,ror#11
  1448. add r11,r11,r2 @ h+=Ch(e,f,g)
  1449. #if 24==31
  1450. and r12,r12,#0xff
  1451. cmp r12,#0xf2 @ done?
  1452. #endif
  1453. #if 24<15
  1454. # if __ARM_ARCH__>=7
  1455. ldr r2,[r1],#4 @ prefetch
  1456. # else
  1457. ldrb r2,[r1,#3]
  1458. # endif
  1459. eor r12,r4,r5 @ a^b, b^c in next round
  1460. #else
  1461. ldr r2,[sp,#10*4] @ from future BODY_16_xx
  1462. eor r12,r4,r5 @ a^b, b^c in next round
  1463. ldr r1,[sp,#7*4] @ from future BODY_16_xx
  1464. #endif
  1465. eor r0,r0,r4,ror#20 @ Sigma0(a)
  1466. and r3,r3,r12 @ (b^c)&=(a^b)
  1467. add r7,r7,r11 @ d+=h
  1468. eor r3,r3,r5 @ Maj(a,b,c)
  1469. add r11,r11,r0,ror#2 @ h+=Sigma0(a)
  1470. @ add r11,r11,r3 @ h+=Maj(a,b,c)
  1471. @ ldr r2,[sp,#10*4] @ 25
  1472. @ ldr r1,[sp,#7*4]
  1473. mov r0,r2,ror#7
  1474. add r11,r11,r3 @ h+=Maj(a,b,c) from the past
  1475. mov r3,r1,ror#17
  1476. eor r0,r0,r2,ror#18
  1477. eor r3,r3,r1,ror#19
  1478. eor r0,r0,r2,lsr#3 @ sigma0(X[i+1])
  1479. ldr r2,[sp,#9*4]
  1480. eor r3,r3,r1,lsr#10 @ sigma1(X[i+14])
  1481. ldr r1,[sp,#2*4]
  1482. add r3,r3,r0
  1483. eor r0,r7,r7,ror#5 @ from BODY_00_15
  1484. add r2,r2,r3
  1485. eor r0,r0,r7,ror#19 @ Sigma1(e)
  1486. add r2,r2,r1 @ X[i]
  1487. ldr r3,[r14],#4 @ *K256++
  1488. add r10,r10,r2 @ h+=X[i]
  1489. str r2,[sp,#9*4]
  1490. eor r2,r8,r9
  1491. add r10,r10,r0,ror#6 @ h+=Sigma1(e)
  1492. and r2,r2,r7
  1493. add r10,r10,r3 @ h+=K256[i]
  1494. eor r2,r2,r9 @ Ch(e,f,g)
  1495. eor r0,r11,r11,ror#11
  1496. add r10,r10,r2 @ h+=Ch(e,f,g)
  1497. #if 25==31
  1498. and r3,r3,#0xff
  1499. cmp r3,#0xf2 @ done?
  1500. #endif
  1501. #if 25<15
  1502. # if __ARM_ARCH__>=7
  1503. ldr r2,[r1],#4 @ prefetch
  1504. # else
  1505. ldrb r2,[r1,#3]
  1506. # endif
  1507. eor r3,r11,r4 @ a^b, b^c in next round
  1508. #else
  1509. ldr r2,[sp,#11*4] @ from future BODY_16_xx
  1510. eor r3,r11,r4 @ a^b, b^c in next round
  1511. ldr r1,[sp,#8*4] @ from future BODY_16_xx
  1512. #endif
  1513. eor r0,r0,r11,ror#20 @ Sigma0(a)
  1514. and r12,r12,r3 @ (b^c)&=(a^b)
  1515. add r6,r6,r10 @ d+=h
  1516. eor r12,r12,r4 @ Maj(a,b,c)
  1517. add r10,r10,r0,ror#2 @ h+=Sigma0(a)
  1518. @ add r10,r10,r12 @ h+=Maj(a,b,c)
  1519. @ ldr r2,[sp,#11*4] @ 26
  1520. @ ldr r1,[sp,#8*4]
  1521. mov r0,r2,ror#7
  1522. add r10,r10,r12 @ h+=Maj(a,b,c) from the past
  1523. mov r12,r1,ror#17
  1524. eor r0,r0,r2,ror#18
  1525. eor r12,r12,r1,ror#19
  1526. eor r0,r0,r2,lsr#3 @ sigma0(X[i+1])
  1527. ldr r2,[sp,#10*4]
  1528. eor r12,r12,r1,lsr#10 @ sigma1(X[i+14])
  1529. ldr r1,[sp,#3*4]
  1530. add r12,r12,r0
  1531. eor r0,r6,r6,ror#5 @ from BODY_00_15
  1532. add r2,r2,r12
  1533. eor r0,r0,r6,ror#19 @ Sigma1(e)
  1534. add r2,r2,r1 @ X[i]
  1535. ldr r12,[r14],#4 @ *K256++
  1536. add r9,r9,r2 @ h+=X[i]
  1537. str r2,[sp,#10*4]
  1538. eor r2,r7,r8
  1539. add r9,r9,r0,ror#6 @ h+=Sigma1(e)
  1540. and r2,r2,r6
  1541. add r9,r9,r12 @ h+=K256[i]
  1542. eor r2,r2,r8 @ Ch(e,f,g)
  1543. eor r0,r10,r10,ror#11
  1544. add r9,r9,r2 @ h+=Ch(e,f,g)
  1545. #if 26==31
  1546. and r12,r12,#0xff
  1547. cmp r12,#0xf2 @ done?
  1548. #endif
  1549. #if 26<15
  1550. # if __ARM_ARCH__>=7
  1551. ldr r2,[r1],#4 @ prefetch
  1552. # else
  1553. ldrb r2,[r1,#3]
  1554. # endif
  1555. eor r12,r10,r11 @ a^b, b^c in next round
  1556. #else
  1557. ldr r2,[sp,#12*4] @ from future BODY_16_xx
  1558. eor r12,r10,r11 @ a^b, b^c in next round
  1559. ldr r1,[sp,#9*4] @ from future BODY_16_xx
  1560. #endif
  1561. eor r0,r0,r10,ror#20 @ Sigma0(a)
  1562. and r3,r3,r12 @ (b^c)&=(a^b)
  1563. add r5,r5,r9 @ d+=h
  1564. eor r3,r3,r11 @ Maj(a,b,c)
  1565. add r9,r9,r0,ror#2 @ h+=Sigma0(a)
  1566. @ add r9,r9,r3 @ h+=Maj(a,b,c)
  1567. @ ldr r2,[sp,#12*4] @ 27
  1568. @ ldr r1,[sp,#9*4]
  1569. mov r0,r2,ror#7
  1570. add r9,r9,r3 @ h+=Maj(a,b,c) from the past
  1571. mov r3,r1,ror#17
  1572. eor r0,r0,r2,ror#18
  1573. eor r3,r3,r1,ror#19
  1574. eor r0,r0,r2,lsr#3 @ sigma0(X[i+1])
  1575. ldr r2,[sp,#11*4]
  1576. eor r3,r3,r1,lsr#10 @ sigma1(X[i+14])
  1577. ldr r1,[sp,#4*4]
  1578. add r3,r3,r0
  1579. eor r0,r5,r5,ror#5 @ from BODY_00_15
  1580. add r2,r2,r3
  1581. eor r0,r0,r5,ror#19 @ Sigma1(e)
  1582. add r2,r2,r1 @ X[i]
  1583. ldr r3,[r14],#4 @ *K256++
  1584. add r8,r8,r2 @ h+=X[i]
  1585. str r2,[sp,#11*4]
  1586. eor r2,r6,r7
  1587. add r8,r8,r0,ror#6 @ h+=Sigma1(e)
  1588. and r2,r2,r5
  1589. add r8,r8,r3 @ h+=K256[i]
  1590. eor r2,r2,r7 @ Ch(e,f,g)
  1591. eor r0,r9,r9,ror#11
  1592. add r8,r8,r2 @ h+=Ch(e,f,g)
  1593. #if 27==31
  1594. and r3,r3,#0xff
  1595. cmp r3,#0xf2 @ done?
  1596. #endif
  1597. #if 27<15
  1598. # if __ARM_ARCH__>=7
  1599. ldr r2,[r1],#4 @ prefetch
  1600. # else
  1601. ldrb r2,[r1,#3]
  1602. # endif
  1603. eor r3,r9,r10 @ a^b, b^c in next round
  1604. #else
  1605. ldr r2,[sp,#13*4] @ from future BODY_16_xx
  1606. eor r3,r9,r10 @ a^b, b^c in next round
  1607. ldr r1,[sp,#10*4] @ from future BODY_16_xx
  1608. #endif
  1609. eor r0,r0,r9,ror#20 @ Sigma0(a)
  1610. and r12,r12,r3 @ (b^c)&=(a^b)
  1611. add r4,r4,r8 @ d+=h
  1612. eor r12,r12,r10 @ Maj(a,b,c)
  1613. add r8,r8,r0,ror#2 @ h+=Sigma0(a)
  1614. @ add r8,r8,r12 @ h+=Maj(a,b,c)
  1615. @ ldr r2,[sp,#13*4] @ 28
  1616. @ ldr r1,[sp,#10*4]
  1617. mov r0,r2,ror#7
  1618. add r8,r8,r12 @ h+=Maj(a,b,c) from the past
  1619. mov r12,r1,ror#17
  1620. eor r0,r0,r2,ror#18
  1621. eor r12,r12,r1,ror#19
  1622. eor r0,r0,r2,lsr#3 @ sigma0(X[i+1])
  1623. ldr r2,[sp,#12*4]
  1624. eor r12,r12,r1,lsr#10 @ sigma1(X[i+14])
  1625. ldr r1,[sp,#5*4]
  1626. add r12,r12,r0
  1627. eor r0,r4,r4,ror#5 @ from BODY_00_15
  1628. add r2,r2,r12
  1629. eor r0,r0,r4,ror#19 @ Sigma1(e)
  1630. add r2,r2,r1 @ X[i]
  1631. ldr r12,[r14],#4 @ *K256++
  1632. add r7,r7,r2 @ h+=X[i]
  1633. str r2,[sp,#12*4]
  1634. eor r2,r5,r6
  1635. add r7,r7,r0,ror#6 @ h+=Sigma1(e)
  1636. and r2,r2,r4
  1637. add r7,r7,r12 @ h+=K256[i]
  1638. eor r2,r2,r6 @ Ch(e,f,g)
  1639. eor r0,r8,r8,ror#11
  1640. add r7,r7,r2 @ h+=Ch(e,f,g)
  1641. #if 28==31
  1642. and r12,r12,#0xff
  1643. cmp r12,#0xf2 @ done?
  1644. #endif
  1645. #if 28<15
  1646. # if __ARM_ARCH__>=7
  1647. ldr r2,[r1],#4 @ prefetch
  1648. # else
  1649. ldrb r2,[r1,#3]
  1650. # endif
  1651. eor r12,r8,r9 @ a^b, b^c in next round
  1652. #else
  1653. ldr r2,[sp,#14*4] @ from future BODY_16_xx
  1654. eor r12,r8,r9 @ a^b, b^c in next round
  1655. ldr r1,[sp,#11*4] @ from future BODY_16_xx
  1656. #endif
  1657. eor r0,r0,r8,ror#20 @ Sigma0(a)
  1658. and r3,r3,r12 @ (b^c)&=(a^b)
  1659. add r11,r11,r7 @ d+=h
  1660. eor r3,r3,r9 @ Maj(a,b,c)
  1661. add r7,r7,r0,ror#2 @ h+=Sigma0(a)
  1662. @ add r7,r7,r3 @ h+=Maj(a,b,c)
  1663. @ ldr r2,[sp,#14*4] @ 29
  1664. @ ldr r1,[sp,#11*4]
  1665. mov r0,r2,ror#7
  1666. add r7,r7,r3 @ h+=Maj(a,b,c) from the past
  1667. mov r3,r1,ror#17
  1668. eor r0,r0,r2,ror#18
  1669. eor r3,r3,r1,ror#19
  1670. eor r0,r0,r2,lsr#3 @ sigma0(X[i+1])
  1671. ldr r2,[sp,#13*4]
  1672. eor r3,r3,r1,lsr#10 @ sigma1(X[i+14])
  1673. ldr r1,[sp,#6*4]
  1674. add r3,r3,r0
  1675. eor r0,r11,r11,ror#5 @ from BODY_00_15
  1676. add r2,r2,r3
  1677. eor r0,r0,r11,ror#19 @ Sigma1(e)
  1678. add r2,r2,r1 @ X[i]
  1679. ldr r3,[r14],#4 @ *K256++
  1680. add r6,r6,r2 @ h+=X[i]
  1681. str r2,[sp,#13*4]
  1682. eor r2,r4,r5
  1683. add r6,r6,r0,ror#6 @ h+=Sigma1(e)
  1684. and r2,r2,r11
  1685. add r6,r6,r3 @ h+=K256[i]
  1686. eor r2,r2,r5 @ Ch(e,f,g)
  1687. eor r0,r7,r7,ror#11
  1688. add r6,r6,r2 @ h+=Ch(e,f,g)
  1689. #if 29==31
  1690. and r3,r3,#0xff
  1691. cmp r3,#0xf2 @ done?
  1692. #endif
  1693. #if 29<15
  1694. # if __ARM_ARCH__>=7
  1695. ldr r2,[r1],#4 @ prefetch
  1696. # else
  1697. ldrb r2,[r1,#3]
  1698. # endif
  1699. eor r3,r7,r8 @ a^b, b^c in next round
  1700. #else
  1701. ldr r2,[sp,#15*4] @ from future BODY_16_xx
  1702. eor r3,r7,r8 @ a^b, b^c in next round
  1703. ldr r1,[sp,#12*4] @ from future BODY_16_xx
  1704. #endif
  1705. eor r0,r0,r7,ror#20 @ Sigma0(a)
  1706. and r12,r12,r3 @ (b^c)&=(a^b)
  1707. add r10,r10,r6 @ d+=h
  1708. eor r12,r12,r8 @ Maj(a,b,c)
  1709. add r6,r6,r0,ror#2 @ h+=Sigma0(a)
  1710. @ add r6,r6,r12 @ h+=Maj(a,b,c)
  1711. @ ldr r2,[sp,#15*4] @ 30
  1712. @ ldr r1,[sp,#12*4]
  1713. mov r0,r2,ror#7
  1714. add r6,r6,r12 @ h+=Maj(a,b,c) from the past
  1715. mov r12,r1,ror#17
  1716. eor r0,r0,r2,ror#18
  1717. eor r12,r12,r1,ror#19
  1718. eor r0,r0,r2,lsr#3 @ sigma0(X[i+1])
  1719. ldr r2,[sp,#14*4]
  1720. eor r12,r12,r1,lsr#10 @ sigma1(X[i+14])
  1721. ldr r1,[sp,#7*4]
  1722. add r12,r12,r0
  1723. eor r0,r10,r10,ror#5 @ from BODY_00_15
  1724. add r2,r2,r12
  1725. eor r0,r0,r10,ror#19 @ Sigma1(e)
  1726. add r2,r2,r1 @ X[i]
  1727. ldr r12,[r14],#4 @ *K256++
  1728. add r5,r5,r2 @ h+=X[i]
  1729. str r2,[sp,#14*4]
  1730. eor r2,r11,r4
  1731. add r5,r5,r0,ror#6 @ h+=Sigma1(e)
  1732. and r2,r2,r10
  1733. add r5,r5,r12 @ h+=K256[i]
  1734. eor r2,r2,r4 @ Ch(e,f,g)
  1735. eor r0,r6,r6,ror#11
  1736. add r5,r5,r2 @ h+=Ch(e,f,g)
  1737. #if 30==31
  1738. and r12,r12,#0xff
  1739. cmp r12,#0xf2 @ done?
  1740. #endif
  1741. #if 30<15
  1742. # if __ARM_ARCH__>=7
  1743. ldr r2,[r1],#4 @ prefetch
  1744. # else
  1745. ldrb r2,[r1,#3]
  1746. # endif
  1747. eor r12,r6,r7 @ a^b, b^c in next round
  1748. #else
  1749. ldr r2,[sp,#0*4] @ from future BODY_16_xx
  1750. eor r12,r6,r7 @ a^b, b^c in next round
  1751. ldr r1,[sp,#13*4] @ from future BODY_16_xx
  1752. #endif
  1753. eor r0,r0,r6,ror#20 @ Sigma0(a)
  1754. and r3,r3,r12 @ (b^c)&=(a^b)
  1755. add r9,r9,r5 @ d+=h
  1756. eor r3,r3,r7 @ Maj(a,b,c)
  1757. add r5,r5,r0,ror#2 @ h+=Sigma0(a)
  1758. @ add r5,r5,r3 @ h+=Maj(a,b,c)
  1759. @ ldr r2,[sp,#0*4] @ 31
  1760. @ ldr r1,[sp,#13*4]
  1761. mov r0,r2,ror#7
  1762. add r5,r5,r3 @ h+=Maj(a,b,c) from the past
  1763. mov r3,r1,ror#17
  1764. eor r0,r0,r2,ror#18
  1765. eor r3,r3,r1,ror#19
  1766. eor r0,r0,r2,lsr#3 @ sigma0(X[i+1])
  1767. ldr r2,[sp,#15*4]
  1768. eor r3,r3,r1,lsr#10 @ sigma1(X[i+14])
  1769. ldr r1,[sp,#8*4]
  1770. add r3,r3,r0
  1771. eor r0,r9,r9,ror#5 @ from BODY_00_15
  1772. add r2,r2,r3
  1773. eor r0,r0,r9,ror#19 @ Sigma1(e)
  1774. add r2,r2,r1 @ X[i]
  1775. ldr r3,[r14],#4 @ *K256++
  1776. add r4,r4,r2 @ h+=X[i]
  1777. str r2,[sp,#15*4]
  1778. eor r2,r10,r11
  1779. add r4,r4,r0,ror#6 @ h+=Sigma1(e)
  1780. and r2,r2,r9
  1781. add r4,r4,r3 @ h+=K256[i]
  1782. eor r2,r2,r11 @ Ch(e,f,g)
  1783. eor r0,r5,r5,ror#11
  1784. add r4,r4,r2 @ h+=Ch(e,f,g)
  1785. #if 31==31
  1786. and r3,r3,#0xff
  1787. cmp r3,#0xf2 @ done?
  1788. #endif
  1789. #if 31<15
  1790. # if __ARM_ARCH__>=7
  1791. ldr r2,[r1],#4 @ prefetch
  1792. # else
  1793. ldrb r2,[r1,#3]
  1794. # endif
  1795. eor r3,r5,r6 @ a^b, b^c in next round
  1796. #else
  1797. ldr r2,[sp,#1*4] @ from future BODY_16_xx
  1798. eor r3,r5,r6 @ a^b, b^c in next round
  1799. ldr r1,[sp,#14*4] @ from future BODY_16_xx
  1800. #endif
  1801. eor r0,r0,r5,ror#20 @ Sigma0(a)
  1802. and r12,r12,r3 @ (b^c)&=(a^b)
  1803. add r8,r8,r4 @ d+=h
  1804. eor r12,r12,r6 @ Maj(a,b,c)
  1805. add r4,r4,r0,ror#2 @ h+=Sigma0(a)
  1806. @ add r4,r4,r12 @ h+=Maj(a,b,c)
  1807. #if __ARM_ARCH__>=7
  1808. ite eq @ Thumb2 thing, sanity check in ARM
  1809. #endif
  1810. ldreq r3,[sp,#16*4] @ pull ctx
  1811. bne .Lrounds_16_xx
  1812. add r4,r4,r12 @ h+=Maj(a,b,c) from the past
  1813. ldr r0,[r3,#0]
  1814. ldr r2,[r3,#4]
  1815. ldr r12,[r3,#8]
  1816. add r4,r4,r0
  1817. ldr r0,[r3,#12]
  1818. add r5,r5,r2
  1819. ldr r2,[r3,#16]
  1820. add r6,r6,r12
  1821. ldr r12,[r3,#20]
  1822. add r7,r7,r0
  1823. ldr r0,[r3,#24]
  1824. add r8,r8,r2
  1825. ldr r2,[r3,#28]
  1826. add r9,r9,r12
  1827. ldr r1,[sp,#17*4] @ pull inp
  1828. ldr r12,[sp,#18*4] @ pull inp+len
  1829. add r10,r10,r0
  1830. add r11,r11,r2
  1831. stmia r3,{r4,r5,r6,r7,r8,r9,r10,r11}
  1832. cmp r1,r12
  1833. sub r14,r14,#256 @ rewind Ktbl
  1834. bne .Loop
  1835. add sp,sp,#19*4 @ destroy frame
  1836. #if __ARM_ARCH__>=5
  1837. ldmia sp!,{r4-r11,pc}
  1838. #else
  1839. ldmia sp!,{r4-r11,lr}
  1840. tst lr,#1
  1841. moveq pc,lr @ be binary compatible with V4, yet
  1842. .word 0xe12fff1e @ interoperable with Thumb ISA:-)
  1843. #endif
  1844. .size sha256_block_data_order,.-sha256_block_data_order
  1845. #if __ARM_MAX_ARCH__>=7
  1846. .arch armv7-a
  1847. .fpu neon
  1848. .global sha256_block_data_order_neon
  1849. .type sha256_block_data_order_neon,%function
  1850. .align 4
  1851. sha256_block_data_order_neon:
  1852. .LNEON:
  1853. stmdb sp!,{r4-r12,lr}
  1854. sub r11,sp,#16*4+16
  1855. adr r14,.Lsha256_block_data_order
  1856. sub r14,r14,#.Lsha256_block_data_order-K256
  1857. bic r11,r11,#15 @ align for 128-bit stores
  1858. mov r12,sp
  1859. mov sp,r11 @ alloca
  1860. add r2,r1,r2,lsl#6 @ len to point at the end of inp
  1861. vld1.8 {q0},[r1]!
  1862. vld1.8 {q1},[r1]!
  1863. vld1.8 {q2},[r1]!
  1864. vld1.8 {q3},[r1]!
  1865. vld1.32 {q8},[r14,:128]!
  1866. vld1.32 {q9},[r14,:128]!
  1867. vld1.32 {q10},[r14,:128]!
  1868. vld1.32 {q11},[r14,:128]!
  1869. vrev32.8 q0,q0 @ yes, even on
  1870. str r0,[sp,#64]
  1871. vrev32.8 q1,q1 @ big-endian
  1872. str r1,[sp,#68]
  1873. mov r1,sp
  1874. vrev32.8 q2,q2
  1875. str r2,[sp,#72]
  1876. vrev32.8 q3,q3
  1877. str r12,[sp,#76] @ save original sp
  1878. vadd.i32 q8,q8,q0
  1879. vadd.i32 q9,q9,q1
  1880. vst1.32 {q8},[r1,:128]!
  1881. vadd.i32 q10,q10,q2
  1882. vst1.32 {q9},[r1,:128]!
  1883. vadd.i32 q11,q11,q3
  1884. vst1.32 {q10},[r1,:128]!
  1885. vst1.32 {q11},[r1,:128]!
  1886. ldmia r0,{r4-r11}
  1887. sub r1,r1,#64
  1888. ldr r2,[sp,#0]
  1889. eor r12,r12,r12
  1890. eor r3,r5,r6
  1891. b .L_00_48
  1892. .align 4
  1893. .L_00_48:
  1894. vext.8 q8,q0,q1,#4
  1895. add r11,r11,r2
  1896. eor r2,r9,r10
  1897. eor r0,r8,r8,ror#5
  1898. vext.8 q9,q2,q3,#4
  1899. add r4,r4,r12
  1900. and r2,r2,r8
  1901. eor r12,r0,r8,ror#19
  1902. vshr.u32 q10,q8,#7
  1903. eor r0,r4,r4,ror#11
  1904. eor r2,r2,r10
  1905. vadd.i32 q0,q0,q9
  1906. add r11,r11,r12,ror#6
  1907. eor r12,r4,r5
  1908. vshr.u32 q9,q8,#3
  1909. eor r0,r0,r4,ror#20
  1910. add r11,r11,r2
  1911. vsli.32 q10,q8,#25
  1912. ldr r2,[sp,#4]
  1913. and r3,r3,r12
  1914. vshr.u32 q11,q8,#18
  1915. add r7,r7,r11
  1916. add r11,r11,r0,ror#2
  1917. eor r3,r3,r5
  1918. veor q9,q9,q10
  1919. add r10,r10,r2
  1920. vsli.32 q11,q8,#14
  1921. eor r2,r8,r9
  1922. eor r0,r7,r7,ror#5
  1923. vshr.u32 d24,d7,#17
  1924. add r11,r11,r3
  1925. and r2,r2,r7
  1926. veor q9,q9,q11
  1927. eor r3,r0,r7,ror#19
  1928. eor r0,r11,r11,ror#11
  1929. vsli.32 d24,d7,#15
  1930. eor r2,r2,r9
  1931. add r10,r10,r3,ror#6
  1932. vshr.u32 d25,d7,#10
  1933. eor r3,r11,r4
  1934. eor r0,r0,r11,ror#20
  1935. vadd.i32 q0,q0,q9
  1936. add r10,r10,r2
  1937. ldr r2,[sp,#8]
  1938. veor d25,d25,d24
  1939. and r12,r12,r3
  1940. add r6,r6,r10
  1941. vshr.u32 d24,d7,#19
  1942. add r10,r10,r0,ror#2
  1943. eor r12,r12,r4
  1944. vsli.32 d24,d7,#13
  1945. add r9,r9,r2
  1946. eor r2,r7,r8
  1947. veor d25,d25,d24
  1948. eor r0,r6,r6,ror#5
  1949. add r10,r10,r12
  1950. vadd.i32 d0,d0,d25
  1951. and r2,r2,r6
  1952. eor r12,r0,r6,ror#19
  1953. vshr.u32 d24,d0,#17
  1954. eor r0,r10,r10,ror#11
  1955. eor r2,r2,r8
  1956. vsli.32 d24,d0,#15
  1957. add r9,r9,r12,ror#6
  1958. eor r12,r10,r11
  1959. vshr.u32 d25,d0,#10
  1960. eor r0,r0,r10,ror#20
  1961. add r9,r9,r2
  1962. veor d25,d25,d24
  1963. ldr r2,[sp,#12]
  1964. and r3,r3,r12
  1965. vshr.u32 d24,d0,#19
  1966. add r5,r5,r9
  1967. add r9,r9,r0,ror#2
  1968. eor r3,r3,r11
  1969. vld1.32 {q8},[r14,:128]!
  1970. add r8,r8,r2
  1971. vsli.32 d24,d0,#13
  1972. eor r2,r6,r7
  1973. eor r0,r5,r5,ror#5
  1974. veor d25,d25,d24
  1975. add r9,r9,r3
  1976. and r2,r2,r5
  1977. vadd.i32 d1,d1,d25
  1978. eor r3,r0,r5,ror#19
  1979. eor r0,r9,r9,ror#11
  1980. vadd.i32 q8,q8,q0
  1981. eor r2,r2,r7
  1982. add r8,r8,r3,ror#6
  1983. eor r3,r9,r10
  1984. eor r0,r0,r9,ror#20
  1985. add r8,r8,r2
  1986. ldr r2,[sp,#16]
  1987. and r12,r12,r3
  1988. add r4,r4,r8
  1989. vst1.32 {q8},[r1,:128]!
  1990. add r8,r8,r0,ror#2
  1991. eor r12,r12,r10
  1992. vext.8 q8,q1,q2,#4
  1993. add r7,r7,r2
  1994. eor r2,r5,r6
  1995. eor r0,r4,r4,ror#5
  1996. vext.8 q9,q3,q0,#4
  1997. add r8,r8,r12
  1998. and r2,r2,r4
  1999. eor r12,r0,r4,ror#19
  2000. vshr.u32 q10,q8,#7
  2001. eor r0,r8,r8,ror#11
  2002. eor r2,r2,r6
  2003. vadd.i32 q1,q1,q9
  2004. add r7,r7,r12,ror#6
  2005. eor r12,r8,r9
  2006. vshr.u32 q9,q8,#3
  2007. eor r0,r0,r8,ror#20
  2008. add r7,r7,r2
  2009. vsli.32 q10,q8,#25
  2010. ldr r2,[sp,#20]
  2011. and r3,r3,r12
  2012. vshr.u32 q11,q8,#18
  2013. add r11,r11,r7
  2014. add r7,r7,r0,ror#2
  2015. eor r3,r3,r9
  2016. veor q9,q9,q10
  2017. add r6,r6,r2
  2018. vsli.32 q11,q8,#14
  2019. eor r2,r4,r5
  2020. eor r0,r11,r11,ror#5
  2021. vshr.u32 d24,d1,#17
  2022. add r7,r7,r3
  2023. and r2,r2,r11
  2024. veor q9,q9,q11
  2025. eor r3,r0,r11,ror#19
  2026. eor r0,r7,r7,ror#11
  2027. vsli.32 d24,d1,#15
  2028. eor r2,r2,r5
  2029. add r6,r6,r3,ror#6
  2030. vshr.u32 d25,d1,#10
  2031. eor r3,r7,r8
  2032. eor r0,r0,r7,ror#20
  2033. vadd.i32 q1,q1,q9
  2034. add r6,r6,r2
  2035. ldr r2,[sp,#24]
  2036. veor d25,d25,d24
  2037. and r12,r12,r3
  2038. add r10,r10,r6
  2039. vshr.u32 d24,d1,#19
  2040. add r6,r6,r0,ror#2
  2041. eor r12,r12,r8
  2042. vsli.32 d24,d1,#13
  2043. add r5,r5,r2
  2044. eor r2,r11,r4
  2045. veor d25,d25,d24
  2046. eor r0,r10,r10,ror#5
  2047. add r6,r6,r12
  2048. vadd.i32 d2,d2,d25
  2049. and r2,r2,r10
  2050. eor r12,r0,r10,ror#19
  2051. vshr.u32 d24,d2,#17
  2052. eor r0,r6,r6,ror#11
  2053. eor r2,r2,r4
  2054. vsli.32 d24,d2,#15
  2055. add r5,r5,r12,ror#6
  2056. eor r12,r6,r7
  2057. vshr.u32 d25,d2,#10
  2058. eor r0,r0,r6,ror#20
  2059. add r5,r5,r2
  2060. veor d25,d25,d24
  2061. ldr r2,[sp,#28]
  2062. and r3,r3,r12
  2063. vshr.u32 d24,d2,#19
  2064. add r9,r9,r5
  2065. add r5,r5,r0,ror#2
  2066. eor r3,r3,r7
  2067. vld1.32 {q8},[r14,:128]!
  2068. add r4,r4,r2
  2069. vsli.32 d24,d2,#13
  2070. eor r2,r10,r11
  2071. eor r0,r9,r9,ror#5
  2072. veor d25,d25,d24
  2073. add r5,r5,r3
  2074. and r2,r2,r9
  2075. vadd.i32 d3,d3,d25
  2076. eor r3,r0,r9,ror#19
  2077. eor r0,r5,r5,ror#11
  2078. vadd.i32 q8,q8,q1
  2079. eor r2,r2,r11
  2080. add r4,r4,r3,ror#6
  2081. eor r3,r5,r6
  2082. eor r0,r0,r5,ror#20
  2083. add r4,r4,r2
  2084. ldr r2,[sp,#32]
  2085. and r12,r12,r3
  2086. add r8,r8,r4
  2087. vst1.32 {q8},[r1,:128]!
  2088. add r4,r4,r0,ror#2
  2089. eor r12,r12,r6
  2090. vext.8 q8,q2,q3,#4
  2091. add r11,r11,r2
  2092. eor r2,r9,r10
  2093. eor r0,r8,r8,ror#5
  2094. vext.8 q9,q0,q1,#4
  2095. add r4,r4,r12
  2096. and r2,r2,r8
  2097. eor r12,r0,r8,ror#19
  2098. vshr.u32 q10,q8,#7
  2099. eor r0,r4,r4,ror#11
  2100. eor r2,r2,r10
  2101. vadd.i32 q2,q2,q9
  2102. add r11,r11,r12,ror#6
  2103. eor r12,r4,r5
  2104. vshr.u32 q9,q8,#3
  2105. eor r0,r0,r4,ror#20
  2106. add r11,r11,r2
  2107. vsli.32 q10,q8,#25
  2108. ldr r2,[sp,#36]
  2109. and r3,r3,r12
  2110. vshr.u32 q11,q8,#18
  2111. add r7,r7,r11
  2112. add r11,r11,r0,ror#2
  2113. eor r3,r3,r5
  2114. veor q9,q9,q10
  2115. add r10,r10,r2
  2116. vsli.32 q11,q8,#14
  2117. eor r2,r8,r9
  2118. eor r0,r7,r7,ror#5
  2119. vshr.u32 d24,d3,#17
  2120. add r11,r11,r3
  2121. and r2,r2,r7
  2122. veor q9,q9,q11
  2123. eor r3,r0,r7,ror#19
  2124. eor r0,r11,r11,ror#11
  2125. vsli.32 d24,d3,#15
  2126. eor r2,r2,r9
  2127. add r10,r10,r3,ror#6
  2128. vshr.u32 d25,d3,#10
  2129. eor r3,r11,r4
  2130. eor r0,r0,r11,ror#20
  2131. vadd.i32 q2,q2,q9
  2132. add r10,r10,r2
  2133. ldr r2,[sp,#40]
  2134. veor d25,d25,d24
  2135. and r12,r12,r3
  2136. add r6,r6,r10
  2137. vshr.u32 d24,d3,#19
  2138. add r10,r10,r0,ror#2
  2139. eor r12,r12,r4
  2140. vsli.32 d24,d3,#13
  2141. add r9,r9,r2
  2142. eor r2,r7,r8
  2143. veor d25,d25,d24
  2144. eor r0,r6,r6,ror#5
  2145. add r10,r10,r12
  2146. vadd.i32 d4,d4,d25
  2147. and r2,r2,r6
  2148. eor r12,r0,r6,ror#19
  2149. vshr.u32 d24,d4,#17
  2150. eor r0,r10,r10,ror#11
  2151. eor r2,r2,r8
  2152. vsli.32 d24,d4,#15
  2153. add r9,r9,r12,ror#6
  2154. eor r12,r10,r11
  2155. vshr.u32 d25,d4,#10
  2156. eor r0,r0,r10,ror#20
  2157. add r9,r9,r2
  2158. veor d25,d25,d24
  2159. ldr r2,[sp,#44]
  2160. and r3,r3,r12
  2161. vshr.u32 d24,d4,#19
  2162. add r5,r5,r9
  2163. add r9,r9,r0,ror#2
  2164. eor r3,r3,r11
  2165. vld1.32 {q8},[r14,:128]!
  2166. add r8,r8,r2
  2167. vsli.32 d24,d4,#13
  2168. eor r2,r6,r7
  2169. eor r0,r5,r5,ror#5
  2170. veor d25,d25,d24
  2171. add r9,r9,r3
  2172. and r2,r2,r5
  2173. vadd.i32 d5,d5,d25
  2174. eor r3,r0,r5,ror#19
  2175. eor r0,r9,r9,ror#11
  2176. vadd.i32 q8,q8,q2
  2177. eor r2,r2,r7
  2178. add r8,r8,r3,ror#6
  2179. eor r3,r9,r10
  2180. eor r0,r0,r9,ror#20
  2181. add r8,r8,r2
  2182. ldr r2,[sp,#48]
  2183. and r12,r12,r3
  2184. add r4,r4,r8
  2185. vst1.32 {q8},[r1,:128]!
  2186. add r8,r8,r0,ror#2
  2187. eor r12,r12,r10
  2188. vext.8 q8,q3,q0,#4
  2189. add r7,r7,r2
  2190. eor r2,r5,r6
  2191. eor r0,r4,r4,ror#5
  2192. vext.8 q9,q1,q2,#4
  2193. add r8,r8,r12
  2194. and r2,r2,r4
  2195. eor r12,r0,r4,ror#19
  2196. vshr.u32 q10,q8,#7
  2197. eor r0,r8,r8,ror#11
  2198. eor r2,r2,r6
  2199. vadd.i32 q3,q3,q9
  2200. add r7,r7,r12,ror#6
  2201. eor r12,r8,r9
  2202. vshr.u32 q9,q8,#3
  2203. eor r0,r0,r8,ror#20
  2204. add r7,r7,r2
  2205. vsli.32 q10,q8,#25
  2206. ldr r2,[sp,#52]
  2207. and r3,r3,r12
  2208. vshr.u32 q11,q8,#18
  2209. add r11,r11,r7
  2210. add r7,r7,r0,ror#2
  2211. eor r3,r3,r9
  2212. veor q9,q9,q10
  2213. add r6,r6,r2
  2214. vsli.32 q11,q8,#14
  2215. eor r2,r4,r5
  2216. eor r0,r11,r11,ror#5
  2217. vshr.u32 d24,d5,#17
  2218. add r7,r7,r3
  2219. and r2,r2,r11
  2220. veor q9,q9,q11
  2221. eor r3,r0,r11,ror#19
  2222. eor r0,r7,r7,ror#11
  2223. vsli.32 d24,d5,#15
  2224. eor r2,r2,r5
  2225. add r6,r6,r3,ror#6
  2226. vshr.u32 d25,d5,#10
  2227. eor r3,r7,r8
  2228. eor r0,r0,r7,ror#20
  2229. vadd.i32 q3,q3,q9
  2230. add r6,r6,r2
  2231. ldr r2,[sp,#56]
  2232. veor d25,d25,d24
  2233. and r12,r12,r3
  2234. add r10,r10,r6
  2235. vshr.u32 d24,d5,#19
  2236. add r6,r6,r0,ror#2
  2237. eor r12,r12,r8
  2238. vsli.32 d24,d5,#13
  2239. add r5,r5,r2
  2240. eor r2,r11,r4
  2241. veor d25,d25,d24
  2242. eor r0,r10,r10,ror#5
  2243. add r6,r6,r12
  2244. vadd.i32 d6,d6,d25
  2245. and r2,r2,r10
  2246. eor r12,r0,r10,ror#19
  2247. vshr.u32 d24,d6,#17
  2248. eor r0,r6,r6,ror#11
  2249. eor r2,r2,r4
  2250. vsli.32 d24,d6,#15
  2251. add r5,r5,r12,ror#6
  2252. eor r12,r6,r7
  2253. vshr.u32 d25,d6,#10
  2254. eor r0,r0,r6,ror#20
  2255. add r5,r5,r2
  2256. veor d25,d25,d24
  2257. ldr r2,[sp,#60]
  2258. and r3,r3,r12
  2259. vshr.u32 d24,d6,#19
  2260. add r9,r9,r5
  2261. add r5,r5,r0,ror#2
  2262. eor r3,r3,r7
  2263. vld1.32 {q8},[r14,:128]!
  2264. add r4,r4,r2
  2265. vsli.32 d24,d6,#13
  2266. eor r2,r10,r11
  2267. eor r0,r9,r9,ror#5
  2268. veor d25,d25,d24
  2269. add r5,r5,r3
  2270. and r2,r2,r9
  2271. vadd.i32 d7,d7,d25
  2272. eor r3,r0,r9,ror#19
  2273. eor r0,r5,r5,ror#11
  2274. vadd.i32 q8,q8,q3
  2275. eor r2,r2,r11
  2276. add r4,r4,r3,ror#6
  2277. eor r3,r5,r6
  2278. eor r0,r0,r5,ror#20
  2279. add r4,r4,r2
  2280. ldr r2,[r14]
  2281. and r12,r12,r3
  2282. add r8,r8,r4
  2283. vst1.32 {q8},[r1,:128]!
  2284. add r4,r4,r0,ror#2
  2285. eor r12,r12,r6
  2286. teq r2,#0 @ check for K256 terminator
  2287. ldr r2,[sp,#0]
  2288. sub r1,r1,#64
  2289. bne .L_00_48
  2290. ldr r1,[sp,#68]
  2291. ldr r0,[sp,#72]
  2292. sub r14,r14,#256 @ rewind r14
  2293. teq r1,r0
  2294. it eq
  2295. subeq r1,r1,#64 @ avoid SEGV
  2296. vld1.8 {q0},[r1]! @ load next input block
  2297. vld1.8 {q1},[r1]!
  2298. vld1.8 {q2},[r1]!
  2299. vld1.8 {q3},[r1]!
  2300. it ne
  2301. strne r1,[sp,#68]
  2302. mov r1,sp
  2303. add r11,r11,r2
  2304. eor r2,r9,r10
  2305. eor r0,r8,r8,ror#5
  2306. add r4,r4,r12
  2307. vld1.32 {q8},[r14,:128]!
  2308. and r2,r2,r8
  2309. eor r12,r0,r8,ror#19
  2310. eor r0,r4,r4,ror#11
  2311. eor r2,r2,r10
  2312. vrev32.8 q0,q0
  2313. add r11,r11,r12,ror#6
  2314. eor r12,r4,r5
  2315. eor r0,r0,r4,ror#20
  2316. add r11,r11,r2
  2317. vadd.i32 q8,q8,q0
  2318. ldr r2,[sp,#4]
  2319. and r3,r3,r12
  2320. add r7,r7,r11
  2321. add r11,r11,r0,ror#2
  2322. eor r3,r3,r5
  2323. add r10,r10,r2
  2324. eor r2,r8,r9
  2325. eor r0,r7,r7,ror#5
  2326. add r11,r11,r3
  2327. and r2,r2,r7
  2328. eor r3,r0,r7,ror#19
  2329. eor r0,r11,r11,ror#11
  2330. eor r2,r2,r9
  2331. add r10,r10,r3,ror#6
  2332. eor r3,r11,r4
  2333. eor r0,r0,r11,ror#20
  2334. add r10,r10,r2
  2335. ldr r2,[sp,#8]
  2336. and r12,r12,r3
  2337. add r6,r6,r10
  2338. add r10,r10,r0,ror#2
  2339. eor r12,r12,r4
  2340. add r9,r9,r2
  2341. eor r2,r7,r8
  2342. eor r0,r6,r6,ror#5
  2343. add r10,r10,r12
  2344. and r2,r2,r6
  2345. eor r12,r0,r6,ror#19
  2346. eor r0,r10,r10,ror#11
  2347. eor r2,r2,r8
  2348. add r9,r9,r12,ror#6
  2349. eor r12,r10,r11
  2350. eor r0,r0,r10,ror#20
  2351. add r9,r9,r2
  2352. ldr r2,[sp,#12]
  2353. and r3,r3,r12
  2354. add r5,r5,r9
  2355. add r9,r9,r0,ror#2
  2356. eor r3,r3,r11
  2357. add r8,r8,r2
  2358. eor r2,r6,r7
  2359. eor r0,r5,r5,ror#5
  2360. add r9,r9,r3
  2361. and r2,r2,r5
  2362. eor r3,r0,r5,ror#19
  2363. eor r0,r9,r9,ror#11
  2364. eor r2,r2,r7
  2365. add r8,r8,r3,ror#6
  2366. eor r3,r9,r10
  2367. eor r0,r0,r9,ror#20
  2368. add r8,r8,r2
  2369. ldr r2,[sp,#16]
  2370. and r12,r12,r3
  2371. add r4,r4,r8
  2372. add r8,r8,r0,ror#2
  2373. eor r12,r12,r10
  2374. vst1.32 {q8},[r1,:128]!
  2375. add r7,r7,r2
  2376. eor r2,r5,r6
  2377. eor r0,r4,r4,ror#5
  2378. add r8,r8,r12
  2379. vld1.32 {q8},[r14,:128]!
  2380. and r2,r2,r4
  2381. eor r12,r0,r4,ror#19
  2382. eor r0,r8,r8,ror#11
  2383. eor r2,r2,r6
  2384. vrev32.8 q1,q1
  2385. add r7,r7,r12,ror#6
  2386. eor r12,r8,r9
  2387. eor r0,r0,r8,ror#20
  2388. add r7,r7,r2
  2389. vadd.i32 q8,q8,q1
  2390. ldr r2,[sp,#20]
  2391. and r3,r3,r12
  2392. add r11,r11,r7
  2393. add r7,r7,r0,ror#2
  2394. eor r3,r3,r9
  2395. add r6,r6,r2
  2396. eor r2,r4,r5
  2397. eor r0,r11,r11,ror#5
  2398. add r7,r7,r3
  2399. and r2,r2,r11
  2400. eor r3,r0,r11,ror#19
  2401. eor r0,r7,r7,ror#11
  2402. eor r2,r2,r5
  2403. add r6,r6,r3,ror#6
  2404. eor r3,r7,r8
  2405. eor r0,r0,r7,ror#20
  2406. add r6,r6,r2
  2407. ldr r2,[sp,#24]
  2408. and r12,r12,r3
  2409. add r10,r10,r6
  2410. add r6,r6,r0,ror#2
  2411. eor r12,r12,r8
  2412. add r5,r5,r2
  2413. eor r2,r11,r4
  2414. eor r0,r10,r10,ror#5
  2415. add r6,r6,r12
  2416. and r2,r2,r10
  2417. eor r12,r0,r10,ror#19
  2418. eor r0,r6,r6,ror#11
  2419. eor r2,r2,r4
  2420. add r5,r5,r12,ror#6
  2421. eor r12,r6,r7
  2422. eor r0,r0,r6,ror#20
  2423. add r5,r5,r2
  2424. ldr r2,[sp,#28]
  2425. and r3,r3,r12
  2426. add r9,r9,r5
  2427. add r5,r5,r0,ror#2
  2428. eor r3,r3,r7
  2429. add r4,r4,r2
  2430. eor r2,r10,r11
  2431. eor r0,r9,r9,ror#5
  2432. add r5,r5,r3
  2433. and r2,r2,r9
  2434. eor r3,r0,r9,ror#19
  2435. eor r0,r5,r5,ror#11
  2436. eor r2,r2,r11
  2437. add r4,r4,r3,ror#6
  2438. eor r3,r5,r6
  2439. eor r0,r0,r5,ror#20
  2440. add r4,r4,r2
  2441. ldr r2,[sp,#32]
  2442. and r12,r12,r3
  2443. add r8,r8,r4
  2444. add r4,r4,r0,ror#2
  2445. eor r12,r12,r6
  2446. vst1.32 {q8},[r1,:128]!
  2447. add r11,r11,r2
  2448. eor r2,r9,r10
  2449. eor r0,r8,r8,ror#5
  2450. add r4,r4,r12
  2451. vld1.32 {q8},[r14,:128]!
  2452. and r2,r2,r8
  2453. eor r12,r0,r8,ror#19
  2454. eor r0,r4,r4,ror#11
  2455. eor r2,r2,r10
  2456. vrev32.8 q2,q2
  2457. add r11,r11,r12,ror#6
  2458. eor r12,r4,r5
  2459. eor r0,r0,r4,ror#20
  2460. add r11,r11,r2
  2461. vadd.i32 q8,q8,q2
  2462. ldr r2,[sp,#36]
  2463. and r3,r3,r12
  2464. add r7,r7,r11
  2465. add r11,r11,r0,ror#2
  2466. eor r3,r3,r5
  2467. add r10,r10,r2
  2468. eor r2,r8,r9
  2469. eor r0,r7,r7,ror#5
  2470. add r11,r11,r3
  2471. and r2,r2,r7
  2472. eor r3,r0,r7,ror#19
  2473. eor r0,r11,r11,ror#11
  2474. eor r2,r2,r9
  2475. add r10,r10,r3,ror#6
  2476. eor r3,r11,r4
  2477. eor r0,r0,r11,ror#20
  2478. add r10,r10,r2
  2479. ldr r2,[sp,#40]
  2480. and r12,r12,r3
  2481. add r6,r6,r10
  2482. add r10,r10,r0,ror#2
  2483. eor r12,r12,r4
  2484. add r9,r9,r2
  2485. eor r2,r7,r8
  2486. eor r0,r6,r6,ror#5
  2487. add r10,r10,r12
  2488. and r2,r2,r6
  2489. eor r12,r0,r6,ror#19
  2490. eor r0,r10,r10,ror#11
  2491. eor r2,r2,r8
  2492. add r9,r9,r12,ror#6
  2493. eor r12,r10,r11
  2494. eor r0,r0,r10,ror#20
  2495. add r9,r9,r2
  2496. ldr r2,[sp,#44]
  2497. and r3,r3,r12
  2498. add r5,r5,r9
  2499. add r9,r9,r0,ror#2
  2500. eor r3,r3,r11
  2501. add r8,r8,r2
  2502. eor r2,r6,r7
  2503. eor r0,r5,r5,ror#5
  2504. add r9,r9,r3
  2505. and r2,r2,r5
  2506. eor r3,r0,r5,ror#19
  2507. eor r0,r9,r9,ror#11
  2508. eor r2,r2,r7
  2509. add r8,r8,r3,ror#6
  2510. eor r3,r9,r10
  2511. eor r0,r0,r9,ror#20
  2512. add r8,r8,r2
  2513. ldr r2,[sp,#48]
  2514. and r12,r12,r3
  2515. add r4,r4,r8
  2516. add r8,r8,r0,ror#2
  2517. eor r12,r12,r10
  2518. vst1.32 {q8},[r1,:128]!
  2519. add r7,r7,r2
  2520. eor r2,r5,r6
  2521. eor r0,r4,r4,ror#5
  2522. add r8,r8,r12
  2523. vld1.32 {q8},[r14,:128]!
  2524. and r2,r2,r4
  2525. eor r12,r0,r4,ror#19
  2526. eor r0,r8,r8,ror#11
  2527. eor r2,r2,r6
  2528. vrev32.8 q3,q3
  2529. add r7,r7,r12,ror#6
  2530. eor r12,r8,r9
  2531. eor r0,r0,r8,ror#20
  2532. add r7,r7,r2
  2533. vadd.i32 q8,q8,q3
  2534. ldr r2,[sp,#52]
  2535. and r3,r3,r12
  2536. add r11,r11,r7
  2537. add r7,r7,r0,ror#2
  2538. eor r3,r3,r9
  2539. add r6,r6,r2
  2540. eor r2,r4,r5
  2541. eor r0,r11,r11,ror#5
  2542. add r7,r7,r3
  2543. and r2,r2,r11
  2544. eor r3,r0,r11,ror#19
  2545. eor r0,r7,r7,ror#11
  2546. eor r2,r2,r5
  2547. add r6,r6,r3,ror#6
  2548. eor r3,r7,r8
  2549. eor r0,r0,r7,ror#20
  2550. add r6,r6,r2
  2551. ldr r2,[sp,#56]
  2552. and r12,r12,r3
  2553. add r10,r10,r6
  2554. add r6,r6,r0,ror#2
  2555. eor r12,r12,r8
  2556. add r5,r5,r2
  2557. eor r2,r11,r4
  2558. eor r0,r10,r10,ror#5
  2559. add r6,r6,r12
  2560. and r2,r2,r10
  2561. eor r12,r0,r10,ror#19
  2562. eor r0,r6,r6,ror#11
  2563. eor r2,r2,r4
  2564. add r5,r5,r12,ror#6
  2565. eor r12,r6,r7
  2566. eor r0,r0,r6,ror#20
  2567. add r5,r5,r2
  2568. ldr r2,[sp,#60]
  2569. and r3,r3,r12
  2570. add r9,r9,r5
  2571. add r5,r5,r0,ror#2
  2572. eor r3,r3,r7
  2573. add r4,r4,r2
  2574. eor r2,r10,r11
  2575. eor r0,r9,r9,ror#5
  2576. add r5,r5,r3
  2577. and r2,r2,r9
  2578. eor r3,r0,r9,ror#19
  2579. eor r0,r5,r5,ror#11
  2580. eor r2,r2,r11
  2581. add r4,r4,r3,ror#6
  2582. eor r3,r5,r6
  2583. eor r0,r0,r5,ror#20
  2584. add r4,r4,r2
  2585. ldr r2,[sp,#64]
  2586. and r12,r12,r3
  2587. add r8,r8,r4
  2588. add r4,r4,r0,ror#2
  2589. eor r12,r12,r6
  2590. vst1.32 {q8},[r1,:128]!
  2591. ldr r0,[r2,#0]
  2592. add r4,r4,r12 @ h+=Maj(a,b,c) from the past
  2593. ldr r12,[r2,#4]
  2594. ldr r3,[r2,#8]
  2595. ldr r1,[r2,#12]
  2596. add r4,r4,r0 @ accumulate
  2597. ldr r0,[r2,#16]
  2598. add r5,r5,r12
  2599. ldr r12,[r2,#20]
  2600. add r6,r6,r3
  2601. ldr r3,[r2,#24]
  2602. add r7,r7,r1
  2603. ldr r1,[r2,#28]
  2604. add r8,r8,r0
  2605. str r4,[r2],#4
  2606. add r9,r9,r12
  2607. str r5,[r2],#4
  2608. add r10,r10,r3
  2609. str r6,[r2],#4
  2610. add r11,r11,r1
  2611. str r7,[r2],#4
  2612. stmia r2,{r8-r11}
  2613. ittte ne
  2614. movne r1,sp
  2615. ldrne r2,[sp,#0]
  2616. eorne r12,r12,r12
  2617. ldreq sp,[sp,#76] @ restore original sp
  2618. itt ne
  2619. eorne r3,r5,r6
  2620. bne .L_00_48
  2621. ldmia sp!,{r4-r12,pc}
  2622. .size sha256_block_data_order_neon,.-sha256_block_data_order_neon
  2623. #endif
  2624. #if __ARM_MAX_ARCH__>=7 && !defined(__KERNEL__)
  2625. # ifdef __thumb2__
  2626. # define INST(a,b,c,d) .byte c,d|0xc,a,b
  2627. # else
  2628. # define INST(a,b,c,d) .byte a,b,c,d
  2629. # endif
  2630. .type sha256_block_data_order_armv8,%function
  2631. .align 5
  2632. sha256_block_data_order_armv8:
  2633. .LARMv8:
  2634. vld1.32 {q0,q1},[r0]
  2635. # ifdef __thumb2__
  2636. adr r3,.LARMv8
  2637. sub r3,r3,#.LARMv8-K256
  2638. # else
  2639. adrl r3,K256
  2640. # endif
  2641. add r2,r1,r2,lsl#6 @ len to point at the end of inp
  2642. .Loop_v8:
  2643. vld1.8 {q8-q9},[r1]!
  2644. vld1.8 {q10-q11},[r1]!
  2645. vld1.32 {q12},[r3]!
  2646. vrev32.8 q8,q8
  2647. vrev32.8 q9,q9
  2648. vrev32.8 q10,q10
  2649. vrev32.8 q11,q11
  2650. vmov q14,q0 @ offload
  2651. vmov q15,q1
  2652. teq r1,r2
  2653. vld1.32 {q13},[r3]!
  2654. vadd.i32 q12,q12,q8
  2655. INST(0xe2,0x03,0xfa,0xf3) @ sha256su0 q8,q9
  2656. vmov q2,q0
  2657. INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12
  2658. INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12
  2659. INST(0xe6,0x0c,0x64,0xf3) @ sha256su1 q8,q10,q11
  2660. vld1.32 {q12},[r3]!
  2661. vadd.i32 q13,q13,q9
  2662. INST(0xe4,0x23,0xfa,0xf3) @ sha256su0 q9,q10
  2663. vmov q2,q0
  2664. INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13
  2665. INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13
  2666. INST(0xe0,0x2c,0x66,0xf3) @ sha256su1 q9,q11,q8
  2667. vld1.32 {q13},[r3]!
  2668. vadd.i32 q12,q12,q10
  2669. INST(0xe6,0x43,0xfa,0xf3) @ sha256su0 q10,q11
  2670. vmov q2,q0
  2671. INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12
  2672. INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12
  2673. INST(0xe2,0x4c,0x60,0xf3) @ sha256su1 q10,q8,q9
  2674. vld1.32 {q12},[r3]!
  2675. vadd.i32 q13,q13,q11
  2676. INST(0xe0,0x63,0xfa,0xf3) @ sha256su0 q11,q8
  2677. vmov q2,q0
  2678. INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13
  2679. INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13
  2680. INST(0xe4,0x6c,0x62,0xf3) @ sha256su1 q11,q9,q10
  2681. vld1.32 {q13},[r3]!
  2682. vadd.i32 q12,q12,q8
  2683. INST(0xe2,0x03,0xfa,0xf3) @ sha256su0 q8,q9
  2684. vmov q2,q0
  2685. INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12
  2686. INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12
  2687. INST(0xe6,0x0c,0x64,0xf3) @ sha256su1 q8,q10,q11
  2688. vld1.32 {q12},[r3]!
  2689. vadd.i32 q13,q13,q9
  2690. INST(0xe4,0x23,0xfa,0xf3) @ sha256su0 q9,q10
  2691. vmov q2,q0
  2692. INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13
  2693. INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13
  2694. INST(0xe0,0x2c,0x66,0xf3) @ sha256su1 q9,q11,q8
  2695. vld1.32 {q13},[r3]!
  2696. vadd.i32 q12,q12,q10
  2697. INST(0xe6,0x43,0xfa,0xf3) @ sha256su0 q10,q11
  2698. vmov q2,q0
  2699. INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12
  2700. INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12
  2701. INST(0xe2,0x4c,0x60,0xf3) @ sha256su1 q10,q8,q9
  2702. vld1.32 {q12},[r3]!
  2703. vadd.i32 q13,q13,q11
  2704. INST(0xe0,0x63,0xfa,0xf3) @ sha256su0 q11,q8
  2705. vmov q2,q0
  2706. INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13
  2707. INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13
  2708. INST(0xe4,0x6c,0x62,0xf3) @ sha256su1 q11,q9,q10
  2709. vld1.32 {q13},[r3]!
  2710. vadd.i32 q12,q12,q8
  2711. INST(0xe2,0x03,0xfa,0xf3) @ sha256su0 q8,q9
  2712. vmov q2,q0
  2713. INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12
  2714. INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12
  2715. INST(0xe6,0x0c,0x64,0xf3) @ sha256su1 q8,q10,q11
  2716. vld1.32 {q12},[r3]!
  2717. vadd.i32 q13,q13,q9
  2718. INST(0xe4,0x23,0xfa,0xf3) @ sha256su0 q9,q10
  2719. vmov q2,q0
  2720. INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13
  2721. INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13
  2722. INST(0xe0,0x2c,0x66,0xf3) @ sha256su1 q9,q11,q8
  2723. vld1.32 {q13},[r3]!
  2724. vadd.i32 q12,q12,q10
  2725. INST(0xe6,0x43,0xfa,0xf3) @ sha256su0 q10,q11
  2726. vmov q2,q0
  2727. INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12
  2728. INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12
  2729. INST(0xe2,0x4c,0x60,0xf3) @ sha256su1 q10,q8,q9
  2730. vld1.32 {q12},[r3]!
  2731. vadd.i32 q13,q13,q11
  2732. INST(0xe0,0x63,0xfa,0xf3) @ sha256su0 q11,q8
  2733. vmov q2,q0
  2734. INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13
  2735. INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13
  2736. INST(0xe4,0x6c,0x62,0xf3) @ sha256su1 q11,q9,q10
  2737. vld1.32 {q13},[r3]!
  2738. vadd.i32 q12,q12,q8
  2739. vmov q2,q0
  2740. INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12
  2741. INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12
  2742. vld1.32 {q12},[r3]!
  2743. vadd.i32 q13,q13,q9
  2744. vmov q2,q0
  2745. INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13
  2746. INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13
  2747. vld1.32 {q13},[r3]
  2748. vadd.i32 q12,q12,q10
  2749. sub r3,r3,#256-16 @ rewind
  2750. vmov q2,q0
  2751. INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12
  2752. INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12
  2753. vadd.i32 q13,q13,q11
  2754. vmov q2,q0
  2755. INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13
  2756. INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13
  2757. vadd.i32 q0,q0,q14
  2758. vadd.i32 q1,q1,q15
  2759. it ne
  2760. bne .Loop_v8
  2761. vst1.32 {q0,q1},[r0]
  2762. bx lr @ bx lr
  2763. .size sha256_block_data_order_armv8,.-sha256_block_data_order_armv8
  2764. #endif
  2765. .asciz "SHA256 block transform for ARMv4/NEON/ARMv8, CRYPTOGAMS by <appro@openssl.org>"
  2766. .align 2
  2767. #if __ARM_MAX_ARCH__>=7 && !defined(__KERNEL__)
  2768. .comm OPENSSL_armcap_P,4,4
  2769. #endif