You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

aes-x86_64.pl 73 KiB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805
  1. #!/usr/bin/env perl
  2. #
  3. # ====================================================================
  4. # Written by Andy Polyakov <appro@fy.chalmers.se> for the OpenSSL
  5. # project. The module is, however, dual licensed under OpenSSL and
  6. # CRYPTOGAMS licenses depending on where you obtain it. For further
  7. # details see http://www.openssl.org/~appro/cryptogams/.
  8. # ====================================================================
  9. #
  10. # Version 2.1.
  11. #
  12. # aes-*-cbc benchmarks are improved by >70% [compared to gcc 3.3.2 on
  13. # Opteron 240 CPU] plus all the bells-n-whistles from 32-bit version
  14. # [you'll notice a lot of resemblance], such as compressed S-boxes
  15. # in little-endian byte order, prefetch of these tables in CBC mode,
  16. # as well as avoiding L1 cache aliasing between stack frame and key
  17. # schedule and already mentioned tables, compressed Td4...
  18. #
  19. # Performance in number of cycles per processed byte for 128-bit key:
  20. #
  21. # ECB encrypt ECB decrypt CBC large chunk
  22. # AMD64 33 43 13.0
  23. # EM64T 38 56 18.6(*)
  24. # Core 2 30 42 14.5(*)
  25. # Atom 65 86 32.1(*)
  26. #
  27. # (*) with hyper-threading off
  28. $flavour = shift;
  29. $output = shift;
  30. if ($flavour =~ /\./) { $output = $flavour; undef $flavour; }
  31. $win64=0; $win64=1 if ($flavour =~ /[nm]asm|mingw64/ || $output =~ /\.asm$/);
  32. $0 =~ m/(.*[\/\\])[^\/\\]+$/; $dir=$1;
  33. ( $xlate="${dir}x86_64-xlate.pl" and -f $xlate ) or
  34. ( $xlate="${dir}../../perlasm/x86_64-xlate.pl" and -f $xlate) or
  35. die "can't locate x86_64-xlate.pl";
  36. open OUT,"| \"$^X\" \"$xlate\" $flavour \"$output\"";
  37. *STDOUT=*OUT;
  38. $verticalspin=1; # unlike 32-bit version $verticalspin performs
  39. # ~15% better on both AMD and Intel cores
  40. $speed_limit=512; # see aes-586.pl for details
  41. $code=".text\n";
  42. $s0="%eax";
  43. $s1="%ebx";
  44. $s2="%ecx";
  45. $s3="%edx";
  46. $acc0="%esi"; $mask80="%rsi";
  47. $acc1="%edi"; $maskfe="%rdi";
  48. $acc2="%ebp"; $mask1b="%rbp";
  49. $inp="%r8";
  50. $out="%r9";
  51. $t0="%r10d";
  52. $t1="%r11d";
  53. $t2="%r12d";
  54. $rnds="%r13d";
  55. $sbox="%r14";
  56. $key="%r15";
  57. sub hi() { my $r=shift; $r =~ s/%[er]([a-d])x/%\1h/; $r; }
  58. sub lo() { my $r=shift; $r =~ s/%[er]([a-d])x/%\1l/;
  59. $r =~ s/%[er]([sd]i)/%\1l/;
  60. $r =~ s/%(r[0-9]+)[d]?/%\1b/; $r; }
  61. sub LO() { my $r=shift; $r =~ s/%r([a-z]+)/%e\1/;
  62. $r =~ s/%r([0-9]+)/%r\1d/; $r; }
  63. sub _data_word()
  64. { my $i;
  65. while(defined($i=shift)) { $code.=sprintf".long\t0x%08x,0x%08x\n",$i,$i; }
  66. }
  67. sub data_word()
  68. { my $i;
  69. my $last=pop(@_);
  70. $code.=".long\t";
  71. while(defined($i=shift)) { $code.=sprintf"0x%08x,",$i; }
  72. $code.=sprintf"0x%08x\n",$last;
  73. }
  74. sub data_byte()
  75. { my $i;
  76. my $last=pop(@_);
  77. $code.=".byte\t";
  78. while(defined($i=shift)) { $code.=sprintf"0x%02x,",$i&0xff; }
  79. $code.=sprintf"0x%02x\n",$last&0xff;
  80. }
  81. sub encvert()
  82. { my $t3="%r8d"; # zaps $inp!
  83. $code.=<<___;
  84. # favor 3-way issue Opteron pipeline...
  85. movzb `&lo("$s0")`,$acc0
  86. movzb `&lo("$s1")`,$acc1
  87. movzb `&lo("$s2")`,$acc2
  88. mov 0($sbox,$acc0,8),$t0
  89. mov 0($sbox,$acc1,8),$t1
  90. mov 0($sbox,$acc2,8),$t2
  91. movzb `&hi("$s1")`,$acc0
  92. movzb `&hi("$s2")`,$acc1
  93. movzb `&lo("$s3")`,$acc2
  94. xor 3($sbox,$acc0,8),$t0
  95. xor 3($sbox,$acc1,8),$t1
  96. mov 0($sbox,$acc2,8),$t3
  97. movzb `&hi("$s3")`,$acc0
  98. shr \$16,$s2
  99. movzb `&hi("$s0")`,$acc2
  100. xor 3($sbox,$acc0,8),$t2
  101. shr \$16,$s3
  102. xor 3($sbox,$acc2,8),$t3
  103. shr \$16,$s1
  104. lea 16($key),$key
  105. shr \$16,$s0
  106. movzb `&lo("$s2")`,$acc0
  107. movzb `&lo("$s3")`,$acc1
  108. movzb `&lo("$s0")`,$acc2
  109. xor 2($sbox,$acc0,8),$t0
  110. xor 2($sbox,$acc1,8),$t1
  111. xor 2($sbox,$acc2,8),$t2
  112. movzb `&hi("$s3")`,$acc0
  113. movzb `&hi("$s0")`,$acc1
  114. movzb `&lo("$s1")`,$acc2
  115. xor 1($sbox,$acc0,8),$t0
  116. xor 1($sbox,$acc1,8),$t1
  117. xor 2($sbox,$acc2,8),$t3
  118. mov 12($key),$s3
  119. movzb `&hi("$s1")`,$acc1
  120. movzb `&hi("$s2")`,$acc2
  121. mov 0($key),$s0
  122. xor 1($sbox,$acc1,8),$t2
  123. xor 1($sbox,$acc2,8),$t3
  124. mov 4($key),$s1
  125. mov 8($key),$s2
  126. xor $t0,$s0
  127. xor $t1,$s1
  128. xor $t2,$s2
  129. xor $t3,$s3
  130. ___
  131. }
  132. sub enclastvert()
  133. { my $t3="%r8d"; # zaps $inp!
  134. $code.=<<___;
  135. movzb `&lo("$s0")`,$acc0
  136. movzb `&lo("$s1")`,$acc1
  137. movzb `&lo("$s2")`,$acc2
  138. movzb 2($sbox,$acc0,8),$t0
  139. movzb 2($sbox,$acc1,8),$t1
  140. movzb 2($sbox,$acc2,8),$t2
  141. movzb `&lo("$s3")`,$acc0
  142. movzb `&hi("$s1")`,$acc1
  143. movzb `&hi("$s2")`,$acc2
  144. movzb 2($sbox,$acc0,8),$t3
  145. mov 0($sbox,$acc1,8),$acc1 #$t0
  146. mov 0($sbox,$acc2,8),$acc2 #$t1
  147. and \$0x0000ff00,$acc1
  148. and \$0x0000ff00,$acc2
  149. xor $acc1,$t0
  150. xor $acc2,$t1
  151. shr \$16,$s2
  152. movzb `&hi("$s3")`,$acc0
  153. movzb `&hi("$s0")`,$acc1
  154. shr \$16,$s3
  155. mov 0($sbox,$acc0,8),$acc0 #$t2
  156. mov 0($sbox,$acc1,8),$acc1 #$t3
  157. and \$0x0000ff00,$acc0
  158. and \$0x0000ff00,$acc1
  159. shr \$16,$s1
  160. xor $acc0,$t2
  161. xor $acc1,$t3
  162. shr \$16,$s0
  163. movzb `&lo("$s2")`,$acc0
  164. movzb `&lo("$s3")`,$acc1
  165. movzb `&lo("$s0")`,$acc2
  166. mov 0($sbox,$acc0,8),$acc0 #$t0
  167. mov 0($sbox,$acc1,8),$acc1 #$t1
  168. mov 0($sbox,$acc2,8),$acc2 #$t2
  169. and \$0x00ff0000,$acc0
  170. and \$0x00ff0000,$acc1
  171. and \$0x00ff0000,$acc2
  172. xor $acc0,$t0
  173. xor $acc1,$t1
  174. xor $acc2,$t2
  175. movzb `&lo("$s1")`,$acc0
  176. movzb `&hi("$s3")`,$acc1
  177. movzb `&hi("$s0")`,$acc2
  178. mov 0($sbox,$acc0,8),$acc0 #$t3
  179. mov 2($sbox,$acc1,8),$acc1 #$t0
  180. mov 2($sbox,$acc2,8),$acc2 #$t1
  181. and \$0x00ff0000,$acc0
  182. and \$0xff000000,$acc1
  183. and \$0xff000000,$acc2
  184. xor $acc0,$t3
  185. xor $acc1,$t0
  186. xor $acc2,$t1
  187. movzb `&hi("$s1")`,$acc0
  188. movzb `&hi("$s2")`,$acc1
  189. mov 16+12($key),$s3
  190. mov 2($sbox,$acc0,8),$acc0 #$t2
  191. mov 2($sbox,$acc1,8),$acc1 #$t3
  192. mov 16+0($key),$s0
  193. and \$0xff000000,$acc0
  194. and \$0xff000000,$acc1
  195. xor $acc0,$t2
  196. xor $acc1,$t3
  197. mov 16+4($key),$s1
  198. mov 16+8($key),$s2
  199. xor $t0,$s0
  200. xor $t1,$s1
  201. xor $t2,$s2
  202. xor $t3,$s3
  203. ___
  204. }
  205. sub encstep()
  206. { my ($i,@s) = @_;
  207. my $tmp0=$acc0;
  208. my $tmp1=$acc1;
  209. my $tmp2=$acc2;
  210. my $out=($t0,$t1,$t2,$s[0])[$i];
  211. if ($i==3) {
  212. $tmp0=$s[1];
  213. $tmp1=$s[2];
  214. $tmp2=$s[3];
  215. }
  216. $code.=" movzb ".&lo($s[0]).",$out\n";
  217. $code.=" mov $s[2],$tmp1\n" if ($i!=3);
  218. $code.=" lea 16($key),$key\n" if ($i==0);
  219. $code.=" movzb ".&hi($s[1]).",$tmp0\n";
  220. $code.=" mov 0($sbox,$out,8),$out\n";
  221. $code.=" shr \$16,$tmp1\n";
  222. $code.=" mov $s[3],$tmp2\n" if ($i!=3);
  223. $code.=" xor 3($sbox,$tmp0,8),$out\n";
  224. $code.=" movzb ".&lo($tmp1).",$tmp1\n";
  225. $code.=" shr \$24,$tmp2\n";
  226. $code.=" xor 4*$i($key),$out\n";
  227. $code.=" xor 2($sbox,$tmp1,8),$out\n";
  228. $code.=" xor 1($sbox,$tmp2,8),$out\n";
  229. $code.=" mov $t0,$s[1]\n" if ($i==3);
  230. $code.=" mov $t1,$s[2]\n" if ($i==3);
  231. $code.=" mov $t2,$s[3]\n" if ($i==3);
  232. $code.="\n";
  233. }
  234. sub enclast()
  235. { my ($i,@s)=@_;
  236. my $tmp0=$acc0;
  237. my $tmp1=$acc1;
  238. my $tmp2=$acc2;
  239. my $out=($t0,$t1,$t2,$s[0])[$i];
  240. if ($i==3) {
  241. $tmp0=$s[1];
  242. $tmp1=$s[2];
  243. $tmp2=$s[3];
  244. }
  245. $code.=" movzb ".&lo($s[0]).",$out\n";
  246. $code.=" mov $s[2],$tmp1\n" if ($i!=3);
  247. $code.=" mov 2($sbox,$out,8),$out\n";
  248. $code.=" shr \$16,$tmp1\n";
  249. $code.=" mov $s[3],$tmp2\n" if ($i!=3);
  250. $code.=" and \$0x000000ff,$out\n";
  251. $code.=" movzb ".&hi($s[1]).",$tmp0\n";
  252. $code.=" movzb ".&lo($tmp1).",$tmp1\n";
  253. $code.=" shr \$24,$tmp2\n";
  254. $code.=" mov 0($sbox,$tmp0,8),$tmp0\n";
  255. $code.=" mov 0($sbox,$tmp1,8),$tmp1\n";
  256. $code.=" mov 2($sbox,$tmp2,8),$tmp2\n";
  257. $code.=" and \$0x0000ff00,$tmp0\n";
  258. $code.=" and \$0x00ff0000,$tmp1\n";
  259. $code.=" and \$0xff000000,$tmp2\n";
  260. $code.=" xor $tmp0,$out\n";
  261. $code.=" mov $t0,$s[1]\n" if ($i==3);
  262. $code.=" xor $tmp1,$out\n";
  263. $code.=" mov $t1,$s[2]\n" if ($i==3);
  264. $code.=" xor $tmp2,$out\n";
  265. $code.=" mov $t2,$s[3]\n" if ($i==3);
  266. $code.="\n";
  267. }
  268. $code.=<<___;
  269. .type _x86_64_AES_encrypt,\@abi-omnipotent
  270. .align 16
  271. _x86_64_AES_encrypt:
  272. xor 0($key),$s0 # xor with key
  273. xor 4($key),$s1
  274. xor 8($key),$s2
  275. xor 12($key),$s3
  276. mov 240($key),$rnds # load key->rounds
  277. sub \$1,$rnds
  278. jmp .Lenc_loop
  279. .align 16
  280. .Lenc_loop:
  281. ___
  282. if ($verticalspin) { &encvert(); }
  283. else { &encstep(0,$s0,$s1,$s2,$s3);
  284. &encstep(1,$s1,$s2,$s3,$s0);
  285. &encstep(2,$s2,$s3,$s0,$s1);
  286. &encstep(3,$s3,$s0,$s1,$s2);
  287. }
  288. $code.=<<___;
  289. sub \$1,$rnds
  290. jnz .Lenc_loop
  291. ___
  292. if ($verticalspin) { &enclastvert(); }
  293. else { &enclast(0,$s0,$s1,$s2,$s3);
  294. &enclast(1,$s1,$s2,$s3,$s0);
  295. &enclast(2,$s2,$s3,$s0,$s1);
  296. &enclast(3,$s3,$s0,$s1,$s2);
  297. $code.=<<___;
  298. xor 16+0($key),$s0 # xor with key
  299. xor 16+4($key),$s1
  300. xor 16+8($key),$s2
  301. xor 16+12($key),$s3
  302. ___
  303. }
  304. $code.=<<___;
  305. .byte 0xf3,0xc3 # rep ret
  306. .size _x86_64_AES_encrypt,.-_x86_64_AES_encrypt
  307. ___
  308. # it's possible to implement this by shifting tN by 8, filling least
  309. # significant byte with byte load and finally bswap-ing at the end,
  310. # but such partial register load kills Core 2...
  311. sub enccompactvert()
  312. { my ($t3,$t4,$t5)=("%r8d","%r9d","%r13d");
  313. $code.=<<___;
  314. movzb `&lo("$s0")`,$t0
  315. movzb `&lo("$s1")`,$t1
  316. movzb `&lo("$s2")`,$t2
  317. movzb `&lo("$s3")`,$t3
  318. movzb `&hi("$s1")`,$acc0
  319. movzb `&hi("$s2")`,$acc1
  320. shr \$16,$s2
  321. movzb `&hi("$s3")`,$acc2
  322. movzb ($sbox,$t0,1),$t0
  323. movzb ($sbox,$t1,1),$t1
  324. movzb ($sbox,$t2,1),$t2
  325. movzb ($sbox,$t3,1),$t3
  326. movzb ($sbox,$acc0,1),$t4 #$t0
  327. movzb `&hi("$s0")`,$acc0
  328. movzb ($sbox,$acc1,1),$t5 #$t1
  329. movzb `&lo("$s2")`,$acc1
  330. movzb ($sbox,$acc2,1),$acc2 #$t2
  331. movzb ($sbox,$acc0,1),$acc0 #$t3
  332. shl \$8,$t4
  333. shr \$16,$s3
  334. shl \$8,$t5
  335. xor $t4,$t0
  336. shr \$16,$s0
  337. movzb `&lo("$s3")`,$t4
  338. shr \$16,$s1
  339. xor $t5,$t1
  340. shl \$8,$acc2
  341. movzb `&lo("$s0")`,$t5
  342. movzb ($sbox,$acc1,1),$acc1 #$t0
  343. xor $acc2,$t2
  344. shl \$8,$acc0
  345. movzb `&lo("$s1")`,$acc2
  346. shl \$16,$acc1
  347. xor $acc0,$t3
  348. movzb ($sbox,$t4,1),$t4 #$t1
  349. movzb `&hi("$s3")`,$acc0
  350. movzb ($sbox,$t5,1),$t5 #$t2
  351. xor $acc1,$t0
  352. shr \$8,$s2
  353. movzb `&hi("$s0")`,$acc1
  354. shl \$16,$t4
  355. shr \$8,$s1
  356. shl \$16,$t5
  357. xor $t4,$t1
  358. movzb ($sbox,$acc2,1),$acc2 #$t3
  359. movzb ($sbox,$acc0,1),$acc0 #$t0
  360. movzb ($sbox,$acc1,1),$acc1 #$t1
  361. movzb ($sbox,$s2,1),$s3 #$t3
  362. movzb ($sbox,$s1,1),$s2 #$t2
  363. shl \$16,$acc2
  364. xor $t5,$t2
  365. shl \$24,$acc0
  366. xor $acc2,$t3
  367. shl \$24,$acc1
  368. xor $acc0,$t0
  369. shl \$24,$s3
  370. xor $acc1,$t1
  371. shl \$24,$s2
  372. mov $t0,$s0
  373. mov $t1,$s1
  374. xor $t2,$s2
  375. xor $t3,$s3
  376. ___
  377. }
  378. sub enctransform_ref()
  379. { my $sn = shift;
  380. my ($acc,$r2,$tmp)=("%r8d","%r9d","%r13d");
  381. $code.=<<___;
  382. mov $sn,$acc
  383. and \$0x80808080,$acc
  384. mov $acc,$tmp
  385. shr \$7,$tmp
  386. lea ($sn,$sn),$r2
  387. sub $tmp,$acc
  388. and \$0xfefefefe,$r2
  389. and \$0x1b1b1b1b,$acc
  390. mov $sn,$tmp
  391. xor $acc,$r2
  392. xor $r2,$sn
  393. rol \$24,$sn
  394. xor $r2,$sn
  395. ror \$16,$tmp
  396. xor $tmp,$sn
  397. ror \$8,$tmp
  398. xor $tmp,$sn
  399. ___
  400. }
  401. # unlike decrypt case it does not pay off to parallelize enctransform
  402. sub enctransform()
  403. { my ($t3,$r20,$r21)=($acc2,"%r8d","%r9d");
  404. $code.=<<___;
  405. mov \$0x80808080,$t0
  406. mov \$0x80808080,$t1
  407. and $s0,$t0
  408. and $s1,$t1
  409. mov $t0,$acc0
  410. mov $t1,$acc1
  411. shr \$7,$t0
  412. lea ($s0,$s0),$r20
  413. shr \$7,$t1
  414. lea ($s1,$s1),$r21
  415. sub $t0,$acc0
  416. sub $t1,$acc1
  417. and \$0xfefefefe,$r20
  418. and \$0xfefefefe,$r21
  419. and \$0x1b1b1b1b,$acc0
  420. and \$0x1b1b1b1b,$acc1
  421. mov $s0,$t0
  422. mov $s1,$t1
  423. xor $acc0,$r20
  424. xor $acc1,$r21
  425. xor $r20,$s0
  426. xor $r21,$s1
  427. mov \$0x80808080,$t2
  428. rol \$24,$s0
  429. mov \$0x80808080,$t3
  430. rol \$24,$s1
  431. and $s2,$t2
  432. and $s3,$t3
  433. xor $r20,$s0
  434. xor $r21,$s1
  435. mov $t2,$acc0
  436. ror \$16,$t0
  437. mov $t3,$acc1
  438. ror \$16,$t1
  439. lea ($s2,$s2),$r20
  440. shr \$7,$t2
  441. xor $t0,$s0
  442. shr \$7,$t3
  443. xor $t1,$s1
  444. ror \$8,$t0
  445. lea ($s3,$s3),$r21
  446. ror \$8,$t1
  447. sub $t2,$acc0
  448. sub $t3,$acc1
  449. xor $t0,$s0
  450. xor $t1,$s1
  451. and \$0xfefefefe,$r20
  452. and \$0xfefefefe,$r21
  453. and \$0x1b1b1b1b,$acc0
  454. and \$0x1b1b1b1b,$acc1
  455. mov $s2,$t2
  456. mov $s3,$t3
  457. xor $acc0,$r20
  458. xor $acc1,$r21
  459. ror \$16,$t2
  460. xor $r20,$s2
  461. ror \$16,$t3
  462. xor $r21,$s3
  463. rol \$24,$s2
  464. mov 0($sbox),$acc0 # prefetch Te4
  465. rol \$24,$s3
  466. xor $r20,$s2
  467. mov 64($sbox),$acc1
  468. xor $r21,$s3
  469. mov 128($sbox),$r20
  470. xor $t2,$s2
  471. ror \$8,$t2
  472. xor $t3,$s3
  473. ror \$8,$t3
  474. xor $t2,$s2
  475. mov 192($sbox),$r21
  476. xor $t3,$s3
  477. ___
  478. }
  479. $code.=<<___;
  480. .type _x86_64_AES_encrypt_compact,\@abi-omnipotent
  481. .align 16
  482. _x86_64_AES_encrypt_compact:
  483. lea 128($sbox),$inp # size optimization
  484. mov 0-128($inp),$acc1 # prefetch Te4
  485. mov 32-128($inp),$acc2
  486. mov 64-128($inp),$t0
  487. mov 96-128($inp),$t1
  488. mov 128-128($inp),$acc1
  489. mov 160-128($inp),$acc2
  490. mov 192-128($inp),$t0
  491. mov 224-128($inp),$t1
  492. jmp .Lenc_loop_compact
  493. .align 16
  494. .Lenc_loop_compact:
  495. xor 0($key),$s0 # xor with key
  496. xor 4($key),$s1
  497. xor 8($key),$s2
  498. xor 12($key),$s3
  499. lea 16($key),$key
  500. ___
  501. &enccompactvert();
  502. $code.=<<___;
  503. cmp 16(%rsp),$key
  504. je .Lenc_compact_done
  505. ___
  506. &enctransform();
  507. $code.=<<___;
  508. jmp .Lenc_loop_compact
  509. .align 16
  510. .Lenc_compact_done:
  511. xor 0($key),$s0
  512. xor 4($key),$s1
  513. xor 8($key),$s2
  514. xor 12($key),$s3
  515. .byte 0xf3,0xc3 # rep ret
  516. .size _x86_64_AES_encrypt_compact,.-_x86_64_AES_encrypt_compact
  517. ___
  518. # void asm_AES_encrypt (const void *inp,void *out,const AES_KEY *key);
  519. $code.=<<___;
  520. .align 16
  521. .globl asm_AES_encrypt
  522. .type asm_AES_encrypt,\@function,3
  523. .hidden asm_AES_encrypt
  524. asm_AES_encrypt:
  525. push %rbx
  526. push %rbp
  527. push %r12
  528. push %r13
  529. push %r14
  530. push %r15
  531. # allocate frame "above" key schedule
  532. mov %rsp,%r10
  533. lea -63(%rdx),%rcx # %rdx is key argument
  534. and \$-64,%rsp
  535. sub %rsp,%rcx
  536. neg %rcx
  537. and \$0x3c0,%rcx
  538. sub %rcx,%rsp
  539. sub \$32,%rsp
  540. mov %rsi,16(%rsp) # save out
  541. mov %r10,24(%rsp) # save real stack pointer
  542. .Lenc_prologue:
  543. mov %rdx,$key
  544. mov 240($key),$rnds # load rounds
  545. mov 0(%rdi),$s0 # load input vector
  546. mov 4(%rdi),$s1
  547. mov 8(%rdi),$s2
  548. mov 12(%rdi),$s3
  549. shl \$4,$rnds
  550. lea ($key,$rnds),%rbp
  551. mov $key,(%rsp) # key schedule
  552. mov %rbp,8(%rsp) # end of key schedule
  553. # pick Te4 copy which can't "overlap" with stack frame or key schedule
  554. lea .LAES_Te+2048(%rip),$sbox
  555. lea 768(%rsp),%rbp
  556. sub $sbox,%rbp
  557. and \$0x300,%rbp
  558. lea ($sbox,%rbp),$sbox
  559. call _x86_64_AES_encrypt_compact
  560. mov 16(%rsp),$out # restore out
  561. mov 24(%rsp),%rsi # restore saved stack pointer
  562. mov $s0,0($out) # write output vector
  563. mov $s1,4($out)
  564. mov $s2,8($out)
  565. mov $s3,12($out)
  566. mov (%rsi),%r15
  567. mov 8(%rsi),%r14
  568. mov 16(%rsi),%r13
  569. mov 24(%rsi),%r12
  570. mov 32(%rsi),%rbp
  571. mov 40(%rsi),%rbx
  572. lea 48(%rsi),%rsp
  573. .Lenc_epilogue:
  574. ret
  575. .size asm_AES_encrypt,.-asm_AES_encrypt
  576. ___
  577. #------------------------------------------------------------------#
  578. sub decvert()
  579. { my $t3="%r8d"; # zaps $inp!
  580. $code.=<<___;
  581. # favor 3-way issue Opteron pipeline...
  582. movzb `&lo("$s0")`,$acc0
  583. movzb `&lo("$s1")`,$acc1
  584. movzb `&lo("$s2")`,$acc2
  585. mov 0($sbox,$acc0,8),$t0
  586. mov 0($sbox,$acc1,8),$t1
  587. mov 0($sbox,$acc2,8),$t2
  588. movzb `&hi("$s3")`,$acc0
  589. movzb `&hi("$s0")`,$acc1
  590. movzb `&lo("$s3")`,$acc2
  591. xor 3($sbox,$acc0,8),$t0
  592. xor 3($sbox,$acc1,8),$t1
  593. mov 0($sbox,$acc2,8),$t3
  594. movzb `&hi("$s1")`,$acc0
  595. shr \$16,$s0
  596. movzb `&hi("$s2")`,$acc2
  597. xor 3($sbox,$acc0,8),$t2
  598. shr \$16,$s3
  599. xor 3($sbox,$acc2,8),$t3
  600. shr \$16,$s1
  601. lea 16($key),$key
  602. shr \$16,$s2
  603. movzb `&lo("$s2")`,$acc0
  604. movzb `&lo("$s3")`,$acc1
  605. movzb `&lo("$s0")`,$acc2
  606. xor 2($sbox,$acc0,8),$t0
  607. xor 2($sbox,$acc1,8),$t1
  608. xor 2($sbox,$acc2,8),$t2
  609. movzb `&hi("$s1")`,$acc0
  610. movzb `&hi("$s2")`,$acc1
  611. movzb `&lo("$s1")`,$acc2
  612. xor 1($sbox,$acc0,8),$t0
  613. xor 1($sbox,$acc1,8),$t1
  614. xor 2($sbox,$acc2,8),$t3
  615. movzb `&hi("$s3")`,$acc0
  616. mov 12($key),$s3
  617. movzb `&hi("$s0")`,$acc2
  618. xor 1($sbox,$acc0,8),$t2
  619. mov 0($key),$s0
  620. xor 1($sbox,$acc2,8),$t3
  621. xor $t0,$s0
  622. mov 4($key),$s1
  623. mov 8($key),$s2
  624. xor $t2,$s2
  625. xor $t1,$s1
  626. xor $t3,$s3
  627. ___
  628. }
  629. sub declastvert()
  630. { my $t3="%r8d"; # zaps $inp!
  631. $code.=<<___;
  632. lea 2048($sbox),$sbox # size optimization
  633. movzb `&lo("$s0")`,$acc0
  634. movzb `&lo("$s1")`,$acc1
  635. movzb `&lo("$s2")`,$acc2
  636. movzb ($sbox,$acc0,1),$t0
  637. movzb ($sbox,$acc1,1),$t1
  638. movzb ($sbox,$acc2,1),$t2
  639. movzb `&lo("$s3")`,$acc0
  640. movzb `&hi("$s3")`,$acc1
  641. movzb `&hi("$s0")`,$acc2
  642. movzb ($sbox,$acc0,1),$t3
  643. movzb ($sbox,$acc1,1),$acc1 #$t0
  644. movzb ($sbox,$acc2,1),$acc2 #$t1
  645. shl \$8,$acc1
  646. shl \$8,$acc2
  647. xor $acc1,$t0
  648. xor $acc2,$t1
  649. shr \$16,$s3
  650. movzb `&hi("$s1")`,$acc0
  651. movzb `&hi("$s2")`,$acc1
  652. shr \$16,$s0
  653. movzb ($sbox,$acc0,1),$acc0 #$t2
  654. movzb ($sbox,$acc1,1),$acc1 #$t3
  655. shl \$8,$acc0
  656. shl \$8,$acc1
  657. shr \$16,$s1
  658. xor $acc0,$t2
  659. xor $acc1,$t3
  660. shr \$16,$s2
  661. movzb `&lo("$s2")`,$acc0
  662. movzb `&lo("$s3")`,$acc1
  663. movzb `&lo("$s0")`,$acc2
  664. movzb ($sbox,$acc0,1),$acc0 #$t0
  665. movzb ($sbox,$acc1,1),$acc1 #$t1
  666. movzb ($sbox,$acc2,1),$acc2 #$t2
  667. shl \$16,$acc0
  668. shl \$16,$acc1
  669. shl \$16,$acc2
  670. xor $acc0,$t0
  671. xor $acc1,$t1
  672. xor $acc2,$t2
  673. movzb `&lo("$s1")`,$acc0
  674. movzb `&hi("$s1")`,$acc1
  675. movzb `&hi("$s2")`,$acc2
  676. movzb ($sbox,$acc0,1),$acc0 #$t3
  677. movzb ($sbox,$acc1,1),$acc1 #$t0
  678. movzb ($sbox,$acc2,1),$acc2 #$t1
  679. shl \$16,$acc0
  680. shl \$24,$acc1
  681. shl \$24,$acc2
  682. xor $acc0,$t3
  683. xor $acc1,$t0
  684. xor $acc2,$t1
  685. movzb `&hi("$s3")`,$acc0
  686. movzb `&hi("$s0")`,$acc1
  687. mov 16+12($key),$s3
  688. movzb ($sbox,$acc0,1),$acc0 #$t2
  689. movzb ($sbox,$acc1,1),$acc1 #$t3
  690. mov 16+0($key),$s0
  691. shl \$24,$acc0
  692. shl \$24,$acc1
  693. xor $acc0,$t2
  694. xor $acc1,$t3
  695. mov 16+4($key),$s1
  696. mov 16+8($key),$s2
  697. lea -2048($sbox),$sbox
  698. xor $t0,$s0
  699. xor $t1,$s1
  700. xor $t2,$s2
  701. xor $t3,$s3
  702. ___
  703. }
  704. sub decstep()
  705. { my ($i,@s) = @_;
  706. my $tmp0=$acc0;
  707. my $tmp1=$acc1;
  708. my $tmp2=$acc2;
  709. my $out=($t0,$t1,$t2,$s[0])[$i];
  710. $code.=" mov $s[0],$out\n" if ($i!=3);
  711. $tmp1=$s[2] if ($i==3);
  712. $code.=" mov $s[2],$tmp1\n" if ($i!=3);
  713. $code.=" and \$0xFF,$out\n";
  714. $code.=" mov 0($sbox,$out,8),$out\n";
  715. $code.=" shr \$16,$tmp1\n";
  716. $tmp2=$s[3] if ($i==3);
  717. $code.=" mov $s[3],$tmp2\n" if ($i!=3);
  718. $tmp0=$s[1] if ($i==3);
  719. $code.=" movzb ".&hi($s[1]).",$tmp0\n";
  720. $code.=" and \$0xFF,$tmp1\n";
  721. $code.=" shr \$24,$tmp2\n";
  722. $code.=" xor 3($sbox,$tmp0,8),$out\n";
  723. $code.=" xor 2($sbox,$tmp1,8),$out\n";
  724. $code.=" xor 1($sbox,$tmp2,8),$out\n";
  725. $code.=" mov $t2,$s[1]\n" if ($i==3);
  726. $code.=" mov $t1,$s[2]\n" if ($i==3);
  727. $code.=" mov $t0,$s[3]\n" if ($i==3);
  728. $code.="\n";
  729. }
  730. sub declast()
  731. { my ($i,@s)=@_;
  732. my $tmp0=$acc0;
  733. my $tmp1=$acc1;
  734. my $tmp2=$acc2;
  735. my $out=($t0,$t1,$t2,$s[0])[$i];
  736. $code.=" mov $s[0],$out\n" if ($i!=3);
  737. $tmp1=$s[2] if ($i==3);
  738. $code.=" mov $s[2],$tmp1\n" if ($i!=3);
  739. $code.=" and \$0xFF,$out\n";
  740. $code.=" movzb 2048($sbox,$out,1),$out\n";
  741. $code.=" shr \$16,$tmp1\n";
  742. $tmp2=$s[3] if ($i==3);
  743. $code.=" mov $s[3],$tmp2\n" if ($i!=3);
  744. $tmp0=$s[1] if ($i==3);
  745. $code.=" movzb ".&hi($s[1]).",$tmp0\n";
  746. $code.=" and \$0xFF,$tmp1\n";
  747. $code.=" shr \$24,$tmp2\n";
  748. $code.=" movzb 2048($sbox,$tmp0,1),$tmp0\n";
  749. $code.=" movzb 2048($sbox,$tmp1,1),$tmp1\n";
  750. $code.=" movzb 2048($sbox,$tmp2,1),$tmp2\n";
  751. $code.=" shl \$8,$tmp0\n";
  752. $code.=" shl \$16,$tmp1\n";
  753. $code.=" shl \$24,$tmp2\n";
  754. $code.=" xor $tmp0,$out\n";
  755. $code.=" mov $t2,$s[1]\n" if ($i==3);
  756. $code.=" xor $tmp1,$out\n";
  757. $code.=" mov $t1,$s[2]\n" if ($i==3);
  758. $code.=" xor $tmp2,$out\n";
  759. $code.=" mov $t0,$s[3]\n" if ($i==3);
  760. $code.="\n";
  761. }
  762. $code.=<<___;
  763. .type _x86_64_AES_decrypt,\@abi-omnipotent
  764. .align 16
  765. _x86_64_AES_decrypt:
  766. xor 0($key),$s0 # xor with key
  767. xor 4($key),$s1
  768. xor 8($key),$s2
  769. xor 12($key),$s3
  770. mov 240($key),$rnds # load key->rounds
  771. sub \$1,$rnds
  772. jmp .Ldec_loop
  773. .align 16
  774. .Ldec_loop:
  775. ___
  776. if ($verticalspin) { &decvert(); }
  777. else { &decstep(0,$s0,$s3,$s2,$s1);
  778. &decstep(1,$s1,$s0,$s3,$s2);
  779. &decstep(2,$s2,$s1,$s0,$s3);
  780. &decstep(3,$s3,$s2,$s1,$s0);
  781. $code.=<<___;
  782. lea 16($key),$key
  783. xor 0($key),$s0 # xor with key
  784. xor 4($key),$s1
  785. xor 8($key),$s2
  786. xor 12($key),$s3
  787. ___
  788. }
  789. $code.=<<___;
  790. sub \$1,$rnds
  791. jnz .Ldec_loop
  792. ___
  793. if ($verticalspin) { &declastvert(); }
  794. else { &declast(0,$s0,$s3,$s2,$s1);
  795. &declast(1,$s1,$s0,$s3,$s2);
  796. &declast(2,$s2,$s1,$s0,$s3);
  797. &declast(3,$s3,$s2,$s1,$s0);
  798. $code.=<<___;
  799. xor 16+0($key),$s0 # xor with key
  800. xor 16+4($key),$s1
  801. xor 16+8($key),$s2
  802. xor 16+12($key),$s3
  803. ___
  804. }
  805. $code.=<<___;
  806. .byte 0xf3,0xc3 # rep ret
  807. .size _x86_64_AES_decrypt,.-_x86_64_AES_decrypt
  808. ___
  809. sub deccompactvert()
  810. { my ($t3,$t4,$t5)=("%r8d","%r9d","%r13d");
  811. $code.=<<___;
  812. movzb `&lo("$s0")`,$t0
  813. movzb `&lo("$s1")`,$t1
  814. movzb `&lo("$s2")`,$t2
  815. movzb `&lo("$s3")`,$t3
  816. movzb `&hi("$s3")`,$acc0
  817. movzb `&hi("$s0")`,$acc1
  818. shr \$16,$s3
  819. movzb `&hi("$s1")`,$acc2
  820. movzb ($sbox,$t0,1),$t0
  821. movzb ($sbox,$t1,1),$t1
  822. movzb ($sbox,$t2,1),$t2
  823. movzb ($sbox,$t3,1),$t3
  824. movzb ($sbox,$acc0,1),$t4 #$t0
  825. movzb `&hi("$s2")`,$acc0
  826. movzb ($sbox,$acc1,1),$t5 #$t1
  827. movzb ($sbox,$acc2,1),$acc2 #$t2
  828. movzb ($sbox,$acc0,1),$acc0 #$t3
  829. shr \$16,$s2
  830. shl \$8,$t5
  831. shl \$8,$t4
  832. movzb `&lo("$s2")`,$acc1
  833. shr \$16,$s0
  834. xor $t4,$t0
  835. shr \$16,$s1
  836. movzb `&lo("$s3")`,$t4
  837. shl \$8,$acc2
  838. xor $t5,$t1
  839. shl \$8,$acc0
  840. movzb `&lo("$s0")`,$t5
  841. movzb ($sbox,$acc1,1),$acc1 #$t0
  842. xor $acc2,$t2
  843. movzb `&lo("$s1")`,$acc2
  844. shl \$16,$acc1
  845. xor $acc0,$t3
  846. movzb ($sbox,$t4,1),$t4 #$t1
  847. movzb `&hi("$s1")`,$acc0
  848. movzb ($sbox,$acc2,1),$acc2 #$t3
  849. xor $acc1,$t0
  850. movzb ($sbox,$t5,1),$t5 #$t2
  851. movzb `&hi("$s2")`,$acc1
  852. shl \$16,$acc2
  853. shl \$16,$t4
  854. shl \$16,$t5
  855. xor $acc2,$t3
  856. movzb `&hi("$s3")`,$acc2
  857. xor $t4,$t1
  858. shr \$8,$s0
  859. xor $t5,$t2
  860. movzb ($sbox,$acc0,1),$acc0 #$t0
  861. movzb ($sbox,$acc1,1),$s1 #$t1
  862. movzb ($sbox,$acc2,1),$s2 #$t2
  863. movzb ($sbox,$s0,1),$s3 #$t3
  864. mov $t0,$s0
  865. shl \$24,$acc0
  866. shl \$24,$s1
  867. shl \$24,$s2
  868. xor $acc0,$s0
  869. shl \$24,$s3
  870. xor $t1,$s1
  871. xor $t2,$s2
  872. xor $t3,$s3
  873. ___
  874. }
  875. # parallelized version! input is pair of 64-bit values: %rax=s1.s0
  876. # and %rcx=s3.s2, output is four 32-bit values in %eax=s0, %ebx=s1,
  877. # %ecx=s2 and %edx=s3.
  878. sub dectransform()
  879. { my ($tp10,$tp20,$tp40,$tp80,$acc0)=("%rax","%r8", "%r9", "%r10","%rbx");
  880. my ($tp18,$tp28,$tp48,$tp88,$acc8)=("%rcx","%r11","%r12","%r13","%rdx");
  881. my $prefetch = shift;
  882. $code.=<<___;
  883. mov $mask80,$tp40
  884. mov $mask80,$tp48
  885. and $tp10,$tp40
  886. and $tp18,$tp48
  887. mov $tp40,$acc0
  888. mov $tp48,$acc8
  889. shr \$7,$tp40
  890. lea ($tp10,$tp10),$tp20
  891. shr \$7,$tp48
  892. lea ($tp18,$tp18),$tp28
  893. sub $tp40,$acc0
  894. sub $tp48,$acc8
  895. and $maskfe,$tp20
  896. and $maskfe,$tp28
  897. and $mask1b,$acc0
  898. and $mask1b,$acc8
  899. xor $acc0,$tp20
  900. xor $acc8,$tp28
  901. mov $mask80,$tp80
  902. mov $mask80,$tp88
  903. and $tp20,$tp80
  904. and $tp28,$tp88
  905. mov $tp80,$acc0
  906. mov $tp88,$acc8
  907. shr \$7,$tp80
  908. lea ($tp20,$tp20),$tp40
  909. shr \$7,$tp88
  910. lea ($tp28,$tp28),$tp48
  911. sub $tp80,$acc0
  912. sub $tp88,$acc8
  913. and $maskfe,$tp40
  914. and $maskfe,$tp48
  915. and $mask1b,$acc0
  916. and $mask1b,$acc8
  917. xor $acc0,$tp40
  918. xor $acc8,$tp48
  919. mov $mask80,$tp80
  920. mov $mask80,$tp88
  921. and $tp40,$tp80
  922. and $tp48,$tp88
  923. mov $tp80,$acc0
  924. mov $tp88,$acc8
  925. shr \$7,$tp80
  926. xor $tp10,$tp20 # tp2^=tp1
  927. shr \$7,$tp88
  928. xor $tp18,$tp28 # tp2^=tp1
  929. sub $tp80,$acc0
  930. sub $tp88,$acc8
  931. lea ($tp40,$tp40),$tp80
  932. lea ($tp48,$tp48),$tp88
  933. xor $tp10,$tp40 # tp4^=tp1
  934. xor $tp18,$tp48 # tp4^=tp1
  935. and $maskfe,$tp80
  936. and $maskfe,$tp88
  937. and $mask1b,$acc0
  938. and $mask1b,$acc8
  939. xor $acc0,$tp80
  940. xor $acc8,$tp88
  941. xor $tp80,$tp10 # tp1^=tp8
  942. xor $tp88,$tp18 # tp1^=tp8
  943. xor $tp80,$tp20 # tp2^tp1^=tp8
  944. xor $tp88,$tp28 # tp2^tp1^=tp8
  945. mov $tp10,$acc0
  946. mov $tp18,$acc8
  947. xor $tp80,$tp40 # tp4^tp1^=tp8
  948. shr \$32,$acc0
  949. xor $tp88,$tp48 # tp4^tp1^=tp8
  950. shr \$32,$acc8
  951. xor $tp20,$tp80 # tp8^=tp8^tp2^tp1=tp2^tp1
  952. rol \$8,`&LO("$tp10")` # ROTATE(tp1^tp8,8)
  953. xor $tp28,$tp88 # tp8^=tp8^tp2^tp1=tp2^tp1
  954. rol \$8,`&LO("$tp18")` # ROTATE(tp1^tp8,8)
  955. xor $tp40,$tp80 # tp2^tp1^=tp8^tp4^tp1=tp8^tp4^tp2
  956. rol \$8,`&LO("$acc0")` # ROTATE(tp1^tp8,8)
  957. xor $tp48,$tp88 # tp2^tp1^=tp8^tp4^tp1=tp8^tp4^tp2
  958. rol \$8,`&LO("$acc8")` # ROTATE(tp1^tp8,8)
  959. xor `&LO("$tp80")`,`&LO("$tp10")`
  960. shr \$32,$tp80
  961. xor `&LO("$tp88")`,`&LO("$tp18")`
  962. shr \$32,$tp88
  963. xor `&LO("$tp80")`,`&LO("$acc0")`
  964. xor `&LO("$tp88")`,`&LO("$acc8")`
  965. mov $tp20,$tp80
  966. rol \$24,`&LO("$tp20")` # ROTATE(tp2^tp1^tp8,24)
  967. mov $tp28,$tp88
  968. rol \$24,`&LO("$tp28")` # ROTATE(tp2^tp1^tp8,24)
  969. shr \$32,$tp80
  970. xor `&LO("$tp20")`,`&LO("$tp10")`
  971. shr \$32,$tp88
  972. xor `&LO("$tp28")`,`&LO("$tp18")`
  973. rol \$24,`&LO("$tp80")` # ROTATE(tp2^tp1^tp8,24)
  974. mov $tp40,$tp20
  975. rol \$24,`&LO("$tp88")` # ROTATE(tp2^tp1^tp8,24)
  976. mov $tp48,$tp28
  977. shr \$32,$tp20
  978. xor `&LO("$tp80")`,`&LO("$acc0")`
  979. shr \$32,$tp28
  980. xor `&LO("$tp88")`,`&LO("$acc8")`
  981. `"mov 0($sbox),$mask80" if ($prefetch)`
  982. rol \$16,`&LO("$tp40")` # ROTATE(tp4^tp1^tp8,16)
  983. `"mov 64($sbox),$maskfe" if ($prefetch)`
  984. rol \$16,`&LO("$tp48")` # ROTATE(tp4^tp1^tp8,16)
  985. `"mov 128($sbox),$mask1b" if ($prefetch)`
  986. rol \$16,`&LO("$tp20")` # ROTATE(tp4^tp1^tp8,16)
  987. `"mov 192($sbox),$tp80" if ($prefetch)`
  988. xor `&LO("$tp40")`,`&LO("$tp10")`
  989. rol \$16,`&LO("$tp28")` # ROTATE(tp4^tp1^tp8,16)
  990. xor `&LO("$tp48")`,`&LO("$tp18")`
  991. `"mov 256($sbox),$tp88" if ($prefetch)`
  992. xor `&LO("$tp20")`,`&LO("$acc0")`
  993. xor `&LO("$tp28")`,`&LO("$acc8")`
  994. ___
  995. }
  996. $code.=<<___;
  997. .type _x86_64_AES_decrypt_compact,\@abi-omnipotent
  998. .align 16
  999. _x86_64_AES_decrypt_compact:
  1000. lea 128($sbox),$inp # size optimization
  1001. mov 0-128($inp),$acc1 # prefetch Td4
  1002. mov 32-128($inp),$acc2
  1003. mov 64-128($inp),$t0
  1004. mov 96-128($inp),$t1
  1005. mov 128-128($inp),$acc1
  1006. mov 160-128($inp),$acc2
  1007. mov 192-128($inp),$t0
  1008. mov 224-128($inp),$t1
  1009. jmp .Ldec_loop_compact
  1010. .align 16
  1011. .Ldec_loop_compact:
  1012. xor 0($key),$s0 # xor with key
  1013. xor 4($key),$s1
  1014. xor 8($key),$s2
  1015. xor 12($key),$s3
  1016. lea 16($key),$key
  1017. ___
  1018. &deccompactvert();
  1019. $code.=<<___;
  1020. cmp 16(%rsp),$key
  1021. je .Ldec_compact_done
  1022. mov 256+0($sbox),$mask80
  1023. shl \$32,%rbx
  1024. shl \$32,%rdx
  1025. mov 256+8($sbox),$maskfe
  1026. or %rbx,%rax
  1027. or %rdx,%rcx
  1028. mov 256+16($sbox),$mask1b
  1029. ___
  1030. &dectransform(1);
  1031. $code.=<<___;
  1032. jmp .Ldec_loop_compact
  1033. .align 16
  1034. .Ldec_compact_done:
  1035. xor 0($key),$s0
  1036. xor 4($key),$s1
  1037. xor 8($key),$s2
  1038. xor 12($key),$s3
  1039. .byte 0xf3,0xc3 # rep ret
  1040. .size _x86_64_AES_decrypt_compact,.-_x86_64_AES_decrypt_compact
  1041. ___
  1042. # void asm_AES_decrypt (const void *inp,void *out,const AES_KEY *key);
  1043. $code.=<<___;
  1044. .align 16
  1045. .globl asm_AES_decrypt
  1046. .type asm_AES_decrypt,\@function,3
  1047. .hidden asm_AES_decrypt
  1048. asm_AES_decrypt:
  1049. push %rbx
  1050. push %rbp
  1051. push %r12
  1052. push %r13
  1053. push %r14
  1054. push %r15
  1055. # allocate frame "above" key schedule
  1056. mov %rsp,%r10
  1057. lea -63(%rdx),%rcx # %rdx is key argument
  1058. and \$-64,%rsp
  1059. sub %rsp,%rcx
  1060. neg %rcx
  1061. and \$0x3c0,%rcx
  1062. sub %rcx,%rsp
  1063. sub \$32,%rsp
  1064. mov %rsi,16(%rsp) # save out
  1065. mov %r10,24(%rsp) # save real stack pointer
  1066. .Ldec_prologue:
  1067. mov %rdx,$key
  1068. mov 240($key),$rnds # load rounds
  1069. mov 0(%rdi),$s0 # load input vector
  1070. mov 4(%rdi),$s1
  1071. mov 8(%rdi),$s2
  1072. mov 12(%rdi),$s3
  1073. shl \$4,$rnds
  1074. lea ($key,$rnds),%rbp
  1075. mov $key,(%rsp) # key schedule
  1076. mov %rbp,8(%rsp) # end of key schedule
  1077. # pick Td4 copy which can't "overlap" with stack frame or key schedule
  1078. lea .LAES_Td+2048(%rip),$sbox
  1079. lea 768(%rsp),%rbp
  1080. sub $sbox,%rbp
  1081. and \$0x300,%rbp
  1082. lea ($sbox,%rbp),$sbox
  1083. shr \$3,%rbp # recall "magic" constants!
  1084. add %rbp,$sbox
  1085. call _x86_64_AES_decrypt_compact
  1086. mov 16(%rsp),$out # restore out
  1087. mov 24(%rsp),%rsi # restore saved stack pointer
  1088. mov $s0,0($out) # write output vector
  1089. mov $s1,4($out)
  1090. mov $s2,8($out)
  1091. mov $s3,12($out)
  1092. mov (%rsi),%r15
  1093. mov 8(%rsi),%r14
  1094. mov 16(%rsi),%r13
  1095. mov 24(%rsi),%r12
  1096. mov 32(%rsi),%rbp
  1097. mov 40(%rsi),%rbx
  1098. lea 48(%rsi),%rsp
  1099. .Ldec_epilogue:
  1100. ret
  1101. .size asm_AES_decrypt,.-asm_AES_decrypt
  1102. ___
  1103. #------------------------------------------------------------------#
  1104. sub enckey()
  1105. {
  1106. $code.=<<___;
  1107. movz %dl,%esi # rk[i]>>0
  1108. movzb -128(%rbp,%rsi),%ebx
  1109. movz %dh,%esi # rk[i]>>8
  1110. shl \$24,%ebx
  1111. xor %ebx,%eax
  1112. movzb -128(%rbp,%rsi),%ebx
  1113. shr \$16,%edx
  1114. movz %dl,%esi # rk[i]>>16
  1115. xor %ebx,%eax
  1116. movzb -128(%rbp,%rsi),%ebx
  1117. movz %dh,%esi # rk[i]>>24
  1118. shl \$8,%ebx
  1119. xor %ebx,%eax
  1120. movzb -128(%rbp,%rsi),%ebx
  1121. shl \$16,%ebx
  1122. xor %ebx,%eax
  1123. xor 1024-128(%rbp,%rcx,4),%eax # rcon
  1124. ___
  1125. }
  1126. # int asm_AES_set_encrypt_key(const unsigned char *userKey, const int bits, AES_KEY *key)
  1127. $code.=<<___;
  1128. .align 16
  1129. .globl asm_AES_set_encrypt_key
  1130. .type asm_AES_set_encrypt_key,\@function,3
  1131. asm_AES_set_encrypt_key:
  1132. push %rbx
  1133. push %rbp
  1134. push %r12 # redundant, but allows to share
  1135. push %r13 # exception handler...
  1136. push %r14
  1137. push %r15
  1138. sub \$8,%rsp
  1139. .Lenc_key_prologue:
  1140. call _x86_64_AES_set_encrypt_key
  1141. mov 40(%rsp),%rbp
  1142. mov 48(%rsp),%rbx
  1143. add \$56,%rsp
  1144. .Lenc_key_epilogue:
  1145. ret
  1146. .size asm_AES_set_encrypt_key,.-asm_AES_set_encrypt_key
  1147. .type _x86_64_AES_set_encrypt_key,\@abi-omnipotent
  1148. .align 16
  1149. _x86_64_AES_set_encrypt_key:
  1150. mov %esi,%ecx # %ecx=bits
  1151. mov %rdi,%rsi # %rsi=userKey
  1152. mov %rdx,%rdi # %rdi=key
  1153. test \$-1,%rsi
  1154. jz .Lbadpointer
  1155. test \$-1,%rdi
  1156. jz .Lbadpointer
  1157. lea .LAES_Te(%rip),%rbp
  1158. lea 2048+128(%rbp),%rbp
  1159. # prefetch Te4
  1160. mov 0-128(%rbp),%eax
  1161. mov 32-128(%rbp),%ebx
  1162. mov 64-128(%rbp),%r8d
  1163. mov 96-128(%rbp),%edx
  1164. mov 128-128(%rbp),%eax
  1165. mov 160-128(%rbp),%ebx
  1166. mov 192-128(%rbp),%r8d
  1167. mov 224-128(%rbp),%edx
  1168. cmp \$128,%ecx
  1169. je .L10rounds
  1170. cmp \$192,%ecx
  1171. je .L12rounds
  1172. cmp \$256,%ecx
  1173. je .L14rounds
  1174. mov \$-2,%rax # invalid number of bits
  1175. jmp .Lexit
  1176. .L10rounds:
  1177. mov 0(%rsi),%rax # copy first 4 dwords
  1178. mov 8(%rsi),%rdx
  1179. mov %rax,0(%rdi)
  1180. mov %rdx,8(%rdi)
  1181. shr \$32,%rdx
  1182. xor %ecx,%ecx
  1183. jmp .L10shortcut
  1184. .align 4
  1185. .L10loop:
  1186. mov 0(%rdi),%eax # rk[0]
  1187. mov 12(%rdi),%edx # rk[3]
  1188. .L10shortcut:
  1189. ___
  1190. &enckey ();
  1191. $code.=<<___;
  1192. mov %eax,16(%rdi) # rk[4]
  1193. xor 4(%rdi),%eax
  1194. mov %eax,20(%rdi) # rk[5]
  1195. xor 8(%rdi),%eax
  1196. mov %eax,24(%rdi) # rk[6]
  1197. xor 12(%rdi),%eax
  1198. mov %eax,28(%rdi) # rk[7]
  1199. add \$1,%ecx
  1200. lea 16(%rdi),%rdi
  1201. cmp \$10,%ecx
  1202. jl .L10loop
  1203. movl \$10,80(%rdi) # setup number of rounds
  1204. xor %rax,%rax
  1205. jmp .Lexit
  1206. .L12rounds:
  1207. mov 0(%rsi),%rax # copy first 6 dwords
  1208. mov 8(%rsi),%rbx
  1209. mov 16(%rsi),%rdx
  1210. mov %rax,0(%rdi)
  1211. mov %rbx,8(%rdi)
  1212. mov %rdx,16(%rdi)
  1213. shr \$32,%rdx
  1214. xor %ecx,%ecx
  1215. jmp .L12shortcut
  1216. .align 4
  1217. .L12loop:
  1218. mov 0(%rdi),%eax # rk[0]
  1219. mov 20(%rdi),%edx # rk[5]
  1220. .L12shortcut:
  1221. ___
  1222. &enckey ();
  1223. $code.=<<___;
  1224. mov %eax,24(%rdi) # rk[6]
  1225. xor 4(%rdi),%eax
  1226. mov %eax,28(%rdi) # rk[7]
  1227. xor 8(%rdi),%eax
  1228. mov %eax,32(%rdi) # rk[8]
  1229. xor 12(%rdi),%eax
  1230. mov %eax,36(%rdi) # rk[9]
  1231. cmp \$7,%ecx
  1232. je .L12break
  1233. add \$1,%ecx
  1234. xor 16(%rdi),%eax
  1235. mov %eax,40(%rdi) # rk[10]
  1236. xor 20(%rdi),%eax
  1237. mov %eax,44(%rdi) # rk[11]
  1238. lea 24(%rdi),%rdi
  1239. jmp .L12loop
  1240. .L12break:
  1241. movl \$12,72(%rdi) # setup number of rounds
  1242. xor %rax,%rax
  1243. jmp .Lexit
  1244. .L14rounds:
  1245. mov 0(%rsi),%rax # copy first 8 dwords
  1246. mov 8(%rsi),%rbx
  1247. mov 16(%rsi),%rcx
  1248. mov 24(%rsi),%rdx
  1249. mov %rax,0(%rdi)
  1250. mov %rbx,8(%rdi)
  1251. mov %rcx,16(%rdi)
  1252. mov %rdx,24(%rdi)
  1253. shr \$32,%rdx
  1254. xor %ecx,%ecx
  1255. jmp .L14shortcut
  1256. .align 4
  1257. .L14loop:
  1258. mov 0(%rdi),%eax # rk[0]
  1259. mov 28(%rdi),%edx # rk[4]
  1260. .L14shortcut:
  1261. ___
  1262. &enckey ();
  1263. $code.=<<___;
  1264. mov %eax,32(%rdi) # rk[8]
  1265. xor 4(%rdi),%eax
  1266. mov %eax,36(%rdi) # rk[9]
  1267. xor 8(%rdi),%eax
  1268. mov %eax,40(%rdi) # rk[10]
  1269. xor 12(%rdi),%eax
  1270. mov %eax,44(%rdi) # rk[11]
  1271. cmp \$6,%ecx
  1272. je .L14break
  1273. add \$1,%ecx
  1274. mov %eax,%edx
  1275. mov 16(%rdi),%eax # rk[4]
  1276. movz %dl,%esi # rk[11]>>0
  1277. movzb -128(%rbp,%rsi),%ebx
  1278. movz %dh,%esi # rk[11]>>8
  1279. xor %ebx,%eax
  1280. movzb -128(%rbp,%rsi),%ebx
  1281. shr \$16,%edx
  1282. shl \$8,%ebx
  1283. movz %dl,%esi # rk[11]>>16
  1284. xor %ebx,%eax
  1285. movzb -128(%rbp,%rsi),%ebx
  1286. movz %dh,%esi # rk[11]>>24
  1287. shl \$16,%ebx
  1288. xor %ebx,%eax
  1289. movzb -128(%rbp,%rsi),%ebx
  1290. shl \$24,%ebx
  1291. xor %ebx,%eax
  1292. mov %eax,48(%rdi) # rk[12]
  1293. xor 20(%rdi),%eax
  1294. mov %eax,52(%rdi) # rk[13]
  1295. xor 24(%rdi),%eax
  1296. mov %eax,56(%rdi) # rk[14]
  1297. xor 28(%rdi),%eax
  1298. mov %eax,60(%rdi) # rk[15]
  1299. lea 32(%rdi),%rdi
  1300. jmp .L14loop
  1301. .L14break:
  1302. movl \$14,48(%rdi) # setup number of rounds
  1303. xor %rax,%rax
  1304. jmp .Lexit
  1305. .Lbadpointer:
  1306. mov \$-1,%rax
  1307. .Lexit:
  1308. .byte 0xf3,0xc3 # rep ret
  1309. .size _x86_64_AES_set_encrypt_key,.-_x86_64_AES_set_encrypt_key
  1310. ___
  1311. sub deckey_ref()
  1312. { my ($i,$ptr,$te,$td) = @_;
  1313. my ($tp1,$tp2,$tp4,$tp8,$acc)=("%eax","%ebx","%edi","%edx","%r8d");
  1314. $code.=<<___;
  1315. mov $i($ptr),$tp1
  1316. mov $tp1,$acc
  1317. and \$0x80808080,$acc
  1318. mov $acc,$tp4
  1319. shr \$7,$tp4
  1320. lea 0($tp1,$tp1),$tp2
  1321. sub $tp4,$acc
  1322. and \$0xfefefefe,$tp2
  1323. and \$0x1b1b1b1b,$acc
  1324. xor $tp2,$acc
  1325. mov $acc,$tp2
  1326. and \$0x80808080,$acc
  1327. mov $acc,$tp8
  1328. shr \$7,$tp8
  1329. lea 0($tp2,$tp2),$tp4
  1330. sub $tp8,$acc
  1331. and \$0xfefefefe,$tp4
  1332. and \$0x1b1b1b1b,$acc
  1333. xor $tp1,$tp2 # tp2^tp1
  1334. xor $tp4,$acc
  1335. mov $acc,$tp4
  1336. and \$0x80808080,$acc
  1337. mov $acc,$tp8
  1338. shr \$7,$tp8
  1339. sub $tp8,$acc
  1340. lea 0($tp4,$tp4),$tp8
  1341. xor $tp1,$tp4 # tp4^tp1
  1342. and \$0xfefefefe,$tp8
  1343. and \$0x1b1b1b1b,$acc
  1344. xor $acc,$tp8
  1345. xor $tp8,$tp1 # tp1^tp8
  1346. rol \$8,$tp1 # ROTATE(tp1^tp8,8)
  1347. xor $tp8,$tp2 # tp2^tp1^tp8
  1348. xor $tp8,$tp4 # tp4^tp1^tp8
  1349. xor $tp2,$tp8
  1350. xor $tp4,$tp8 # tp8^(tp8^tp4^tp1)^(tp8^tp2^tp1)=tp8^tp4^tp2
  1351. xor $tp8,$tp1
  1352. rol \$24,$tp2 # ROTATE(tp2^tp1^tp8,24)
  1353. xor $tp2,$tp1
  1354. rol \$16,$tp4 # ROTATE(tp4^tp1^tp8,16)
  1355. xor $tp4,$tp1
  1356. mov $tp1,$i($ptr)
  1357. ___
  1358. }
  1359. # int asm_AES_set_decrypt_key(const unsigned char *userKey, const int bits, AES_KEY *key)
  1360. $code.=<<___;
  1361. .align 16
  1362. .globl asm_AES_set_decrypt_key
  1363. .type asm_AES_set_decrypt_key,\@function,3
  1364. asm_AES_set_decrypt_key:
  1365. push %rbx
  1366. push %rbp
  1367. push %r12
  1368. push %r13
  1369. push %r14
  1370. push %r15
  1371. push %rdx # save key schedule
  1372. .Ldec_key_prologue:
  1373. call _x86_64_AES_set_encrypt_key
  1374. mov (%rsp),%r8 # restore key schedule
  1375. cmp \$0,%eax
  1376. jne .Labort
  1377. mov 240(%r8),%r14d # pull number of rounds
  1378. xor %rdi,%rdi
  1379. lea (%rdi,%r14d,4),%rcx
  1380. mov %r8,%rsi
  1381. lea (%r8,%rcx,4),%rdi # pointer to last chunk
  1382. .align 4
  1383. .Linvert:
  1384. mov 0(%rsi),%rax
  1385. mov 8(%rsi),%rbx
  1386. mov 0(%rdi),%rcx
  1387. mov 8(%rdi),%rdx
  1388. mov %rax,0(%rdi)
  1389. mov %rbx,8(%rdi)
  1390. mov %rcx,0(%rsi)
  1391. mov %rdx,8(%rsi)
  1392. lea 16(%rsi),%rsi
  1393. lea -16(%rdi),%rdi
  1394. cmp %rsi,%rdi
  1395. jne .Linvert
  1396. lea .LAES_Te+2048+1024(%rip),%rax # rcon
  1397. mov 40(%rax),$mask80
  1398. mov 48(%rax),$maskfe
  1399. mov 56(%rax),$mask1b
  1400. mov %r8,$key
  1401. sub \$1,%r14d
  1402. .align 4
  1403. .Lpermute:
  1404. lea 16($key),$key
  1405. mov 0($key),%rax
  1406. mov 8($key),%rcx
  1407. ___
  1408. &dectransform ();
  1409. $code.=<<___;
  1410. mov %eax,0($key)
  1411. mov %ebx,4($key)
  1412. mov %ecx,8($key)
  1413. mov %edx,12($key)
  1414. sub \$1,%r14d
  1415. jnz .Lpermute
  1416. xor %rax,%rax
  1417. .Labort:
  1418. mov 8(%rsp),%r15
  1419. mov 16(%rsp),%r14
  1420. mov 24(%rsp),%r13
  1421. mov 32(%rsp),%r12
  1422. mov 40(%rsp),%rbp
  1423. mov 48(%rsp),%rbx
  1424. add \$56,%rsp
  1425. .Ldec_key_epilogue:
  1426. ret
  1427. .size asm_AES_set_decrypt_key,.-asm_AES_set_decrypt_key
  1428. ___
  1429. # void asm_AES_cbc_encrypt (const void char *inp, unsigned char *out,
  1430. # size_t length, const AES_KEY *key,
  1431. # unsigned char *ivp,const int enc);
  1432. {
  1433. # stack frame layout
  1434. # -8(%rsp) return address
  1435. my $keyp="0(%rsp)"; # one to pass as $key
  1436. my $keyend="8(%rsp)"; # &(keyp->rd_key[4*keyp->rounds])
  1437. my $_rsp="16(%rsp)"; # saved %rsp
  1438. my $_inp="24(%rsp)"; # copy of 1st parameter, inp
  1439. my $_out="32(%rsp)"; # copy of 2nd parameter, out
  1440. my $_len="40(%rsp)"; # copy of 3rd parameter, length
  1441. my $_key="48(%rsp)"; # copy of 4th parameter, key
  1442. my $_ivp="56(%rsp)"; # copy of 5th parameter, ivp
  1443. my $ivec="64(%rsp)"; # ivec[16]
  1444. my $aes_key="80(%rsp)"; # copy of aes_key
  1445. my $mark="80+240(%rsp)"; # copy of aes_key->rounds
  1446. $code.=<<___;
  1447. .align 16
  1448. .globl asm_AES_cbc_encrypt
  1449. .type asm_AES_cbc_encrypt,\@function,6
  1450. .extern OPENSSL_ia32cap_P
  1451. .hidden asm_AES_cbc_encrypt
  1452. asm_AES_cbc_encrypt:
  1453. cmp \$0,%rdx # check length
  1454. je .Lcbc_epilogue
  1455. pushfq
  1456. push %rbx
  1457. push %rbp
  1458. push %r12
  1459. push %r13
  1460. push %r14
  1461. push %r15
  1462. .Lcbc_prologue:
  1463. cld
  1464. mov %r9d,%r9d # clear upper half of enc
  1465. lea .LAES_Te(%rip),$sbox
  1466. cmp \$0,%r9
  1467. jne .Lcbc_picked_te
  1468. lea .LAES_Td(%rip),$sbox
  1469. .Lcbc_picked_te:
  1470. mov OPENSSL_ia32cap_P(%rip),%r10d
  1471. cmp \$$speed_limit,%rdx
  1472. jb .Lcbc_slow_prologue
  1473. test \$15,%rdx
  1474. jnz .Lcbc_slow_prologue
  1475. bt \$28,%r10d
  1476. jc .Lcbc_slow_prologue
  1477. # allocate aligned stack frame...
  1478. lea -88-248(%rsp),$key
  1479. and \$-64,$key
  1480. # ... and make sure it doesn't alias with AES_T[ed] modulo 4096
  1481. mov $sbox,%r10
  1482. lea 2304($sbox),%r11
  1483. mov $key,%r12
  1484. and \$0xFFF,%r10 # s = $sbox&0xfff
  1485. and \$0xFFF,%r11 # e = ($sbox+2048)&0xfff
  1486. and \$0xFFF,%r12 # p = %rsp&0xfff
  1487. cmp %r11,%r12 # if (p=>e) %rsp =- (p-e);
  1488. jb .Lcbc_te_break_out
  1489. sub %r11,%r12
  1490. sub %r12,$key
  1491. jmp .Lcbc_te_ok
  1492. .Lcbc_te_break_out: # else %rsp -= (p-s)&0xfff + framesz
  1493. sub %r10,%r12
  1494. and \$0xFFF,%r12
  1495. add \$320,%r12
  1496. sub %r12,$key
  1497. .align 4
  1498. .Lcbc_te_ok:
  1499. xchg %rsp,$key
  1500. #add \$8,%rsp # reserve for return address!
  1501. mov $key,$_rsp # save %rsp
  1502. .Lcbc_fast_body:
  1503. mov %rdi,$_inp # save copy of inp
  1504. mov %rsi,$_out # save copy of out
  1505. mov %rdx,$_len # save copy of len
  1506. mov %rcx,$_key # save copy of key
  1507. mov %r8,$_ivp # save copy of ivp
  1508. movl \$0,$mark # copy of aes_key->rounds = 0;
  1509. mov %r8,%rbp # rearrange input arguments
  1510. mov %r9,%rbx
  1511. mov %rsi,$out
  1512. mov %rdi,$inp
  1513. mov %rcx,$key
  1514. mov 240($key),%eax # key->rounds
  1515. # do we copy key schedule to stack?
  1516. mov $key,%r10
  1517. sub $sbox,%r10
  1518. and \$0xfff,%r10
  1519. cmp \$2304,%r10
  1520. jb .Lcbc_do_ecopy
  1521. cmp \$4096-248,%r10
  1522. jb .Lcbc_skip_ecopy
  1523. .align 4
  1524. .Lcbc_do_ecopy:
  1525. mov $key,%rsi
  1526. lea $aes_key,%rdi
  1527. lea $aes_key,$key
  1528. mov \$240/8,%ecx
  1529. .long 0x90A548F3 # rep movsq
  1530. mov %eax,(%rdi) # copy aes_key->rounds
  1531. .Lcbc_skip_ecopy:
  1532. mov $key,$keyp # save key pointer
  1533. mov \$18,%ecx
  1534. .align 4
  1535. .Lcbc_prefetch_te:
  1536. mov 0($sbox),%r10
  1537. mov 32($sbox),%r11
  1538. mov 64($sbox),%r12
  1539. mov 96($sbox),%r13
  1540. lea 128($sbox),$sbox
  1541. sub \$1,%ecx
  1542. jnz .Lcbc_prefetch_te
  1543. lea -2304($sbox),$sbox
  1544. cmp \$0,%rbx
  1545. je .LFAST_DECRYPT
  1546. #----------------------------- ENCRYPT -----------------------------#
  1547. mov 0(%rbp),$s0 # load iv
  1548. mov 4(%rbp),$s1
  1549. mov 8(%rbp),$s2
  1550. mov 12(%rbp),$s3
  1551. .align 4
  1552. .Lcbc_fast_enc_loop:
  1553. xor 0($inp),$s0
  1554. xor 4($inp),$s1
  1555. xor 8($inp),$s2
  1556. xor 12($inp),$s3
  1557. mov $keyp,$key # restore key
  1558. mov $inp,$_inp # if ($verticalspin) save inp
  1559. call _x86_64_AES_encrypt
  1560. mov $_inp,$inp # if ($verticalspin) restore inp
  1561. mov $_len,%r10
  1562. mov $s0,0($out)
  1563. mov $s1,4($out)
  1564. mov $s2,8($out)
  1565. mov $s3,12($out)
  1566. lea 16($inp),$inp
  1567. lea 16($out),$out
  1568. sub \$16,%r10
  1569. test \$-16,%r10
  1570. mov %r10,$_len
  1571. jnz .Lcbc_fast_enc_loop
  1572. mov $_ivp,%rbp # restore ivp
  1573. mov $s0,0(%rbp) # save ivec
  1574. mov $s1,4(%rbp)
  1575. mov $s2,8(%rbp)
  1576. mov $s3,12(%rbp)
  1577. jmp .Lcbc_fast_cleanup
  1578. #----------------------------- DECRYPT -----------------------------#
  1579. .align 16
  1580. .LFAST_DECRYPT:
  1581. cmp $inp,$out
  1582. je .Lcbc_fast_dec_in_place
  1583. mov %rbp,$ivec
  1584. .align 4
  1585. .Lcbc_fast_dec_loop:
  1586. mov 0($inp),$s0 # read input
  1587. mov 4($inp),$s1
  1588. mov 8($inp),$s2
  1589. mov 12($inp),$s3
  1590. mov $keyp,$key # restore key
  1591. mov $inp,$_inp # if ($verticalspin) save inp
  1592. call _x86_64_AES_decrypt
  1593. mov $ivec,%rbp # load ivp
  1594. mov $_inp,$inp # if ($verticalspin) restore inp
  1595. mov $_len,%r10 # load len
  1596. xor 0(%rbp),$s0 # xor iv
  1597. xor 4(%rbp),$s1
  1598. xor 8(%rbp),$s2
  1599. xor 12(%rbp),$s3
  1600. mov $inp,%rbp # current input, next iv
  1601. sub \$16,%r10
  1602. mov %r10,$_len # update len
  1603. mov %rbp,$ivec # update ivp
  1604. mov $s0,0($out) # write output
  1605. mov $s1,4($out)
  1606. mov $s2,8($out)
  1607. mov $s3,12($out)
  1608. lea 16($inp),$inp
  1609. lea 16($out),$out
  1610. jnz .Lcbc_fast_dec_loop
  1611. mov $_ivp,%r12 # load user ivp
  1612. mov 0(%rbp),%r10 # load iv
  1613. mov 8(%rbp),%r11
  1614. mov %r10,0(%r12) # copy back to user
  1615. mov %r11,8(%r12)
  1616. jmp .Lcbc_fast_cleanup
  1617. .align 16
  1618. .Lcbc_fast_dec_in_place:
  1619. mov 0(%rbp),%r10 # copy iv to stack
  1620. mov 8(%rbp),%r11
  1621. mov %r10,0+$ivec
  1622. mov %r11,8+$ivec
  1623. .align 4
  1624. .Lcbc_fast_dec_in_place_loop:
  1625. mov 0($inp),$s0 # load input
  1626. mov 4($inp),$s1
  1627. mov 8($inp),$s2
  1628. mov 12($inp),$s3
  1629. mov $keyp,$key # restore key
  1630. mov $inp,$_inp # if ($verticalspin) save inp
  1631. call _x86_64_AES_decrypt
  1632. mov $_inp,$inp # if ($verticalspin) restore inp
  1633. mov $_len,%r10
  1634. xor 0+$ivec,$s0
  1635. xor 4+$ivec,$s1
  1636. xor 8+$ivec,$s2
  1637. xor 12+$ivec,$s3
  1638. mov 0($inp),%r11 # load input
  1639. mov 8($inp),%r12
  1640. sub \$16,%r10
  1641. jz .Lcbc_fast_dec_in_place_done
  1642. mov %r11,0+$ivec # copy input to iv
  1643. mov %r12,8+$ivec
  1644. mov $s0,0($out) # save output [zaps input]
  1645. mov $s1,4($out)
  1646. mov $s2,8($out)
  1647. mov $s3,12($out)
  1648. lea 16($inp),$inp
  1649. lea 16($out),$out
  1650. mov %r10,$_len
  1651. jmp .Lcbc_fast_dec_in_place_loop
  1652. .Lcbc_fast_dec_in_place_done:
  1653. mov $_ivp,%rdi
  1654. mov %r11,0(%rdi) # copy iv back to user
  1655. mov %r12,8(%rdi)
  1656. mov $s0,0($out) # save output [zaps input]
  1657. mov $s1,4($out)
  1658. mov $s2,8($out)
  1659. mov $s3,12($out)
  1660. .align 4
  1661. .Lcbc_fast_cleanup:
  1662. cmpl \$0,$mark # was the key schedule copied?
  1663. lea $aes_key,%rdi
  1664. je .Lcbc_exit
  1665. mov \$240/8,%ecx
  1666. xor %rax,%rax
  1667. .long 0x90AB48F3 # rep stosq
  1668. jmp .Lcbc_exit
  1669. #--------------------------- SLOW ROUTINE ---------------------------#
  1670. .align 16
  1671. .Lcbc_slow_prologue:
  1672. # allocate aligned stack frame...
  1673. lea -88(%rsp),%rbp
  1674. and \$-64,%rbp
  1675. # ... just "above" key schedule
  1676. lea -88-63(%rcx),%r10
  1677. sub %rbp,%r10
  1678. neg %r10
  1679. and \$0x3c0,%r10
  1680. sub %r10,%rbp
  1681. xchg %rsp,%rbp
  1682. #add \$8,%rsp # reserve for return address!
  1683. mov %rbp,$_rsp # save %rsp
  1684. .Lcbc_slow_body:
  1685. #mov %rdi,$_inp # save copy of inp
  1686. #mov %rsi,$_out # save copy of out
  1687. #mov %rdx,$_len # save copy of len
  1688. #mov %rcx,$_key # save copy of key
  1689. mov %r8,$_ivp # save copy of ivp
  1690. mov %r8,%rbp # rearrange input arguments
  1691. mov %r9,%rbx
  1692. mov %rsi,$out
  1693. mov %rdi,$inp
  1694. mov %rcx,$key
  1695. mov %rdx,%r10
  1696. mov 240($key),%eax
  1697. mov $key,$keyp # save key pointer
  1698. shl \$4,%eax
  1699. lea ($key,%rax),%rax
  1700. mov %rax,$keyend
  1701. # pick Te4 copy which can't "overlap" with stack frame or key scdedule
  1702. lea 2048($sbox),$sbox
  1703. lea 768-8(%rsp),%rax
  1704. sub $sbox,%rax
  1705. and \$0x300,%rax
  1706. lea ($sbox,%rax),$sbox
  1707. cmp \$0,%rbx
  1708. je .LSLOW_DECRYPT
  1709. #--------------------------- SLOW ENCRYPT ---------------------------#
  1710. test \$-16,%r10 # check upon length
  1711. mov 0(%rbp),$s0 # load iv
  1712. mov 4(%rbp),$s1
  1713. mov 8(%rbp),$s2
  1714. mov 12(%rbp),$s3
  1715. jz .Lcbc_slow_enc_tail # short input...
  1716. .align 4
  1717. .Lcbc_slow_enc_loop:
  1718. xor 0($inp),$s0
  1719. xor 4($inp),$s1
  1720. xor 8($inp),$s2
  1721. xor 12($inp),$s3
  1722. mov $keyp,$key # restore key
  1723. mov $inp,$_inp # save inp
  1724. mov $out,$_out # save out
  1725. mov %r10,$_len # save len
  1726. call _x86_64_AES_encrypt_compact
  1727. mov $_inp,$inp # restore inp
  1728. mov $_out,$out # restore out
  1729. mov $_len,%r10 # restore len
  1730. mov $s0,0($out)
  1731. mov $s1,4($out)
  1732. mov $s2,8($out)
  1733. mov $s3,12($out)
  1734. lea 16($inp),$inp
  1735. lea 16($out),$out
  1736. sub \$16,%r10
  1737. test \$-16,%r10
  1738. jnz .Lcbc_slow_enc_loop
  1739. test \$15,%r10
  1740. jnz .Lcbc_slow_enc_tail
  1741. mov $_ivp,%rbp # restore ivp
  1742. mov $s0,0(%rbp) # save ivec
  1743. mov $s1,4(%rbp)
  1744. mov $s2,8(%rbp)
  1745. mov $s3,12(%rbp)
  1746. jmp .Lcbc_exit
  1747. .align 4
  1748. .Lcbc_slow_enc_tail:
  1749. mov %rax,%r11
  1750. mov %rcx,%r12
  1751. mov %r10,%rcx
  1752. mov $inp,%rsi
  1753. mov $out,%rdi
  1754. .long 0x9066A4F3 # rep movsb
  1755. mov \$16,%rcx # zero tail
  1756. sub %r10,%rcx
  1757. xor %rax,%rax
  1758. .long 0x9066AAF3 # rep stosb
  1759. mov $out,$inp # this is not a mistake!
  1760. mov \$16,%r10 # len=16
  1761. mov %r11,%rax
  1762. mov %r12,%rcx
  1763. jmp .Lcbc_slow_enc_loop # one more spin...
  1764. #--------------------------- SLOW DECRYPT ---------------------------#
  1765. .align 16
  1766. .LSLOW_DECRYPT:
  1767. shr \$3,%rax
  1768. add %rax,$sbox # recall "magic" constants!
  1769. mov 0(%rbp),%r11 # copy iv to stack
  1770. mov 8(%rbp),%r12
  1771. mov %r11,0+$ivec
  1772. mov %r12,8+$ivec
  1773. .align 4
  1774. .Lcbc_slow_dec_loop:
  1775. mov 0($inp),$s0 # load input
  1776. mov 4($inp),$s1
  1777. mov 8($inp),$s2
  1778. mov 12($inp),$s3
  1779. mov $keyp,$key # restore key
  1780. mov $inp,$_inp # save inp
  1781. mov $out,$_out # save out
  1782. mov %r10,$_len # save len
  1783. call _x86_64_AES_decrypt_compact
  1784. mov $_inp,$inp # restore inp
  1785. mov $_out,$out # restore out
  1786. mov $_len,%r10
  1787. xor 0+$ivec,$s0
  1788. xor 4+$ivec,$s1
  1789. xor 8+$ivec,$s2
  1790. xor 12+$ivec,$s3
  1791. mov 0($inp),%r11 # load input
  1792. mov 8($inp),%r12
  1793. sub \$16,%r10
  1794. jc .Lcbc_slow_dec_partial
  1795. jz .Lcbc_slow_dec_done
  1796. mov %r11,0+$ivec # copy input to iv
  1797. mov %r12,8+$ivec
  1798. mov $s0,0($out) # save output [can zap input]
  1799. mov $s1,4($out)
  1800. mov $s2,8($out)
  1801. mov $s3,12($out)
  1802. lea 16($inp),$inp
  1803. lea 16($out),$out
  1804. jmp .Lcbc_slow_dec_loop
  1805. .Lcbc_slow_dec_done:
  1806. mov $_ivp,%rdi
  1807. mov %r11,0(%rdi) # copy iv back to user
  1808. mov %r12,8(%rdi)
  1809. mov $s0,0($out) # save output [can zap input]
  1810. mov $s1,4($out)
  1811. mov $s2,8($out)
  1812. mov $s3,12($out)
  1813. jmp .Lcbc_exit
  1814. .align 4
  1815. .Lcbc_slow_dec_partial:
  1816. mov $_ivp,%rdi
  1817. mov %r11,0(%rdi) # copy iv back to user
  1818. mov %r12,8(%rdi)
  1819. mov $s0,0+$ivec # save output to stack
  1820. mov $s1,4+$ivec
  1821. mov $s2,8+$ivec
  1822. mov $s3,12+$ivec
  1823. mov $out,%rdi
  1824. lea $ivec,%rsi
  1825. lea 16(%r10),%rcx
  1826. .long 0x9066A4F3 # rep movsb
  1827. jmp .Lcbc_exit
  1828. .align 16
  1829. .Lcbc_exit:
  1830. mov $_rsp,%rsi
  1831. mov (%rsi),%r15
  1832. mov 8(%rsi),%r14
  1833. mov 16(%rsi),%r13
  1834. mov 24(%rsi),%r12
  1835. mov 32(%rsi),%rbp
  1836. mov 40(%rsi),%rbx
  1837. lea 48(%rsi),%rsp
  1838. .Lcbc_popfq:
  1839. popfq
  1840. .Lcbc_epilogue:
  1841. ret
  1842. .size asm_AES_cbc_encrypt,.-asm_AES_cbc_encrypt
  1843. ___
  1844. }
  1845. $code.=<<___;
  1846. .align 64
  1847. .LAES_Te:
  1848. ___
  1849. &_data_word(0xa56363c6, 0x847c7cf8, 0x997777ee, 0x8d7b7bf6);
  1850. &_data_word(0x0df2f2ff, 0xbd6b6bd6, 0xb16f6fde, 0x54c5c591);
  1851. &_data_word(0x50303060, 0x03010102, 0xa96767ce, 0x7d2b2b56);
  1852. &_data_word(0x19fefee7, 0x62d7d7b5, 0xe6abab4d, 0x9a7676ec);
  1853. &_data_word(0x45caca8f, 0x9d82821f, 0x40c9c989, 0x877d7dfa);
  1854. &_data_word(0x15fafaef, 0xeb5959b2, 0xc947478e, 0x0bf0f0fb);
  1855. &_data_word(0xecadad41, 0x67d4d4b3, 0xfda2a25f, 0xeaafaf45);
  1856. &_data_word(0xbf9c9c23, 0xf7a4a453, 0x967272e4, 0x5bc0c09b);
  1857. &_data_word(0xc2b7b775, 0x1cfdfde1, 0xae93933d, 0x6a26264c);
  1858. &_data_word(0x5a36366c, 0x413f3f7e, 0x02f7f7f5, 0x4fcccc83);
  1859. &_data_word(0x5c343468, 0xf4a5a551, 0x34e5e5d1, 0x08f1f1f9);
  1860. &_data_word(0x937171e2, 0x73d8d8ab, 0x53313162, 0x3f15152a);
  1861. &_data_word(0x0c040408, 0x52c7c795, 0x65232346, 0x5ec3c39d);
  1862. &_data_word(0x28181830, 0xa1969637, 0x0f05050a, 0xb59a9a2f);
  1863. &_data_word(0x0907070e, 0x36121224, 0x9b80801b, 0x3de2e2df);
  1864. &_data_word(0x26ebebcd, 0x6927274e, 0xcdb2b27f, 0x9f7575ea);
  1865. &_data_word(0x1b090912, 0x9e83831d, 0x742c2c58, 0x2e1a1a34);
  1866. &_data_word(0x2d1b1b36, 0xb26e6edc, 0xee5a5ab4, 0xfba0a05b);
  1867. &_data_word(0xf65252a4, 0x4d3b3b76, 0x61d6d6b7, 0xceb3b37d);
  1868. &_data_word(0x7b292952, 0x3ee3e3dd, 0x712f2f5e, 0x97848413);
  1869. &_data_word(0xf55353a6, 0x68d1d1b9, 0x00000000, 0x2cededc1);
  1870. &_data_word(0x60202040, 0x1ffcfce3, 0xc8b1b179, 0xed5b5bb6);
  1871. &_data_word(0xbe6a6ad4, 0x46cbcb8d, 0xd9bebe67, 0x4b393972);
  1872. &_data_word(0xde4a4a94, 0xd44c4c98, 0xe85858b0, 0x4acfcf85);
  1873. &_data_word(0x6bd0d0bb, 0x2aefefc5, 0xe5aaaa4f, 0x16fbfbed);
  1874. &_data_word(0xc5434386, 0xd74d4d9a, 0x55333366, 0x94858511);
  1875. &_data_word(0xcf45458a, 0x10f9f9e9, 0x06020204, 0x817f7ffe);
  1876. &_data_word(0xf05050a0, 0x443c3c78, 0xba9f9f25, 0xe3a8a84b);
  1877. &_data_word(0xf35151a2, 0xfea3a35d, 0xc0404080, 0x8a8f8f05);
  1878. &_data_word(0xad92923f, 0xbc9d9d21, 0x48383870, 0x04f5f5f1);
  1879. &_data_word(0xdfbcbc63, 0xc1b6b677, 0x75dadaaf, 0x63212142);
  1880. &_data_word(0x30101020, 0x1affffe5, 0x0ef3f3fd, 0x6dd2d2bf);
  1881. &_data_word(0x4ccdcd81, 0x140c0c18, 0x35131326, 0x2fececc3);
  1882. &_data_word(0xe15f5fbe, 0xa2979735, 0xcc444488, 0x3917172e);
  1883. &_data_word(0x57c4c493, 0xf2a7a755, 0x827e7efc, 0x473d3d7a);
  1884. &_data_word(0xac6464c8, 0xe75d5dba, 0x2b191932, 0x957373e6);
  1885. &_data_word(0xa06060c0, 0x98818119, 0xd14f4f9e, 0x7fdcdca3);
  1886. &_data_word(0x66222244, 0x7e2a2a54, 0xab90903b, 0x8388880b);
  1887. &_data_word(0xca46468c, 0x29eeeec7, 0xd3b8b86b, 0x3c141428);
  1888. &_data_word(0x79dedea7, 0xe25e5ebc, 0x1d0b0b16, 0x76dbdbad);
  1889. &_data_word(0x3be0e0db, 0x56323264, 0x4e3a3a74, 0x1e0a0a14);
  1890. &_data_word(0xdb494992, 0x0a06060c, 0x6c242448, 0xe45c5cb8);
  1891. &_data_word(0x5dc2c29f, 0x6ed3d3bd, 0xefacac43, 0xa66262c4);
  1892. &_data_word(0xa8919139, 0xa4959531, 0x37e4e4d3, 0x8b7979f2);
  1893. &_data_word(0x32e7e7d5, 0x43c8c88b, 0x5937376e, 0xb76d6dda);
  1894. &_data_word(0x8c8d8d01, 0x64d5d5b1, 0xd24e4e9c, 0xe0a9a949);
  1895. &_data_word(0xb46c6cd8, 0xfa5656ac, 0x07f4f4f3, 0x25eaeacf);
  1896. &_data_word(0xaf6565ca, 0x8e7a7af4, 0xe9aeae47, 0x18080810);
  1897. &_data_word(0xd5baba6f, 0x887878f0, 0x6f25254a, 0x722e2e5c);
  1898. &_data_word(0x241c1c38, 0xf1a6a657, 0xc7b4b473, 0x51c6c697);
  1899. &_data_word(0x23e8e8cb, 0x7cdddda1, 0x9c7474e8, 0x211f1f3e);
  1900. &_data_word(0xdd4b4b96, 0xdcbdbd61, 0x868b8b0d, 0x858a8a0f);
  1901. &_data_word(0x907070e0, 0x423e3e7c, 0xc4b5b571, 0xaa6666cc);
  1902. &_data_word(0xd8484890, 0x05030306, 0x01f6f6f7, 0x120e0e1c);
  1903. &_data_word(0xa36161c2, 0x5f35356a, 0xf95757ae, 0xd0b9b969);
  1904. &_data_word(0x91868617, 0x58c1c199, 0x271d1d3a, 0xb99e9e27);
  1905. &_data_word(0x38e1e1d9, 0x13f8f8eb, 0xb398982b, 0x33111122);
  1906. &_data_word(0xbb6969d2, 0x70d9d9a9, 0x898e8e07, 0xa7949433);
  1907. &_data_word(0xb69b9b2d, 0x221e1e3c, 0x92878715, 0x20e9e9c9);
  1908. &_data_word(0x49cece87, 0xff5555aa, 0x78282850, 0x7adfdfa5);
  1909. &_data_word(0x8f8c8c03, 0xf8a1a159, 0x80898909, 0x170d0d1a);
  1910. &_data_word(0xdabfbf65, 0x31e6e6d7, 0xc6424284, 0xb86868d0);
  1911. &_data_word(0xc3414182, 0xb0999929, 0x772d2d5a, 0x110f0f1e);
  1912. &_data_word(0xcbb0b07b, 0xfc5454a8, 0xd6bbbb6d, 0x3a16162c);
  1913. #Te4 # four copies of Te4 to choose from to avoid L1 aliasing
  1914. &data_byte(0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5);
  1915. &data_byte(0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76);
  1916. &data_byte(0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0);
  1917. &data_byte(0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0);
  1918. &data_byte(0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc);
  1919. &data_byte(0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15);
  1920. &data_byte(0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a);
  1921. &data_byte(0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75);
  1922. &data_byte(0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0);
  1923. &data_byte(0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84);
  1924. &data_byte(0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b);
  1925. &data_byte(0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf);
  1926. &data_byte(0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85);
  1927. &data_byte(0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8);
  1928. &data_byte(0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5);
  1929. &data_byte(0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2);
  1930. &data_byte(0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17);
  1931. &data_byte(0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73);
  1932. &data_byte(0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88);
  1933. &data_byte(0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb);
  1934. &data_byte(0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c);
  1935. &data_byte(0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79);
  1936. &data_byte(0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9);
  1937. &data_byte(0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08);
  1938. &data_byte(0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6);
  1939. &data_byte(0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a);
  1940. &data_byte(0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e);
  1941. &data_byte(0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e);
  1942. &data_byte(0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94);
  1943. &data_byte(0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf);
  1944. &data_byte(0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68);
  1945. &data_byte(0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16);
  1946. &data_byte(0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5);
  1947. &data_byte(0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76);
  1948. &data_byte(0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0);
  1949. &data_byte(0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0);
  1950. &data_byte(0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc);
  1951. &data_byte(0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15);
  1952. &data_byte(0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a);
  1953. &data_byte(0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75);
  1954. &data_byte(0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0);
  1955. &data_byte(0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84);
  1956. &data_byte(0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b);
  1957. &data_byte(0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf);
  1958. &data_byte(0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85);
  1959. &data_byte(0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8);
  1960. &data_byte(0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5);
  1961. &data_byte(0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2);
  1962. &data_byte(0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17);
  1963. &data_byte(0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73);
  1964. &data_byte(0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88);
  1965. &data_byte(0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb);
  1966. &data_byte(0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c);
  1967. &data_byte(0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79);
  1968. &data_byte(0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9);
  1969. &data_byte(0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08);
  1970. &data_byte(0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6);
  1971. &data_byte(0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a);
  1972. &data_byte(0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e);
  1973. &data_byte(0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e);
  1974. &data_byte(0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94);
  1975. &data_byte(0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf);
  1976. &data_byte(0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68);
  1977. &data_byte(0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16);
  1978. &data_byte(0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5);
  1979. &data_byte(0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76);
  1980. &data_byte(0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0);
  1981. &data_byte(0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0);
  1982. &data_byte(0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc);
  1983. &data_byte(0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15);
  1984. &data_byte(0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a);
  1985. &data_byte(0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75);
  1986. &data_byte(0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0);
  1987. &data_byte(0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84);
  1988. &data_byte(0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b);
  1989. &data_byte(0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf);
  1990. &data_byte(0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85);
  1991. &data_byte(0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8);
  1992. &data_byte(0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5);
  1993. &data_byte(0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2);
  1994. &data_byte(0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17);
  1995. &data_byte(0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73);
  1996. &data_byte(0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88);
  1997. &data_byte(0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb);
  1998. &data_byte(0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c);
  1999. &data_byte(0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79);
  2000. &data_byte(0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9);
  2001. &data_byte(0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08);
  2002. &data_byte(0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6);
  2003. &data_byte(0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a);
  2004. &data_byte(0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e);
  2005. &data_byte(0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e);
  2006. &data_byte(0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94);
  2007. &data_byte(0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf);
  2008. &data_byte(0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68);
  2009. &data_byte(0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16);
  2010. &data_byte(0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5);
  2011. &data_byte(0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76);
  2012. &data_byte(0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0);
  2013. &data_byte(0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0);
  2014. &data_byte(0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc);
  2015. &data_byte(0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15);
  2016. &data_byte(0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a);
  2017. &data_byte(0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75);
  2018. &data_byte(0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0);
  2019. &data_byte(0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84);
  2020. &data_byte(0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b);
  2021. &data_byte(0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf);
  2022. &data_byte(0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85);
  2023. &data_byte(0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8);
  2024. &data_byte(0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5);
  2025. &data_byte(0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2);
  2026. &data_byte(0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17);
  2027. &data_byte(0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73);
  2028. &data_byte(0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88);
  2029. &data_byte(0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb);
  2030. &data_byte(0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c);
  2031. &data_byte(0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79);
  2032. &data_byte(0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9);
  2033. &data_byte(0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08);
  2034. &data_byte(0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6);
  2035. &data_byte(0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a);
  2036. &data_byte(0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e);
  2037. &data_byte(0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e);
  2038. &data_byte(0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94);
  2039. &data_byte(0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf);
  2040. &data_byte(0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68);
  2041. &data_byte(0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16);
  2042. #rcon:
  2043. $code.=<<___;
  2044. .long 0x00000001, 0x00000002, 0x00000004, 0x00000008
  2045. .long 0x00000010, 0x00000020, 0x00000040, 0x00000080
  2046. .long 0x0000001b, 0x00000036, 0x80808080, 0x80808080
  2047. .long 0xfefefefe, 0xfefefefe, 0x1b1b1b1b, 0x1b1b1b1b
  2048. ___
  2049. $code.=<<___;
  2050. .align 64
  2051. .LAES_Td:
  2052. ___
  2053. &_data_word(0x50a7f451, 0x5365417e, 0xc3a4171a, 0x965e273a);
  2054. &_data_word(0xcb6bab3b, 0xf1459d1f, 0xab58faac, 0x9303e34b);
  2055. &_data_word(0x55fa3020, 0xf66d76ad, 0x9176cc88, 0x254c02f5);
  2056. &_data_word(0xfcd7e54f, 0xd7cb2ac5, 0x80443526, 0x8fa362b5);
  2057. &_data_word(0x495ab1de, 0x671bba25, 0x980eea45, 0xe1c0fe5d);
  2058. &_data_word(0x02752fc3, 0x12f04c81, 0xa397468d, 0xc6f9d36b);
  2059. &_data_word(0xe75f8f03, 0x959c9215, 0xeb7a6dbf, 0xda595295);
  2060. &_data_word(0x2d83bed4, 0xd3217458, 0x2969e049, 0x44c8c98e);
  2061. &_data_word(0x6a89c275, 0x78798ef4, 0x6b3e5899, 0xdd71b927);
  2062. &_data_word(0xb64fe1be, 0x17ad88f0, 0x66ac20c9, 0xb43ace7d);
  2063. &_data_word(0x184adf63, 0x82311ae5, 0x60335197, 0x457f5362);
  2064. &_data_word(0xe07764b1, 0x84ae6bbb, 0x1ca081fe, 0x942b08f9);
  2065. &_data_word(0x58684870, 0x19fd458f, 0x876cde94, 0xb7f87b52);
  2066. &_data_word(0x23d373ab, 0xe2024b72, 0x578f1fe3, 0x2aab5566);
  2067. &_data_word(0x0728ebb2, 0x03c2b52f, 0x9a7bc586, 0xa50837d3);
  2068. &_data_word(0xf2872830, 0xb2a5bf23, 0xba6a0302, 0x5c8216ed);
  2069. &_data_word(0x2b1ccf8a, 0x92b479a7, 0xf0f207f3, 0xa1e2694e);
  2070. &_data_word(0xcdf4da65, 0xd5be0506, 0x1f6234d1, 0x8afea6c4);
  2071. &_data_word(0x9d532e34, 0xa055f3a2, 0x32e18a05, 0x75ebf6a4);
  2072. &_data_word(0x39ec830b, 0xaaef6040, 0x069f715e, 0x51106ebd);
  2073. &_data_word(0xf98a213e, 0x3d06dd96, 0xae053edd, 0x46bde64d);
  2074. &_data_word(0xb58d5491, 0x055dc471, 0x6fd40604, 0xff155060);
  2075. &_data_word(0x24fb9819, 0x97e9bdd6, 0xcc434089, 0x779ed967);
  2076. &_data_word(0xbd42e8b0, 0x888b8907, 0x385b19e7, 0xdbeec879);
  2077. &_data_word(0x470a7ca1, 0xe90f427c, 0xc91e84f8, 0x00000000);
  2078. &_data_word(0x83868009, 0x48ed2b32, 0xac70111e, 0x4e725a6c);
  2079. &_data_word(0xfbff0efd, 0x5638850f, 0x1ed5ae3d, 0x27392d36);
  2080. &_data_word(0x64d90f0a, 0x21a65c68, 0xd1545b9b, 0x3a2e3624);
  2081. &_data_word(0xb1670a0c, 0x0fe75793, 0xd296eeb4, 0x9e919b1b);
  2082. &_data_word(0x4fc5c080, 0xa220dc61, 0x694b775a, 0x161a121c);
  2083. &_data_word(0x0aba93e2, 0xe52aa0c0, 0x43e0223c, 0x1d171b12);
  2084. &_data_word(0x0b0d090e, 0xadc78bf2, 0xb9a8b62d, 0xc8a91e14);
  2085. &_data_word(0x8519f157, 0x4c0775af, 0xbbdd99ee, 0xfd607fa3);
  2086. &_data_word(0x9f2601f7, 0xbcf5725c, 0xc53b6644, 0x347efb5b);
  2087. &_data_word(0x7629438b, 0xdcc623cb, 0x68fcedb6, 0x63f1e4b8);
  2088. &_data_word(0xcadc31d7, 0x10856342, 0x40229713, 0x2011c684);
  2089. &_data_word(0x7d244a85, 0xf83dbbd2, 0x1132f9ae, 0x6da129c7);
  2090. &_data_word(0x4b2f9e1d, 0xf330b2dc, 0xec52860d, 0xd0e3c177);
  2091. &_data_word(0x6c16b32b, 0x99b970a9, 0xfa489411, 0x2264e947);
  2092. &_data_word(0xc48cfca8, 0x1a3ff0a0, 0xd82c7d56, 0xef903322);
  2093. &_data_word(0xc74e4987, 0xc1d138d9, 0xfea2ca8c, 0x360bd498);
  2094. &_data_word(0xcf81f5a6, 0x28de7aa5, 0x268eb7da, 0xa4bfad3f);
  2095. &_data_word(0xe49d3a2c, 0x0d927850, 0x9bcc5f6a, 0x62467e54);
  2096. &_data_word(0xc2138df6, 0xe8b8d890, 0x5ef7392e, 0xf5afc382);
  2097. &_data_word(0xbe805d9f, 0x7c93d069, 0xa92dd56f, 0xb31225cf);
  2098. &_data_word(0x3b99acc8, 0xa77d1810, 0x6e639ce8, 0x7bbb3bdb);
  2099. &_data_word(0x097826cd, 0xf418596e, 0x01b79aec, 0xa89a4f83);
  2100. &_data_word(0x656e95e6, 0x7ee6ffaa, 0x08cfbc21, 0xe6e815ef);
  2101. &_data_word(0xd99be7ba, 0xce366f4a, 0xd4099fea, 0xd67cb029);
  2102. &_data_word(0xafb2a431, 0x31233f2a, 0x3094a5c6, 0xc066a235);
  2103. &_data_word(0x37bc4e74, 0xa6ca82fc, 0xb0d090e0, 0x15d8a733);
  2104. &_data_word(0x4a9804f1, 0xf7daec41, 0x0e50cd7f, 0x2ff69117);
  2105. &_data_word(0x8dd64d76, 0x4db0ef43, 0x544daacc, 0xdf0496e4);
  2106. &_data_word(0xe3b5d19e, 0x1b886a4c, 0xb81f2cc1, 0x7f516546);
  2107. &_data_word(0x04ea5e9d, 0x5d358c01, 0x737487fa, 0x2e410bfb);
  2108. &_data_word(0x5a1d67b3, 0x52d2db92, 0x335610e9, 0x1347d66d);
  2109. &_data_word(0x8c61d79a, 0x7a0ca137, 0x8e14f859, 0x893c13eb);
  2110. &_data_word(0xee27a9ce, 0x35c961b7, 0xede51ce1, 0x3cb1477a);
  2111. &_data_word(0x59dfd29c, 0x3f73f255, 0x79ce1418, 0xbf37c773);
  2112. &_data_word(0xeacdf753, 0x5baafd5f, 0x146f3ddf, 0x86db4478);
  2113. &_data_word(0x81f3afca, 0x3ec468b9, 0x2c342438, 0x5f40a3c2);
  2114. &_data_word(0x72c31d16, 0x0c25e2bc, 0x8b493c28, 0x41950dff);
  2115. &_data_word(0x7101a839, 0xdeb30c08, 0x9ce4b4d8, 0x90c15664);
  2116. &_data_word(0x6184cb7b, 0x70b632d5, 0x745c6c48, 0x4257b8d0);
  2117. #Td4: # four copies of Td4 to choose from to avoid L1 aliasing
  2118. &data_byte(0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38);
  2119. &data_byte(0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb);
  2120. &data_byte(0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87);
  2121. &data_byte(0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb);
  2122. &data_byte(0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d);
  2123. &data_byte(0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e);
  2124. &data_byte(0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2);
  2125. &data_byte(0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25);
  2126. &data_byte(0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16);
  2127. &data_byte(0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92);
  2128. &data_byte(0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda);
  2129. &data_byte(0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84);
  2130. &data_byte(0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a);
  2131. &data_byte(0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06);
  2132. &data_byte(0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02);
  2133. &data_byte(0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b);
  2134. &data_byte(0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea);
  2135. &data_byte(0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73);
  2136. &data_byte(0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85);
  2137. &data_byte(0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e);
  2138. &data_byte(0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89);
  2139. &data_byte(0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b);
  2140. &data_byte(0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20);
  2141. &data_byte(0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4);
  2142. &data_byte(0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31);
  2143. &data_byte(0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f);
  2144. &data_byte(0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d);
  2145. &data_byte(0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef);
  2146. &data_byte(0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0);
  2147. &data_byte(0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61);
  2148. &data_byte(0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26);
  2149. &data_byte(0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d);
  2150. $code.=<<___;
  2151. .long 0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe
  2152. .long 0x1b1b1b1b, 0x1b1b1b1b, 0, 0
  2153. ___
  2154. &data_byte(0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38);
  2155. &data_byte(0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb);
  2156. &data_byte(0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87);
  2157. &data_byte(0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb);
  2158. &data_byte(0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d);
  2159. &data_byte(0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e);
  2160. &data_byte(0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2);
  2161. &data_byte(0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25);
  2162. &data_byte(0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16);
  2163. &data_byte(0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92);
  2164. &data_byte(0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda);
  2165. &data_byte(0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84);
  2166. &data_byte(0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a);
  2167. &data_byte(0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06);
  2168. &data_byte(0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02);
  2169. &data_byte(0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b);
  2170. &data_byte(0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea);
  2171. &data_byte(0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73);
  2172. &data_byte(0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85);
  2173. &data_byte(0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e);
  2174. &data_byte(0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89);
  2175. &data_byte(0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b);
  2176. &data_byte(0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20);
  2177. &data_byte(0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4);
  2178. &data_byte(0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31);
  2179. &data_byte(0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f);
  2180. &data_byte(0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d);
  2181. &data_byte(0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef);
  2182. &data_byte(0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0);
  2183. &data_byte(0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61);
  2184. &data_byte(0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26);
  2185. &data_byte(0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d);
  2186. $code.=<<___;
  2187. .long 0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe
  2188. .long 0x1b1b1b1b, 0x1b1b1b1b, 0, 0
  2189. ___
  2190. &data_byte(0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38);
  2191. &data_byte(0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb);
  2192. &data_byte(0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87);
  2193. &data_byte(0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb);
  2194. &data_byte(0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d);
  2195. &data_byte(0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e);
  2196. &data_byte(0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2);
  2197. &data_byte(0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25);
  2198. &data_byte(0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16);
  2199. &data_byte(0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92);
  2200. &data_byte(0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda);
  2201. &data_byte(0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84);
  2202. &data_byte(0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a);
  2203. &data_byte(0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06);
  2204. &data_byte(0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02);
  2205. &data_byte(0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b);
  2206. &data_byte(0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea);
  2207. &data_byte(0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73);
  2208. &data_byte(0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85);
  2209. &data_byte(0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e);
  2210. &data_byte(0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89);
  2211. &data_byte(0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b);
  2212. &data_byte(0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20);
  2213. &data_byte(0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4);
  2214. &data_byte(0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31);
  2215. &data_byte(0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f);
  2216. &data_byte(0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d);
  2217. &data_byte(0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef);
  2218. &data_byte(0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0);
  2219. &data_byte(0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61);
  2220. &data_byte(0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26);
  2221. &data_byte(0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d);
  2222. $code.=<<___;
  2223. .long 0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe
  2224. .long 0x1b1b1b1b, 0x1b1b1b1b, 0, 0
  2225. ___
  2226. &data_byte(0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38);
  2227. &data_byte(0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb);
  2228. &data_byte(0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87);
  2229. &data_byte(0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb);
  2230. &data_byte(0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d);
  2231. &data_byte(0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e);
  2232. &data_byte(0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2);
  2233. &data_byte(0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25);
  2234. &data_byte(0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16);
  2235. &data_byte(0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92);
  2236. &data_byte(0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda);
  2237. &data_byte(0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84);
  2238. &data_byte(0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a);
  2239. &data_byte(0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06);
  2240. &data_byte(0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02);
  2241. &data_byte(0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b);
  2242. &data_byte(0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea);
  2243. &data_byte(0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73);
  2244. &data_byte(0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85);
  2245. &data_byte(0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e);
  2246. &data_byte(0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89);
  2247. &data_byte(0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b);
  2248. &data_byte(0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20);
  2249. &data_byte(0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4);
  2250. &data_byte(0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31);
  2251. &data_byte(0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f);
  2252. &data_byte(0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d);
  2253. &data_byte(0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef);
  2254. &data_byte(0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0);
  2255. &data_byte(0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61);
  2256. &data_byte(0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26);
  2257. &data_byte(0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d);
  2258. $code.=<<___;
  2259. .long 0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe
  2260. .long 0x1b1b1b1b, 0x1b1b1b1b, 0, 0
  2261. .asciz "AES for x86_64, CRYPTOGAMS by <appro\@openssl.org>"
  2262. .align 64
  2263. ___
  2264. # EXCEPTION_DISPOSITION handler (EXCEPTION_RECORD *rec,ULONG64 frame,
  2265. # CONTEXT *context,DISPATCHER_CONTEXT *disp)
  2266. if ($win64) {
  2267. $rec="%rcx";
  2268. $frame="%rdx";
  2269. $context="%r8";
  2270. $disp="%r9";
  2271. $code.=<<___;
  2272. .extern __imp_RtlVirtualUnwind
  2273. .type block_se_handler,\@abi-omnipotent
  2274. .align 16
  2275. block_se_handler:
  2276. push %rsi
  2277. push %rdi
  2278. push %rbx
  2279. push %rbp
  2280. push %r12
  2281. push %r13
  2282. push %r14
  2283. push %r15
  2284. pushfq
  2285. sub \$64,%rsp
  2286. mov 120($context),%rax # pull context->Rax
  2287. mov 248($context),%rbx # pull context->Rip
  2288. mov 8($disp),%rsi # disp->ImageBase
  2289. mov 56($disp),%r11 # disp->HandlerData
  2290. mov 0(%r11),%r10d # HandlerData[0]
  2291. lea (%rsi,%r10),%r10 # prologue label
  2292. cmp %r10,%rbx # context->Rip<prologue label
  2293. jb .Lin_block_prologue
  2294. mov 152($context),%rax # pull context->Rsp
  2295. mov 4(%r11),%r10d # HandlerData[1]
  2296. lea (%rsi,%r10),%r10 # epilogue label
  2297. cmp %r10,%rbx # context->Rip>=epilogue label
  2298. jae .Lin_block_prologue
  2299. mov 24(%rax),%rax # pull saved real stack pointer
  2300. lea 48(%rax),%rax # adjust...
  2301. mov -8(%rax),%rbx
  2302. mov -16(%rax),%rbp
  2303. mov -24(%rax),%r12
  2304. mov -32(%rax),%r13
  2305. mov -40(%rax),%r14
  2306. mov -48(%rax),%r15
  2307. mov %rbx,144($context) # restore context->Rbx
  2308. mov %rbp,160($context) # restore context->Rbp
  2309. mov %r12,216($context) # restore context->R12
  2310. mov %r13,224($context) # restore context->R13
  2311. mov %r14,232($context) # restore context->R14
  2312. mov %r15,240($context) # restore context->R15
  2313. .Lin_block_prologue:
  2314. mov 8(%rax),%rdi
  2315. mov 16(%rax),%rsi
  2316. mov %rax,152($context) # restore context->Rsp
  2317. mov %rsi,168($context) # restore context->Rsi
  2318. mov %rdi,176($context) # restore context->Rdi
  2319. jmp .Lcommon_seh_exit
  2320. .size block_se_handler,.-block_se_handler
  2321. .type key_se_handler,\@abi-omnipotent
  2322. .align 16
  2323. key_se_handler:
  2324. push %rsi
  2325. push %rdi
  2326. push %rbx
  2327. push %rbp
  2328. push %r12
  2329. push %r13
  2330. push %r14
  2331. push %r15
  2332. pushfq
  2333. sub \$64,%rsp
  2334. mov 120($context),%rax # pull context->Rax
  2335. mov 248($context),%rbx # pull context->Rip
  2336. mov 8($disp),%rsi # disp->ImageBase
  2337. mov 56($disp),%r11 # disp->HandlerData
  2338. mov 0(%r11),%r10d # HandlerData[0]
  2339. lea (%rsi,%r10),%r10 # prologue label
  2340. cmp %r10,%rbx # context->Rip<prologue label
  2341. jb .Lin_key_prologue
  2342. mov 152($context),%rax # pull context->Rsp
  2343. mov 4(%r11),%r10d # HandlerData[1]
  2344. lea (%rsi,%r10),%r10 # epilogue label
  2345. cmp %r10,%rbx # context->Rip>=epilogue label
  2346. jae .Lin_key_prologue
  2347. lea 56(%rax),%rax
  2348. mov -8(%rax),%rbx
  2349. mov -16(%rax),%rbp
  2350. mov -24(%rax),%r12
  2351. mov -32(%rax),%r13
  2352. mov -40(%rax),%r14
  2353. mov -48(%rax),%r15
  2354. mov %rbx,144($context) # restore context->Rbx
  2355. mov %rbp,160($context) # restore context->Rbp
  2356. mov %r12,216($context) # restore context->R12
  2357. mov %r13,224($context) # restore context->R13
  2358. mov %r14,232($context) # restore context->R14
  2359. mov %r15,240($context) # restore context->R15
  2360. .Lin_key_prologue:
  2361. mov 8(%rax),%rdi
  2362. mov 16(%rax),%rsi
  2363. mov %rax,152($context) # restore context->Rsp
  2364. mov %rsi,168($context) # restore context->Rsi
  2365. mov %rdi,176($context) # restore context->Rdi
  2366. jmp .Lcommon_seh_exit
  2367. .size key_se_handler,.-key_se_handler
  2368. .type cbc_se_handler,\@abi-omnipotent
  2369. .align 16
  2370. cbc_se_handler:
  2371. push %rsi
  2372. push %rdi
  2373. push %rbx
  2374. push %rbp
  2375. push %r12
  2376. push %r13
  2377. push %r14
  2378. push %r15
  2379. pushfq
  2380. sub \$64,%rsp
  2381. mov 120($context),%rax # pull context->Rax
  2382. mov 248($context),%rbx # pull context->Rip
  2383. lea .Lcbc_prologue(%rip),%r10
  2384. cmp %r10,%rbx # context->Rip<.Lcbc_prologue
  2385. jb .Lin_cbc_prologue
  2386. lea .Lcbc_fast_body(%rip),%r10
  2387. cmp %r10,%rbx # context->Rip<.Lcbc_fast_body
  2388. jb .Lin_cbc_frame_setup
  2389. lea .Lcbc_slow_prologue(%rip),%r10
  2390. cmp %r10,%rbx # context->Rip<.Lcbc_slow_prologue
  2391. jb .Lin_cbc_body
  2392. lea .Lcbc_slow_body(%rip),%r10
  2393. cmp %r10,%rbx # context->Rip<.Lcbc_slow_body
  2394. jb .Lin_cbc_frame_setup
  2395. .Lin_cbc_body:
  2396. mov 152($context),%rax # pull context->Rsp
  2397. lea .Lcbc_epilogue(%rip),%r10
  2398. cmp %r10,%rbx # context->Rip>=.Lcbc_epilogue
  2399. jae .Lin_cbc_prologue
  2400. lea 8(%rax),%rax
  2401. lea .Lcbc_popfq(%rip),%r10
  2402. cmp %r10,%rbx # context->Rip>=.Lcbc_popfq
  2403. jae .Lin_cbc_prologue
  2404. mov `16-8`(%rax),%rax # biased $_rsp
  2405. lea 56(%rax),%rax
  2406. .Lin_cbc_frame_setup:
  2407. mov -16(%rax),%rbx
  2408. mov -24(%rax),%rbp
  2409. mov -32(%rax),%r12
  2410. mov -40(%rax),%r13
  2411. mov -48(%rax),%r14
  2412. mov -56(%rax),%r15
  2413. mov %rbx,144($context) # restore context->Rbx
  2414. mov %rbp,160($context) # restore context->Rbp
  2415. mov %r12,216($context) # restore context->R12
  2416. mov %r13,224($context) # restore context->R13
  2417. mov %r14,232($context) # restore context->R14
  2418. mov %r15,240($context) # restore context->R15
  2419. .Lin_cbc_prologue:
  2420. mov 8(%rax),%rdi
  2421. mov 16(%rax),%rsi
  2422. mov %rax,152($context) # restore context->Rsp
  2423. mov %rsi,168($context) # restore context->Rsi
  2424. mov %rdi,176($context) # restore context->Rdi
  2425. .Lcommon_seh_exit:
  2426. mov 40($disp),%rdi # disp->ContextRecord
  2427. mov $context,%rsi # context
  2428. mov \$`1232/8`,%ecx # sizeof(CONTEXT)
  2429. .long 0xa548f3fc # cld; rep movsq
  2430. mov $disp,%rsi
  2431. xor %rcx,%rcx # arg1, UNW_FLAG_NHANDLER
  2432. mov 8(%rsi),%rdx # arg2, disp->ImageBase
  2433. mov 0(%rsi),%r8 # arg3, disp->ControlPc
  2434. mov 16(%rsi),%r9 # arg4, disp->FunctionEntry
  2435. mov 40(%rsi),%r10 # disp->ContextRecord
  2436. lea 56(%rsi),%r11 # &disp->HandlerData
  2437. lea 24(%rsi),%r12 # &disp->EstablisherFrame
  2438. mov %r10,32(%rsp) # arg5
  2439. mov %r11,40(%rsp) # arg6
  2440. mov %r12,48(%rsp) # arg7
  2441. mov %rcx,56(%rsp) # arg8, (NULL)
  2442. call *__imp_RtlVirtualUnwind(%rip)
  2443. mov \$1,%eax # ExceptionContinueSearch
  2444. add \$64,%rsp
  2445. popfq
  2446. pop %r15
  2447. pop %r14
  2448. pop %r13
  2449. pop %r12
  2450. pop %rbp
  2451. pop %rbx
  2452. pop %rdi
  2453. pop %rsi
  2454. ret
  2455. .size cbc_se_handler,.-cbc_se_handler
  2456. .section .pdata
  2457. .align 4
  2458. .rva .LSEH_begin_asm_AES_encrypt
  2459. .rva .LSEH_end_asm_AES_encrypt
  2460. .rva .LSEH_info_asm_AES_encrypt
  2461. .rva .LSEH_begin_asm_AES_decrypt
  2462. .rva .LSEH_end_asm_AES_decrypt
  2463. .rva .LSEH_info_asm_AES_decrypt
  2464. .rva .LSEH_begin_asm_AES_set_encrypt_key
  2465. .rva .LSEH_end_asm_AES_set_encrypt_key
  2466. .rva .LSEH_info_asm_AES_set_encrypt_key
  2467. .rva .LSEH_begin_asm_AES_set_decrypt_key
  2468. .rva .LSEH_end_asm_AES_set_decrypt_key
  2469. .rva .LSEH_info_asm_AES_set_decrypt_key
  2470. .rva .LSEH_begin_asm_AES_cbc_encrypt
  2471. .rva .LSEH_end_asm_AES_cbc_encrypt
  2472. .rva .LSEH_info_asm_AES_cbc_encrypt
  2473. .section .xdata
  2474. .align 8
  2475. .LSEH_info_asm_AES_encrypt:
  2476. .byte 9,0,0,0
  2477. .rva block_se_handler
  2478. .rva .Lenc_prologue,.Lenc_epilogue # HandlerData[]
  2479. .LSEH_info_asm_AES_decrypt:
  2480. .byte 9,0,0,0
  2481. .rva block_se_handler
  2482. .rva .Ldec_prologue,.Ldec_epilogue # HandlerData[]
  2483. .LSEH_info_asm_AES_set_encrypt_key:
  2484. .byte 9,0,0,0
  2485. .rva key_se_handler
  2486. .rva .Lenc_key_prologue,.Lenc_key_epilogue # HandlerData[]
  2487. .LSEH_info_asm_AES_set_decrypt_key:
  2488. .byte 9,0,0,0
  2489. .rva key_se_handler
  2490. .rva .Ldec_key_prologue,.Ldec_key_epilogue # HandlerData[]
  2491. .LSEH_info_asm_AES_cbc_encrypt:
  2492. .byte 9,0,0,0
  2493. .rva cbc_se_handler
  2494. ___
  2495. }
  2496. $code =~ s/\`([^\`]*)\`/eval($1)/gem;
  2497. print $code;
  2498. close STDOUT;