No puede seleccionar más de 25 temas Los temas deben comenzar con una letra o número, pueden incluir guiones ('-') y pueden tener hasta 35 caracteres de largo.

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760
  1. /* ====================================================================
  2. * Copyright (c) 2001-2011 The OpenSSL Project. All rights reserved.
  3. *
  4. * Redistribution and use in source and binary forms, with or without
  5. * modification, are permitted provided that the following conditions
  6. * are met:
  7. *
  8. * 1. Redistributions of source code must retain the above copyright
  9. * notice, this list of conditions and the following disclaimer.
  10. *
  11. * 2. Redistributions in binary form must reproduce the above copyright
  12. * notice, this list of conditions and the following disclaimer in
  13. * the documentation and/or other materials provided with the
  14. * distribution.
  15. *
  16. * 3. All advertising materials mentioning features or use of this
  17. * software must display the following acknowledgment:
  18. * "This product includes software developed by the OpenSSL Project
  19. * for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
  20. *
  21. * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
  22. * endorse or promote products derived from this software without
  23. * prior written permission. For written permission, please contact
  24. * openssl-core@openssl.org.
  25. *
  26. * 5. Products derived from this software may not be called "OpenSSL"
  27. * nor may "OpenSSL" appear in their names without prior written
  28. * permission of the OpenSSL Project.
  29. *
  30. * 6. Redistributions of any form whatsoever must retain the following
  31. * acknowledgment:
  32. * "This product includes software developed by the OpenSSL Project
  33. * for use in the OpenSSL Toolkit (http://www.openssl.org/)"
  34. *
  35. * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
  36. * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  37. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
  38. * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
  39. * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
  40. * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
  41. * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
  42. * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
  43. * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
  44. * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
  45. * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
  46. * OF THE POSSIBILITY OF SUCH DAMAGE.
  47. * ==================================================================== */
  48. #include <string.h>
  49. #include <openssl/aead.h>
  50. #include <openssl/aes.h>
  51. #include <openssl/cipher.h>
  52. #include <openssl/cpu.h>
  53. #include <openssl/err.h>
  54. #include <openssl/mem.h>
  55. #include <openssl/modes.h>
  56. #include <openssl/obj.h>
  57. #include <openssl/rand.h>
  58. #include <openssl/sha.h>
  59. #include "internal.h"
  60. #include "../internal.h"
  61. #include "../modes/internal.h"
  62. #if defined(OPENSSL_ARM) || defined(OPENSSL_AARCH64)
  63. #include "../arm_arch.h"
  64. #endif
  65. typedef struct {
  66. union {
  67. double align;
  68. AES_KEY ks;
  69. } ks;
  70. block128_f block;
  71. union {
  72. cbc128_f cbc;
  73. ctr128_f ctr;
  74. } stream;
  75. } EVP_AES_KEY;
  76. typedef struct {
  77. union {
  78. double align;
  79. AES_KEY ks;
  80. } ks; /* AES key schedule to use */
  81. int key_set; /* Set if key initialised */
  82. int iv_set; /* Set if an iv is set */
  83. GCM128_CONTEXT gcm;
  84. uint8_t *iv; /* Temporary IV store */
  85. int ivlen; /* IV length */
  86. int taglen;
  87. int iv_gen; /* It is OK to generate IVs */
  88. ctr128_f ctr;
  89. } EVP_AES_GCM_CTX;
  90. #if !defined(OPENSSL_NO_ASM) && \
  91. (defined(OPENSSL_X86_64) || defined(OPENSSL_X86))
  92. #define VPAES
  93. static char vpaes_capable(void) {
  94. return (OPENSSL_ia32cap_P[1] & (1 << (41 - 32))) != 0;
  95. }
  96. #if defined(OPENSSL_X86_64)
  97. #define BSAES
  98. static char bsaes_capable(void) {
  99. return vpaes_capable();
  100. }
  101. #endif
  102. #elif !defined(OPENSSL_NO_ASM) && \
  103. (defined(OPENSSL_ARM) || defined(OPENSSL_AARCH64))
  104. #include "../arm_arch.h"
  105. #if defined(OPENSSL_ARM) && __ARM_MAX_ARCH__ >= 7
  106. #define BSAES
  107. static char bsaes_capable(void) {
  108. return CRYPTO_is_NEON_capable();
  109. }
  110. #endif
  111. #define HWAES
  112. static char hwaes_capable(void) {
  113. return (OPENSSL_armcap_P & ARMV8_AES) != 0;
  114. }
  115. int aes_v8_set_encrypt_key(const uint8_t *user_key, const int bits,
  116. AES_KEY *key);
  117. int aes_v8_set_decrypt_key(const uint8_t *user_key, const int bits,
  118. AES_KEY *key);
  119. void aes_v8_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
  120. void aes_v8_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
  121. void aes_v8_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
  122. const AES_KEY *key, uint8_t *ivec, const int enc);
  123. void aes_v8_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t len,
  124. const AES_KEY *key, const uint8_t ivec[16]);
  125. #endif /* OPENSSL_ARM */
  126. #if defined(BSAES)
  127. /* On platforms where BSAES gets defined (just above), then these functions are
  128. * provided by asm. */
  129. void bsaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
  130. const AES_KEY *key, uint8_t ivec[16], int enc);
  131. void bsaes_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t len,
  132. const AES_KEY *key, const uint8_t ivec[16]);
  133. #else
  134. static char bsaes_capable(void) {
  135. return 0;
  136. }
  137. /* On other platforms, bsaes_capable() will always return false and so the
  138. * following will never be called. */
  139. void bsaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
  140. const AES_KEY *key, uint8_t ivec[16], int enc) {
  141. abort();
  142. }
  143. void bsaes_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t len,
  144. const AES_KEY *key, const uint8_t ivec[16]) {
  145. abort();
  146. }
  147. #endif
  148. #if defined(VPAES)
  149. /* On platforms where VPAES gets defined (just above), then these functions are
  150. * provided by asm. */
  151. int vpaes_set_encrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
  152. int vpaes_set_decrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
  153. void vpaes_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
  154. void vpaes_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
  155. void vpaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
  156. const AES_KEY *key, uint8_t *ivec, int enc);
  157. #else
  158. static char vpaes_capable(void) {
  159. return 0;
  160. }
  161. /* On other platforms, vpaes_capable() will always return false and so the
  162. * following will never be called. */
  163. int vpaes_set_encrypt_key(const uint8_t *userKey, int bits, AES_KEY *key) {
  164. abort();
  165. }
  166. int vpaes_set_decrypt_key(const uint8_t *userKey, int bits, AES_KEY *key) {
  167. abort();
  168. }
  169. void vpaes_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
  170. abort();
  171. }
  172. void vpaes_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
  173. abort();
  174. }
  175. void vpaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
  176. const AES_KEY *key, uint8_t *ivec, int enc) {
  177. abort();
  178. }
  179. #endif
  180. #if !defined(HWAES)
  181. /* If HWAES isn't defined then we provide dummy functions for each of the hwaes
  182. * functions. */
  183. int hwaes_capable(void) {
  184. return 0;
  185. }
  186. int aes_v8_set_encrypt_key(const uint8_t *user_key, int bits,
  187. AES_KEY *key) {
  188. abort();
  189. }
  190. int aes_v8_set_decrypt_key(const uint8_t *user_key, int bits, AES_KEY *key) {
  191. abort();
  192. }
  193. void aes_v8_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
  194. abort();
  195. }
  196. void aes_v8_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
  197. abort();
  198. }
  199. void aes_v8_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
  200. const AES_KEY *key, uint8_t *ivec, int enc) {
  201. abort();
  202. }
  203. void aes_v8_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t len,
  204. const AES_KEY *key, const uint8_t ivec[16]) {
  205. abort();
  206. }
  207. #endif
  208. #if !defined(OPENSSL_NO_ASM) && \
  209. (defined(OPENSSL_X86_64) || defined(OPENSSL_X86))
  210. int aesni_set_encrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
  211. int aesni_set_decrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
  212. void aesni_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
  213. void aesni_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
  214. void aesni_ecb_encrypt(const uint8_t *in, uint8_t *out, size_t length,
  215. const AES_KEY *key, int enc);
  216. void aesni_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
  217. const AES_KEY *key, uint8_t *ivec, int enc);
  218. void aesni_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t blocks,
  219. const void *key, const uint8_t *ivec);
  220. #if defined(OPENSSL_X86_64)
  221. size_t aesni_gcm_encrypt(const uint8_t *in, uint8_t *out, size_t len,
  222. const void *key, uint8_t ivec[16], uint64_t *Xi);
  223. #define AES_gcm_encrypt aesni_gcm_encrypt
  224. size_t aesni_gcm_decrypt(const uint8_t *in, uint8_t *out, size_t len,
  225. const void *key, uint8_t ivec[16], uint64_t *Xi);
  226. #define AES_gcm_decrypt aesni_gcm_decrypt
  227. void gcm_ghash_avx(uint64_t Xi[2], const u128 Htable[16], const uint8_t *in,
  228. size_t len);
  229. #define AES_GCM_ASM(gctx) \
  230. (gctx->ctr == aesni_ctr32_encrypt_blocks && gctx->gcm.ghash == gcm_ghash_avx)
  231. #endif /* OPENSSL_X86_64 */
  232. #else
  233. /* On other platforms, aesni_capable() will always return false and so the
  234. * following will never be called. */
  235. void aesni_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
  236. abort();
  237. }
  238. int aesni_set_encrypt_key(const uint8_t *userKey, int bits, AES_KEY *key) {
  239. abort();
  240. }
  241. void aesni_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t blocks,
  242. const void *key, const uint8_t *ivec) {
  243. abort();
  244. }
  245. #endif
  246. static int aes_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
  247. const uint8_t *iv, int enc)
  248. OPENSSL_SUPPRESS_UNREACHABLE_CODE_WARNINGS {
  249. int ret, mode;
  250. EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
  251. mode = ctx->cipher->flags & EVP_CIPH_MODE_MASK;
  252. if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) && !enc) {
  253. if (hwaes_capable()) {
  254. ret = aes_v8_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  255. dat->block = (block128_f)aes_v8_decrypt;
  256. dat->stream.cbc = NULL;
  257. if (mode == EVP_CIPH_CBC_MODE) {
  258. dat->stream.cbc = (cbc128_f)aes_v8_cbc_encrypt;
  259. }
  260. } else if (bsaes_capable() && mode == EVP_CIPH_CBC_MODE) {
  261. ret = AES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  262. dat->block = (block128_f)AES_decrypt;
  263. dat->stream.cbc = (cbc128_f)bsaes_cbc_encrypt;
  264. } else if (vpaes_capable()) {
  265. ret = vpaes_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  266. dat->block = (block128_f)vpaes_decrypt;
  267. dat->stream.cbc =
  268. mode == EVP_CIPH_CBC_MODE ? (cbc128_f)vpaes_cbc_encrypt : NULL;
  269. } else {
  270. ret = AES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  271. dat->block = (block128_f)AES_decrypt;
  272. dat->stream.cbc =
  273. mode == EVP_CIPH_CBC_MODE ? (cbc128_f)AES_cbc_encrypt : NULL;
  274. }
  275. } else if (hwaes_capable()) {
  276. ret = aes_v8_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  277. dat->block = (block128_f)aes_v8_encrypt;
  278. dat->stream.cbc = NULL;
  279. if (mode == EVP_CIPH_CBC_MODE) {
  280. dat->stream.cbc = (cbc128_f)aes_v8_cbc_encrypt;
  281. } else if (mode == EVP_CIPH_CTR_MODE) {
  282. dat->stream.ctr = (ctr128_f)aes_v8_ctr32_encrypt_blocks;
  283. }
  284. } else if (bsaes_capable() && mode == EVP_CIPH_CTR_MODE) {
  285. ret = AES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  286. dat->block = (block128_f)AES_encrypt;
  287. dat->stream.ctr = (ctr128_f)bsaes_ctr32_encrypt_blocks;
  288. } else if (vpaes_capable()) {
  289. ret = vpaes_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  290. dat->block = (block128_f)vpaes_encrypt;
  291. dat->stream.cbc =
  292. mode == EVP_CIPH_CBC_MODE ? (cbc128_f)vpaes_cbc_encrypt : NULL;
  293. } else {
  294. ret = AES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  295. dat->block = (block128_f)AES_encrypt;
  296. dat->stream.cbc =
  297. mode == EVP_CIPH_CBC_MODE ? (cbc128_f)AES_cbc_encrypt : NULL;
  298. }
  299. if (ret < 0) {
  300. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_AES_KEY_SETUP_FAILED);
  301. return 0;
  302. }
  303. return 1;
  304. }
  305. static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
  306. size_t len) {
  307. EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
  308. if (dat->stream.cbc) {
  309. (*dat->stream.cbc)(in, out, len, &dat->ks, ctx->iv, ctx->encrypt);
  310. } else if (ctx->encrypt) {
  311. CRYPTO_cbc128_encrypt(in, out, len, &dat->ks, ctx->iv, dat->block);
  312. } else {
  313. CRYPTO_cbc128_decrypt(in, out, len, &dat->ks, ctx->iv, dat->block);
  314. }
  315. return 1;
  316. }
  317. static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
  318. size_t len) {
  319. size_t bl = ctx->cipher->block_size;
  320. size_t i;
  321. EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
  322. if (len < bl) {
  323. return 1;
  324. }
  325. for (i = 0, len -= bl; i <= len; i += bl) {
  326. (*dat->block)(in + i, out + i, &dat->ks);
  327. }
  328. return 1;
  329. }
  330. static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
  331. size_t len) {
  332. unsigned int num = ctx->num;
  333. EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
  334. if (dat->stream.ctr) {
  335. CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks, ctx->iv, ctx->buf, &num,
  336. dat->stream.ctr);
  337. } else {
  338. CRYPTO_ctr128_encrypt(in, out, len, &dat->ks, ctx->iv, ctx->buf, &num,
  339. dat->block);
  340. }
  341. ctx->num = (size_t)num;
  342. return 1;
  343. }
  344. static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
  345. size_t len) {
  346. EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
  347. CRYPTO_ofb128_encrypt(in, out, len, &dat->ks, ctx->iv, &ctx->num, dat->block);
  348. return 1;
  349. }
  350. static char aesni_capable(void);
  351. static ctr128_f aes_ctr_set_key(AES_KEY *aes_key, GCM128_CONTEXT *gcm_ctx,
  352. block128_f *out_block, const uint8_t *key,
  353. size_t key_len)
  354. OPENSSL_SUPPRESS_UNREACHABLE_CODE_WARNINGS {
  355. if (aesni_capable()) {
  356. aesni_set_encrypt_key(key, key_len * 8, aes_key);
  357. if (gcm_ctx != NULL) {
  358. CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)aesni_encrypt);
  359. }
  360. if (out_block) {
  361. *out_block = (block128_f) aesni_encrypt;
  362. }
  363. return (ctr128_f)aesni_ctr32_encrypt_blocks;
  364. }
  365. if (hwaes_capable()) {
  366. aes_v8_set_encrypt_key(key, key_len * 8, aes_key);
  367. if (gcm_ctx != NULL) {
  368. CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)aes_v8_encrypt);
  369. }
  370. if (out_block) {
  371. *out_block = (block128_f) aes_v8_encrypt;
  372. }
  373. return (ctr128_f)aes_v8_ctr32_encrypt_blocks;
  374. }
  375. if (bsaes_capable()) {
  376. AES_set_encrypt_key(key, key_len * 8, aes_key);
  377. if (gcm_ctx != NULL) {
  378. CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)AES_encrypt);
  379. }
  380. if (out_block) {
  381. *out_block = (block128_f) AES_encrypt;
  382. }
  383. return (ctr128_f)bsaes_ctr32_encrypt_blocks;
  384. }
  385. if (vpaes_capable()) {
  386. vpaes_set_encrypt_key(key, key_len * 8, aes_key);
  387. if (out_block) {
  388. *out_block = (block128_f) vpaes_encrypt;
  389. }
  390. if (gcm_ctx != NULL) {
  391. CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)vpaes_encrypt);
  392. }
  393. return NULL;
  394. }
  395. AES_set_encrypt_key(key, key_len * 8, aes_key);
  396. if (gcm_ctx != NULL) {
  397. CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)AES_encrypt);
  398. }
  399. if (out_block) {
  400. *out_block = (block128_f) AES_encrypt;
  401. }
  402. return NULL;
  403. }
  404. static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
  405. const uint8_t *iv, int enc) {
  406. EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
  407. if (!iv && !key) {
  408. return 1;
  409. }
  410. if (key) {
  411. gctx->ctr =
  412. aes_ctr_set_key(&gctx->ks.ks, &gctx->gcm, NULL, key, ctx->key_len);
  413. /* If we have an iv can set it directly, otherwise use saved IV. */
  414. if (iv == NULL && gctx->iv_set) {
  415. iv = gctx->iv;
  416. }
  417. if (iv) {
  418. CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
  419. gctx->iv_set = 1;
  420. }
  421. gctx->key_set = 1;
  422. } else {
  423. /* If key set use IV, otherwise copy */
  424. if (gctx->key_set) {
  425. CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
  426. } else {
  427. memcpy(gctx->iv, iv, gctx->ivlen);
  428. }
  429. gctx->iv_set = 1;
  430. gctx->iv_gen = 0;
  431. }
  432. return 1;
  433. }
  434. static void aes_gcm_cleanup(EVP_CIPHER_CTX *c) {
  435. EVP_AES_GCM_CTX *gctx = c->cipher_data;
  436. OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
  437. if (gctx->iv != c->iv) {
  438. OPENSSL_free(gctx->iv);
  439. }
  440. }
  441. /* increment counter (64-bit int) by 1 */
  442. static void ctr64_inc(uint8_t *counter) {
  443. int n = 8;
  444. uint8_t c;
  445. do {
  446. --n;
  447. c = counter[n];
  448. ++c;
  449. counter[n] = c;
  450. if (c) {
  451. return;
  452. }
  453. } while (n);
  454. }
  455. static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) {
  456. EVP_AES_GCM_CTX *gctx = c->cipher_data;
  457. switch (type) {
  458. case EVP_CTRL_INIT:
  459. gctx->key_set = 0;
  460. gctx->iv_set = 0;
  461. gctx->ivlen = c->cipher->iv_len;
  462. gctx->iv = c->iv;
  463. gctx->taglen = -1;
  464. gctx->iv_gen = 0;
  465. return 1;
  466. case EVP_CTRL_GCM_SET_IVLEN:
  467. if (arg <= 0) {
  468. return 0;
  469. }
  470. /* Allocate memory for IV if needed */
  471. if (arg > EVP_MAX_IV_LENGTH && arg > gctx->ivlen) {
  472. if (gctx->iv != c->iv) {
  473. OPENSSL_free(gctx->iv);
  474. }
  475. gctx->iv = OPENSSL_malloc(arg);
  476. if (!gctx->iv) {
  477. return 0;
  478. }
  479. }
  480. gctx->ivlen = arg;
  481. return 1;
  482. case EVP_CTRL_GCM_SET_TAG:
  483. if (arg <= 0 || arg > 16 || c->encrypt) {
  484. return 0;
  485. }
  486. memcpy(c->buf, ptr, arg);
  487. gctx->taglen = arg;
  488. return 1;
  489. case EVP_CTRL_GCM_GET_TAG:
  490. if (arg <= 0 || arg > 16 || !c->encrypt || gctx->taglen < 0) {
  491. return 0;
  492. }
  493. memcpy(ptr, c->buf, arg);
  494. return 1;
  495. case EVP_CTRL_GCM_SET_IV_FIXED:
  496. /* Special case: -1 length restores whole IV */
  497. if (arg == -1) {
  498. memcpy(gctx->iv, ptr, gctx->ivlen);
  499. gctx->iv_gen = 1;
  500. return 1;
  501. }
  502. /* Fixed field must be at least 4 bytes and invocation field
  503. * at least 8. */
  504. if (arg < 4 || (gctx->ivlen - arg) < 8) {
  505. return 0;
  506. }
  507. if (arg) {
  508. memcpy(gctx->iv, ptr, arg);
  509. }
  510. if (c->encrypt && !RAND_bytes(gctx->iv + arg, gctx->ivlen - arg)) {
  511. return 0;
  512. }
  513. gctx->iv_gen = 1;
  514. return 1;
  515. case EVP_CTRL_GCM_IV_GEN:
  516. if (gctx->iv_gen == 0 || gctx->key_set == 0) {
  517. return 0;
  518. }
  519. CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
  520. if (arg <= 0 || arg > gctx->ivlen) {
  521. arg = gctx->ivlen;
  522. }
  523. memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
  524. /* Invocation field will be at least 8 bytes in size and
  525. * so no need to check wrap around or increment more than
  526. * last 8 bytes. */
  527. ctr64_inc(gctx->iv + gctx->ivlen - 8);
  528. gctx->iv_set = 1;
  529. return 1;
  530. case EVP_CTRL_GCM_SET_IV_INV:
  531. if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt) {
  532. return 0;
  533. }
  534. memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
  535. CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
  536. gctx->iv_set = 1;
  537. return 1;
  538. case EVP_CTRL_COPY: {
  539. EVP_CIPHER_CTX *out = ptr;
  540. EVP_AES_GCM_CTX *gctx_out = out->cipher_data;
  541. if (gctx->gcm.key) {
  542. if (gctx->gcm.key != &gctx->ks) {
  543. return 0;
  544. }
  545. gctx_out->gcm.key = &gctx_out->ks;
  546. }
  547. if (gctx->iv == c->iv) {
  548. gctx_out->iv = out->iv;
  549. } else {
  550. gctx_out->iv = OPENSSL_malloc(gctx->ivlen);
  551. if (!gctx_out->iv) {
  552. return 0;
  553. }
  554. memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
  555. }
  556. return 1;
  557. }
  558. default:
  559. return -1;
  560. }
  561. }
  562. static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
  563. size_t len) {
  564. EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
  565. /* If not set up, return error */
  566. if (!gctx->key_set) {
  567. return -1;
  568. }
  569. if (!gctx->iv_set) {
  570. return -1;
  571. }
  572. if (in) {
  573. if (out == NULL) {
  574. if (!CRYPTO_gcm128_aad(&gctx->gcm, in, len)) {
  575. return -1;
  576. }
  577. } else if (ctx->encrypt) {
  578. if (gctx->ctr) {
  579. size_t bulk = 0;
  580. #if defined(AES_GCM_ASM)
  581. if (len >= 32 && AES_GCM_ASM(gctx)) {
  582. size_t res = (16 - gctx->gcm.mres) % 16;
  583. if (!CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res)) {
  584. return -1;
  585. }
  586. bulk = AES_gcm_encrypt(in + res, out + res, len - res, gctx->gcm.key,
  587. gctx->gcm.Yi.c, gctx->gcm.Xi.u);
  588. gctx->gcm.len.u[1] += bulk;
  589. bulk += res;
  590. }
  591. #endif
  592. if (!CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm, in + bulk, out + bulk,
  593. len - bulk, gctx->ctr)) {
  594. return -1;
  595. }
  596. } else {
  597. size_t bulk = 0;
  598. if (!CRYPTO_gcm128_encrypt(&gctx->gcm, in + bulk, out + bulk,
  599. len - bulk)) {
  600. return -1;
  601. }
  602. }
  603. } else {
  604. if (gctx->ctr) {
  605. size_t bulk = 0;
  606. #if defined(AES_GCM_ASM)
  607. if (len >= 16 && AES_GCM_ASM(gctx)) {
  608. size_t res = (16 - gctx->gcm.mres) % 16;
  609. if (!CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res)) {
  610. return -1;
  611. }
  612. bulk = AES_gcm_decrypt(in + res, out + res, len - res, gctx->gcm.key,
  613. gctx->gcm.Yi.c, gctx->gcm.Xi.u);
  614. gctx->gcm.len.u[1] += bulk;
  615. bulk += res;
  616. }
  617. #endif
  618. if (!CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm, in + bulk, out + bulk,
  619. len - bulk, gctx->ctr)) {
  620. return -1;
  621. }
  622. } else {
  623. size_t bulk = 0;
  624. if (!CRYPTO_gcm128_decrypt(&gctx->gcm, in + bulk, out + bulk,
  625. len - bulk)) {
  626. return -1;
  627. }
  628. }
  629. }
  630. return len;
  631. } else {
  632. if (!ctx->encrypt) {
  633. if (gctx->taglen < 0 ||
  634. !CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0) {
  635. return -1;
  636. }
  637. gctx->iv_set = 0;
  638. return 0;
  639. }
  640. CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
  641. gctx->taglen = 16;
  642. /* Don't reuse the IV */
  643. gctx->iv_set = 0;
  644. return 0;
  645. }
  646. }
  647. static const EVP_CIPHER aes_128_cbc = {
  648. NID_aes_128_cbc, 16 /* block_size */, 16 /* key_size */,
  649. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
  650. NULL /* app_data */, aes_init_key, aes_cbc_cipher,
  651. NULL /* cleanup */, NULL /* ctrl */};
  652. static const EVP_CIPHER aes_128_ctr = {
  653. NID_aes_128_ctr, 1 /* block_size */, 16 /* key_size */,
  654. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
  655. NULL /* app_data */, aes_init_key, aes_ctr_cipher,
  656. NULL /* cleanup */, NULL /* ctrl */};
  657. static const EVP_CIPHER aes_128_ecb = {
  658. NID_aes_128_ecb, 16 /* block_size */, 16 /* key_size */,
  659. 0 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
  660. NULL /* app_data */, aes_init_key, aes_ecb_cipher,
  661. NULL /* cleanup */, NULL /* ctrl */};
  662. static const EVP_CIPHER aes_128_ofb = {
  663. NID_aes_128_ofb128, 1 /* block_size */, 16 /* key_size */,
  664. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_OFB_MODE,
  665. NULL /* app_data */, aes_init_key, aes_ofb_cipher,
  666. NULL /* cleanup */, NULL /* ctrl */};
  667. static const EVP_CIPHER aes_128_gcm = {
  668. NID_aes_128_gcm, 1 /* block_size */, 16 /* key_size */, 12 /* iv_len */,
  669. sizeof(EVP_AES_GCM_CTX),
  670. EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
  671. EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT |
  672. EVP_CIPH_FLAG_AEAD_CIPHER,
  673. NULL /* app_data */, aes_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
  674. aes_gcm_ctrl};
  675. static const EVP_CIPHER aes_192_cbc = {
  676. NID_aes_192_cbc, 16 /* block_size */, 24 /* key_size */,
  677. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
  678. NULL /* app_data */, aes_init_key, aes_cbc_cipher,
  679. NULL /* cleanup */, NULL /* ctrl */};
  680. static const EVP_CIPHER aes_192_ctr = {
  681. NID_aes_192_ctr, 1 /* block_size */, 24 /* key_size */,
  682. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
  683. NULL /* app_data */, aes_init_key, aes_ctr_cipher,
  684. NULL /* cleanup */, NULL /* ctrl */};
  685. static const EVP_CIPHER aes_192_ecb = {
  686. NID_aes_192_ecb, 16 /* block_size */, 24 /* key_size */,
  687. 0 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
  688. NULL /* app_data */, aes_init_key, aes_ecb_cipher,
  689. NULL /* cleanup */, NULL /* ctrl */};
  690. static const EVP_CIPHER aes_192_gcm = {
  691. NID_aes_192_gcm, 1 /* block_size */, 24 /* key_size */, 12 /* iv_len */,
  692. sizeof(EVP_AES_GCM_CTX),
  693. EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
  694. EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT |
  695. EVP_CIPH_FLAG_AEAD_CIPHER,
  696. NULL /* app_data */, aes_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
  697. aes_gcm_ctrl};
  698. static const EVP_CIPHER aes_256_cbc = {
  699. NID_aes_256_cbc, 16 /* block_size */, 32 /* key_size */,
  700. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
  701. NULL /* app_data */, aes_init_key, aes_cbc_cipher,
  702. NULL /* cleanup */, NULL /* ctrl */};
  703. static const EVP_CIPHER aes_256_ctr = {
  704. NID_aes_256_ctr, 1 /* block_size */, 32 /* key_size */,
  705. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
  706. NULL /* app_data */, aes_init_key, aes_ctr_cipher,
  707. NULL /* cleanup */, NULL /* ctrl */};
  708. static const EVP_CIPHER aes_256_ecb = {
  709. NID_aes_256_ecb, 16 /* block_size */, 32 /* key_size */,
  710. 0 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
  711. NULL /* app_data */, aes_init_key, aes_ecb_cipher,
  712. NULL /* cleanup */, NULL /* ctrl */};
  713. static const EVP_CIPHER aes_256_ofb = {
  714. NID_aes_256_ofb128, 1 /* block_size */, 32 /* key_size */,
  715. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_OFB_MODE,
  716. NULL /* app_data */, aes_init_key, aes_ofb_cipher,
  717. NULL /* cleanup */, NULL /* ctrl */};
  718. static const EVP_CIPHER aes_256_gcm = {
  719. NID_aes_256_gcm, 1 /* block_size */, 32 /* key_size */, 12 /* iv_len */,
  720. sizeof(EVP_AES_GCM_CTX),
  721. EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
  722. EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT |
  723. EVP_CIPH_FLAG_AEAD_CIPHER,
  724. NULL /* app_data */, aes_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
  725. aes_gcm_ctrl};
  726. #if !defined(OPENSSL_NO_ASM) && \
  727. (defined(OPENSSL_X86_64) || defined(OPENSSL_X86))
  728. /* AES-NI section. */
  729. static char aesni_capable(void) {
  730. return (OPENSSL_ia32cap_P[1] & (1 << (57 - 32))) != 0;
  731. }
  732. static int aesni_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
  733. const uint8_t *iv, int enc) {
  734. int ret, mode;
  735. EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
  736. mode = ctx->cipher->flags & EVP_CIPH_MODE_MASK;
  737. if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) && !enc) {
  738. ret = aesni_set_decrypt_key(key, ctx->key_len * 8, ctx->cipher_data);
  739. dat->block = (block128_f)aesni_decrypt;
  740. dat->stream.cbc =
  741. mode == EVP_CIPH_CBC_MODE ? (cbc128_f)aesni_cbc_encrypt : NULL;
  742. } else {
  743. ret = aesni_set_encrypt_key(key, ctx->key_len * 8, ctx->cipher_data);
  744. dat->block = (block128_f)aesni_encrypt;
  745. if (mode == EVP_CIPH_CBC_MODE) {
  746. dat->stream.cbc = (cbc128_f)aesni_cbc_encrypt;
  747. } else if (mode == EVP_CIPH_CTR_MODE) {
  748. dat->stream.ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
  749. } else {
  750. dat->stream.cbc = NULL;
  751. }
  752. }
  753. if (ret < 0) {
  754. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_AES_KEY_SETUP_FAILED);
  755. return 0;
  756. }
  757. return 1;
  758. }
  759. static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out,
  760. const uint8_t *in, size_t len) {
  761. aesni_cbc_encrypt(in, out, len, ctx->cipher_data, ctx->iv, ctx->encrypt);
  762. return 1;
  763. }
  764. static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out,
  765. const uint8_t *in, size_t len) {
  766. size_t bl = ctx->cipher->block_size;
  767. if (len < bl) {
  768. return 1;
  769. }
  770. aesni_ecb_encrypt(in, out, len, ctx->cipher_data, ctx->encrypt);
  771. return 1;
  772. }
  773. static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
  774. const uint8_t *iv, int enc) {
  775. EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
  776. if (!iv && !key) {
  777. return 1;
  778. }
  779. if (key) {
  780. aesni_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
  781. CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f)aesni_encrypt);
  782. gctx->ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
  783. /* If we have an iv can set it directly, otherwise use
  784. * saved IV. */
  785. if (iv == NULL && gctx->iv_set) {
  786. iv = gctx->iv;
  787. }
  788. if (iv) {
  789. CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
  790. gctx->iv_set = 1;
  791. }
  792. gctx->key_set = 1;
  793. } else {
  794. /* If key set use IV, otherwise copy */
  795. if (gctx->key_set) {
  796. CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
  797. } else {
  798. memcpy(gctx->iv, iv, gctx->ivlen);
  799. }
  800. gctx->iv_set = 1;
  801. gctx->iv_gen = 0;
  802. }
  803. return 1;
  804. }
  805. static const EVP_CIPHER aesni_128_cbc = {
  806. NID_aes_128_cbc, 16 /* block_size */, 16 /* key_size */,
  807. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
  808. NULL /* app_data */, aesni_init_key, aesni_cbc_cipher,
  809. NULL /* cleanup */, NULL /* ctrl */};
  810. static const EVP_CIPHER aesni_128_ctr = {
  811. NID_aes_128_ctr, 1 /* block_size */, 16 /* key_size */,
  812. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
  813. NULL /* app_data */, aesni_init_key, aes_ctr_cipher,
  814. NULL /* cleanup */, NULL /* ctrl */};
  815. static const EVP_CIPHER aesni_128_ecb = {
  816. NID_aes_128_ecb, 16 /* block_size */, 16 /* key_size */,
  817. 0 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
  818. NULL /* app_data */, aesni_init_key, aesni_ecb_cipher,
  819. NULL /* cleanup */, NULL /* ctrl */};
  820. static const EVP_CIPHER aesni_128_ofb = {
  821. NID_aes_128_ofb128, 1 /* block_size */, 16 /* key_size */,
  822. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_OFB_MODE,
  823. NULL /* app_data */, aesni_init_key, aes_ofb_cipher,
  824. NULL /* cleanup */, NULL /* ctrl */};
  825. static const EVP_CIPHER aesni_128_gcm = {
  826. NID_aes_128_gcm, 1 /* block_size */, 16 /* key_size */, 12 /* iv_len */,
  827. sizeof(EVP_AES_GCM_CTX),
  828. EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
  829. EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT |
  830. EVP_CIPH_FLAG_AEAD_CIPHER,
  831. NULL /* app_data */, aesni_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
  832. aes_gcm_ctrl};
  833. static const EVP_CIPHER aesni_192_cbc = {
  834. NID_aes_192_cbc, 16 /* block_size */, 24 /* key_size */,
  835. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
  836. NULL /* app_data */, aesni_init_key, aesni_cbc_cipher,
  837. NULL /* cleanup */, NULL /* ctrl */};
  838. static const EVP_CIPHER aesni_192_ctr = {
  839. NID_aes_192_ctr, 1 /* block_size */, 24 /* key_size */,
  840. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
  841. NULL /* app_data */, aesni_init_key, aes_ctr_cipher,
  842. NULL /* cleanup */, NULL /* ctrl */};
  843. static const EVP_CIPHER aesni_192_ecb = {
  844. NID_aes_192_ecb, 16 /* block_size */, 24 /* key_size */,
  845. 0 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
  846. NULL /* app_data */, aesni_init_key, aesni_ecb_cipher,
  847. NULL /* cleanup */, NULL /* ctrl */};
  848. static const EVP_CIPHER aesni_192_gcm = {
  849. NID_aes_192_gcm, 1 /* block_size */, 24 /* key_size */, 12 /* iv_len */,
  850. sizeof(EVP_AES_GCM_CTX),
  851. EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
  852. EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT |
  853. EVP_CIPH_FLAG_AEAD_CIPHER,
  854. NULL /* app_data */, aesni_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
  855. aes_gcm_ctrl};
  856. static const EVP_CIPHER aesni_256_cbc = {
  857. NID_aes_256_cbc, 16 /* block_size */, 32 /* key_size */,
  858. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
  859. NULL /* app_data */, aesni_init_key, aesni_cbc_cipher,
  860. NULL /* cleanup */, NULL /* ctrl */};
  861. static const EVP_CIPHER aesni_256_ctr = {
  862. NID_aes_256_ctr, 1 /* block_size */, 32 /* key_size */,
  863. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
  864. NULL /* app_data */, aesni_init_key, aes_ctr_cipher,
  865. NULL /* cleanup */, NULL /* ctrl */};
  866. static const EVP_CIPHER aesni_256_ecb = {
  867. NID_aes_256_ecb, 16 /* block_size */, 32 /* key_size */,
  868. 0 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
  869. NULL /* app_data */, aesni_init_key, aesni_ecb_cipher,
  870. NULL /* cleanup */, NULL /* ctrl */};
  871. static const EVP_CIPHER aesni_256_ofb = {
  872. NID_aes_256_ofb128, 1 /* block_size */, 32 /* key_size */,
  873. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_OFB_MODE,
  874. NULL /* app_data */, aesni_init_key, aes_ofb_cipher,
  875. NULL /* cleanup */, NULL /* ctrl */};
  876. static const EVP_CIPHER aesni_256_gcm = {
  877. NID_aes_256_gcm, 1 /* block_size */, 32 /* key_size */, 12 /* iv_len */,
  878. sizeof(EVP_AES_GCM_CTX),
  879. EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
  880. EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT | EVP_CIPH_CUSTOM_COPY |
  881. EVP_CIPH_FLAG_AEAD_CIPHER,
  882. NULL /* app_data */, aesni_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
  883. aes_gcm_ctrl};
  884. #define EVP_CIPHER_FUNCTION(keybits, mode) \
  885. const EVP_CIPHER *EVP_aes_##keybits##_##mode(void) { \
  886. if (aesni_capable()) { \
  887. return &aesni_##keybits##_##mode; \
  888. } else { \
  889. return &aes_##keybits##_##mode; \
  890. } \
  891. }
  892. #else /* ^^^ OPENSSL_X86_64 || OPENSSL_X86 */
  893. static char aesni_capable(void) {
  894. return 0;
  895. }
  896. #define EVP_CIPHER_FUNCTION(keybits, mode) \
  897. const EVP_CIPHER *EVP_aes_##keybits##_##mode(void) { \
  898. return &aes_##keybits##_##mode; \
  899. }
  900. #endif
  901. EVP_CIPHER_FUNCTION(128, cbc)
  902. EVP_CIPHER_FUNCTION(128, ctr)
  903. EVP_CIPHER_FUNCTION(128, ecb)
  904. EVP_CIPHER_FUNCTION(128, ofb)
  905. EVP_CIPHER_FUNCTION(128, gcm)
  906. EVP_CIPHER_FUNCTION(192, cbc)
  907. EVP_CIPHER_FUNCTION(192, ctr)
  908. EVP_CIPHER_FUNCTION(192, ecb)
  909. EVP_CIPHER_FUNCTION(192, gcm)
  910. EVP_CIPHER_FUNCTION(256, cbc)
  911. EVP_CIPHER_FUNCTION(256, ctr)
  912. EVP_CIPHER_FUNCTION(256, ecb)
  913. EVP_CIPHER_FUNCTION(256, ofb)
  914. EVP_CIPHER_FUNCTION(256, gcm)
  915. #define EVP_AEAD_AES_GCM_TAG_LEN 16
  916. struct aead_aes_gcm_ctx {
  917. union {
  918. double align;
  919. AES_KEY ks;
  920. } ks;
  921. GCM128_CONTEXT gcm;
  922. ctr128_f ctr;
  923. uint8_t tag_len;
  924. };
  925. static int aead_aes_gcm_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
  926. size_t key_len, size_t tag_len) {
  927. struct aead_aes_gcm_ctx *gcm_ctx;
  928. const size_t key_bits = key_len * 8;
  929. if (key_bits != 128 && key_bits != 256) {
  930. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_KEY_LENGTH);
  931. return 0; /* EVP_AEAD_CTX_init should catch this. */
  932. }
  933. if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) {
  934. tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
  935. }
  936. if (tag_len > EVP_AEAD_AES_GCM_TAG_LEN) {
  937. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TAG_TOO_LARGE);
  938. return 0;
  939. }
  940. gcm_ctx = OPENSSL_malloc(sizeof(struct aead_aes_gcm_ctx));
  941. if (gcm_ctx == NULL) {
  942. return 0;
  943. }
  944. gcm_ctx->ctr =
  945. aes_ctr_set_key(&gcm_ctx->ks.ks, &gcm_ctx->gcm, NULL, key, key_len);
  946. gcm_ctx->tag_len = tag_len;
  947. ctx->aead_state = gcm_ctx;
  948. return 1;
  949. }
  950. static void aead_aes_gcm_cleanup(EVP_AEAD_CTX *ctx) {
  951. struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
  952. OPENSSL_cleanse(gcm_ctx, sizeof(struct aead_aes_gcm_ctx));
  953. OPENSSL_free(gcm_ctx);
  954. }
  955. static int aead_aes_gcm_seal(const EVP_AEAD_CTX *ctx, uint8_t *out,
  956. size_t *out_len, size_t max_out_len,
  957. const uint8_t *nonce, size_t nonce_len,
  958. const uint8_t *in, size_t in_len,
  959. const uint8_t *ad, size_t ad_len) {
  960. size_t bulk = 0;
  961. const struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
  962. GCM128_CONTEXT gcm;
  963. if (in_len + gcm_ctx->tag_len < in_len) {
  964. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
  965. return 0;
  966. }
  967. if (max_out_len < in_len + gcm_ctx->tag_len) {
  968. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
  969. return 0;
  970. }
  971. memcpy(&gcm, &gcm_ctx->gcm, sizeof(gcm));
  972. CRYPTO_gcm128_setiv(&gcm, nonce, nonce_len);
  973. if (ad_len > 0 && !CRYPTO_gcm128_aad(&gcm, ad, ad_len)) {
  974. return 0;
  975. }
  976. if (gcm_ctx->ctr) {
  977. if (!CRYPTO_gcm128_encrypt_ctr32(&gcm, in + bulk, out + bulk, in_len - bulk,
  978. gcm_ctx->ctr)) {
  979. return 0;
  980. }
  981. } else {
  982. if (!CRYPTO_gcm128_encrypt(&gcm, in + bulk, out + bulk, in_len - bulk)) {
  983. return 0;
  984. }
  985. }
  986. CRYPTO_gcm128_tag(&gcm, out + in_len, gcm_ctx->tag_len);
  987. *out_len = in_len + gcm_ctx->tag_len;
  988. return 1;
  989. }
  990. static int aead_aes_gcm_open(const EVP_AEAD_CTX *ctx, uint8_t *out,
  991. size_t *out_len, size_t max_out_len,
  992. const uint8_t *nonce, size_t nonce_len,
  993. const uint8_t *in, size_t in_len,
  994. const uint8_t *ad, size_t ad_len) {
  995. size_t bulk = 0;
  996. const struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
  997. uint8_t tag[EVP_AEAD_AES_GCM_TAG_LEN];
  998. size_t plaintext_len;
  999. GCM128_CONTEXT gcm;
  1000. if (in_len < gcm_ctx->tag_len) {
  1001. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
  1002. return 0;
  1003. }
  1004. plaintext_len = in_len - gcm_ctx->tag_len;
  1005. if (max_out_len < plaintext_len) {
  1006. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
  1007. return 0;
  1008. }
  1009. memcpy(&gcm, &gcm_ctx->gcm, sizeof(gcm));
  1010. CRYPTO_gcm128_setiv(&gcm, nonce, nonce_len);
  1011. if (!CRYPTO_gcm128_aad(&gcm, ad, ad_len)) {
  1012. return 0;
  1013. }
  1014. if (gcm_ctx->ctr) {
  1015. if (!CRYPTO_gcm128_decrypt_ctr32(&gcm, in + bulk, out + bulk,
  1016. in_len - bulk - gcm_ctx->tag_len,
  1017. gcm_ctx->ctr)) {
  1018. return 0;
  1019. }
  1020. } else {
  1021. if (!CRYPTO_gcm128_decrypt(&gcm, in + bulk, out + bulk,
  1022. in_len - bulk - gcm_ctx->tag_len)) {
  1023. return 0;
  1024. }
  1025. }
  1026. CRYPTO_gcm128_tag(&gcm, tag, gcm_ctx->tag_len);
  1027. if (CRYPTO_memcmp(tag, in + plaintext_len, gcm_ctx->tag_len) != 0) {
  1028. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
  1029. return 0;
  1030. }
  1031. *out_len = plaintext_len;
  1032. return 1;
  1033. }
  1034. static const EVP_AEAD aead_aes_128_gcm = {
  1035. 16, /* key len */
  1036. 12, /* nonce len */
  1037. EVP_AEAD_AES_GCM_TAG_LEN, /* overhead */
  1038. EVP_AEAD_AES_GCM_TAG_LEN, /* max tag length */
  1039. aead_aes_gcm_init,
  1040. NULL, /* init_with_direction */
  1041. aead_aes_gcm_cleanup,
  1042. aead_aes_gcm_seal,
  1043. aead_aes_gcm_open,
  1044. NULL, /* get_rc4_state */
  1045. };
  1046. static const EVP_AEAD aead_aes_256_gcm = {
  1047. 32, /* key len */
  1048. 12, /* nonce len */
  1049. EVP_AEAD_AES_GCM_TAG_LEN, /* overhead */
  1050. EVP_AEAD_AES_GCM_TAG_LEN, /* max tag length */
  1051. aead_aes_gcm_init,
  1052. NULL, /* init_with_direction */
  1053. aead_aes_gcm_cleanup,
  1054. aead_aes_gcm_seal,
  1055. aead_aes_gcm_open,
  1056. NULL, /* get_rc4_state */
  1057. };
  1058. const EVP_AEAD *EVP_aead_aes_128_gcm(void) { return &aead_aes_128_gcm; }
  1059. const EVP_AEAD *EVP_aead_aes_256_gcm(void) { return &aead_aes_256_gcm; }
  1060. /* AES Key Wrap is specified in
  1061. * http://csrc.nist.gov/groups/ST/toolkit/documents/kms/key-wrap.pdf
  1062. * or https://tools.ietf.org/html/rfc3394 */
  1063. struct aead_aes_key_wrap_ctx {
  1064. uint8_t key[32];
  1065. unsigned key_bits;
  1066. };
  1067. static int aead_aes_key_wrap_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
  1068. size_t key_len, size_t tag_len) {
  1069. struct aead_aes_key_wrap_ctx *kw_ctx;
  1070. const size_t key_bits = key_len * 8;
  1071. if (key_bits != 128 && key_bits != 256) {
  1072. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_KEY_LENGTH);
  1073. return 0; /* EVP_AEAD_CTX_init should catch this. */
  1074. }
  1075. if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) {
  1076. tag_len = 8;
  1077. }
  1078. if (tag_len != 8) {
  1079. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_TAG_SIZE);
  1080. return 0;
  1081. }
  1082. kw_ctx = OPENSSL_malloc(sizeof(struct aead_aes_key_wrap_ctx));
  1083. if (kw_ctx == NULL) {
  1084. OPENSSL_PUT_ERROR(CIPHER, ERR_R_MALLOC_FAILURE);
  1085. return 0;
  1086. }
  1087. memcpy(kw_ctx->key, key, key_len);
  1088. kw_ctx->key_bits = key_bits;
  1089. ctx->aead_state = kw_ctx;
  1090. return 1;
  1091. }
  1092. static void aead_aes_key_wrap_cleanup(EVP_AEAD_CTX *ctx) {
  1093. struct aead_aes_key_wrap_ctx *kw_ctx = ctx->aead_state;
  1094. OPENSSL_cleanse(kw_ctx, sizeof(struct aead_aes_key_wrap_ctx));
  1095. OPENSSL_free(kw_ctx);
  1096. }
  1097. /* kDefaultAESKeyWrapNonce is the default nonce value given in 2.2.3.1. */
  1098. static const uint8_t kDefaultAESKeyWrapNonce[8] = {0xa6, 0xa6, 0xa6, 0xa6,
  1099. 0xa6, 0xa6, 0xa6, 0xa6};
  1100. static int aead_aes_key_wrap_seal(const EVP_AEAD_CTX *ctx, uint8_t *out,
  1101. size_t *out_len, size_t max_out_len,
  1102. const uint8_t *nonce, size_t nonce_len,
  1103. const uint8_t *in, size_t in_len,
  1104. const uint8_t *ad, size_t ad_len) {
  1105. const struct aead_aes_key_wrap_ctx *kw_ctx = ctx->aead_state;
  1106. union {
  1107. double align;
  1108. AES_KEY ks;
  1109. } ks;
  1110. /* Variables in this function match up with the variables in the second half
  1111. * of section 2.2.1. */
  1112. unsigned i, j, n;
  1113. uint8_t A[AES_BLOCK_SIZE];
  1114. if (ad_len != 0) {
  1115. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_AD_SIZE);
  1116. return 0;
  1117. }
  1118. if (nonce_len == 0) {
  1119. nonce = kDefaultAESKeyWrapNonce;
  1120. nonce_len = sizeof(kDefaultAESKeyWrapNonce);
  1121. }
  1122. if (nonce_len != 8) {
  1123. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
  1124. return 0;
  1125. }
  1126. if (in_len % 8 != 0) {
  1127. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_INPUT_SIZE);
  1128. return 0;
  1129. }
  1130. /* The code below only handles a 32-bit |t| thus 6*|n| must be less than
  1131. * 2^32, where |n| is |in_len| / 8. So in_len < 4/3 * 2^32 and we
  1132. * conservatively cap it to 2^32-16 to stop 32-bit platforms complaining that
  1133. * a comparison is always true. */
  1134. if (in_len > 0xfffffff0) {
  1135. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
  1136. return 0;
  1137. }
  1138. n = in_len / 8;
  1139. if (n < 2) {
  1140. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_INPUT_SIZE);
  1141. return 0;
  1142. }
  1143. if (in_len + 8 < in_len) {
  1144. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
  1145. return 0;
  1146. }
  1147. if (max_out_len < in_len + 8) {
  1148. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
  1149. return 0;
  1150. }
  1151. if (AES_set_encrypt_key(kw_ctx->key, kw_ctx->key_bits, &ks.ks) < 0) {
  1152. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_AES_KEY_SETUP_FAILED);
  1153. return 0;
  1154. }
  1155. memmove(out + 8, in, in_len);
  1156. memcpy(A, nonce, 8);
  1157. for (j = 0; j < 6; j++) {
  1158. for (i = 1; i <= n; i++) {
  1159. uint32_t t;
  1160. memcpy(A + 8, out + 8 * i, 8);
  1161. AES_encrypt(A, A, &ks.ks);
  1162. t = n * j + i;
  1163. A[7] ^= t & 0xff;
  1164. A[6] ^= (t >> 8) & 0xff;
  1165. A[5] ^= (t >> 16) & 0xff;
  1166. A[4] ^= (t >> 24) & 0xff;
  1167. memcpy(out + 8 * i, A + 8, 8);
  1168. }
  1169. }
  1170. memcpy(out, A, 8);
  1171. *out_len = in_len + 8;
  1172. return 1;
  1173. }
  1174. static int aead_aes_key_wrap_open(const EVP_AEAD_CTX *ctx, uint8_t *out,
  1175. size_t *out_len, size_t max_out_len,
  1176. const uint8_t *nonce, size_t nonce_len,
  1177. const uint8_t *in, size_t in_len,
  1178. const uint8_t *ad, size_t ad_len) {
  1179. const struct aead_aes_key_wrap_ctx *kw_ctx = ctx->aead_state;
  1180. union {
  1181. double align;
  1182. AES_KEY ks;
  1183. } ks;
  1184. /* Variables in this function match up with the variables in the second half
  1185. * of section 2.2.1. */
  1186. unsigned i, j, n;
  1187. uint8_t A[AES_BLOCK_SIZE];
  1188. if (ad_len != 0) {
  1189. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_AD_SIZE);
  1190. return 0;
  1191. }
  1192. if (nonce_len == 0) {
  1193. nonce = kDefaultAESKeyWrapNonce;
  1194. nonce_len = sizeof(kDefaultAESKeyWrapNonce);
  1195. }
  1196. if (nonce_len != 8) {
  1197. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
  1198. return 0;
  1199. }
  1200. if (in_len % 8 != 0) {
  1201. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_INPUT_SIZE);
  1202. return 0;
  1203. }
  1204. /* The code below only handles a 32-bit |t| thus 6*|n| must be less than
  1205. * 2^32, where |n| is |in_len| / 8. So in_len < 4/3 * 2^32 and we
  1206. * conservatively cap it to 2^32-8 to stop 32-bit platforms complaining that
  1207. * a comparison is always true. */
  1208. if (in_len > 0xfffffff8) {
  1209. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
  1210. return 0;
  1211. }
  1212. if (in_len < 24) {
  1213. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
  1214. return 0;
  1215. }
  1216. n = (in_len / 8) - 1;
  1217. if (max_out_len < in_len - 8) {
  1218. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
  1219. return 0;
  1220. }
  1221. if (AES_set_decrypt_key(kw_ctx->key, kw_ctx->key_bits, &ks.ks) < 0) {
  1222. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_AES_KEY_SETUP_FAILED);
  1223. return 0;
  1224. }
  1225. memcpy(A, in, 8);
  1226. memmove(out, in + 8, in_len - 8);
  1227. for (j = 5; j < 6; j--) {
  1228. for (i = n; i > 0; i--) {
  1229. uint32_t t;
  1230. t = n * j + i;
  1231. A[7] ^= t & 0xff;
  1232. A[6] ^= (t >> 8) & 0xff;
  1233. A[5] ^= (t >> 16) & 0xff;
  1234. A[4] ^= (t >> 24) & 0xff;
  1235. memcpy(A + 8, out + 8 * (i - 1), 8);
  1236. AES_decrypt(A, A, &ks.ks);
  1237. memcpy(out + 8 * (i - 1), A + 8, 8);
  1238. }
  1239. }
  1240. if (CRYPTO_memcmp(A, nonce, 8) != 0) {
  1241. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
  1242. return 0;
  1243. }
  1244. *out_len = in_len - 8;
  1245. return 1;
  1246. }
  1247. static const EVP_AEAD aead_aes_128_key_wrap = {
  1248. 16, /* key len */
  1249. 8, /* nonce len */
  1250. 8, /* overhead */
  1251. 8, /* max tag length */
  1252. aead_aes_key_wrap_init,
  1253. NULL, /* init_with_direction */
  1254. aead_aes_key_wrap_cleanup,
  1255. aead_aes_key_wrap_seal,
  1256. aead_aes_key_wrap_open,
  1257. NULL, /* get_rc4_state */
  1258. };
  1259. static const EVP_AEAD aead_aes_256_key_wrap = {
  1260. 32, /* key len */
  1261. 8, /* nonce len */
  1262. 8, /* overhead */
  1263. 8, /* max tag length */
  1264. aead_aes_key_wrap_init,
  1265. NULL, /* init_with_direction */
  1266. aead_aes_key_wrap_cleanup,
  1267. aead_aes_key_wrap_seal,
  1268. aead_aes_key_wrap_open,
  1269. NULL, /* get_rc4_state */
  1270. };
  1271. const EVP_AEAD *EVP_aead_aes_128_key_wrap(void) { return &aead_aes_128_key_wrap; }
  1272. const EVP_AEAD *EVP_aead_aes_256_key_wrap(void) { return &aead_aes_256_key_wrap; }
  1273. #define EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN SHA256_DIGEST_LENGTH
  1274. #define EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN 12
  1275. struct aead_aes_ctr_hmac_sha256_ctx {
  1276. union {
  1277. double align;
  1278. AES_KEY ks;
  1279. } ks;
  1280. ctr128_f ctr;
  1281. block128_f block;
  1282. SHA256_CTX inner_init_state;
  1283. SHA256_CTX outer_init_state;
  1284. uint8_t tag_len;
  1285. };
  1286. static void hmac_init(SHA256_CTX *out_inner, SHA256_CTX *out_outer,
  1287. const uint8_t hmac_key[32]) {
  1288. static const size_t hmac_key_len = 32;
  1289. uint8_t block[SHA256_CBLOCK];
  1290. memcpy(block, hmac_key, hmac_key_len);
  1291. memset(block + hmac_key_len, 0x36, sizeof(block) - hmac_key_len);
  1292. unsigned i;
  1293. for (i = 0; i < hmac_key_len; i++) {
  1294. block[i] ^= 0x36;
  1295. }
  1296. SHA256_Init(out_inner);
  1297. SHA256_Update(out_inner, block, sizeof(block));
  1298. memset(block + hmac_key_len, 0x5c, sizeof(block) - hmac_key_len);
  1299. for (i = 0; i < hmac_key_len; i++) {
  1300. block[i] ^= (0x36 ^ 0x5c);
  1301. }
  1302. SHA256_Init(out_outer);
  1303. SHA256_Update(out_outer, block, sizeof(block));
  1304. }
  1305. static int aead_aes_ctr_hmac_sha256_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
  1306. size_t key_len, size_t tag_len) {
  1307. struct aead_aes_ctr_hmac_sha256_ctx *aes_ctx;
  1308. static const size_t hmac_key_len = 32;
  1309. if (key_len < hmac_key_len) {
  1310. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_KEY_LENGTH);
  1311. return 0; /* EVP_AEAD_CTX_init should catch this. */
  1312. }
  1313. const size_t aes_key_len = key_len - hmac_key_len;
  1314. if (aes_key_len != 16 && aes_key_len != 32) {
  1315. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_KEY_LENGTH);
  1316. return 0; /* EVP_AEAD_CTX_init should catch this. */
  1317. }
  1318. if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) {
  1319. tag_len = EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN;
  1320. }
  1321. if (tag_len > EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN) {
  1322. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TAG_TOO_LARGE);
  1323. return 0;
  1324. }
  1325. aes_ctx = OPENSSL_malloc(sizeof(struct aead_aes_ctr_hmac_sha256_ctx));
  1326. if (aes_ctx == NULL) {
  1327. OPENSSL_PUT_ERROR(CIPHER, ERR_R_MALLOC_FAILURE);
  1328. return 0;
  1329. }
  1330. aes_ctx->ctr =
  1331. aes_ctr_set_key(&aes_ctx->ks.ks, NULL, &aes_ctx->block, key, aes_key_len);
  1332. aes_ctx->tag_len = tag_len;
  1333. hmac_init(&aes_ctx->inner_init_state, &aes_ctx->outer_init_state,
  1334. key + aes_key_len);
  1335. ctx->aead_state = aes_ctx;
  1336. return 1;
  1337. }
  1338. static void aead_aes_ctr_hmac_sha256_cleanup(EVP_AEAD_CTX *ctx) {
  1339. struct aead_aes_ctr_hmac_sha256_ctx *aes_ctx = ctx->aead_state;
  1340. OPENSSL_cleanse(aes_ctx, sizeof(struct aead_aes_ctr_hmac_sha256_ctx));
  1341. OPENSSL_free(aes_ctx);
  1342. }
  1343. static void hmac_update_uint64(SHA256_CTX *sha256, uint64_t value) {
  1344. unsigned i;
  1345. uint8_t bytes[8];
  1346. for (i = 0; i < sizeof(bytes); i++) {
  1347. bytes[i] = value & 0xff;
  1348. value >>= 8;
  1349. }
  1350. SHA256_Update(sha256, bytes, sizeof(bytes));
  1351. }
  1352. static void hmac_calculate(uint8_t out[SHA256_DIGEST_LENGTH],
  1353. const SHA256_CTX *inner_init_state,
  1354. const SHA256_CTX *outer_init_state,
  1355. const uint8_t *ad, size_t ad_len,
  1356. const uint8_t *nonce, const uint8_t *ciphertext,
  1357. size_t ciphertext_len) {
  1358. SHA256_CTX sha256;
  1359. memcpy(&sha256, inner_init_state, sizeof(sha256));
  1360. hmac_update_uint64(&sha256, ad_len);
  1361. hmac_update_uint64(&sha256, ciphertext_len);
  1362. SHA256_Update(&sha256, nonce, EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN);
  1363. SHA256_Update(&sha256, ad, ad_len);
  1364. /* Pad with zeros to the end of the SHA-256 block. */
  1365. const unsigned num_padding =
  1366. (SHA256_CBLOCK - ((sizeof(uint64_t)*2 +
  1367. EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN + ad_len) %
  1368. SHA256_CBLOCK)) %
  1369. SHA256_CBLOCK;
  1370. uint8_t padding[SHA256_CBLOCK];
  1371. memset(padding, 0, num_padding);
  1372. SHA256_Update(&sha256, padding, num_padding);
  1373. SHA256_Update(&sha256, ciphertext, ciphertext_len);
  1374. uint8_t inner_digest[SHA256_DIGEST_LENGTH];
  1375. SHA256_Final(inner_digest, &sha256);
  1376. memcpy(&sha256, outer_init_state, sizeof(sha256));
  1377. SHA256_Update(&sha256, inner_digest, sizeof(inner_digest));
  1378. SHA256_Final(out, &sha256);
  1379. }
  1380. static void aead_aes_ctr_hmac_sha256_crypt(
  1381. const struct aead_aes_ctr_hmac_sha256_ctx *aes_ctx, uint8_t *out,
  1382. const uint8_t *in, size_t len, const uint8_t *nonce) {
  1383. /* Since the AEAD operation is one-shot, keeping a buffer of unused keystream
  1384. * bytes is pointless. However, |CRYPTO_ctr128_encrypt| requires it. */
  1385. uint8_t partial_block_buffer[AES_BLOCK_SIZE];
  1386. unsigned partial_block_offset = 0;
  1387. memset(partial_block_buffer, 0, sizeof(partial_block_buffer));
  1388. uint8_t counter[AES_BLOCK_SIZE];
  1389. memcpy(counter, nonce, EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN);
  1390. memset(counter + EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN, 0, 4);
  1391. if (aes_ctx->ctr) {
  1392. CRYPTO_ctr128_encrypt_ctr32(in, out, len, &aes_ctx->ks.ks, counter,
  1393. partial_block_buffer, &partial_block_offset,
  1394. aes_ctx->ctr);
  1395. } else {
  1396. CRYPTO_ctr128_encrypt(in, out, len, &aes_ctx->ks.ks, counter,
  1397. partial_block_buffer, &partial_block_offset,
  1398. aes_ctx->block);
  1399. }
  1400. }
  1401. static int aead_aes_ctr_hmac_sha256_seal(const EVP_AEAD_CTX *ctx, uint8_t *out,
  1402. size_t *out_len, size_t max_out_len,
  1403. const uint8_t *nonce, size_t nonce_len,
  1404. const uint8_t *in, size_t in_len,
  1405. const uint8_t *ad, size_t ad_len) {
  1406. const struct aead_aes_ctr_hmac_sha256_ctx *aes_ctx = ctx->aead_state;
  1407. const uint64_t in_len_64 = in_len;
  1408. if (in_len + aes_ctx->tag_len < in_len ||
  1409. /* This input is so large it would overflow the 32-bit block counter. */
  1410. in_len_64 >= (OPENSSL_U64(1) << 32) * AES_BLOCK_SIZE) {
  1411. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
  1412. return 0;
  1413. }
  1414. if (max_out_len < in_len + aes_ctx->tag_len) {
  1415. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
  1416. return 0;
  1417. }
  1418. if (nonce_len != EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN) {
  1419. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
  1420. return 0;
  1421. }
  1422. aead_aes_ctr_hmac_sha256_crypt(aes_ctx, out, in, in_len, nonce);
  1423. uint8_t hmac_result[SHA256_DIGEST_LENGTH];
  1424. hmac_calculate(hmac_result, &aes_ctx->inner_init_state,
  1425. &aes_ctx->outer_init_state, ad, ad_len, nonce, out, in_len);
  1426. memcpy(out + in_len, hmac_result, aes_ctx->tag_len);
  1427. *out_len = in_len + aes_ctx->tag_len;
  1428. return 1;
  1429. }
  1430. static int aead_aes_ctr_hmac_sha256_open(const EVP_AEAD_CTX *ctx, uint8_t *out,
  1431. size_t *out_len, size_t max_out_len,
  1432. const uint8_t *nonce, size_t nonce_len,
  1433. const uint8_t *in, size_t in_len,
  1434. const uint8_t *ad, size_t ad_len) {
  1435. const struct aead_aes_ctr_hmac_sha256_ctx *aes_ctx = ctx->aead_state;
  1436. size_t plaintext_len;
  1437. if (in_len < aes_ctx->tag_len) {
  1438. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
  1439. return 0;
  1440. }
  1441. plaintext_len = in_len - aes_ctx->tag_len;
  1442. if (max_out_len < plaintext_len) {
  1443. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
  1444. return 0;
  1445. }
  1446. if (nonce_len != EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN) {
  1447. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
  1448. return 0;
  1449. }
  1450. uint8_t hmac_result[SHA256_DIGEST_LENGTH];
  1451. hmac_calculate(hmac_result, &aes_ctx->inner_init_state,
  1452. &aes_ctx->outer_init_state, ad, ad_len, nonce, in,
  1453. plaintext_len);
  1454. if (CRYPTO_memcmp(hmac_result, in + plaintext_len, aes_ctx->tag_len) != 0) {
  1455. OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
  1456. return 0;
  1457. }
  1458. aead_aes_ctr_hmac_sha256_crypt(aes_ctx, out, in, plaintext_len, nonce);
  1459. *out_len = plaintext_len;
  1460. return 1;
  1461. }
  1462. static const EVP_AEAD aead_aes_128_ctr_hmac_sha256 = {
  1463. 16 /* AES key */ + 32 /* HMAC key */,
  1464. 12, /* nonce length */
  1465. EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN, /* overhead */
  1466. EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN, /* max tag length */
  1467. aead_aes_ctr_hmac_sha256_init,
  1468. NULL /* init_with_direction */,
  1469. aead_aes_ctr_hmac_sha256_cleanup,
  1470. aead_aes_ctr_hmac_sha256_seal,
  1471. aead_aes_ctr_hmac_sha256_open,
  1472. NULL /* get_rc4_state */,
  1473. };
  1474. static const EVP_AEAD aead_aes_256_ctr_hmac_sha256 = {
  1475. 32 /* AES key */ + 32 /* HMAC key */,
  1476. 12, /* nonce length */
  1477. EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN, /* overhead */
  1478. EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN, /* max tag length */
  1479. aead_aes_ctr_hmac_sha256_init,
  1480. NULL /* init_with_direction */,
  1481. aead_aes_ctr_hmac_sha256_cleanup,
  1482. aead_aes_ctr_hmac_sha256_seal,
  1483. aead_aes_ctr_hmac_sha256_open,
  1484. NULL /* get_rc4_state */,
  1485. };
  1486. const EVP_AEAD *EVP_aead_aes_128_ctr_hmac_sha256(void) {
  1487. return &aead_aes_128_ctr_hmac_sha256;
  1488. }
  1489. const EVP_AEAD *EVP_aead_aes_256_ctr_hmac_sha256(void) {
  1490. return &aead_aes_256_ctr_hmac_sha256;
  1491. }
  1492. int EVP_has_aes_hardware(void) {
  1493. #if defined(OPENSSL_X86) || defined(OPENSSL_X86_64)
  1494. return aesni_capable() && crypto_gcm_clmul_enabled();
  1495. #elif defined(OPENSSL_ARM) || defined(OPENSSL_AARCH64)
  1496. return hwaes_capable() && (OPENSSL_armcap_P & ARMV8_PMULL);
  1497. #else
  1498. return 0;
  1499. #endif
  1500. }