Du kannst nicht mehr als 25 Themen auswählen Themen müssen entweder mit einem Buchstaben oder einer Ziffer beginnen. Sie können Bindestriche („-“) enthalten und bis zu 35 Zeichen lang sein.
 
 
 
 
 
 

989 Zeilen
31 KiB

  1. /* ====================================================================
  2. * Copyright (c) 2001-2011 The OpenSSL Project. All rights reserved.
  3. *
  4. * Redistribution and use in source and binary forms, with or without
  5. * modification, are permitted provided that the following conditions
  6. * are met:
  7. *
  8. * 1. Redistributions of source code must retain the above copyright
  9. * notice, this list of conditions and the following disclaimer.
  10. *
  11. * 2. Redistributions in binary form must reproduce the above copyright
  12. * notice, this list of conditions and the following disclaimer in
  13. * the documentation and/or other materials provided with the
  14. * distribution.
  15. *
  16. * 3. All advertising materials mentioning features or use of this
  17. * software must display the following acknowledgment:
  18. * "This product includes software developed by the OpenSSL Project
  19. * for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
  20. *
  21. * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
  22. * endorse or promote products derived from this software without
  23. * prior written permission. For written permission, please contact
  24. * openssl-core@openssl.org.
  25. *
  26. * 5. Products derived from this software may not be called "OpenSSL"
  27. * nor may "OpenSSL" appear in their names without prior written
  28. * permission of the OpenSSL Project.
  29. *
  30. * 6. Redistributions of any form whatsoever must retain the following
  31. * acknowledgment:
  32. * "This product includes software developed by the OpenSSL Project
  33. * for use in the OpenSSL Toolkit (http://www.openssl.org/)"
  34. *
  35. * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
  36. * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  37. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
  38. * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
  39. * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
  40. * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
  41. * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
  42. * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
  43. * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
  44. * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
  45. * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
  46. * OF THE POSSIBILITY OF SUCH DAMAGE.
  47. * ==================================================================== */
  48. #include <openssl/aead.h>
  49. #include <openssl/aes.h>
  50. #include <openssl/cipher.h>
  51. #include <openssl/cpu.h>
  52. #include <openssl/err.h>
  53. #include <openssl/mem.h>
  54. #include <openssl/modes.h>
  55. #include <openssl/obj.h>
  56. #include <openssl/rand.h>
  57. #include "internal.h"
  58. #include "../modes/internal.h"
  59. typedef struct {
  60. union {
  61. double align;
  62. AES_KEY ks;
  63. } ks;
  64. block128_f block;
  65. union {
  66. cbc128_f cbc;
  67. ctr128_f ctr;
  68. } stream;
  69. } EVP_AES_KEY;
  70. typedef struct {
  71. union {
  72. double align;
  73. AES_KEY ks;
  74. } ks; /* AES key schedule to use */
  75. int key_set; /* Set if key initialised */
  76. int iv_set; /* Set if an iv is set */
  77. GCM128_CONTEXT gcm;
  78. uint8_t *iv; /* Temporary IV store */
  79. int ivlen; /* IV length */
  80. int taglen;
  81. int iv_gen; /* It is OK to generate IVs */
  82. ctr128_f ctr;
  83. } EVP_AES_GCM_CTX;
  84. #if !defined(OPENSSL_NO_ASM) && \
  85. (defined(OPENSSL_X86_64) || defined(OPENSSL_X86))
  86. #define VPAES
  87. extern unsigned int OPENSSL_ia32cap_P[];
  88. static char vpaes_capable() {
  89. return (OPENSSL_ia32cap_P[1] & (1 << (41 - 32))) != 0;
  90. }
  91. #if defined(OPENSSL_X86_64)
  92. #define BSAES
  93. static char bsaes_capable() {
  94. return vpaes_capable();
  95. }
  96. #endif
  97. #elif !defined(OPENSSL_NO_ASM) && defined(OPENSSL_ARM)
  98. #include "../arm_arch.h"
  99. #if __ARM_ARCH__ >= 7
  100. #define BSAES
  101. static char bsaes_capable() {
  102. return CRYPTO_is_NEON_capable();
  103. }
  104. #endif /* __ARM_ARCH__ >= 7 */
  105. #endif /* OPENSSL_ARM */
  106. #if defined(BSAES)
  107. /* On platforms where BSAES gets defined (just above), then these functions are
  108. * provided by asm. */
  109. void bsaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
  110. const AES_KEY *key, uint8_t ivec[16], int enc);
  111. void bsaes_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t len,
  112. const AES_KEY *key, const uint8_t ivec[16]);
  113. #else
  114. static char bsaes_capable() {
  115. return 0;
  116. }
  117. /* On other platforms, bsaes_capable() will always return false and so the
  118. * following will never be called. */
  119. void bsaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
  120. const AES_KEY *key, uint8_t ivec[16], int enc) {
  121. abort();
  122. }
  123. void bsaes_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t len,
  124. const AES_KEY *key, const uint8_t ivec[16]) {
  125. abort();
  126. }
  127. #endif
  128. #if defined(VPAES)
  129. /* On platforms where VPAES gets defined (just above), then these functions are
  130. * provided by asm. */
  131. int vpaes_set_encrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
  132. int vpaes_set_decrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
  133. void vpaes_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
  134. void vpaes_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
  135. void vpaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
  136. const AES_KEY *key, uint8_t *ivec, int enc);
  137. #else
  138. static char vpaes_capable() {
  139. return 0;
  140. }
  141. /* On other platforms, vpaes_capable() will always return false and so the
  142. * following will never be called. */
  143. int vpaes_set_encrypt_key(const uint8_t *userKey, int bits, AES_KEY *key) {
  144. abort();
  145. }
  146. int vpaes_set_decrypt_key(const uint8_t *userKey, int bits, AES_KEY *key) {
  147. abort();
  148. }
  149. void vpaes_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
  150. abort();
  151. }
  152. void vpaes_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
  153. abort();
  154. }
  155. void vpaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
  156. const AES_KEY *key, uint8_t *ivec, int enc) {
  157. abort();
  158. }
  159. #endif
  160. #if !defined(OPENSSL_NO_ASM) && \
  161. (defined(OPENSSL_X86_64) || defined(OPENSSL_X86))
  162. int aesni_set_encrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
  163. int aesni_set_decrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
  164. void aesni_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
  165. void aesni_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
  166. void aesni_ecb_encrypt(const uint8_t *in, uint8_t *out, size_t length,
  167. const AES_KEY *key, int enc);
  168. void aesni_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
  169. const AES_KEY *key, uint8_t *ivec, int enc);
  170. void aesni_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t blocks,
  171. const void *key, const uint8_t *ivec);
  172. #if defined(OPENSSL_X86_64)
  173. size_t aesni_gcm_encrypt(const uint8_t *in, uint8_t *out, size_t len,
  174. const void *key, uint8_t ivec[16], uint64_t *Xi);
  175. #define AES_gcm_encrypt aesni_gcm_encrypt
  176. size_t aesni_gcm_decrypt(const uint8_t *in, uint8_t *out, size_t len,
  177. const void *key, uint8_t ivec[16], uint64_t *Xi);
  178. #define AES_gcm_decrypt aesni_gcm_decrypt
  179. void gcm_ghash_avx(uint64_t Xi[2], const u128 Htable[16], const uint8_t *in,
  180. size_t len);
  181. #define AES_GCM_ASM(gctx) \
  182. (gctx->ctr == aesni_ctr32_encrypt_blocks && gctx->gcm.ghash == gcm_ghash_avx)
  183. #endif /* OPENSSL_X86_64 */
  184. #else
  185. /* On other platforms, aesni_capable() will always return false and so the
  186. * following will never be called. */
  187. void aesni_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
  188. abort();
  189. }
  190. int aesni_set_encrypt_key(const uint8_t *userKey, int bits, AES_KEY *key) {
  191. abort();
  192. }
  193. void aesni_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t blocks,
  194. const void *key, const uint8_t *ivec) {
  195. abort();
  196. }
  197. #endif
  198. static int aes_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
  199. const uint8_t *iv, int enc) {
  200. int ret, mode;
  201. EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
  202. mode = ctx->cipher->flags & EVP_CIPH_MODE_MASK;
  203. if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) && !enc) {
  204. if (bsaes_capable() && mode == EVP_CIPH_CBC_MODE) {
  205. ret = AES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  206. dat->block = (block128_f)AES_decrypt;
  207. dat->stream.cbc = (cbc128_f)bsaes_cbc_encrypt;
  208. } else if (vpaes_capable()) {
  209. ret = vpaes_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  210. dat->block = (block128_f)vpaes_decrypt;
  211. dat->stream.cbc =
  212. mode == EVP_CIPH_CBC_MODE ? (cbc128_f)vpaes_cbc_encrypt : NULL;
  213. } else {
  214. ret = AES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  215. dat->block = (block128_f)AES_decrypt;
  216. dat->stream.cbc =
  217. mode == EVP_CIPH_CBC_MODE ? (cbc128_f)AES_cbc_encrypt : NULL;
  218. }
  219. } else if (bsaes_capable() && mode == EVP_CIPH_CTR_MODE) {
  220. ret = AES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  221. dat->block = (block128_f)AES_encrypt;
  222. dat->stream.ctr = (ctr128_f)bsaes_ctr32_encrypt_blocks;
  223. } else if (vpaes_capable()) {
  224. ret = vpaes_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  225. dat->block = (block128_f)vpaes_encrypt;
  226. dat->stream.cbc =
  227. mode == EVP_CIPH_CBC_MODE ? (cbc128_f)vpaes_cbc_encrypt : NULL;
  228. } else {
  229. ret = AES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
  230. dat->block = (block128_f)AES_encrypt;
  231. dat->stream.cbc =
  232. mode == EVP_CIPH_CBC_MODE ? (cbc128_f)AES_cbc_encrypt : NULL;
  233. }
  234. if (ret < 0) {
  235. OPENSSL_PUT_ERROR(CIPHER, aes_init_key, CIPHER_R_AES_KEY_SETUP_FAILED);
  236. return 0;
  237. }
  238. return 1;
  239. }
  240. static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  241. const unsigned char *in, size_t len) {
  242. EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
  243. if (dat->stream.cbc) {
  244. (*dat->stream.cbc)(in, out, len, &dat->ks, ctx->iv, ctx->encrypt);
  245. } else if (ctx->encrypt) {
  246. CRYPTO_cbc128_encrypt(in, out, len, &dat->ks, ctx->iv, dat->block);
  247. } else {
  248. CRYPTO_cbc128_decrypt(in, out, len, &dat->ks, ctx->iv, dat->block);
  249. }
  250. return 1;
  251. }
  252. static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  253. const unsigned char *in, size_t len) {
  254. size_t bl = ctx->cipher->block_size;
  255. size_t i;
  256. EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
  257. if (len < bl) {
  258. return 1;
  259. }
  260. for (i = 0, len -= bl; i <= len; i += bl) {
  261. (*dat->block)(in + i, out + i, &dat->ks);
  262. }
  263. return 1;
  264. }
  265. static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  266. const unsigned char *in, size_t len) {
  267. unsigned int num = ctx->num;
  268. EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
  269. if (dat->stream.ctr) {
  270. CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks, ctx->iv, ctx->buf, &num,
  271. dat->stream.ctr);
  272. } else {
  273. CRYPTO_ctr128_encrypt(in, out, len, &dat->ks, ctx->iv, ctx->buf, &num,
  274. dat->block);
  275. }
  276. ctx->num = (size_t)num;
  277. return 1;
  278. }
  279. static ctr128_f aes_gcm_set_key(AES_KEY *aes_key, GCM128_CONTEXT *gcm_ctx,
  280. const uint8_t *key, size_t key_len) {
  281. if (bsaes_capable()) {
  282. AES_set_encrypt_key(key, key_len * 8, aes_key);
  283. CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)AES_encrypt);
  284. return (ctr128_f)bsaes_ctr32_encrypt_blocks;
  285. }
  286. if (vpaes_capable()) {
  287. vpaes_set_encrypt_key(key, key_len * 8, aes_key);
  288. CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)vpaes_encrypt);
  289. return NULL;
  290. }
  291. AES_set_encrypt_key(key, key_len * 8, aes_key);
  292. CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)AES_encrypt);
  293. return NULL;
  294. }
  295. static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
  296. const uint8_t *iv, int enc) {
  297. EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
  298. if (!iv && !key) {
  299. return 1;
  300. }
  301. if (key) {
  302. gctx->ctr = aes_gcm_set_key(&gctx->ks.ks, &gctx->gcm, key, ctx->key_len);
  303. /* If we have an iv can set it directly, otherwise use saved IV. */
  304. if (iv == NULL && gctx->iv_set) {
  305. iv = gctx->iv;
  306. }
  307. if (iv) {
  308. CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
  309. gctx->iv_set = 1;
  310. }
  311. gctx->key_set = 1;
  312. } else {
  313. /* If key set use IV, otherwise copy */
  314. if (gctx->key_set) {
  315. CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
  316. } else {
  317. memcpy(gctx->iv, iv, gctx->ivlen);
  318. }
  319. gctx->iv_set = 1;
  320. gctx->iv_gen = 0;
  321. }
  322. return 1;
  323. }
  324. static int aes_gcm_cleanup(EVP_CIPHER_CTX *c) {
  325. EVP_AES_GCM_CTX *gctx = c->cipher_data;
  326. OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
  327. if (gctx->iv != c->iv) {
  328. OPENSSL_free(gctx->iv);
  329. }
  330. return 1;
  331. }
  332. /* increment counter (64-bit int) by 1 */
  333. static void ctr64_inc(uint8_t *counter) {
  334. int n = 8;
  335. uint8_t c;
  336. do {
  337. --n;
  338. c = counter[n];
  339. ++c;
  340. counter[n] = c;
  341. if (c) {
  342. return;
  343. }
  344. } while (n);
  345. }
  346. static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) {
  347. EVP_AES_GCM_CTX *gctx = c->cipher_data;
  348. switch (type) {
  349. case EVP_CTRL_INIT:
  350. gctx->key_set = 0;
  351. gctx->iv_set = 0;
  352. gctx->ivlen = c->cipher->iv_len;
  353. gctx->iv = c->iv;
  354. gctx->taglen = -1;
  355. gctx->iv_gen = 0;
  356. return 1;
  357. case EVP_CTRL_GCM_SET_IVLEN:
  358. if (arg <= 0) {
  359. return 0;
  360. }
  361. /* Allocate memory for IV if needed */
  362. if (arg > EVP_MAX_IV_LENGTH && arg > gctx->ivlen) {
  363. if (gctx->iv != c->iv) {
  364. OPENSSL_free(gctx->iv);
  365. }
  366. gctx->iv = OPENSSL_malloc(arg);
  367. if (!gctx->iv) {
  368. return 0;
  369. }
  370. }
  371. gctx->ivlen = arg;
  372. return 1;
  373. case EVP_CTRL_GCM_SET_TAG:
  374. if (arg <= 0 || arg > 16 || c->encrypt) {
  375. return 0;
  376. }
  377. memcpy(c->buf, ptr, arg);
  378. gctx->taglen = arg;
  379. return 1;
  380. case EVP_CTRL_GCM_GET_TAG:
  381. if (arg <= 0 || arg > 16 || !c->encrypt || gctx->taglen < 0) {
  382. return 0;
  383. }
  384. memcpy(ptr, c->buf, arg);
  385. return 1;
  386. case EVP_CTRL_GCM_SET_IV_FIXED:
  387. /* Special case: -1 length restores whole IV */
  388. if (arg == -1) {
  389. memcpy(gctx->iv, ptr, gctx->ivlen);
  390. gctx->iv_gen = 1;
  391. return 1;
  392. }
  393. /* Fixed field must be at least 4 bytes and invocation field
  394. * at least 8. */
  395. if (arg < 4 || (gctx->ivlen - arg) < 8) {
  396. return 0;
  397. }
  398. if (arg) {
  399. memcpy(gctx->iv, ptr, arg);
  400. }
  401. if (c->encrypt &&
  402. RAND_pseudo_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0) {
  403. return 0;
  404. }
  405. gctx->iv_gen = 1;
  406. return 1;
  407. case EVP_CTRL_GCM_IV_GEN:
  408. if (gctx->iv_gen == 0 || gctx->key_set == 0) {
  409. return 0;
  410. }
  411. CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
  412. if (arg <= 0 || arg > gctx->ivlen) {
  413. arg = gctx->ivlen;
  414. }
  415. memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
  416. /* Invocation field will be at least 8 bytes in size and
  417. * so no need to check wrap around or increment more than
  418. * last 8 bytes. */
  419. ctr64_inc(gctx->iv + gctx->ivlen - 8);
  420. gctx->iv_set = 1;
  421. return 1;
  422. case EVP_CTRL_GCM_SET_IV_INV:
  423. if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt) {
  424. return 0;
  425. }
  426. memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
  427. CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
  428. gctx->iv_set = 1;
  429. return 1;
  430. default:
  431. return -1;
  432. }
  433. }
  434. static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
  435. size_t len) {
  436. EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
  437. /* If not set up, return error */
  438. if (!gctx->key_set) {
  439. return -1;
  440. }
  441. if (!gctx->iv_set) {
  442. return -1;
  443. }
  444. if (in) {
  445. if (out == NULL) {
  446. if (!CRYPTO_gcm128_aad(&gctx->gcm, in, len)) {
  447. return -1;
  448. }
  449. } else if (ctx->encrypt) {
  450. if (gctx->ctr) {
  451. size_t bulk = 0;
  452. #if defined(AES_GCM_ASM)
  453. if (len >= 32 && AES_GCM_ASM(gctx)) {
  454. size_t res = (16 - gctx->gcm.mres) % 16;
  455. if (!CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res)) {
  456. return -1;
  457. }
  458. bulk = AES_gcm_encrypt(in + res, out + res, len - res, gctx->gcm.key,
  459. gctx->gcm.Yi.c, gctx->gcm.Xi.u);
  460. gctx->gcm.len.u[1] += bulk;
  461. bulk += res;
  462. }
  463. #endif
  464. if (!CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm, in + bulk, out + bulk,
  465. len - bulk, gctx->ctr)) {
  466. return -1;
  467. }
  468. } else {
  469. size_t bulk = 0;
  470. if (!CRYPTO_gcm128_encrypt(&gctx->gcm, in + bulk, out + bulk,
  471. len - bulk)) {
  472. return -1;
  473. }
  474. }
  475. } else {
  476. if (gctx->ctr) {
  477. size_t bulk = 0;
  478. #if defined(AES_GCM_ASM)
  479. if (len >= 16 && AES_GCM_ASM(gctx)) {
  480. size_t res = (16 - gctx->gcm.mres) % 16;
  481. if (!CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res)) {
  482. return -1;
  483. }
  484. bulk = AES_gcm_decrypt(in + res, out + res, len - res, gctx->gcm.key,
  485. gctx->gcm.Yi.c, gctx->gcm.Xi.u);
  486. gctx->gcm.len.u[1] += bulk;
  487. bulk += res;
  488. }
  489. #endif
  490. if (!CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm, in + bulk, out + bulk,
  491. len - bulk, gctx->ctr)) {
  492. return -1;
  493. }
  494. } else {
  495. size_t bulk = 0;
  496. if (!CRYPTO_gcm128_decrypt(&gctx->gcm, in + bulk, out + bulk,
  497. len - bulk)) {
  498. return -1;
  499. }
  500. }
  501. }
  502. return len;
  503. } else {
  504. if (!ctx->encrypt) {
  505. if (gctx->taglen < 0 ||
  506. !CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0) {
  507. return -1;
  508. }
  509. gctx->iv_set = 0;
  510. return 0;
  511. }
  512. CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
  513. gctx->taglen = 16;
  514. /* Don't reuse the IV */
  515. gctx->iv_set = 0;
  516. return 0;
  517. }
  518. }
  519. static const EVP_CIPHER aes_128_cbc = {
  520. NID_aes_128_cbc, 16 /* block_size */, 16 /* key_size */,
  521. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
  522. NULL /* app_data */, aes_init_key, aes_cbc_cipher,
  523. NULL /* cleanup */, NULL /* ctrl */};
  524. static const EVP_CIPHER aes_128_ctr = {
  525. NID_aes_128_ctr, 1 /* block_size */, 16 /* key_size */,
  526. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
  527. NULL /* app_data */, aes_init_key, aes_ctr_cipher,
  528. NULL /* cleanup */, NULL /* ctrl */};
  529. static const EVP_CIPHER aes_128_ecb = {
  530. NID_aes_128_ecb, 16 /* block_size */, 16 /* key_size */,
  531. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
  532. NULL /* app_data */, aes_init_key, aes_ecb_cipher,
  533. NULL /* cleanup */, NULL /* ctrl */};
  534. static const EVP_CIPHER aes_128_gcm = {
  535. NID_aes_128_gcm, 1 /* block_size */, 16 /* key_size */, 12 /* iv_len */,
  536. sizeof(EVP_AES_GCM_CTX),
  537. EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
  538. EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT |
  539. EVP_CIPH_FLAG_AEAD_CIPHER,
  540. NULL /* app_data */, aes_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
  541. aes_gcm_ctrl};
  542. static const EVP_CIPHER aes_256_cbc = {
  543. NID_aes_128_cbc, 16 /* block_size */, 32 /* key_size */,
  544. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
  545. NULL /* app_data */, aes_init_key, aes_cbc_cipher,
  546. NULL /* cleanup */, NULL /* ctrl */};
  547. static const EVP_CIPHER aes_256_ctr = {
  548. NID_aes_128_ctr, 1 /* block_size */, 32 /* key_size */,
  549. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
  550. NULL /* app_data */, aes_init_key, aes_ctr_cipher,
  551. NULL /* cleanup */, NULL /* ctrl */};
  552. static const EVP_CIPHER aes_256_ecb = {
  553. NID_aes_128_ecb, 16 /* block_size */, 32 /* key_size */,
  554. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
  555. NULL /* app_data */, aes_init_key, aes_ecb_cipher,
  556. NULL /* cleanup */, NULL /* ctrl */};
  557. static const EVP_CIPHER aes_256_gcm = {
  558. NID_aes_128_gcm, 1 /* block_size */, 32 /* key_size */, 12 /* iv_len */,
  559. sizeof(EVP_AES_GCM_CTX),
  560. EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
  561. EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT |
  562. EVP_CIPH_FLAG_AEAD_CIPHER,
  563. NULL /* app_data */, aes_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
  564. aes_gcm_ctrl};
  565. #if !defined(OPENSSL_NO_ASM) && \
  566. (defined(OPENSSL_X86_64) || defined(OPENSSL_X86))
  567. /* AES-NI section. */
  568. static char aesni_capable() {
  569. return (OPENSSL_ia32cap_P[1] & (1 << (57 - 32))) != 0;
  570. }
  571. static int aesni_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
  572. const uint8_t *iv, int enc) {
  573. int ret, mode;
  574. EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
  575. mode = ctx->cipher->flags & EVP_CIPH_MODE_MASK;
  576. if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) && !enc) {
  577. ret = aesni_set_decrypt_key(key, ctx->key_len * 8, ctx->cipher_data);
  578. dat->block = (block128_f)aesni_decrypt;
  579. dat->stream.cbc =
  580. mode == EVP_CIPH_CBC_MODE ? (cbc128_f)aesni_cbc_encrypt : NULL;
  581. } else {
  582. ret = aesni_set_encrypt_key(key, ctx->key_len * 8, ctx->cipher_data);
  583. dat->block = (block128_f)aesni_encrypt;
  584. if (mode == EVP_CIPH_CBC_MODE) {
  585. dat->stream.cbc = (cbc128_f)aesni_cbc_encrypt;
  586. } else if (mode == EVP_CIPH_CTR_MODE) {
  587. dat->stream.ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
  588. } else {
  589. dat->stream.cbc = NULL;
  590. }
  591. }
  592. if (ret < 0) {
  593. OPENSSL_PUT_ERROR(CIPHER, aesni_init_key, CIPHER_R_AES_KEY_SETUP_FAILED);
  594. return 0;
  595. }
  596. return 1;
  597. }
  598. static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out,
  599. const uint8_t *in, size_t len) {
  600. aesni_cbc_encrypt(in, out, len, ctx->cipher_data, ctx->iv, ctx->encrypt);
  601. return 1;
  602. }
  603. static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out,
  604. const uint8_t *in, size_t len) {
  605. size_t bl = ctx->cipher->block_size;
  606. if (len < bl) {
  607. return 1;
  608. }
  609. aesni_ecb_encrypt(in, out, len, ctx->cipher_data, ctx->encrypt);
  610. return 1;
  611. }
  612. static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
  613. const uint8_t *iv, int enc) {
  614. EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
  615. if (!iv && !key) {
  616. return 1;
  617. }
  618. if (key) {
  619. aesni_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
  620. CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f)aesni_encrypt);
  621. gctx->ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
  622. /* If we have an iv can set it directly, otherwise use
  623. * saved IV. */
  624. if (iv == NULL && gctx->iv_set) {
  625. iv = gctx->iv;
  626. }
  627. if (iv) {
  628. CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
  629. gctx->iv_set = 1;
  630. }
  631. gctx->key_set = 1;
  632. } else {
  633. /* If key set use IV, otherwise copy */
  634. if (gctx->key_set) {
  635. CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
  636. } else {
  637. memcpy(gctx->iv, iv, gctx->ivlen);
  638. }
  639. gctx->iv_set = 1;
  640. gctx->iv_gen = 0;
  641. }
  642. return 1;
  643. }
  644. static const EVP_CIPHER aesni_128_cbc = {
  645. NID_aes_128_cbc, 16 /* block_size */, 16 /* key_size */,
  646. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
  647. NULL /* app_data */, aesni_init_key, aesni_cbc_cipher,
  648. NULL /* cleanup */, NULL /* ctrl */};
  649. static const EVP_CIPHER aesni_128_ctr = {
  650. NID_aes_128_ctr, 1 /* block_size */, 16 /* key_size */,
  651. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
  652. NULL /* app_data */, aesni_init_key, aes_ctr_cipher,
  653. NULL /* cleanup */, NULL /* ctrl */};
  654. static const EVP_CIPHER aesni_128_ecb = {
  655. NID_aes_128_ecb, 16 /* block_size */, 16 /* key_size */,
  656. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
  657. NULL /* app_data */, aesni_init_key, aesni_ecb_cipher,
  658. NULL /* cleanup */, NULL /* ctrl */};
  659. static const EVP_CIPHER aesni_128_gcm = {
  660. NID_aes_128_gcm, 1 /* block_size */, 16 /* key_size */, 12 /* iv_len */,
  661. sizeof(EVP_AES_GCM_CTX),
  662. EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
  663. EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT |
  664. EVP_CIPH_FLAG_AEAD_CIPHER,
  665. NULL /* app_data */, aesni_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
  666. aes_gcm_ctrl};
  667. static const EVP_CIPHER aesni_256_cbc = {
  668. NID_aes_128_cbc, 16 /* block_size */, 32 /* key_size */,
  669. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
  670. NULL /* app_data */, aesni_init_key, aesni_cbc_cipher,
  671. NULL /* cleanup */, NULL /* ctrl */};
  672. static const EVP_CIPHER aesni_256_ctr = {
  673. NID_aes_128_ctr, 1 /* block_size */, 32 /* key_size */,
  674. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
  675. NULL /* app_data */, aesni_init_key, aes_ctr_cipher,
  676. NULL /* cleanup */, NULL /* ctrl */};
  677. static const EVP_CIPHER aesni_256_ecb = {
  678. NID_aes_128_ecb, 16 /* block_size */, 32 /* key_size */,
  679. 16 /* iv_len */, sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
  680. NULL /* app_data */, aesni_init_key, aesni_ecb_cipher,
  681. NULL /* cleanup */, NULL /* ctrl */};
  682. static const EVP_CIPHER aesni_256_gcm = {
  683. NID_aes_256_gcm, 1 /* block_size */, 32 /* key_size */, 12 /* iv_len */,
  684. sizeof(EVP_AES_GCM_CTX),
  685. EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
  686. EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT |
  687. EVP_CIPH_FLAG_AEAD_CIPHER,
  688. NULL /* app_data */, aesni_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
  689. aes_gcm_ctrl};
  690. #define EVP_CIPHER_FUNCTION(keybits, mode) \
  691. const EVP_CIPHER *EVP_aes_##keybits##_##mode(void) { \
  692. if (aesni_capable()) { \
  693. return &aesni_##keybits##_##mode; \
  694. } else { \
  695. return &aes_##keybits##_##mode; \
  696. } \
  697. }
  698. #else /* ^^^ OPENSSL_X86_64 || OPENSSL_X86 */
  699. static char aesni_capable() {
  700. return 0;
  701. }
  702. #define EVP_CIPHER_FUNCTION(keybits, mode) \
  703. const EVP_CIPHER *EVP_aes_##keybits##_##mode(void) { \
  704. return &aes_##keybits##_##mode; \
  705. }
  706. #endif
  707. EVP_CIPHER_FUNCTION(128, cbc)
  708. EVP_CIPHER_FUNCTION(128, ctr)
  709. EVP_CIPHER_FUNCTION(128, ecb)
  710. EVP_CIPHER_FUNCTION(128, gcm)
  711. EVP_CIPHER_FUNCTION(256, cbc)
  712. EVP_CIPHER_FUNCTION(256, ctr)
  713. EVP_CIPHER_FUNCTION(256, ecb)
  714. EVP_CIPHER_FUNCTION(256, gcm)
  715. #define EVP_AEAD_AES_GCM_TAG_LEN 16
  716. struct aead_aes_gcm_ctx {
  717. union {
  718. double align;
  719. AES_KEY ks;
  720. } ks;
  721. GCM128_CONTEXT gcm;
  722. ctr128_f ctr;
  723. uint8_t tag_len;
  724. };
  725. static int aead_aes_gcm_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
  726. size_t key_len, size_t tag_len) {
  727. struct aead_aes_gcm_ctx *gcm_ctx;
  728. const size_t key_bits = key_len * 8;
  729. if (key_bits != 128 && key_bits != 256) {
  730. OPENSSL_PUT_ERROR(CIPHER, aead_aes_gcm_init, CIPHER_R_BAD_KEY_LENGTH);
  731. return 0; /* EVP_AEAD_CTX_init should catch this. */
  732. }
  733. if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) {
  734. tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
  735. }
  736. if (tag_len > EVP_AEAD_AES_GCM_TAG_LEN) {
  737. OPENSSL_PUT_ERROR(CIPHER, aead_aes_gcm_init, CIPHER_R_TAG_TOO_LARGE);
  738. return 0;
  739. }
  740. gcm_ctx = OPENSSL_malloc(sizeof(struct aead_aes_gcm_ctx));
  741. if (gcm_ctx == NULL) {
  742. return 0;
  743. }
  744. if (aesni_capable()) {
  745. aesni_set_encrypt_key(key, key_len * 8, &gcm_ctx->ks.ks);
  746. CRYPTO_gcm128_init(&gcm_ctx->gcm, &gcm_ctx->ks.ks,
  747. (block128_f)aesni_encrypt);
  748. gcm_ctx->ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
  749. } else {
  750. gcm_ctx->ctr =
  751. aes_gcm_set_key(&gcm_ctx->ks.ks, &gcm_ctx->gcm, key, key_len);
  752. }
  753. gcm_ctx->tag_len = tag_len;
  754. ctx->aead_state = gcm_ctx;
  755. return 1;
  756. }
  757. static void aead_aes_gcm_cleanup(EVP_AEAD_CTX *ctx) {
  758. struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
  759. OPENSSL_cleanse(gcm_ctx, sizeof(struct aead_aes_gcm_ctx));
  760. OPENSSL_free(gcm_ctx);
  761. }
  762. static int aead_aes_gcm_seal(const EVP_AEAD_CTX *ctx, uint8_t *out,
  763. size_t *out_len, size_t max_out_len,
  764. const uint8_t *nonce, size_t nonce_len,
  765. const uint8_t *in, size_t in_len,
  766. const uint8_t *ad, size_t ad_len) {
  767. size_t bulk = 0;
  768. const struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
  769. GCM128_CONTEXT gcm;
  770. if (in_len + gcm_ctx->tag_len < in_len) {
  771. OPENSSL_PUT_ERROR(CIPHER, aead_aes_gcm_seal, CIPHER_R_TOO_LARGE);
  772. return 0;
  773. }
  774. if (max_out_len < in_len + gcm_ctx->tag_len) {
  775. OPENSSL_PUT_ERROR(CIPHER, aead_aes_gcm_seal, CIPHER_R_BUFFER_TOO_SMALL);
  776. return 0;
  777. }
  778. memcpy(&gcm, &gcm_ctx->gcm, sizeof(gcm));
  779. CRYPTO_gcm128_setiv(&gcm, nonce, nonce_len);
  780. if (ad_len > 0 && !CRYPTO_gcm128_aad(&gcm, ad, ad_len)) {
  781. return 0;
  782. }
  783. if (gcm_ctx->ctr) {
  784. if (!CRYPTO_gcm128_encrypt_ctr32(&gcm, in + bulk, out + bulk, in_len - bulk,
  785. gcm_ctx->ctr)) {
  786. return 0;
  787. }
  788. } else {
  789. if (!CRYPTO_gcm128_encrypt(&gcm, in + bulk, out + bulk, in_len - bulk)) {
  790. return 0;
  791. }
  792. }
  793. CRYPTO_gcm128_tag(&gcm, out + in_len, gcm_ctx->tag_len);
  794. *out_len = in_len + gcm_ctx->tag_len;
  795. return 1;
  796. }
  797. static int aead_aes_gcm_open(const EVP_AEAD_CTX *ctx, uint8_t *out,
  798. size_t *out_len, size_t max_out_len,
  799. const uint8_t *nonce, size_t nonce_len,
  800. const uint8_t *in, size_t in_len,
  801. const uint8_t *ad, size_t ad_len) {
  802. size_t bulk = 0;
  803. const struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
  804. uint8_t tag[EVP_AEAD_AES_GCM_TAG_LEN];
  805. size_t plaintext_len;
  806. GCM128_CONTEXT gcm;
  807. if (in_len < gcm_ctx->tag_len) {
  808. OPENSSL_PUT_ERROR(CIPHER, aead_aes_gcm_open, CIPHER_R_BAD_DECRYPT);
  809. return 0;
  810. }
  811. plaintext_len = in_len - gcm_ctx->tag_len;
  812. if (max_out_len < plaintext_len) {
  813. OPENSSL_PUT_ERROR(CIPHER, aead_aes_gcm_open, CIPHER_R_BUFFER_TOO_SMALL);
  814. return 0;
  815. }
  816. memcpy(&gcm, &gcm_ctx->gcm, sizeof(gcm));
  817. CRYPTO_gcm128_setiv(&gcm, nonce, nonce_len);
  818. if (!CRYPTO_gcm128_aad(&gcm, ad, ad_len)) {
  819. return 0;
  820. }
  821. if (gcm_ctx->ctr) {
  822. if (!CRYPTO_gcm128_decrypt_ctr32(&gcm, in + bulk, out + bulk,
  823. in_len - bulk - gcm_ctx->tag_len,
  824. gcm_ctx->ctr)) {
  825. return 0;
  826. }
  827. } else {
  828. if (!CRYPTO_gcm128_decrypt(&gcm, in + bulk, out + bulk,
  829. in_len - bulk - gcm_ctx->tag_len)) {
  830. return 0;
  831. }
  832. }
  833. CRYPTO_gcm128_tag(&gcm, tag, gcm_ctx->tag_len);
  834. if (CRYPTO_memcmp(tag, in + plaintext_len, gcm_ctx->tag_len) != 0) {
  835. OPENSSL_PUT_ERROR(CIPHER, aead_aes_gcm_open, CIPHER_R_BAD_DECRYPT);
  836. return 0;
  837. }
  838. *out_len = plaintext_len;
  839. return 1;
  840. }
  841. static const EVP_AEAD aead_aes_128_gcm = {
  842. 16, /* key len */
  843. 12, /* nonce len */
  844. EVP_AEAD_AES_GCM_TAG_LEN, /* overhead */
  845. EVP_AEAD_AES_GCM_TAG_LEN, /* max tag length */
  846. aead_aes_gcm_init, aead_aes_gcm_cleanup,
  847. aead_aes_gcm_seal, aead_aes_gcm_open,
  848. };
  849. static const EVP_AEAD aead_aes_256_gcm = {
  850. 32, /* key len */
  851. 12, /* nonce len */
  852. EVP_AEAD_AES_GCM_TAG_LEN, /* overhead */
  853. EVP_AEAD_AES_GCM_TAG_LEN, /* max tag length */
  854. aead_aes_gcm_init, aead_aes_gcm_cleanup,
  855. aead_aes_gcm_seal, aead_aes_gcm_open,
  856. };
  857. const EVP_AEAD *EVP_aead_aes_128_gcm() { return &aead_aes_128_gcm; }
  858. const EVP_AEAD *EVP_aead_aes_256_gcm() { return &aead_aes_256_gcm; }