You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 

770 lines
23 KiB

  1. /* Based on the public domain implementation in
  2. * crypto_hash/sha512/ref/ from http://bench.cr.yp.to/supercop.html
  3. * by D. J. Bernstein */
  4. #include <stddef.h>
  5. #include <stdint.h>
  6. #include <stdlib.h>
  7. #include <string.h>
  8. #include "sha2.h"
  9. static uint32_t load_bigendian_32(const uint8_t *x) {
  10. return (uint32_t)(x[3]) | (((uint32_t)(x[2])) << 8) |
  11. (((uint32_t)(x[1])) << 16) | (((uint32_t)(x[0])) << 24);
  12. }
  13. static uint64_t load_bigendian_64(const uint8_t *x) {
  14. return (uint64_t)(x[7]) | (((uint64_t)(x[6])) << 8) |
  15. (((uint64_t)(x[5])) << 16) | (((uint64_t)(x[4])) << 24) |
  16. (((uint64_t)(x[3])) << 32) | (((uint64_t)(x[2])) << 40) |
  17. (((uint64_t)(x[1])) << 48) | (((uint64_t)(x[0])) << 56);
  18. }
  19. static void store_bigendian_32(uint8_t *x, uint64_t u) {
  20. x[3] = (uint8_t) u;
  21. u >>= 8;
  22. x[2] = (uint8_t) u;
  23. u >>= 8;
  24. x[1] = (uint8_t) u;
  25. u >>= 8;
  26. x[0] = (uint8_t) u;
  27. }
  28. static void store_bigendian_64(uint8_t *x, uint64_t u) {
  29. x[7] = (uint8_t) u;
  30. u >>= 8;
  31. x[6] = (uint8_t) u;
  32. u >>= 8;
  33. x[5] = (uint8_t) u;
  34. u >>= 8;
  35. x[4] = (uint8_t) u;
  36. u >>= 8;
  37. x[3] = (uint8_t) u;
  38. u >>= 8;
  39. x[2] = (uint8_t) u;
  40. u >>= 8;
  41. x[1] = (uint8_t) u;
  42. u >>= 8;
  43. x[0] = (uint8_t) u;
  44. }
  45. #define SHR(x, c) ((x) >> (c))
  46. #define ROTR_32(x, c) (((x) >> (c)) | ((x) << (32 - (c))))
  47. #define ROTR_64(x, c) (((x) >> (c)) | ((x) << (64 - (c))))
  48. #define Ch(x, y, z) (((x) & (y)) ^ (~(x) & (z)))
  49. #define Maj(x, y, z) (((x) & (y)) ^ ((x) & (z)) ^ ((y) & (z)))
  50. #define Sigma0_32(x) (ROTR_32(x, 2) ^ ROTR_32(x,13) ^ ROTR_32(x,22))
  51. #define Sigma1_32(x) (ROTR_32(x, 6) ^ ROTR_32(x,11) ^ ROTR_32(x,25))
  52. #define sigma0_32(x) (ROTR_32(x, 7) ^ ROTR_32(x,18) ^ SHR(x, 3))
  53. #define sigma1_32(x) (ROTR_32(x,17) ^ ROTR_32(x,19) ^ SHR(x,10))
  54. #define Sigma0_64(x) (ROTR_64(x, 28) ^ ROTR_64(x, 34) ^ ROTR_64(x, 39))
  55. #define Sigma1_64(x) (ROTR_64(x, 14) ^ ROTR_64(x, 18) ^ ROTR_64(x, 41))
  56. #define sigma0_64(x) (ROTR_64(x, 1) ^ ROTR_64(x, 8) ^ SHR(x, 7))
  57. #define sigma1_64(x) (ROTR_64(x, 19) ^ ROTR_64(x, 61) ^ SHR(x, 6))
  58. #define M_32(w0, w14, w9, w1) w0 = sigma1_32(w14) + (w9) + sigma0_32(w1) + (w0);
  59. #define M_64(w0, w14, w9, w1) w0 = sigma1_64(w14) + (w9) + sigma0_64(w1) + (w0);
  60. #define EXPAND_32 \
  61. M_32(w0, w14, w9, w1) \
  62. M_32(w1, w15, w10, w2) \
  63. M_32(w2, w0, w11, w3) \
  64. M_32(w3, w1, w12, w4) \
  65. M_32(w4, w2, w13, w5) \
  66. M_32(w5, w3, w14, w6) \
  67. M_32(w6, w4, w15, w7) \
  68. M_32(w7, w5, w0, w8) \
  69. M_32(w8, w6, w1, w9) \
  70. M_32(w9, w7, w2, w10) \
  71. M_32(w10, w8, w3, w11) \
  72. M_32(w11, w9, w4, w12) \
  73. M_32(w12, w10, w5, w13) \
  74. M_32(w13, w11, w6, w14) \
  75. M_32(w14, w12, w7, w15) \
  76. M_32(w15, w13, w8, w0)
  77. #define EXPAND_64 \
  78. M_64(w0, w14, w9, w1) \
  79. M_64(w1, w15, w10, w2) \
  80. M_64(w2, w0, w11, w3) \
  81. M_64(w3, w1, w12, w4) \
  82. M_64(w4, w2, w13, w5) \
  83. M_64(w5, w3, w14, w6) \
  84. M_64(w6, w4, w15, w7) \
  85. M_64(w7, w5, w0, w8) \
  86. M_64(w8, w6, w1, w9) \
  87. M_64(w9, w7, w2, w10) \
  88. M_64(w10, w8, w3, w11) \
  89. M_64(w11, w9, w4, w12) \
  90. M_64(w12, w10, w5, w13) \
  91. M_64(w13, w11, w6, w14) \
  92. M_64(w14, w12, w7, w15) \
  93. M_64(w15, w13, w8, w0)
  94. #define F_32(w, k) \
  95. T1 = h + Sigma1_32(e) + Ch(e, f, g) + (k) + (w); \
  96. T2 = Sigma0_32(a) + Maj(a, b, c); \
  97. h = g; \
  98. g = f; \
  99. f = e; \
  100. e = d + T1; \
  101. d = c; \
  102. c = b; \
  103. b = a; \
  104. a = T1 + T2;
  105. #define F_64(w, k) \
  106. T1 = h + Sigma1_64(e) + Ch(e, f, g) + (k) + (w); \
  107. T2 = Sigma0_64(a) + Maj(a, b, c); \
  108. h = g; \
  109. g = f; \
  110. f = e; \
  111. e = d + T1; \
  112. d = c; \
  113. c = b; \
  114. b = a; \
  115. a = T1 + T2;
  116. static size_t crypto_hashblocks_sha256(uint8_t *statebytes,
  117. const uint8_t *in, size_t inlen) {
  118. uint32_t state[8];
  119. uint32_t a;
  120. uint32_t b;
  121. uint32_t c;
  122. uint32_t d;
  123. uint32_t e;
  124. uint32_t f;
  125. uint32_t g;
  126. uint32_t h;
  127. uint32_t T1;
  128. uint32_t T2;
  129. a = load_bigendian_32(statebytes + 0);
  130. state[0] = a;
  131. b = load_bigendian_32(statebytes + 4);
  132. state[1] = b;
  133. c = load_bigendian_32(statebytes + 8);
  134. state[2] = c;
  135. d = load_bigendian_32(statebytes + 12);
  136. state[3] = d;
  137. e = load_bigendian_32(statebytes + 16);
  138. state[4] = e;
  139. f = load_bigendian_32(statebytes + 20);
  140. state[5] = f;
  141. g = load_bigendian_32(statebytes + 24);
  142. state[6] = g;
  143. h = load_bigendian_32(statebytes + 28);
  144. state[7] = h;
  145. while (inlen >= 64) {
  146. uint32_t w0 = load_bigendian_32(in + 0);
  147. uint32_t w1 = load_bigendian_32(in + 4);
  148. uint32_t w2 = load_bigendian_32(in + 8);
  149. uint32_t w3 = load_bigendian_32(in + 12);
  150. uint32_t w4 = load_bigendian_32(in + 16);
  151. uint32_t w5 = load_bigendian_32(in + 20);
  152. uint32_t w6 = load_bigendian_32(in + 24);
  153. uint32_t w7 = load_bigendian_32(in + 28);
  154. uint32_t w8 = load_bigendian_32(in + 32);
  155. uint32_t w9 = load_bigendian_32(in + 36);
  156. uint32_t w10 = load_bigendian_32(in + 40);
  157. uint32_t w11 = load_bigendian_32(in + 44);
  158. uint32_t w12 = load_bigendian_32(in + 48);
  159. uint32_t w13 = load_bigendian_32(in + 52);
  160. uint32_t w14 = load_bigendian_32(in + 56);
  161. uint32_t w15 = load_bigendian_32(in + 60);
  162. F_32(w0, 0x428a2f98)
  163. F_32(w1, 0x71374491)
  164. F_32(w2, 0xb5c0fbcf)
  165. F_32(w3, 0xe9b5dba5)
  166. F_32(w4, 0x3956c25b)
  167. F_32(w5, 0x59f111f1)
  168. F_32(w6, 0x923f82a4)
  169. F_32(w7, 0xab1c5ed5)
  170. F_32(w8, 0xd807aa98)
  171. F_32(w9, 0x12835b01)
  172. F_32(w10, 0x243185be)
  173. F_32(w11, 0x550c7dc3)
  174. F_32(w12, 0x72be5d74)
  175. F_32(w13, 0x80deb1fe)
  176. F_32(w14, 0x9bdc06a7)
  177. F_32(w15, 0xc19bf174)
  178. EXPAND_32
  179. F_32(w0, 0xe49b69c1)
  180. F_32(w1, 0xefbe4786)
  181. F_32(w2, 0x0fc19dc6)
  182. F_32(w3, 0x240ca1cc)
  183. F_32(w4, 0x2de92c6f)
  184. F_32(w5, 0x4a7484aa)
  185. F_32(w6, 0x5cb0a9dc)
  186. F_32(w7, 0x76f988da)
  187. F_32(w8, 0x983e5152)
  188. F_32(w9, 0xa831c66d)
  189. F_32(w10, 0xb00327c8)
  190. F_32(w11, 0xbf597fc7)
  191. F_32(w12, 0xc6e00bf3)
  192. F_32(w13, 0xd5a79147)
  193. F_32(w14, 0x06ca6351)
  194. F_32(w15, 0x14292967)
  195. EXPAND_32
  196. F_32(w0, 0x27b70a85)
  197. F_32(w1, 0x2e1b2138)
  198. F_32(w2, 0x4d2c6dfc)
  199. F_32(w3, 0x53380d13)
  200. F_32(w4, 0x650a7354)
  201. F_32(w5, 0x766a0abb)
  202. F_32(w6, 0x81c2c92e)
  203. F_32(w7, 0x92722c85)
  204. F_32(w8, 0xa2bfe8a1)
  205. F_32(w9, 0xa81a664b)
  206. F_32(w10, 0xc24b8b70)
  207. F_32(w11, 0xc76c51a3)
  208. F_32(w12, 0xd192e819)
  209. F_32(w13, 0xd6990624)
  210. F_32(w14, 0xf40e3585)
  211. F_32(w15, 0x106aa070)
  212. EXPAND_32
  213. F_32(w0, 0x19a4c116)
  214. F_32(w1, 0x1e376c08)
  215. F_32(w2, 0x2748774c)
  216. F_32(w3, 0x34b0bcb5)
  217. F_32(w4, 0x391c0cb3)
  218. F_32(w5, 0x4ed8aa4a)
  219. F_32(w6, 0x5b9cca4f)
  220. F_32(w7, 0x682e6ff3)
  221. F_32(w8, 0x748f82ee)
  222. F_32(w9, 0x78a5636f)
  223. F_32(w10, 0x84c87814)
  224. F_32(w11, 0x8cc70208)
  225. F_32(w12, 0x90befffa)
  226. F_32(w13, 0xa4506ceb)
  227. F_32(w14, 0xbef9a3f7)
  228. F_32(w15, 0xc67178f2)
  229. a += state[0];
  230. b += state[1];
  231. c += state[2];
  232. d += state[3];
  233. e += state[4];
  234. f += state[5];
  235. g += state[6];
  236. h += state[7];
  237. state[0] = a;
  238. state[1] = b;
  239. state[2] = c;
  240. state[3] = d;
  241. state[4] = e;
  242. state[5] = f;
  243. state[6] = g;
  244. state[7] = h;
  245. in += 64;
  246. inlen -= 64;
  247. }
  248. store_bigendian_32(statebytes + 0, state[0]);
  249. store_bigendian_32(statebytes + 4, state[1]);
  250. store_bigendian_32(statebytes + 8, state[2]);
  251. store_bigendian_32(statebytes + 12, state[3]);
  252. store_bigendian_32(statebytes + 16, state[4]);
  253. store_bigendian_32(statebytes + 20, state[5]);
  254. store_bigendian_32(statebytes + 24, state[6]);
  255. store_bigendian_32(statebytes + 28, state[7]);
  256. return inlen;
  257. }
  258. static size_t crypto_hashblocks_sha512(uint8_t *statebytes,
  259. const uint8_t *in, size_t inlen) {
  260. uint64_t state[8];
  261. uint64_t a;
  262. uint64_t b;
  263. uint64_t c;
  264. uint64_t d;
  265. uint64_t e;
  266. uint64_t f;
  267. uint64_t g;
  268. uint64_t h;
  269. uint64_t T1;
  270. uint64_t T2;
  271. a = load_bigendian_64(statebytes + 0);
  272. state[0] = a;
  273. b = load_bigendian_64(statebytes + 8);
  274. state[1] = b;
  275. c = load_bigendian_64(statebytes + 16);
  276. state[2] = c;
  277. d = load_bigendian_64(statebytes + 24);
  278. state[3] = d;
  279. e = load_bigendian_64(statebytes + 32);
  280. state[4] = e;
  281. f = load_bigendian_64(statebytes + 40);
  282. state[5] = f;
  283. g = load_bigendian_64(statebytes + 48);
  284. state[6] = g;
  285. h = load_bigendian_64(statebytes + 56);
  286. state[7] = h;
  287. while (inlen >= 128) {
  288. uint64_t w0 = load_bigendian_64(in + 0);
  289. uint64_t w1 = load_bigendian_64(in + 8);
  290. uint64_t w2 = load_bigendian_64(in + 16);
  291. uint64_t w3 = load_bigendian_64(in + 24);
  292. uint64_t w4 = load_bigendian_64(in + 32);
  293. uint64_t w5 = load_bigendian_64(in + 40);
  294. uint64_t w6 = load_bigendian_64(in + 48);
  295. uint64_t w7 = load_bigendian_64(in + 56);
  296. uint64_t w8 = load_bigendian_64(in + 64);
  297. uint64_t w9 = load_bigendian_64(in + 72);
  298. uint64_t w10 = load_bigendian_64(in + 80);
  299. uint64_t w11 = load_bigendian_64(in + 88);
  300. uint64_t w12 = load_bigendian_64(in + 96);
  301. uint64_t w13 = load_bigendian_64(in + 104);
  302. uint64_t w14 = load_bigendian_64(in + 112);
  303. uint64_t w15 = load_bigendian_64(in + 120);
  304. F_64(w0, 0x428a2f98d728ae22ULL)
  305. F_64(w1, 0x7137449123ef65cdULL)
  306. F_64(w2, 0xb5c0fbcfec4d3b2fULL)
  307. F_64(w3, 0xe9b5dba58189dbbcULL)
  308. F_64(w4, 0x3956c25bf348b538ULL)
  309. F_64(w5, 0x59f111f1b605d019ULL)
  310. F_64(w6, 0x923f82a4af194f9bULL)
  311. F_64(w7, 0xab1c5ed5da6d8118ULL)
  312. F_64(w8, 0xd807aa98a3030242ULL)
  313. F_64(w9, 0x12835b0145706fbeULL)
  314. F_64(w10, 0x243185be4ee4b28cULL)
  315. F_64(w11, 0x550c7dc3d5ffb4e2ULL)
  316. F_64(w12, 0x72be5d74f27b896fULL)
  317. F_64(w13, 0x80deb1fe3b1696b1ULL)
  318. F_64(w14, 0x9bdc06a725c71235ULL)
  319. F_64(w15, 0xc19bf174cf692694ULL)
  320. EXPAND_64
  321. F_64(w0, 0xe49b69c19ef14ad2ULL)
  322. F_64(w1, 0xefbe4786384f25e3ULL)
  323. F_64(w2, 0x0fc19dc68b8cd5b5ULL)
  324. F_64(w3, 0x240ca1cc77ac9c65ULL)
  325. F_64(w4, 0x2de92c6f592b0275ULL)
  326. F_64(w5, 0x4a7484aa6ea6e483ULL)
  327. F_64(w6, 0x5cb0a9dcbd41fbd4ULL)
  328. F_64(w7, 0x76f988da831153b5ULL)
  329. F_64(w8, 0x983e5152ee66dfabULL)
  330. F_64(w9, 0xa831c66d2db43210ULL)
  331. F_64(w10, 0xb00327c898fb213fULL)
  332. F_64(w11, 0xbf597fc7beef0ee4ULL)
  333. F_64(w12, 0xc6e00bf33da88fc2ULL)
  334. F_64(w13, 0xd5a79147930aa725ULL)
  335. F_64(w14, 0x06ca6351e003826fULL)
  336. F_64(w15, 0x142929670a0e6e70ULL)
  337. EXPAND_64
  338. F_64(w0, 0x27b70a8546d22ffcULL)
  339. F_64(w1, 0x2e1b21385c26c926ULL)
  340. F_64(w2, 0x4d2c6dfc5ac42aedULL)
  341. F_64(w3, 0x53380d139d95b3dfULL)
  342. F_64(w4, 0x650a73548baf63deULL)
  343. F_64(w5, 0x766a0abb3c77b2a8ULL)
  344. F_64(w6, 0x81c2c92e47edaee6ULL)
  345. F_64(w7, 0x92722c851482353bULL)
  346. F_64(w8, 0xa2bfe8a14cf10364ULL)
  347. F_64(w9, 0xa81a664bbc423001ULL)
  348. F_64(w10, 0xc24b8b70d0f89791ULL)
  349. F_64(w11, 0xc76c51a30654be30ULL)
  350. F_64(w12, 0xd192e819d6ef5218ULL)
  351. F_64(w13, 0xd69906245565a910ULL)
  352. F_64(w14, 0xf40e35855771202aULL)
  353. F_64(w15, 0x106aa07032bbd1b8ULL)
  354. EXPAND_64
  355. F_64(w0, 0x19a4c116b8d2d0c8ULL)
  356. F_64(w1, 0x1e376c085141ab53ULL)
  357. F_64(w2, 0x2748774cdf8eeb99ULL)
  358. F_64(w3, 0x34b0bcb5e19b48a8ULL)
  359. F_64(w4, 0x391c0cb3c5c95a63ULL)
  360. F_64(w5, 0x4ed8aa4ae3418acbULL)
  361. F_64(w6, 0x5b9cca4f7763e373ULL)
  362. F_64(w7, 0x682e6ff3d6b2b8a3ULL)
  363. F_64(w8, 0x748f82ee5defb2fcULL)
  364. F_64(w9, 0x78a5636f43172f60ULL)
  365. F_64(w10, 0x84c87814a1f0ab72ULL)
  366. F_64(w11, 0x8cc702081a6439ecULL)
  367. F_64(w12, 0x90befffa23631e28ULL)
  368. F_64(w13, 0xa4506cebde82bde9ULL)
  369. F_64(w14, 0xbef9a3f7b2c67915ULL)
  370. F_64(w15, 0xc67178f2e372532bULL)
  371. EXPAND_64
  372. F_64(w0, 0xca273eceea26619cULL)
  373. F_64(w1, 0xd186b8c721c0c207ULL)
  374. F_64(w2, 0xeada7dd6cde0eb1eULL)
  375. F_64(w3, 0xf57d4f7fee6ed178ULL)
  376. F_64(w4, 0x06f067aa72176fbaULL)
  377. F_64(w5, 0x0a637dc5a2c898a6ULL)
  378. F_64(w6, 0x113f9804bef90daeULL)
  379. F_64(w7, 0x1b710b35131c471bULL)
  380. F_64(w8, 0x28db77f523047d84ULL)
  381. F_64(w9, 0x32caab7b40c72493ULL)
  382. F_64(w10, 0x3c9ebe0a15c9bebcULL)
  383. F_64(w11, 0x431d67c49c100d4cULL)
  384. F_64(w12, 0x4cc5d4becb3e42b6ULL)
  385. F_64(w13, 0x597f299cfc657e2aULL)
  386. F_64(w14, 0x5fcb6fab3ad6faecULL)
  387. F_64(w15, 0x6c44198c4a475817ULL)
  388. a += state[0];
  389. b += state[1];
  390. c += state[2];
  391. d += state[3];
  392. e += state[4];
  393. f += state[5];
  394. g += state[6];
  395. h += state[7];
  396. state[0] = a;
  397. state[1] = b;
  398. state[2] = c;
  399. state[3] = d;
  400. state[4] = e;
  401. state[5] = f;
  402. state[6] = g;
  403. state[7] = h;
  404. in += 128;
  405. inlen -= 128;
  406. }
  407. store_bigendian_64(statebytes + 0, state[0]);
  408. store_bigendian_64(statebytes + 8, state[1]);
  409. store_bigendian_64(statebytes + 16, state[2]);
  410. store_bigendian_64(statebytes + 24, state[3]);
  411. store_bigendian_64(statebytes + 32, state[4]);
  412. store_bigendian_64(statebytes + 40, state[5]);
  413. store_bigendian_64(statebytes + 48, state[6]);
  414. store_bigendian_64(statebytes + 56, state[7]);
  415. return inlen;
  416. }
  417. static const uint8_t iv_224[32] = {
  418. 0xc1, 0x05, 0x9e, 0xd8, 0x36, 0x7c, 0xd5, 0x07,
  419. 0x30, 0x70, 0xdd, 0x17, 0xf7, 0x0e, 0x59, 0x39,
  420. 0xff, 0xc0, 0x0b, 0x31, 0x68, 0x58, 0x15, 0x11,
  421. 0x64, 0xf9, 0x8f, 0xa7, 0xbe, 0xfa, 0x4f, 0xa4
  422. };
  423. static const uint8_t iv_256[32] = {
  424. 0x6a, 0x09, 0xe6, 0x67, 0xbb, 0x67, 0xae, 0x85,
  425. 0x3c, 0x6e, 0xf3, 0x72, 0xa5, 0x4f, 0xf5, 0x3a,
  426. 0x51, 0x0e, 0x52, 0x7f, 0x9b, 0x05, 0x68, 0x8c,
  427. 0x1f, 0x83, 0xd9, 0xab, 0x5b, 0xe0, 0xcd, 0x19
  428. };
  429. static const uint8_t iv_384[64] = {
  430. 0xcb, 0xbb, 0x9d, 0x5d, 0xc1, 0x05, 0x9e, 0xd8, 0x62, 0x9a, 0x29,
  431. 0x2a, 0x36, 0x7c, 0xd5, 0x07, 0x91, 0x59, 0x01, 0x5a, 0x30, 0x70,
  432. 0xdd, 0x17, 0x15, 0x2f, 0xec, 0xd8, 0xf7, 0x0e, 0x59, 0x39, 0x67,
  433. 0x33, 0x26, 0x67, 0xff, 0xc0, 0x0b, 0x31, 0x8e, 0xb4, 0x4a, 0x87,
  434. 0x68, 0x58, 0x15, 0x11, 0xdb, 0x0c, 0x2e, 0x0d, 0x64, 0xf9, 0x8f,
  435. 0xa7, 0x47, 0xb5, 0x48, 0x1d, 0xbe, 0xfa, 0x4f, 0xa4
  436. };
  437. static const uint8_t iv_512[64] = {
  438. 0x6a, 0x09, 0xe6, 0x67, 0xf3, 0xbc, 0xc9, 0x08, 0xbb, 0x67, 0xae,
  439. 0x85, 0x84, 0xca, 0xa7, 0x3b, 0x3c, 0x6e, 0xf3, 0x72, 0xfe, 0x94,
  440. 0xf8, 0x2b, 0xa5, 0x4f, 0xf5, 0x3a, 0x5f, 0x1d, 0x36, 0xf1, 0x51,
  441. 0x0e, 0x52, 0x7f, 0xad, 0xe6, 0x82, 0xd1, 0x9b, 0x05, 0x68, 0x8c,
  442. 0x2b, 0x3e, 0x6c, 0x1f, 0x1f, 0x83, 0xd9, 0xab, 0xfb, 0x41, 0xbd,
  443. 0x6b, 0x5b, 0xe0, 0xcd, 0x19, 0x13, 0x7e, 0x21, 0x79
  444. };
  445. void sha224_inc_init(sha224ctx *state) {
  446. state->ctx = malloc(PQC_SHA256CTX_BYTES);
  447. if (state->ctx == NULL) {
  448. exit(111);
  449. }
  450. for (size_t i = 0; i < 32; ++i) {
  451. state->ctx[i] = iv_224[i];
  452. }
  453. for (size_t i = 32; i < 40; ++i) {
  454. state->ctx[i] = 0;
  455. }
  456. }
  457. void sha256_inc_init(sha256ctx *state) {
  458. state->ctx = malloc(PQC_SHA256CTX_BYTES);
  459. if (state->ctx == NULL) {
  460. exit(111);
  461. }
  462. for (size_t i = 0; i < 32; ++i) {
  463. state->ctx[i] = iv_256[i];
  464. }
  465. for (size_t i = 32; i < 40; ++i) {
  466. state->ctx[i] = 0;
  467. }
  468. }
  469. void sha384_inc_init(sha384ctx *state) {
  470. state->ctx = malloc(PQC_SHA512CTX_BYTES);
  471. if (state->ctx == NULL) {
  472. exit(111);
  473. }
  474. for (size_t i = 0; i < 64; ++i) {
  475. state->ctx[i] = iv_384[i];
  476. }
  477. for (size_t i = 64; i < 72; ++i) {
  478. state->ctx[i] = 0;
  479. }
  480. }
  481. void sha512_inc_init(sha512ctx *state) {
  482. state->ctx = malloc(PQC_SHA512CTX_BYTES);
  483. if (state->ctx == NULL) {
  484. exit(111);
  485. }
  486. for (size_t i = 0; i < 64; ++i) {
  487. state->ctx[i] = iv_512[i];
  488. }
  489. for (size_t i = 64; i < 72; ++i) {
  490. state->ctx[i] = 0;
  491. }
  492. }
  493. void sha224_inc_ctx_clone(sha224ctx *stateout, const sha224ctx *statein) {
  494. stateout->ctx = malloc(PQC_SHA256CTX_BYTES);
  495. if (stateout->ctx == NULL) {
  496. exit(111);
  497. }
  498. memcpy(stateout->ctx, statein->ctx, PQC_SHA256CTX_BYTES);
  499. }
  500. void sha256_inc_ctx_clone(sha256ctx *stateout, const sha256ctx *statein) {
  501. stateout->ctx = malloc(PQC_SHA256CTX_BYTES);
  502. if (stateout->ctx == NULL) {
  503. exit(111);
  504. }
  505. memcpy(stateout->ctx, statein->ctx, PQC_SHA256CTX_BYTES);
  506. }
  507. void sha384_inc_ctx_clone(sha384ctx *stateout, const sha384ctx *statein) {
  508. stateout->ctx = malloc(PQC_SHA512CTX_BYTES);
  509. if (stateout->ctx == NULL) {
  510. exit(111);
  511. }
  512. memcpy(stateout->ctx, statein->ctx, PQC_SHA512CTX_BYTES);
  513. }
  514. void sha512_inc_ctx_clone(sha512ctx *stateout, const sha512ctx *statein) {
  515. stateout->ctx = malloc(PQC_SHA512CTX_BYTES);
  516. if (stateout->ctx == NULL) {
  517. exit(111);
  518. }
  519. memcpy(stateout->ctx, statein->ctx, PQC_SHA512CTX_BYTES);
  520. }
  521. /* Destroy the hash state. */
  522. void sha224_inc_ctx_release(sha224ctx *state) {
  523. free(state->ctx);
  524. }
  525. /* Destroy the hash state. */
  526. void sha256_inc_ctx_release(sha256ctx *state) {
  527. free(state->ctx);
  528. }
  529. /* Destroy the hash state. */
  530. void sha384_inc_ctx_release(sha384ctx *state) {
  531. free(state->ctx);
  532. }
  533. /* Destroy the hash state. */
  534. void sha512_inc_ctx_release(sha512ctx *state) {
  535. free(state->ctx);
  536. }
  537. void sha256_inc_blocks(sha256ctx *state, const uint8_t *in, size_t inblocks) {
  538. uint64_t bytes = load_bigendian_64(state->ctx + 32);
  539. crypto_hashblocks_sha256(state->ctx, in, 64 * inblocks);
  540. bytes += 64 * inblocks;
  541. store_bigendian_64(state->ctx + 32, bytes);
  542. }
  543. void sha224_inc_blocks(sha224ctx *state, const uint8_t *in, size_t inblocks) {
  544. sha256_inc_blocks((sha256ctx*) state, in, inblocks);
  545. }
  546. void sha512_inc_blocks(sha512ctx *state, const uint8_t *in, size_t inblocks) {
  547. uint64_t bytes = load_bigendian_64(state->ctx + 64);
  548. crypto_hashblocks_sha512(state->ctx, in, 128 * inblocks);
  549. bytes += 128 * inblocks;
  550. store_bigendian_64(state->ctx + 64, bytes);
  551. }
  552. void sha384_inc_blocks(sha384ctx *state, const uint8_t *in, size_t inblocks) {
  553. sha512_inc_blocks((sha512ctx*) state, in, inblocks);
  554. }
  555. void sha256_inc_finalize(uint8_t *out, sha256ctx *state, const uint8_t *in, size_t inlen) {
  556. uint8_t padded[128];
  557. uint64_t bytes = load_bigendian_64(state->ctx + 32) + inlen;
  558. crypto_hashblocks_sha256(state->ctx, in, inlen);
  559. in += inlen;
  560. inlen &= 63;
  561. in -= inlen;
  562. for (size_t i = 0; i < inlen; ++i) {
  563. padded[i] = in[i];
  564. }
  565. padded[inlen] = 0x80;
  566. if (inlen < 56) {
  567. for (size_t i = inlen + 1; i < 56; ++i) {
  568. padded[i] = 0;
  569. }
  570. padded[56] = (uint8_t) (bytes >> 53);
  571. padded[57] = (uint8_t) (bytes >> 45);
  572. padded[58] = (uint8_t) (bytes >> 37);
  573. padded[59] = (uint8_t) (bytes >> 29);
  574. padded[60] = (uint8_t) (bytes >> 21);
  575. padded[61] = (uint8_t) (bytes >> 13);
  576. padded[62] = (uint8_t) (bytes >> 5);
  577. padded[63] = (uint8_t) (bytes << 3);
  578. crypto_hashblocks_sha256(state->ctx, padded, 64);
  579. } else {
  580. for (size_t i = inlen + 1; i < 120; ++i) {
  581. padded[i] = 0;
  582. }
  583. padded[120] = (uint8_t) (bytes >> 53);
  584. padded[121] = (uint8_t) (bytes >> 45);
  585. padded[122] = (uint8_t) (bytes >> 37);
  586. padded[123] = (uint8_t) (bytes >> 29);
  587. padded[124] = (uint8_t) (bytes >> 21);
  588. padded[125] = (uint8_t) (bytes >> 13);
  589. padded[126] = (uint8_t) (bytes >> 5);
  590. padded[127] = (uint8_t) (bytes << 3);
  591. crypto_hashblocks_sha256(state->ctx, padded, 128);
  592. }
  593. for (size_t i = 0; i < 32; ++i) {
  594. out[i] = state->ctx[i];
  595. }
  596. sha256_inc_ctx_release(state);
  597. }
  598. void sha224_inc_finalize(uint8_t *out, sha224ctx *state, const uint8_t *in, size_t inlen) {
  599. uint8_t tmp[32];
  600. sha256_inc_finalize(tmp, (sha256ctx*)state, in, inlen);
  601. for (size_t i = 0; i < 28; ++i) {
  602. out[i] = tmp[i];
  603. }
  604. }
  605. void sha512_inc_finalize(uint8_t *out, sha512ctx *state, const uint8_t *in, size_t inlen) {
  606. uint8_t padded[256];
  607. uint64_t bytes = load_bigendian_64(state->ctx + 64) + inlen;
  608. crypto_hashblocks_sha512(state->ctx, in, inlen);
  609. in += inlen;
  610. inlen &= 127;
  611. in -= inlen;
  612. for (size_t i = 0; i < inlen; ++i) {
  613. padded[i] = in[i];
  614. }
  615. padded[inlen] = 0x80;
  616. if (inlen < 112) {
  617. for (size_t i = inlen + 1; i < 119; ++i) {
  618. padded[i] = 0;
  619. }
  620. padded[119] = (uint8_t) (bytes >> 61);
  621. padded[120] = (uint8_t) (bytes >> 53);
  622. padded[121] = (uint8_t) (bytes >> 45);
  623. padded[122] = (uint8_t) (bytes >> 37);
  624. padded[123] = (uint8_t) (bytes >> 29);
  625. padded[124] = (uint8_t) (bytes >> 21);
  626. padded[125] = (uint8_t) (bytes >> 13);
  627. padded[126] = (uint8_t) (bytes >> 5);
  628. padded[127] = (uint8_t) (bytes << 3);
  629. crypto_hashblocks_sha512(state->ctx, padded, 128);
  630. } else {
  631. for (size_t i = inlen + 1; i < 247; ++i) {
  632. padded[i] = 0;
  633. }
  634. padded[247] = (uint8_t) (bytes >> 61);
  635. padded[248] = (uint8_t) (bytes >> 53);
  636. padded[249] = (uint8_t) (bytes >> 45);
  637. padded[250] = (uint8_t) (bytes >> 37);
  638. padded[251] = (uint8_t) (bytes >> 29);
  639. padded[252] = (uint8_t) (bytes >> 21);
  640. padded[253] = (uint8_t) (bytes >> 13);
  641. padded[254] = (uint8_t) (bytes >> 5);
  642. padded[255] = (uint8_t) (bytes << 3);
  643. crypto_hashblocks_sha512(state->ctx, padded, 256);
  644. }
  645. for (size_t i = 0; i < 64; ++i) {
  646. out[i] = state->ctx[i];
  647. }
  648. sha512_inc_ctx_release(state);
  649. }
  650. void sha384_inc_finalize(uint8_t *out, sha384ctx *state, const uint8_t *in, size_t inlen) {
  651. uint8_t tmp[64];
  652. sha512_inc_finalize(tmp, (sha512ctx*)state, in, inlen);
  653. for (size_t i = 0; i < 48; ++i) {
  654. out[i] = tmp[i];
  655. }
  656. }
  657. void sha224(uint8_t *out, const uint8_t *in, size_t inlen) {
  658. sha224ctx state;
  659. sha224_inc_init(&state);
  660. sha224_inc_finalize(out, &state, in, inlen);
  661. }
  662. void sha256(uint8_t *out, const uint8_t *in, size_t inlen) {
  663. sha256ctx state;
  664. sha256_inc_init(&state);
  665. sha256_inc_finalize(out, &state, in, inlen);
  666. }
  667. void sha384(uint8_t *out, const uint8_t *in, size_t inlen) {
  668. sha384ctx state;
  669. sha384_inc_init(&state);
  670. sha384_inc_finalize(out, &state, in, inlen);
  671. }
  672. void sha512(uint8_t *out, const uint8_t *in, size_t inlen) {
  673. sha512ctx state;
  674. sha512_inc_init(&state);
  675. sha512_inc_finalize(out, &state, in, inlen);
  676. }