You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 

2128 lines
66 KiB

  1. # qhasm: int64 input_0
  2. # qhasm: int64 input_1
  3. # qhasm: int64 input_2
  4. # qhasm: int64 input_3
  5. # qhasm: int64 input_4
  6. # qhasm: int64 input_5
  7. # qhasm: stack64 input_6
  8. # qhasm: stack64 input_7
  9. # qhasm: int64 caller_r11
  10. # qhasm: int64 caller_r12
  11. # qhasm: int64 caller_r13
  12. # qhasm: int64 caller_r14
  13. # qhasm: int64 caller_r15
  14. # qhasm: int64 caller_rbx
  15. # qhasm: int64 caller_rbp
  16. # qhasm: reg256 a0
  17. # qhasm: reg256 a1
  18. # qhasm: reg256 a2
  19. # qhasm: reg256 a3
  20. # qhasm: reg256 a4
  21. # qhasm: reg256 a5
  22. # qhasm: reg256 a6
  23. # qhasm: reg256 a7
  24. # qhasm: reg256 a8
  25. # qhasm: reg256 a9
  26. # qhasm: reg256 a10
  27. # qhasm: reg256 a11
  28. # qhasm: reg256 a12
  29. # qhasm: reg256 b0
  30. # qhasm: reg256 b1
  31. # qhasm: reg256 r0
  32. # qhasm: reg256 r1
  33. # qhasm: reg256 r2
  34. # qhasm: reg256 r3
  35. # qhasm: reg256 r4
  36. # qhasm: reg256 r5
  37. # qhasm: reg256 r6
  38. # qhasm: reg256 r7
  39. # qhasm: reg256 r8
  40. # qhasm: reg256 r9
  41. # qhasm: reg256 r10
  42. # qhasm: reg256 r11
  43. # qhasm: reg256 r12
  44. # qhasm: reg256 r13
  45. # qhasm: reg256 r14
  46. # qhasm: reg256 r15
  47. # qhasm: reg256 r16
  48. # qhasm: reg256 r17
  49. # qhasm: reg256 r18
  50. # qhasm: reg256 r19
  51. # qhasm: reg256 r20
  52. # qhasm: reg256 r21
  53. # qhasm: reg256 r22
  54. # qhasm: reg256 r23
  55. # qhasm: reg256 r24
  56. # qhasm: reg256 r
  57. # qhasm: enter vec256_mul_asm
  58. .p2align 5
  59. .global _PQCLEAN_MCELIECE460896F_AVX_vec256_mul_asm
  60. .global PQCLEAN_MCELIECE460896F_AVX_vec256_mul_asm
  61. _PQCLEAN_MCELIECE460896F_AVX_vec256_mul_asm:
  62. PQCLEAN_MCELIECE460896F_AVX_vec256_mul_asm:
  63. mov %rsp,%r11
  64. and $31,%r11
  65. add $0,%r11
  66. sub %r11,%rsp
  67. # qhasm: b0 = mem256[ input_2 + 0 ]
  68. # asm 1: vmovupd 0(<input_2=int64#3),>b0=reg256#1
  69. # asm 2: vmovupd 0(<input_2=%rdx),>b0=%ymm0
  70. vmovupd 0(%rdx),%ymm0
  71. # qhasm: a12 = mem256[ input_1 + 384 ]
  72. # asm 1: vmovupd 384(<input_1=int64#2),>a12=reg256#2
  73. # asm 2: vmovupd 384(<input_1=%rsi),>a12=%ymm1
  74. vmovupd 384(%rsi),%ymm1
  75. # qhasm: r12 = a12 & b0
  76. # asm 1: vpand <a12=reg256#2,<b0=reg256#1,>r12=reg256#3
  77. # asm 2: vpand <a12=%ymm1,<b0=%ymm0,>r12=%ymm2
  78. vpand %ymm1,%ymm0,%ymm2
  79. # qhasm: r13 = a12 & mem256[input_2 + 32]
  80. # asm 1: vpand 32(<input_2=int64#3),<a12=reg256#2,>r13=reg256#4
  81. # asm 2: vpand 32(<input_2=%rdx),<a12=%ymm1,>r13=%ymm3
  82. vpand 32(%rdx),%ymm1,%ymm3
  83. # qhasm: r14 = a12 & mem256[input_2 + 64]
  84. # asm 1: vpand 64(<input_2=int64#3),<a12=reg256#2,>r14=reg256#5
  85. # asm 2: vpand 64(<input_2=%rdx),<a12=%ymm1,>r14=%ymm4
  86. vpand 64(%rdx),%ymm1,%ymm4
  87. # qhasm: r15 = a12 & mem256[input_2 + 96]
  88. # asm 1: vpand 96(<input_2=int64#3),<a12=reg256#2,>r15=reg256#6
  89. # asm 2: vpand 96(<input_2=%rdx),<a12=%ymm1,>r15=%ymm5
  90. vpand 96(%rdx),%ymm1,%ymm5
  91. # qhasm: r16 = a12 & mem256[input_2 + 128]
  92. # asm 1: vpand 128(<input_2=int64#3),<a12=reg256#2,>r16=reg256#7
  93. # asm 2: vpand 128(<input_2=%rdx),<a12=%ymm1,>r16=%ymm6
  94. vpand 128(%rdx),%ymm1,%ymm6
  95. # qhasm: r17 = a12 & mem256[input_2 + 160]
  96. # asm 1: vpand 160(<input_2=int64#3),<a12=reg256#2,>r17=reg256#8
  97. # asm 2: vpand 160(<input_2=%rdx),<a12=%ymm1,>r17=%ymm7
  98. vpand 160(%rdx),%ymm1,%ymm7
  99. # qhasm: r18 = a12 & mem256[input_2 + 192]
  100. # asm 1: vpand 192(<input_2=int64#3),<a12=reg256#2,>r18=reg256#9
  101. # asm 2: vpand 192(<input_2=%rdx),<a12=%ymm1,>r18=%ymm8
  102. vpand 192(%rdx),%ymm1,%ymm8
  103. # qhasm: r19 = a12 & mem256[input_2 + 224]
  104. # asm 1: vpand 224(<input_2=int64#3),<a12=reg256#2,>r19=reg256#10
  105. # asm 2: vpand 224(<input_2=%rdx),<a12=%ymm1,>r19=%ymm9
  106. vpand 224(%rdx),%ymm1,%ymm9
  107. # qhasm: r20 = a12 & mem256[input_2 + 256]
  108. # asm 1: vpand 256(<input_2=int64#3),<a12=reg256#2,>r20=reg256#11
  109. # asm 2: vpand 256(<input_2=%rdx),<a12=%ymm1,>r20=%ymm10
  110. vpand 256(%rdx),%ymm1,%ymm10
  111. # qhasm: r21 = a12 & mem256[input_2 + 288]
  112. # asm 1: vpand 288(<input_2=int64#3),<a12=reg256#2,>r21=reg256#12
  113. # asm 2: vpand 288(<input_2=%rdx),<a12=%ymm1,>r21=%ymm11
  114. vpand 288(%rdx),%ymm1,%ymm11
  115. # qhasm: r22 = a12 & mem256[input_2 + 320]
  116. # asm 1: vpand 320(<input_2=int64#3),<a12=reg256#2,>r22=reg256#13
  117. # asm 2: vpand 320(<input_2=%rdx),<a12=%ymm1,>r22=%ymm12
  118. vpand 320(%rdx),%ymm1,%ymm12
  119. # qhasm: r23 = a12 & mem256[input_2 + 352]
  120. # asm 1: vpand 352(<input_2=int64#3),<a12=reg256#2,>r23=reg256#14
  121. # asm 2: vpand 352(<input_2=%rdx),<a12=%ymm1,>r23=%ymm13
  122. vpand 352(%rdx),%ymm1,%ymm13
  123. # qhasm: r24 = a12 & mem256[input_2 + 384]
  124. # asm 1: vpand 384(<input_2=int64#3),<a12=reg256#2,>r24=reg256#2
  125. # asm 2: vpand 384(<input_2=%rdx),<a12=%ymm1,>r24=%ymm1
  126. vpand 384(%rdx),%ymm1,%ymm1
  127. # qhasm: r15 ^= r24
  128. # asm 1: vpxor <r24=reg256#2,<r15=reg256#6,<r15=reg256#6
  129. # asm 2: vpxor <r24=%ymm1,<r15=%ymm5,<r15=%ymm5
  130. vpxor %ymm1,%ymm5,%ymm5
  131. # qhasm: r14 ^= r24
  132. # asm 1: vpxor <r24=reg256#2,<r14=reg256#5,<r14=reg256#5
  133. # asm 2: vpxor <r24=%ymm1,<r14=%ymm4,<r14=%ymm4
  134. vpxor %ymm1,%ymm4,%ymm4
  135. # qhasm: r12 ^= r24
  136. # asm 1: vpxor <r24=reg256#2,<r12=reg256#3,<r12=reg256#3
  137. # asm 2: vpxor <r24=%ymm1,<r12=%ymm2,<r12=%ymm2
  138. vpxor %ymm1,%ymm2,%ymm2
  139. # qhasm: r11 = r24
  140. # asm 1: vmovapd <r24=reg256#2,>r11=reg256#2
  141. # asm 2: vmovapd <r24=%ymm1,>r11=%ymm1
  142. vmovapd %ymm1,%ymm1
  143. # qhasm: a11 = mem256[ input_1 + 352 ]
  144. # asm 1: vmovupd 352(<input_1=int64#2),>a11=reg256#15
  145. # asm 2: vmovupd 352(<input_1=%rsi),>a11=%ymm14
  146. vmovupd 352(%rsi),%ymm14
  147. # qhasm: r = a11 & b0
  148. # asm 1: vpand <a11=reg256#15,<b0=reg256#1,>r=reg256#16
  149. # asm 2: vpand <a11=%ymm14,<b0=%ymm0,>r=%ymm15
  150. vpand %ymm14,%ymm0,%ymm15
  151. # qhasm: r11 ^= r
  152. # asm 1: vpxor <r=reg256#16,<r11=reg256#2,<r11=reg256#2
  153. # asm 2: vpxor <r=%ymm15,<r11=%ymm1,<r11=%ymm1
  154. vpxor %ymm15,%ymm1,%ymm1
  155. # qhasm: r = a11 & mem256[input_2 + 32]
  156. # asm 1: vpand 32(<input_2=int64#3),<a11=reg256#15,>r=reg256#16
  157. # asm 2: vpand 32(<input_2=%rdx),<a11=%ymm14,>r=%ymm15
  158. vpand 32(%rdx),%ymm14,%ymm15
  159. # qhasm: r12 ^= r
  160. # asm 1: vpxor <r=reg256#16,<r12=reg256#3,<r12=reg256#3
  161. # asm 2: vpxor <r=%ymm15,<r12=%ymm2,<r12=%ymm2
  162. vpxor %ymm15,%ymm2,%ymm2
  163. # qhasm: r = a11 & mem256[input_2 + 64]
  164. # asm 1: vpand 64(<input_2=int64#3),<a11=reg256#15,>r=reg256#16
  165. # asm 2: vpand 64(<input_2=%rdx),<a11=%ymm14,>r=%ymm15
  166. vpand 64(%rdx),%ymm14,%ymm15
  167. # qhasm: r13 ^= r
  168. # asm 1: vpxor <r=reg256#16,<r13=reg256#4,<r13=reg256#4
  169. # asm 2: vpxor <r=%ymm15,<r13=%ymm3,<r13=%ymm3
  170. vpxor %ymm15,%ymm3,%ymm3
  171. # qhasm: r = a11 & mem256[input_2 + 96]
  172. # asm 1: vpand 96(<input_2=int64#3),<a11=reg256#15,>r=reg256#16
  173. # asm 2: vpand 96(<input_2=%rdx),<a11=%ymm14,>r=%ymm15
  174. vpand 96(%rdx),%ymm14,%ymm15
  175. # qhasm: r14 ^= r
  176. # asm 1: vpxor <r=reg256#16,<r14=reg256#5,<r14=reg256#5
  177. # asm 2: vpxor <r=%ymm15,<r14=%ymm4,<r14=%ymm4
  178. vpxor %ymm15,%ymm4,%ymm4
  179. # qhasm: r = a11 & mem256[input_2 + 128]
  180. # asm 1: vpand 128(<input_2=int64#3),<a11=reg256#15,>r=reg256#16
  181. # asm 2: vpand 128(<input_2=%rdx),<a11=%ymm14,>r=%ymm15
  182. vpand 128(%rdx),%ymm14,%ymm15
  183. # qhasm: r15 ^= r
  184. # asm 1: vpxor <r=reg256#16,<r15=reg256#6,<r15=reg256#6
  185. # asm 2: vpxor <r=%ymm15,<r15=%ymm5,<r15=%ymm5
  186. vpxor %ymm15,%ymm5,%ymm5
  187. # qhasm: r = a11 & mem256[input_2 + 160]
  188. # asm 1: vpand 160(<input_2=int64#3),<a11=reg256#15,>r=reg256#16
  189. # asm 2: vpand 160(<input_2=%rdx),<a11=%ymm14,>r=%ymm15
  190. vpand 160(%rdx),%ymm14,%ymm15
  191. # qhasm: r16 ^= r
  192. # asm 1: vpxor <r=reg256#16,<r16=reg256#7,<r16=reg256#7
  193. # asm 2: vpxor <r=%ymm15,<r16=%ymm6,<r16=%ymm6
  194. vpxor %ymm15,%ymm6,%ymm6
  195. # qhasm: r = a11 & mem256[input_2 + 192]
  196. # asm 1: vpand 192(<input_2=int64#3),<a11=reg256#15,>r=reg256#16
  197. # asm 2: vpand 192(<input_2=%rdx),<a11=%ymm14,>r=%ymm15
  198. vpand 192(%rdx),%ymm14,%ymm15
  199. # qhasm: r17 ^= r
  200. # asm 1: vpxor <r=reg256#16,<r17=reg256#8,<r17=reg256#8
  201. # asm 2: vpxor <r=%ymm15,<r17=%ymm7,<r17=%ymm7
  202. vpxor %ymm15,%ymm7,%ymm7
  203. # qhasm: r = a11 & mem256[input_2 + 224]
  204. # asm 1: vpand 224(<input_2=int64#3),<a11=reg256#15,>r=reg256#16
  205. # asm 2: vpand 224(<input_2=%rdx),<a11=%ymm14,>r=%ymm15
  206. vpand 224(%rdx),%ymm14,%ymm15
  207. # qhasm: r18 ^= r
  208. # asm 1: vpxor <r=reg256#16,<r18=reg256#9,<r18=reg256#9
  209. # asm 2: vpxor <r=%ymm15,<r18=%ymm8,<r18=%ymm8
  210. vpxor %ymm15,%ymm8,%ymm8
  211. # qhasm: r = a11 & mem256[input_2 + 256]
  212. # asm 1: vpand 256(<input_2=int64#3),<a11=reg256#15,>r=reg256#16
  213. # asm 2: vpand 256(<input_2=%rdx),<a11=%ymm14,>r=%ymm15
  214. vpand 256(%rdx),%ymm14,%ymm15
  215. # qhasm: r19 ^= r
  216. # asm 1: vpxor <r=reg256#16,<r19=reg256#10,<r19=reg256#10
  217. # asm 2: vpxor <r=%ymm15,<r19=%ymm9,<r19=%ymm9
  218. vpxor %ymm15,%ymm9,%ymm9
  219. # qhasm: r = a11 & mem256[input_2 + 288]
  220. # asm 1: vpand 288(<input_2=int64#3),<a11=reg256#15,>r=reg256#16
  221. # asm 2: vpand 288(<input_2=%rdx),<a11=%ymm14,>r=%ymm15
  222. vpand 288(%rdx),%ymm14,%ymm15
  223. # qhasm: r20 ^= r
  224. # asm 1: vpxor <r=reg256#16,<r20=reg256#11,<r20=reg256#11
  225. # asm 2: vpxor <r=%ymm15,<r20=%ymm10,<r20=%ymm10
  226. vpxor %ymm15,%ymm10,%ymm10
  227. # qhasm: r = a11 & mem256[input_2 + 320]
  228. # asm 1: vpand 320(<input_2=int64#3),<a11=reg256#15,>r=reg256#16
  229. # asm 2: vpand 320(<input_2=%rdx),<a11=%ymm14,>r=%ymm15
  230. vpand 320(%rdx),%ymm14,%ymm15
  231. # qhasm: r21 ^= r
  232. # asm 1: vpxor <r=reg256#16,<r21=reg256#12,<r21=reg256#12
  233. # asm 2: vpxor <r=%ymm15,<r21=%ymm11,<r21=%ymm11
  234. vpxor %ymm15,%ymm11,%ymm11
  235. # qhasm: r = a11 & mem256[input_2 + 352]
  236. # asm 1: vpand 352(<input_2=int64#3),<a11=reg256#15,>r=reg256#16
  237. # asm 2: vpand 352(<input_2=%rdx),<a11=%ymm14,>r=%ymm15
  238. vpand 352(%rdx),%ymm14,%ymm15
  239. # qhasm: r22 ^= r
  240. # asm 1: vpxor <r=reg256#16,<r22=reg256#13,<r22=reg256#13
  241. # asm 2: vpxor <r=%ymm15,<r22=%ymm12,<r22=%ymm12
  242. vpxor %ymm15,%ymm12,%ymm12
  243. # qhasm: r = a11 & mem256[input_2 + 384]
  244. # asm 1: vpand 384(<input_2=int64#3),<a11=reg256#15,>r=reg256#15
  245. # asm 2: vpand 384(<input_2=%rdx),<a11=%ymm14,>r=%ymm14
  246. vpand 384(%rdx),%ymm14,%ymm14
  247. # qhasm: r23 ^= r
  248. # asm 1: vpxor <r=reg256#15,<r23=reg256#14,<r23=reg256#14
  249. # asm 2: vpxor <r=%ymm14,<r23=%ymm13,<r23=%ymm13
  250. vpxor %ymm14,%ymm13,%ymm13
  251. # qhasm: r14 ^= r23
  252. # asm 1: vpxor <r23=reg256#14,<r14=reg256#5,<r14=reg256#5
  253. # asm 2: vpxor <r23=%ymm13,<r14=%ymm4,<r14=%ymm4
  254. vpxor %ymm13,%ymm4,%ymm4
  255. # qhasm: r13 ^= r23
  256. # asm 1: vpxor <r23=reg256#14,<r13=reg256#4,<r13=reg256#4
  257. # asm 2: vpxor <r23=%ymm13,<r13=%ymm3,<r13=%ymm3
  258. vpxor %ymm13,%ymm3,%ymm3
  259. # qhasm: r11 ^= r23
  260. # asm 1: vpxor <r23=reg256#14,<r11=reg256#2,<r11=reg256#2
  261. # asm 2: vpxor <r23=%ymm13,<r11=%ymm1,<r11=%ymm1
  262. vpxor %ymm13,%ymm1,%ymm1
  263. # qhasm: r10 = r23
  264. # asm 1: vmovapd <r23=reg256#14,>r10=reg256#14
  265. # asm 2: vmovapd <r23=%ymm13,>r10=%ymm13
  266. vmovapd %ymm13,%ymm13
  267. # qhasm: a10 = mem256[ input_1 + 320 ]
  268. # asm 1: vmovupd 320(<input_1=int64#2),>a10=reg256#15
  269. # asm 2: vmovupd 320(<input_1=%rsi),>a10=%ymm14
  270. vmovupd 320(%rsi),%ymm14
  271. # qhasm: r = a10 & b0
  272. # asm 1: vpand <a10=reg256#15,<b0=reg256#1,>r=reg256#16
  273. # asm 2: vpand <a10=%ymm14,<b0=%ymm0,>r=%ymm15
  274. vpand %ymm14,%ymm0,%ymm15
  275. # qhasm: r10 ^= r
  276. # asm 1: vpxor <r=reg256#16,<r10=reg256#14,<r10=reg256#14
  277. # asm 2: vpxor <r=%ymm15,<r10=%ymm13,<r10=%ymm13
  278. vpxor %ymm15,%ymm13,%ymm13
  279. # qhasm: r = a10 & mem256[input_2 + 32]
  280. # asm 1: vpand 32(<input_2=int64#3),<a10=reg256#15,>r=reg256#16
  281. # asm 2: vpand 32(<input_2=%rdx),<a10=%ymm14,>r=%ymm15
  282. vpand 32(%rdx),%ymm14,%ymm15
  283. # qhasm: r11 ^= r
  284. # asm 1: vpxor <r=reg256#16,<r11=reg256#2,<r11=reg256#2
  285. # asm 2: vpxor <r=%ymm15,<r11=%ymm1,<r11=%ymm1
  286. vpxor %ymm15,%ymm1,%ymm1
  287. # qhasm: r = a10 & mem256[input_2 + 64]
  288. # asm 1: vpand 64(<input_2=int64#3),<a10=reg256#15,>r=reg256#16
  289. # asm 2: vpand 64(<input_2=%rdx),<a10=%ymm14,>r=%ymm15
  290. vpand 64(%rdx),%ymm14,%ymm15
  291. # qhasm: r12 ^= r
  292. # asm 1: vpxor <r=reg256#16,<r12=reg256#3,<r12=reg256#3
  293. # asm 2: vpxor <r=%ymm15,<r12=%ymm2,<r12=%ymm2
  294. vpxor %ymm15,%ymm2,%ymm2
  295. # qhasm: r = a10 & mem256[input_2 + 96]
  296. # asm 1: vpand 96(<input_2=int64#3),<a10=reg256#15,>r=reg256#16
  297. # asm 2: vpand 96(<input_2=%rdx),<a10=%ymm14,>r=%ymm15
  298. vpand 96(%rdx),%ymm14,%ymm15
  299. # qhasm: r13 ^= r
  300. # asm 1: vpxor <r=reg256#16,<r13=reg256#4,<r13=reg256#4
  301. # asm 2: vpxor <r=%ymm15,<r13=%ymm3,<r13=%ymm3
  302. vpxor %ymm15,%ymm3,%ymm3
  303. # qhasm: r = a10 & mem256[input_2 + 128]
  304. # asm 1: vpand 128(<input_2=int64#3),<a10=reg256#15,>r=reg256#16
  305. # asm 2: vpand 128(<input_2=%rdx),<a10=%ymm14,>r=%ymm15
  306. vpand 128(%rdx),%ymm14,%ymm15
  307. # qhasm: r14 ^= r
  308. # asm 1: vpxor <r=reg256#16,<r14=reg256#5,<r14=reg256#5
  309. # asm 2: vpxor <r=%ymm15,<r14=%ymm4,<r14=%ymm4
  310. vpxor %ymm15,%ymm4,%ymm4
  311. # qhasm: r = a10 & mem256[input_2 + 160]
  312. # asm 1: vpand 160(<input_2=int64#3),<a10=reg256#15,>r=reg256#16
  313. # asm 2: vpand 160(<input_2=%rdx),<a10=%ymm14,>r=%ymm15
  314. vpand 160(%rdx),%ymm14,%ymm15
  315. # qhasm: r15 ^= r
  316. # asm 1: vpxor <r=reg256#16,<r15=reg256#6,<r15=reg256#6
  317. # asm 2: vpxor <r=%ymm15,<r15=%ymm5,<r15=%ymm5
  318. vpxor %ymm15,%ymm5,%ymm5
  319. # qhasm: r = a10 & mem256[input_2 + 192]
  320. # asm 1: vpand 192(<input_2=int64#3),<a10=reg256#15,>r=reg256#16
  321. # asm 2: vpand 192(<input_2=%rdx),<a10=%ymm14,>r=%ymm15
  322. vpand 192(%rdx),%ymm14,%ymm15
  323. # qhasm: r16 ^= r
  324. # asm 1: vpxor <r=reg256#16,<r16=reg256#7,<r16=reg256#7
  325. # asm 2: vpxor <r=%ymm15,<r16=%ymm6,<r16=%ymm6
  326. vpxor %ymm15,%ymm6,%ymm6
  327. # qhasm: r = a10 & mem256[input_2 + 224]
  328. # asm 1: vpand 224(<input_2=int64#3),<a10=reg256#15,>r=reg256#16
  329. # asm 2: vpand 224(<input_2=%rdx),<a10=%ymm14,>r=%ymm15
  330. vpand 224(%rdx),%ymm14,%ymm15
  331. # qhasm: r17 ^= r
  332. # asm 1: vpxor <r=reg256#16,<r17=reg256#8,<r17=reg256#8
  333. # asm 2: vpxor <r=%ymm15,<r17=%ymm7,<r17=%ymm7
  334. vpxor %ymm15,%ymm7,%ymm7
  335. # qhasm: r = a10 & mem256[input_2 + 256]
  336. # asm 1: vpand 256(<input_2=int64#3),<a10=reg256#15,>r=reg256#16
  337. # asm 2: vpand 256(<input_2=%rdx),<a10=%ymm14,>r=%ymm15
  338. vpand 256(%rdx),%ymm14,%ymm15
  339. # qhasm: r18 ^= r
  340. # asm 1: vpxor <r=reg256#16,<r18=reg256#9,<r18=reg256#9
  341. # asm 2: vpxor <r=%ymm15,<r18=%ymm8,<r18=%ymm8
  342. vpxor %ymm15,%ymm8,%ymm8
  343. # qhasm: r = a10 & mem256[input_2 + 288]
  344. # asm 1: vpand 288(<input_2=int64#3),<a10=reg256#15,>r=reg256#16
  345. # asm 2: vpand 288(<input_2=%rdx),<a10=%ymm14,>r=%ymm15
  346. vpand 288(%rdx),%ymm14,%ymm15
  347. # qhasm: r19 ^= r
  348. # asm 1: vpxor <r=reg256#16,<r19=reg256#10,<r19=reg256#10
  349. # asm 2: vpxor <r=%ymm15,<r19=%ymm9,<r19=%ymm9
  350. vpxor %ymm15,%ymm9,%ymm9
  351. # qhasm: r = a10 & mem256[input_2 + 320]
  352. # asm 1: vpand 320(<input_2=int64#3),<a10=reg256#15,>r=reg256#16
  353. # asm 2: vpand 320(<input_2=%rdx),<a10=%ymm14,>r=%ymm15
  354. vpand 320(%rdx),%ymm14,%ymm15
  355. # qhasm: r20 ^= r
  356. # asm 1: vpxor <r=reg256#16,<r20=reg256#11,<r20=reg256#11
  357. # asm 2: vpxor <r=%ymm15,<r20=%ymm10,<r20=%ymm10
  358. vpxor %ymm15,%ymm10,%ymm10
  359. # qhasm: r = a10 & mem256[input_2 + 352]
  360. # asm 1: vpand 352(<input_2=int64#3),<a10=reg256#15,>r=reg256#16
  361. # asm 2: vpand 352(<input_2=%rdx),<a10=%ymm14,>r=%ymm15
  362. vpand 352(%rdx),%ymm14,%ymm15
  363. # qhasm: r21 ^= r
  364. # asm 1: vpxor <r=reg256#16,<r21=reg256#12,<r21=reg256#12
  365. # asm 2: vpxor <r=%ymm15,<r21=%ymm11,<r21=%ymm11
  366. vpxor %ymm15,%ymm11,%ymm11
  367. # qhasm: r = a10 & mem256[input_2 + 384]
  368. # asm 1: vpand 384(<input_2=int64#3),<a10=reg256#15,>r=reg256#15
  369. # asm 2: vpand 384(<input_2=%rdx),<a10=%ymm14,>r=%ymm14
  370. vpand 384(%rdx),%ymm14,%ymm14
  371. # qhasm: r22 ^= r
  372. # asm 1: vpxor <r=reg256#15,<r22=reg256#13,<r22=reg256#13
  373. # asm 2: vpxor <r=%ymm14,<r22=%ymm12,<r22=%ymm12
  374. vpxor %ymm14,%ymm12,%ymm12
  375. # qhasm: r13 ^= r22
  376. # asm 1: vpxor <r22=reg256#13,<r13=reg256#4,<r13=reg256#4
  377. # asm 2: vpxor <r22=%ymm12,<r13=%ymm3,<r13=%ymm3
  378. vpxor %ymm12,%ymm3,%ymm3
  379. # qhasm: r12 ^= r22
  380. # asm 1: vpxor <r22=reg256#13,<r12=reg256#3,<r12=reg256#3
  381. # asm 2: vpxor <r22=%ymm12,<r12=%ymm2,<r12=%ymm2
  382. vpxor %ymm12,%ymm2,%ymm2
  383. # qhasm: r10 ^= r22
  384. # asm 1: vpxor <r22=reg256#13,<r10=reg256#14,<r10=reg256#14
  385. # asm 2: vpxor <r22=%ymm12,<r10=%ymm13,<r10=%ymm13
  386. vpxor %ymm12,%ymm13,%ymm13
  387. # qhasm: r9 = r22
  388. # asm 1: vmovapd <r22=reg256#13,>r9=reg256#13
  389. # asm 2: vmovapd <r22=%ymm12,>r9=%ymm12
  390. vmovapd %ymm12,%ymm12
  391. # qhasm: a9 = mem256[ input_1 + 288 ]
  392. # asm 1: vmovupd 288(<input_1=int64#2),>a9=reg256#15
  393. # asm 2: vmovupd 288(<input_1=%rsi),>a9=%ymm14
  394. vmovupd 288(%rsi),%ymm14
  395. # qhasm: r = a9 & b0
  396. # asm 1: vpand <a9=reg256#15,<b0=reg256#1,>r=reg256#16
  397. # asm 2: vpand <a9=%ymm14,<b0=%ymm0,>r=%ymm15
  398. vpand %ymm14,%ymm0,%ymm15
  399. # qhasm: r9 ^= r
  400. # asm 1: vpxor <r=reg256#16,<r9=reg256#13,<r9=reg256#13
  401. # asm 2: vpxor <r=%ymm15,<r9=%ymm12,<r9=%ymm12
  402. vpxor %ymm15,%ymm12,%ymm12
  403. # qhasm: r = a9 & mem256[input_2 + 32]
  404. # asm 1: vpand 32(<input_2=int64#3),<a9=reg256#15,>r=reg256#16
  405. # asm 2: vpand 32(<input_2=%rdx),<a9=%ymm14,>r=%ymm15
  406. vpand 32(%rdx),%ymm14,%ymm15
  407. # qhasm: r10 ^= r
  408. # asm 1: vpxor <r=reg256#16,<r10=reg256#14,<r10=reg256#14
  409. # asm 2: vpxor <r=%ymm15,<r10=%ymm13,<r10=%ymm13
  410. vpxor %ymm15,%ymm13,%ymm13
  411. # qhasm: r = a9 & mem256[input_2 + 64]
  412. # asm 1: vpand 64(<input_2=int64#3),<a9=reg256#15,>r=reg256#16
  413. # asm 2: vpand 64(<input_2=%rdx),<a9=%ymm14,>r=%ymm15
  414. vpand 64(%rdx),%ymm14,%ymm15
  415. # qhasm: r11 ^= r
  416. # asm 1: vpxor <r=reg256#16,<r11=reg256#2,<r11=reg256#2
  417. # asm 2: vpxor <r=%ymm15,<r11=%ymm1,<r11=%ymm1
  418. vpxor %ymm15,%ymm1,%ymm1
  419. # qhasm: r = a9 & mem256[input_2 + 96]
  420. # asm 1: vpand 96(<input_2=int64#3),<a9=reg256#15,>r=reg256#16
  421. # asm 2: vpand 96(<input_2=%rdx),<a9=%ymm14,>r=%ymm15
  422. vpand 96(%rdx),%ymm14,%ymm15
  423. # qhasm: r12 ^= r
  424. # asm 1: vpxor <r=reg256#16,<r12=reg256#3,<r12=reg256#3
  425. # asm 2: vpxor <r=%ymm15,<r12=%ymm2,<r12=%ymm2
  426. vpxor %ymm15,%ymm2,%ymm2
  427. # qhasm: r = a9 & mem256[input_2 + 128]
  428. # asm 1: vpand 128(<input_2=int64#3),<a9=reg256#15,>r=reg256#16
  429. # asm 2: vpand 128(<input_2=%rdx),<a9=%ymm14,>r=%ymm15
  430. vpand 128(%rdx),%ymm14,%ymm15
  431. # qhasm: r13 ^= r
  432. # asm 1: vpxor <r=reg256#16,<r13=reg256#4,<r13=reg256#4
  433. # asm 2: vpxor <r=%ymm15,<r13=%ymm3,<r13=%ymm3
  434. vpxor %ymm15,%ymm3,%ymm3
  435. # qhasm: r = a9 & mem256[input_2 + 160]
  436. # asm 1: vpand 160(<input_2=int64#3),<a9=reg256#15,>r=reg256#16
  437. # asm 2: vpand 160(<input_2=%rdx),<a9=%ymm14,>r=%ymm15
  438. vpand 160(%rdx),%ymm14,%ymm15
  439. # qhasm: r14 ^= r
  440. # asm 1: vpxor <r=reg256#16,<r14=reg256#5,<r14=reg256#5
  441. # asm 2: vpxor <r=%ymm15,<r14=%ymm4,<r14=%ymm4
  442. vpxor %ymm15,%ymm4,%ymm4
  443. # qhasm: r = a9 & mem256[input_2 + 192]
  444. # asm 1: vpand 192(<input_2=int64#3),<a9=reg256#15,>r=reg256#16
  445. # asm 2: vpand 192(<input_2=%rdx),<a9=%ymm14,>r=%ymm15
  446. vpand 192(%rdx),%ymm14,%ymm15
  447. # qhasm: r15 ^= r
  448. # asm 1: vpxor <r=reg256#16,<r15=reg256#6,<r15=reg256#6
  449. # asm 2: vpxor <r=%ymm15,<r15=%ymm5,<r15=%ymm5
  450. vpxor %ymm15,%ymm5,%ymm5
  451. # qhasm: r = a9 & mem256[input_2 + 224]
  452. # asm 1: vpand 224(<input_2=int64#3),<a9=reg256#15,>r=reg256#16
  453. # asm 2: vpand 224(<input_2=%rdx),<a9=%ymm14,>r=%ymm15
  454. vpand 224(%rdx),%ymm14,%ymm15
  455. # qhasm: r16 ^= r
  456. # asm 1: vpxor <r=reg256#16,<r16=reg256#7,<r16=reg256#7
  457. # asm 2: vpxor <r=%ymm15,<r16=%ymm6,<r16=%ymm6
  458. vpxor %ymm15,%ymm6,%ymm6
  459. # qhasm: r = a9 & mem256[input_2 + 256]
  460. # asm 1: vpand 256(<input_2=int64#3),<a9=reg256#15,>r=reg256#16
  461. # asm 2: vpand 256(<input_2=%rdx),<a9=%ymm14,>r=%ymm15
  462. vpand 256(%rdx),%ymm14,%ymm15
  463. # qhasm: r17 ^= r
  464. # asm 1: vpxor <r=reg256#16,<r17=reg256#8,<r17=reg256#8
  465. # asm 2: vpxor <r=%ymm15,<r17=%ymm7,<r17=%ymm7
  466. vpxor %ymm15,%ymm7,%ymm7
  467. # qhasm: r = a9 & mem256[input_2 + 288]
  468. # asm 1: vpand 288(<input_2=int64#3),<a9=reg256#15,>r=reg256#16
  469. # asm 2: vpand 288(<input_2=%rdx),<a9=%ymm14,>r=%ymm15
  470. vpand 288(%rdx),%ymm14,%ymm15
  471. # qhasm: r18 ^= r
  472. # asm 1: vpxor <r=reg256#16,<r18=reg256#9,<r18=reg256#9
  473. # asm 2: vpxor <r=%ymm15,<r18=%ymm8,<r18=%ymm8
  474. vpxor %ymm15,%ymm8,%ymm8
  475. # qhasm: r = a9 & mem256[input_2 + 320]
  476. # asm 1: vpand 320(<input_2=int64#3),<a9=reg256#15,>r=reg256#16
  477. # asm 2: vpand 320(<input_2=%rdx),<a9=%ymm14,>r=%ymm15
  478. vpand 320(%rdx),%ymm14,%ymm15
  479. # qhasm: r19 ^= r
  480. # asm 1: vpxor <r=reg256#16,<r19=reg256#10,<r19=reg256#10
  481. # asm 2: vpxor <r=%ymm15,<r19=%ymm9,<r19=%ymm9
  482. vpxor %ymm15,%ymm9,%ymm9
  483. # qhasm: r = a9 & mem256[input_2 + 352]
  484. # asm 1: vpand 352(<input_2=int64#3),<a9=reg256#15,>r=reg256#16
  485. # asm 2: vpand 352(<input_2=%rdx),<a9=%ymm14,>r=%ymm15
  486. vpand 352(%rdx),%ymm14,%ymm15
  487. # qhasm: r20 ^= r
  488. # asm 1: vpxor <r=reg256#16,<r20=reg256#11,<r20=reg256#11
  489. # asm 2: vpxor <r=%ymm15,<r20=%ymm10,<r20=%ymm10
  490. vpxor %ymm15,%ymm10,%ymm10
  491. # qhasm: r = a9 & mem256[input_2 + 384]
  492. # asm 1: vpand 384(<input_2=int64#3),<a9=reg256#15,>r=reg256#15
  493. # asm 2: vpand 384(<input_2=%rdx),<a9=%ymm14,>r=%ymm14
  494. vpand 384(%rdx),%ymm14,%ymm14
  495. # qhasm: r21 ^= r
  496. # asm 1: vpxor <r=reg256#15,<r21=reg256#12,<r21=reg256#12
  497. # asm 2: vpxor <r=%ymm14,<r21=%ymm11,<r21=%ymm11
  498. vpxor %ymm14,%ymm11,%ymm11
  499. # qhasm: r12 ^= r21
  500. # asm 1: vpxor <r21=reg256#12,<r12=reg256#3,<r12=reg256#3
  501. # asm 2: vpxor <r21=%ymm11,<r12=%ymm2,<r12=%ymm2
  502. vpxor %ymm11,%ymm2,%ymm2
  503. # qhasm: r11 ^= r21
  504. # asm 1: vpxor <r21=reg256#12,<r11=reg256#2,<r11=reg256#2
  505. # asm 2: vpxor <r21=%ymm11,<r11=%ymm1,<r11=%ymm1
  506. vpxor %ymm11,%ymm1,%ymm1
  507. # qhasm: r9 ^= r21
  508. # asm 1: vpxor <r21=reg256#12,<r9=reg256#13,<r9=reg256#13
  509. # asm 2: vpxor <r21=%ymm11,<r9=%ymm12,<r9=%ymm12
  510. vpxor %ymm11,%ymm12,%ymm12
  511. # qhasm: r8 = r21
  512. # asm 1: vmovapd <r21=reg256#12,>r8=reg256#12
  513. # asm 2: vmovapd <r21=%ymm11,>r8=%ymm11
  514. vmovapd %ymm11,%ymm11
  515. # qhasm: a8 = mem256[ input_1 + 256 ]
  516. # asm 1: vmovupd 256(<input_1=int64#2),>a8=reg256#15
  517. # asm 2: vmovupd 256(<input_1=%rsi),>a8=%ymm14
  518. vmovupd 256(%rsi),%ymm14
  519. # qhasm: r = a8 & b0
  520. # asm 1: vpand <a8=reg256#15,<b0=reg256#1,>r=reg256#16
  521. # asm 2: vpand <a8=%ymm14,<b0=%ymm0,>r=%ymm15
  522. vpand %ymm14,%ymm0,%ymm15
  523. # qhasm: r8 ^= r
  524. # asm 1: vpxor <r=reg256#16,<r8=reg256#12,<r8=reg256#12
  525. # asm 2: vpxor <r=%ymm15,<r8=%ymm11,<r8=%ymm11
  526. vpxor %ymm15,%ymm11,%ymm11
  527. # qhasm: r = a8 & mem256[input_2 + 32]
  528. # asm 1: vpand 32(<input_2=int64#3),<a8=reg256#15,>r=reg256#16
  529. # asm 2: vpand 32(<input_2=%rdx),<a8=%ymm14,>r=%ymm15
  530. vpand 32(%rdx),%ymm14,%ymm15
  531. # qhasm: r9 ^= r
  532. # asm 1: vpxor <r=reg256#16,<r9=reg256#13,<r9=reg256#13
  533. # asm 2: vpxor <r=%ymm15,<r9=%ymm12,<r9=%ymm12
  534. vpxor %ymm15,%ymm12,%ymm12
  535. # qhasm: r = a8 & mem256[input_2 + 64]
  536. # asm 1: vpand 64(<input_2=int64#3),<a8=reg256#15,>r=reg256#16
  537. # asm 2: vpand 64(<input_2=%rdx),<a8=%ymm14,>r=%ymm15
  538. vpand 64(%rdx),%ymm14,%ymm15
  539. # qhasm: r10 ^= r
  540. # asm 1: vpxor <r=reg256#16,<r10=reg256#14,<r10=reg256#14
  541. # asm 2: vpxor <r=%ymm15,<r10=%ymm13,<r10=%ymm13
  542. vpxor %ymm15,%ymm13,%ymm13
  543. # qhasm: r = a8 & mem256[input_2 + 96]
  544. # asm 1: vpand 96(<input_2=int64#3),<a8=reg256#15,>r=reg256#16
  545. # asm 2: vpand 96(<input_2=%rdx),<a8=%ymm14,>r=%ymm15
  546. vpand 96(%rdx),%ymm14,%ymm15
  547. # qhasm: r11 ^= r
  548. # asm 1: vpxor <r=reg256#16,<r11=reg256#2,<r11=reg256#2
  549. # asm 2: vpxor <r=%ymm15,<r11=%ymm1,<r11=%ymm1
  550. vpxor %ymm15,%ymm1,%ymm1
  551. # qhasm: r = a8 & mem256[input_2 + 128]
  552. # asm 1: vpand 128(<input_2=int64#3),<a8=reg256#15,>r=reg256#16
  553. # asm 2: vpand 128(<input_2=%rdx),<a8=%ymm14,>r=%ymm15
  554. vpand 128(%rdx),%ymm14,%ymm15
  555. # qhasm: r12 ^= r
  556. # asm 1: vpxor <r=reg256#16,<r12=reg256#3,<r12=reg256#3
  557. # asm 2: vpxor <r=%ymm15,<r12=%ymm2,<r12=%ymm2
  558. vpxor %ymm15,%ymm2,%ymm2
  559. # qhasm: r = a8 & mem256[input_2 + 160]
  560. # asm 1: vpand 160(<input_2=int64#3),<a8=reg256#15,>r=reg256#16
  561. # asm 2: vpand 160(<input_2=%rdx),<a8=%ymm14,>r=%ymm15
  562. vpand 160(%rdx),%ymm14,%ymm15
  563. # qhasm: r13 ^= r
  564. # asm 1: vpxor <r=reg256#16,<r13=reg256#4,<r13=reg256#4
  565. # asm 2: vpxor <r=%ymm15,<r13=%ymm3,<r13=%ymm3
  566. vpxor %ymm15,%ymm3,%ymm3
  567. # qhasm: r = a8 & mem256[input_2 + 192]
  568. # asm 1: vpand 192(<input_2=int64#3),<a8=reg256#15,>r=reg256#16
  569. # asm 2: vpand 192(<input_2=%rdx),<a8=%ymm14,>r=%ymm15
  570. vpand 192(%rdx),%ymm14,%ymm15
  571. # qhasm: r14 ^= r
  572. # asm 1: vpxor <r=reg256#16,<r14=reg256#5,<r14=reg256#5
  573. # asm 2: vpxor <r=%ymm15,<r14=%ymm4,<r14=%ymm4
  574. vpxor %ymm15,%ymm4,%ymm4
  575. # qhasm: r = a8 & mem256[input_2 + 224]
  576. # asm 1: vpand 224(<input_2=int64#3),<a8=reg256#15,>r=reg256#16
  577. # asm 2: vpand 224(<input_2=%rdx),<a8=%ymm14,>r=%ymm15
  578. vpand 224(%rdx),%ymm14,%ymm15
  579. # qhasm: r15 ^= r
  580. # asm 1: vpxor <r=reg256#16,<r15=reg256#6,<r15=reg256#6
  581. # asm 2: vpxor <r=%ymm15,<r15=%ymm5,<r15=%ymm5
  582. vpxor %ymm15,%ymm5,%ymm5
  583. # qhasm: r = a8 & mem256[input_2 + 256]
  584. # asm 1: vpand 256(<input_2=int64#3),<a8=reg256#15,>r=reg256#16
  585. # asm 2: vpand 256(<input_2=%rdx),<a8=%ymm14,>r=%ymm15
  586. vpand 256(%rdx),%ymm14,%ymm15
  587. # qhasm: r16 ^= r
  588. # asm 1: vpxor <r=reg256#16,<r16=reg256#7,<r16=reg256#7
  589. # asm 2: vpxor <r=%ymm15,<r16=%ymm6,<r16=%ymm6
  590. vpxor %ymm15,%ymm6,%ymm6
  591. # qhasm: r = a8 & mem256[input_2 + 288]
  592. # asm 1: vpand 288(<input_2=int64#3),<a8=reg256#15,>r=reg256#16
  593. # asm 2: vpand 288(<input_2=%rdx),<a8=%ymm14,>r=%ymm15
  594. vpand 288(%rdx),%ymm14,%ymm15
  595. # qhasm: r17 ^= r
  596. # asm 1: vpxor <r=reg256#16,<r17=reg256#8,<r17=reg256#8
  597. # asm 2: vpxor <r=%ymm15,<r17=%ymm7,<r17=%ymm7
  598. vpxor %ymm15,%ymm7,%ymm7
  599. # qhasm: r = a8 & mem256[input_2 + 320]
  600. # asm 1: vpand 320(<input_2=int64#3),<a8=reg256#15,>r=reg256#16
  601. # asm 2: vpand 320(<input_2=%rdx),<a8=%ymm14,>r=%ymm15
  602. vpand 320(%rdx),%ymm14,%ymm15
  603. # qhasm: r18 ^= r
  604. # asm 1: vpxor <r=reg256#16,<r18=reg256#9,<r18=reg256#9
  605. # asm 2: vpxor <r=%ymm15,<r18=%ymm8,<r18=%ymm8
  606. vpxor %ymm15,%ymm8,%ymm8
  607. # qhasm: r = a8 & mem256[input_2 + 352]
  608. # asm 1: vpand 352(<input_2=int64#3),<a8=reg256#15,>r=reg256#16
  609. # asm 2: vpand 352(<input_2=%rdx),<a8=%ymm14,>r=%ymm15
  610. vpand 352(%rdx),%ymm14,%ymm15
  611. # qhasm: r19 ^= r
  612. # asm 1: vpxor <r=reg256#16,<r19=reg256#10,<r19=reg256#10
  613. # asm 2: vpxor <r=%ymm15,<r19=%ymm9,<r19=%ymm9
  614. vpxor %ymm15,%ymm9,%ymm9
  615. # qhasm: r = a8 & mem256[input_2 + 384]
  616. # asm 1: vpand 384(<input_2=int64#3),<a8=reg256#15,>r=reg256#15
  617. # asm 2: vpand 384(<input_2=%rdx),<a8=%ymm14,>r=%ymm14
  618. vpand 384(%rdx),%ymm14,%ymm14
  619. # qhasm: r20 ^= r
  620. # asm 1: vpxor <r=reg256#15,<r20=reg256#11,<r20=reg256#11
  621. # asm 2: vpxor <r=%ymm14,<r20=%ymm10,<r20=%ymm10
  622. vpxor %ymm14,%ymm10,%ymm10
  623. # qhasm: r11 ^= r20
  624. # asm 1: vpxor <r20=reg256#11,<r11=reg256#2,<r11=reg256#2
  625. # asm 2: vpxor <r20=%ymm10,<r11=%ymm1,<r11=%ymm1
  626. vpxor %ymm10,%ymm1,%ymm1
  627. # qhasm: r10 ^= r20
  628. # asm 1: vpxor <r20=reg256#11,<r10=reg256#14,<r10=reg256#14
  629. # asm 2: vpxor <r20=%ymm10,<r10=%ymm13,<r10=%ymm13
  630. vpxor %ymm10,%ymm13,%ymm13
  631. # qhasm: r8 ^= r20
  632. # asm 1: vpxor <r20=reg256#11,<r8=reg256#12,<r8=reg256#12
  633. # asm 2: vpxor <r20=%ymm10,<r8=%ymm11,<r8=%ymm11
  634. vpxor %ymm10,%ymm11,%ymm11
  635. # qhasm: r7 = r20
  636. # asm 1: vmovapd <r20=reg256#11,>r7=reg256#11
  637. # asm 2: vmovapd <r20=%ymm10,>r7=%ymm10
  638. vmovapd %ymm10,%ymm10
  639. # qhasm: a7 = mem256[ input_1 + 224 ]
  640. # asm 1: vmovupd 224(<input_1=int64#2),>a7=reg256#15
  641. # asm 2: vmovupd 224(<input_1=%rsi),>a7=%ymm14
  642. vmovupd 224(%rsi),%ymm14
  643. # qhasm: r = a7 & b0
  644. # asm 1: vpand <a7=reg256#15,<b0=reg256#1,>r=reg256#16
  645. # asm 2: vpand <a7=%ymm14,<b0=%ymm0,>r=%ymm15
  646. vpand %ymm14,%ymm0,%ymm15
  647. # qhasm: r7 ^= r
  648. # asm 1: vpxor <r=reg256#16,<r7=reg256#11,<r7=reg256#11
  649. # asm 2: vpxor <r=%ymm15,<r7=%ymm10,<r7=%ymm10
  650. vpxor %ymm15,%ymm10,%ymm10
  651. # qhasm: r = a7 & mem256[input_2 + 32]
  652. # asm 1: vpand 32(<input_2=int64#3),<a7=reg256#15,>r=reg256#16
  653. # asm 2: vpand 32(<input_2=%rdx),<a7=%ymm14,>r=%ymm15
  654. vpand 32(%rdx),%ymm14,%ymm15
  655. # qhasm: r8 ^= r
  656. # asm 1: vpxor <r=reg256#16,<r8=reg256#12,<r8=reg256#12
  657. # asm 2: vpxor <r=%ymm15,<r8=%ymm11,<r8=%ymm11
  658. vpxor %ymm15,%ymm11,%ymm11
  659. # qhasm: r = a7 & mem256[input_2 + 64]
  660. # asm 1: vpand 64(<input_2=int64#3),<a7=reg256#15,>r=reg256#16
  661. # asm 2: vpand 64(<input_2=%rdx),<a7=%ymm14,>r=%ymm15
  662. vpand 64(%rdx),%ymm14,%ymm15
  663. # qhasm: r9 ^= r
  664. # asm 1: vpxor <r=reg256#16,<r9=reg256#13,<r9=reg256#13
  665. # asm 2: vpxor <r=%ymm15,<r9=%ymm12,<r9=%ymm12
  666. vpxor %ymm15,%ymm12,%ymm12
  667. # qhasm: r = a7 & mem256[input_2 + 96]
  668. # asm 1: vpand 96(<input_2=int64#3),<a7=reg256#15,>r=reg256#16
  669. # asm 2: vpand 96(<input_2=%rdx),<a7=%ymm14,>r=%ymm15
  670. vpand 96(%rdx),%ymm14,%ymm15
  671. # qhasm: r10 ^= r
  672. # asm 1: vpxor <r=reg256#16,<r10=reg256#14,<r10=reg256#14
  673. # asm 2: vpxor <r=%ymm15,<r10=%ymm13,<r10=%ymm13
  674. vpxor %ymm15,%ymm13,%ymm13
  675. # qhasm: r = a7 & mem256[input_2 + 128]
  676. # asm 1: vpand 128(<input_2=int64#3),<a7=reg256#15,>r=reg256#16
  677. # asm 2: vpand 128(<input_2=%rdx),<a7=%ymm14,>r=%ymm15
  678. vpand 128(%rdx),%ymm14,%ymm15
  679. # qhasm: r11 ^= r
  680. # asm 1: vpxor <r=reg256#16,<r11=reg256#2,<r11=reg256#2
  681. # asm 2: vpxor <r=%ymm15,<r11=%ymm1,<r11=%ymm1
  682. vpxor %ymm15,%ymm1,%ymm1
  683. # qhasm: r = a7 & mem256[input_2 + 160]
  684. # asm 1: vpand 160(<input_2=int64#3),<a7=reg256#15,>r=reg256#16
  685. # asm 2: vpand 160(<input_2=%rdx),<a7=%ymm14,>r=%ymm15
  686. vpand 160(%rdx),%ymm14,%ymm15
  687. # qhasm: r12 ^= r
  688. # asm 1: vpxor <r=reg256#16,<r12=reg256#3,<r12=reg256#3
  689. # asm 2: vpxor <r=%ymm15,<r12=%ymm2,<r12=%ymm2
  690. vpxor %ymm15,%ymm2,%ymm2
  691. # qhasm: r = a7 & mem256[input_2 + 192]
  692. # asm 1: vpand 192(<input_2=int64#3),<a7=reg256#15,>r=reg256#16
  693. # asm 2: vpand 192(<input_2=%rdx),<a7=%ymm14,>r=%ymm15
  694. vpand 192(%rdx),%ymm14,%ymm15
  695. # qhasm: r13 ^= r
  696. # asm 1: vpxor <r=reg256#16,<r13=reg256#4,<r13=reg256#4
  697. # asm 2: vpxor <r=%ymm15,<r13=%ymm3,<r13=%ymm3
  698. vpxor %ymm15,%ymm3,%ymm3
  699. # qhasm: r = a7 & mem256[input_2 + 224]
  700. # asm 1: vpand 224(<input_2=int64#3),<a7=reg256#15,>r=reg256#16
  701. # asm 2: vpand 224(<input_2=%rdx),<a7=%ymm14,>r=%ymm15
  702. vpand 224(%rdx),%ymm14,%ymm15
  703. # qhasm: r14 ^= r
  704. # asm 1: vpxor <r=reg256#16,<r14=reg256#5,<r14=reg256#5
  705. # asm 2: vpxor <r=%ymm15,<r14=%ymm4,<r14=%ymm4
  706. vpxor %ymm15,%ymm4,%ymm4
  707. # qhasm: r = a7 & mem256[input_2 + 256]
  708. # asm 1: vpand 256(<input_2=int64#3),<a7=reg256#15,>r=reg256#16
  709. # asm 2: vpand 256(<input_2=%rdx),<a7=%ymm14,>r=%ymm15
  710. vpand 256(%rdx),%ymm14,%ymm15
  711. # qhasm: r15 ^= r
  712. # asm 1: vpxor <r=reg256#16,<r15=reg256#6,<r15=reg256#6
  713. # asm 2: vpxor <r=%ymm15,<r15=%ymm5,<r15=%ymm5
  714. vpxor %ymm15,%ymm5,%ymm5
  715. # qhasm: r = a7 & mem256[input_2 + 288]
  716. # asm 1: vpand 288(<input_2=int64#3),<a7=reg256#15,>r=reg256#16
  717. # asm 2: vpand 288(<input_2=%rdx),<a7=%ymm14,>r=%ymm15
  718. vpand 288(%rdx),%ymm14,%ymm15
  719. # qhasm: r16 ^= r
  720. # asm 1: vpxor <r=reg256#16,<r16=reg256#7,<r16=reg256#7
  721. # asm 2: vpxor <r=%ymm15,<r16=%ymm6,<r16=%ymm6
  722. vpxor %ymm15,%ymm6,%ymm6
  723. # qhasm: r = a7 & mem256[input_2 + 320]
  724. # asm 1: vpand 320(<input_2=int64#3),<a7=reg256#15,>r=reg256#16
  725. # asm 2: vpand 320(<input_2=%rdx),<a7=%ymm14,>r=%ymm15
  726. vpand 320(%rdx),%ymm14,%ymm15
  727. # qhasm: r17 ^= r
  728. # asm 1: vpxor <r=reg256#16,<r17=reg256#8,<r17=reg256#8
  729. # asm 2: vpxor <r=%ymm15,<r17=%ymm7,<r17=%ymm7
  730. vpxor %ymm15,%ymm7,%ymm7
  731. # qhasm: r = a7 & mem256[input_2 + 352]
  732. # asm 1: vpand 352(<input_2=int64#3),<a7=reg256#15,>r=reg256#16
  733. # asm 2: vpand 352(<input_2=%rdx),<a7=%ymm14,>r=%ymm15
  734. vpand 352(%rdx),%ymm14,%ymm15
  735. # qhasm: r18 ^= r
  736. # asm 1: vpxor <r=reg256#16,<r18=reg256#9,<r18=reg256#9
  737. # asm 2: vpxor <r=%ymm15,<r18=%ymm8,<r18=%ymm8
  738. vpxor %ymm15,%ymm8,%ymm8
  739. # qhasm: r = a7 & mem256[input_2 + 384]
  740. # asm 1: vpand 384(<input_2=int64#3),<a7=reg256#15,>r=reg256#15
  741. # asm 2: vpand 384(<input_2=%rdx),<a7=%ymm14,>r=%ymm14
  742. vpand 384(%rdx),%ymm14,%ymm14
  743. # qhasm: r19 ^= r
  744. # asm 1: vpxor <r=reg256#15,<r19=reg256#10,<r19=reg256#10
  745. # asm 2: vpxor <r=%ymm14,<r19=%ymm9,<r19=%ymm9
  746. vpxor %ymm14,%ymm9,%ymm9
  747. # qhasm: r10 ^= r19
  748. # asm 1: vpxor <r19=reg256#10,<r10=reg256#14,<r10=reg256#14
  749. # asm 2: vpxor <r19=%ymm9,<r10=%ymm13,<r10=%ymm13
  750. vpxor %ymm9,%ymm13,%ymm13
  751. # qhasm: r9 ^= r19
  752. # asm 1: vpxor <r19=reg256#10,<r9=reg256#13,<r9=reg256#13
  753. # asm 2: vpxor <r19=%ymm9,<r9=%ymm12,<r9=%ymm12
  754. vpxor %ymm9,%ymm12,%ymm12
  755. # qhasm: r7 ^= r19
  756. # asm 1: vpxor <r19=reg256#10,<r7=reg256#11,<r7=reg256#11
  757. # asm 2: vpxor <r19=%ymm9,<r7=%ymm10,<r7=%ymm10
  758. vpxor %ymm9,%ymm10,%ymm10
  759. # qhasm: r6 = r19
  760. # asm 1: vmovapd <r19=reg256#10,>r6=reg256#10
  761. # asm 2: vmovapd <r19=%ymm9,>r6=%ymm9
  762. vmovapd %ymm9,%ymm9
  763. # qhasm: a6 = mem256[ input_1 + 192 ]
  764. # asm 1: vmovupd 192(<input_1=int64#2),>a6=reg256#15
  765. # asm 2: vmovupd 192(<input_1=%rsi),>a6=%ymm14
  766. vmovupd 192(%rsi),%ymm14
  767. # qhasm: r = a6 & b0
  768. # asm 1: vpand <a6=reg256#15,<b0=reg256#1,>r=reg256#16
  769. # asm 2: vpand <a6=%ymm14,<b0=%ymm0,>r=%ymm15
  770. vpand %ymm14,%ymm0,%ymm15
  771. # qhasm: r6 ^= r
  772. # asm 1: vpxor <r=reg256#16,<r6=reg256#10,<r6=reg256#10
  773. # asm 2: vpxor <r=%ymm15,<r6=%ymm9,<r6=%ymm9
  774. vpxor %ymm15,%ymm9,%ymm9
  775. # qhasm: r = a6 & mem256[input_2 + 32]
  776. # asm 1: vpand 32(<input_2=int64#3),<a6=reg256#15,>r=reg256#16
  777. # asm 2: vpand 32(<input_2=%rdx),<a6=%ymm14,>r=%ymm15
  778. vpand 32(%rdx),%ymm14,%ymm15
  779. # qhasm: r7 ^= r
  780. # asm 1: vpxor <r=reg256#16,<r7=reg256#11,<r7=reg256#11
  781. # asm 2: vpxor <r=%ymm15,<r7=%ymm10,<r7=%ymm10
  782. vpxor %ymm15,%ymm10,%ymm10
  783. # qhasm: r = a6 & mem256[input_2 + 64]
  784. # asm 1: vpand 64(<input_2=int64#3),<a6=reg256#15,>r=reg256#16
  785. # asm 2: vpand 64(<input_2=%rdx),<a6=%ymm14,>r=%ymm15
  786. vpand 64(%rdx),%ymm14,%ymm15
  787. # qhasm: r8 ^= r
  788. # asm 1: vpxor <r=reg256#16,<r8=reg256#12,<r8=reg256#12
  789. # asm 2: vpxor <r=%ymm15,<r8=%ymm11,<r8=%ymm11
  790. vpxor %ymm15,%ymm11,%ymm11
  791. # qhasm: r = a6 & mem256[input_2 + 96]
  792. # asm 1: vpand 96(<input_2=int64#3),<a6=reg256#15,>r=reg256#16
  793. # asm 2: vpand 96(<input_2=%rdx),<a6=%ymm14,>r=%ymm15
  794. vpand 96(%rdx),%ymm14,%ymm15
  795. # qhasm: r9 ^= r
  796. # asm 1: vpxor <r=reg256#16,<r9=reg256#13,<r9=reg256#13
  797. # asm 2: vpxor <r=%ymm15,<r9=%ymm12,<r9=%ymm12
  798. vpxor %ymm15,%ymm12,%ymm12
  799. # qhasm: r = a6 & mem256[input_2 + 128]
  800. # asm 1: vpand 128(<input_2=int64#3),<a6=reg256#15,>r=reg256#16
  801. # asm 2: vpand 128(<input_2=%rdx),<a6=%ymm14,>r=%ymm15
  802. vpand 128(%rdx),%ymm14,%ymm15
  803. # qhasm: r10 ^= r
  804. # asm 1: vpxor <r=reg256#16,<r10=reg256#14,<r10=reg256#14
  805. # asm 2: vpxor <r=%ymm15,<r10=%ymm13,<r10=%ymm13
  806. vpxor %ymm15,%ymm13,%ymm13
  807. # qhasm: r = a6 & mem256[input_2 + 160]
  808. # asm 1: vpand 160(<input_2=int64#3),<a6=reg256#15,>r=reg256#16
  809. # asm 2: vpand 160(<input_2=%rdx),<a6=%ymm14,>r=%ymm15
  810. vpand 160(%rdx),%ymm14,%ymm15
  811. # qhasm: r11 ^= r
  812. # asm 1: vpxor <r=reg256#16,<r11=reg256#2,<r11=reg256#2
  813. # asm 2: vpxor <r=%ymm15,<r11=%ymm1,<r11=%ymm1
  814. vpxor %ymm15,%ymm1,%ymm1
  815. # qhasm: r = a6 & mem256[input_2 + 192]
  816. # asm 1: vpand 192(<input_2=int64#3),<a6=reg256#15,>r=reg256#16
  817. # asm 2: vpand 192(<input_2=%rdx),<a6=%ymm14,>r=%ymm15
  818. vpand 192(%rdx),%ymm14,%ymm15
  819. # qhasm: r12 ^= r
  820. # asm 1: vpxor <r=reg256#16,<r12=reg256#3,<r12=reg256#3
  821. # asm 2: vpxor <r=%ymm15,<r12=%ymm2,<r12=%ymm2
  822. vpxor %ymm15,%ymm2,%ymm2
  823. # qhasm: r = a6 & mem256[input_2 + 224]
  824. # asm 1: vpand 224(<input_2=int64#3),<a6=reg256#15,>r=reg256#16
  825. # asm 2: vpand 224(<input_2=%rdx),<a6=%ymm14,>r=%ymm15
  826. vpand 224(%rdx),%ymm14,%ymm15
  827. # qhasm: r13 ^= r
  828. # asm 1: vpxor <r=reg256#16,<r13=reg256#4,<r13=reg256#4
  829. # asm 2: vpxor <r=%ymm15,<r13=%ymm3,<r13=%ymm3
  830. vpxor %ymm15,%ymm3,%ymm3
  831. # qhasm: r = a6 & mem256[input_2 + 256]
  832. # asm 1: vpand 256(<input_2=int64#3),<a6=reg256#15,>r=reg256#16
  833. # asm 2: vpand 256(<input_2=%rdx),<a6=%ymm14,>r=%ymm15
  834. vpand 256(%rdx),%ymm14,%ymm15
  835. # qhasm: r14 ^= r
  836. # asm 1: vpxor <r=reg256#16,<r14=reg256#5,<r14=reg256#5
  837. # asm 2: vpxor <r=%ymm15,<r14=%ymm4,<r14=%ymm4
  838. vpxor %ymm15,%ymm4,%ymm4
  839. # qhasm: r = a6 & mem256[input_2 + 288]
  840. # asm 1: vpand 288(<input_2=int64#3),<a6=reg256#15,>r=reg256#16
  841. # asm 2: vpand 288(<input_2=%rdx),<a6=%ymm14,>r=%ymm15
  842. vpand 288(%rdx),%ymm14,%ymm15
  843. # qhasm: r15 ^= r
  844. # asm 1: vpxor <r=reg256#16,<r15=reg256#6,<r15=reg256#6
  845. # asm 2: vpxor <r=%ymm15,<r15=%ymm5,<r15=%ymm5
  846. vpxor %ymm15,%ymm5,%ymm5
  847. # qhasm: r = a6 & mem256[input_2 + 320]
  848. # asm 1: vpand 320(<input_2=int64#3),<a6=reg256#15,>r=reg256#16
  849. # asm 2: vpand 320(<input_2=%rdx),<a6=%ymm14,>r=%ymm15
  850. vpand 320(%rdx),%ymm14,%ymm15
  851. # qhasm: r16 ^= r
  852. # asm 1: vpxor <r=reg256#16,<r16=reg256#7,<r16=reg256#7
  853. # asm 2: vpxor <r=%ymm15,<r16=%ymm6,<r16=%ymm6
  854. vpxor %ymm15,%ymm6,%ymm6
  855. # qhasm: r = a6 & mem256[input_2 + 352]
  856. # asm 1: vpand 352(<input_2=int64#3),<a6=reg256#15,>r=reg256#16
  857. # asm 2: vpand 352(<input_2=%rdx),<a6=%ymm14,>r=%ymm15
  858. vpand 352(%rdx),%ymm14,%ymm15
  859. # qhasm: r17 ^= r
  860. # asm 1: vpxor <r=reg256#16,<r17=reg256#8,<r17=reg256#8
  861. # asm 2: vpxor <r=%ymm15,<r17=%ymm7,<r17=%ymm7
  862. vpxor %ymm15,%ymm7,%ymm7
  863. # qhasm: r = a6 & mem256[input_2 + 384]
  864. # asm 1: vpand 384(<input_2=int64#3),<a6=reg256#15,>r=reg256#15
  865. # asm 2: vpand 384(<input_2=%rdx),<a6=%ymm14,>r=%ymm14
  866. vpand 384(%rdx),%ymm14,%ymm14
  867. # qhasm: r18 ^= r
  868. # asm 1: vpxor <r=reg256#15,<r18=reg256#9,<r18=reg256#9
  869. # asm 2: vpxor <r=%ymm14,<r18=%ymm8,<r18=%ymm8
  870. vpxor %ymm14,%ymm8,%ymm8
  871. # qhasm: r9 ^= r18
  872. # asm 1: vpxor <r18=reg256#9,<r9=reg256#13,<r9=reg256#13
  873. # asm 2: vpxor <r18=%ymm8,<r9=%ymm12,<r9=%ymm12
  874. vpxor %ymm8,%ymm12,%ymm12
  875. # qhasm: r8 ^= r18
  876. # asm 1: vpxor <r18=reg256#9,<r8=reg256#12,<r8=reg256#12
  877. # asm 2: vpxor <r18=%ymm8,<r8=%ymm11,<r8=%ymm11
  878. vpxor %ymm8,%ymm11,%ymm11
  879. # qhasm: r6 ^= r18
  880. # asm 1: vpxor <r18=reg256#9,<r6=reg256#10,<r6=reg256#10
  881. # asm 2: vpxor <r18=%ymm8,<r6=%ymm9,<r6=%ymm9
  882. vpxor %ymm8,%ymm9,%ymm9
  883. # qhasm: r5 = r18
  884. # asm 1: vmovapd <r18=reg256#9,>r5=reg256#9
  885. # asm 2: vmovapd <r18=%ymm8,>r5=%ymm8
  886. vmovapd %ymm8,%ymm8
  887. # qhasm: a5 = mem256[ input_1 + 160 ]
  888. # asm 1: vmovupd 160(<input_1=int64#2),>a5=reg256#15
  889. # asm 2: vmovupd 160(<input_1=%rsi),>a5=%ymm14
  890. vmovupd 160(%rsi),%ymm14
  891. # qhasm: r = a5 & b0
  892. # asm 1: vpand <a5=reg256#15,<b0=reg256#1,>r=reg256#16
  893. # asm 2: vpand <a5=%ymm14,<b0=%ymm0,>r=%ymm15
  894. vpand %ymm14,%ymm0,%ymm15
  895. # qhasm: r5 ^= r
  896. # asm 1: vpxor <r=reg256#16,<r5=reg256#9,<r5=reg256#9
  897. # asm 2: vpxor <r=%ymm15,<r5=%ymm8,<r5=%ymm8
  898. vpxor %ymm15,%ymm8,%ymm8
  899. # qhasm: r = a5 & mem256[input_2 + 32]
  900. # asm 1: vpand 32(<input_2=int64#3),<a5=reg256#15,>r=reg256#16
  901. # asm 2: vpand 32(<input_2=%rdx),<a5=%ymm14,>r=%ymm15
  902. vpand 32(%rdx),%ymm14,%ymm15
  903. # qhasm: r6 ^= r
  904. # asm 1: vpxor <r=reg256#16,<r6=reg256#10,<r6=reg256#10
  905. # asm 2: vpxor <r=%ymm15,<r6=%ymm9,<r6=%ymm9
  906. vpxor %ymm15,%ymm9,%ymm9
  907. # qhasm: r = a5 & mem256[input_2 + 64]
  908. # asm 1: vpand 64(<input_2=int64#3),<a5=reg256#15,>r=reg256#16
  909. # asm 2: vpand 64(<input_2=%rdx),<a5=%ymm14,>r=%ymm15
  910. vpand 64(%rdx),%ymm14,%ymm15
  911. # qhasm: r7 ^= r
  912. # asm 1: vpxor <r=reg256#16,<r7=reg256#11,<r7=reg256#11
  913. # asm 2: vpxor <r=%ymm15,<r7=%ymm10,<r7=%ymm10
  914. vpxor %ymm15,%ymm10,%ymm10
  915. # qhasm: r = a5 & mem256[input_2 + 96]
  916. # asm 1: vpand 96(<input_2=int64#3),<a5=reg256#15,>r=reg256#16
  917. # asm 2: vpand 96(<input_2=%rdx),<a5=%ymm14,>r=%ymm15
  918. vpand 96(%rdx),%ymm14,%ymm15
  919. # qhasm: r8 ^= r
  920. # asm 1: vpxor <r=reg256#16,<r8=reg256#12,<r8=reg256#12
  921. # asm 2: vpxor <r=%ymm15,<r8=%ymm11,<r8=%ymm11
  922. vpxor %ymm15,%ymm11,%ymm11
  923. # qhasm: r = a5 & mem256[input_2 + 128]
  924. # asm 1: vpand 128(<input_2=int64#3),<a5=reg256#15,>r=reg256#16
  925. # asm 2: vpand 128(<input_2=%rdx),<a5=%ymm14,>r=%ymm15
  926. vpand 128(%rdx),%ymm14,%ymm15
  927. # qhasm: r9 ^= r
  928. # asm 1: vpxor <r=reg256#16,<r9=reg256#13,<r9=reg256#13
  929. # asm 2: vpxor <r=%ymm15,<r9=%ymm12,<r9=%ymm12
  930. vpxor %ymm15,%ymm12,%ymm12
  931. # qhasm: r = a5 & mem256[input_2 + 160]
  932. # asm 1: vpand 160(<input_2=int64#3),<a5=reg256#15,>r=reg256#16
  933. # asm 2: vpand 160(<input_2=%rdx),<a5=%ymm14,>r=%ymm15
  934. vpand 160(%rdx),%ymm14,%ymm15
  935. # qhasm: r10 ^= r
  936. # asm 1: vpxor <r=reg256#16,<r10=reg256#14,<r10=reg256#14
  937. # asm 2: vpxor <r=%ymm15,<r10=%ymm13,<r10=%ymm13
  938. vpxor %ymm15,%ymm13,%ymm13
  939. # qhasm: r = a5 & mem256[input_2 + 192]
  940. # asm 1: vpand 192(<input_2=int64#3),<a5=reg256#15,>r=reg256#16
  941. # asm 2: vpand 192(<input_2=%rdx),<a5=%ymm14,>r=%ymm15
  942. vpand 192(%rdx),%ymm14,%ymm15
  943. # qhasm: r11 ^= r
  944. # asm 1: vpxor <r=reg256#16,<r11=reg256#2,<r11=reg256#2
  945. # asm 2: vpxor <r=%ymm15,<r11=%ymm1,<r11=%ymm1
  946. vpxor %ymm15,%ymm1,%ymm1
  947. # qhasm: r = a5 & mem256[input_2 + 224]
  948. # asm 1: vpand 224(<input_2=int64#3),<a5=reg256#15,>r=reg256#16
  949. # asm 2: vpand 224(<input_2=%rdx),<a5=%ymm14,>r=%ymm15
  950. vpand 224(%rdx),%ymm14,%ymm15
  951. # qhasm: r12 ^= r
  952. # asm 1: vpxor <r=reg256#16,<r12=reg256#3,<r12=reg256#3
  953. # asm 2: vpxor <r=%ymm15,<r12=%ymm2,<r12=%ymm2
  954. vpxor %ymm15,%ymm2,%ymm2
  955. # qhasm: r = a5 & mem256[input_2 + 256]
  956. # asm 1: vpand 256(<input_2=int64#3),<a5=reg256#15,>r=reg256#16
  957. # asm 2: vpand 256(<input_2=%rdx),<a5=%ymm14,>r=%ymm15
  958. vpand 256(%rdx),%ymm14,%ymm15
  959. # qhasm: r13 ^= r
  960. # asm 1: vpxor <r=reg256#16,<r13=reg256#4,<r13=reg256#4
  961. # asm 2: vpxor <r=%ymm15,<r13=%ymm3,<r13=%ymm3
  962. vpxor %ymm15,%ymm3,%ymm3
  963. # qhasm: r = a5 & mem256[input_2 + 288]
  964. # asm 1: vpand 288(<input_2=int64#3),<a5=reg256#15,>r=reg256#16
  965. # asm 2: vpand 288(<input_2=%rdx),<a5=%ymm14,>r=%ymm15
  966. vpand 288(%rdx),%ymm14,%ymm15
  967. # qhasm: r14 ^= r
  968. # asm 1: vpxor <r=reg256#16,<r14=reg256#5,<r14=reg256#5
  969. # asm 2: vpxor <r=%ymm15,<r14=%ymm4,<r14=%ymm4
  970. vpxor %ymm15,%ymm4,%ymm4
  971. # qhasm: r = a5 & mem256[input_2 + 320]
  972. # asm 1: vpand 320(<input_2=int64#3),<a5=reg256#15,>r=reg256#16
  973. # asm 2: vpand 320(<input_2=%rdx),<a5=%ymm14,>r=%ymm15
  974. vpand 320(%rdx),%ymm14,%ymm15
  975. # qhasm: r15 ^= r
  976. # asm 1: vpxor <r=reg256#16,<r15=reg256#6,<r15=reg256#6
  977. # asm 2: vpxor <r=%ymm15,<r15=%ymm5,<r15=%ymm5
  978. vpxor %ymm15,%ymm5,%ymm5
  979. # qhasm: r = a5 & mem256[input_2 + 352]
  980. # asm 1: vpand 352(<input_2=int64#3),<a5=reg256#15,>r=reg256#16
  981. # asm 2: vpand 352(<input_2=%rdx),<a5=%ymm14,>r=%ymm15
  982. vpand 352(%rdx),%ymm14,%ymm15
  983. # qhasm: r16 ^= r
  984. # asm 1: vpxor <r=reg256#16,<r16=reg256#7,<r16=reg256#7
  985. # asm 2: vpxor <r=%ymm15,<r16=%ymm6,<r16=%ymm6
  986. vpxor %ymm15,%ymm6,%ymm6
  987. # qhasm: r = a5 & mem256[input_2 + 384]
  988. # asm 1: vpand 384(<input_2=int64#3),<a5=reg256#15,>r=reg256#15
  989. # asm 2: vpand 384(<input_2=%rdx),<a5=%ymm14,>r=%ymm14
  990. vpand 384(%rdx),%ymm14,%ymm14
  991. # qhasm: r17 ^= r
  992. # asm 1: vpxor <r=reg256#15,<r17=reg256#8,<r17=reg256#8
  993. # asm 2: vpxor <r=%ymm14,<r17=%ymm7,<r17=%ymm7
  994. vpxor %ymm14,%ymm7,%ymm7
  995. # qhasm: r8 ^= r17
  996. # asm 1: vpxor <r17=reg256#8,<r8=reg256#12,<r8=reg256#12
  997. # asm 2: vpxor <r17=%ymm7,<r8=%ymm11,<r8=%ymm11
  998. vpxor %ymm7,%ymm11,%ymm11
  999. # qhasm: r7 ^= r17
  1000. # asm 1: vpxor <r17=reg256#8,<r7=reg256#11,<r7=reg256#11
  1001. # asm 2: vpxor <r17=%ymm7,<r7=%ymm10,<r7=%ymm10
  1002. vpxor %ymm7,%ymm10,%ymm10
  1003. # qhasm: r5 ^= r17
  1004. # asm 1: vpxor <r17=reg256#8,<r5=reg256#9,<r5=reg256#9
  1005. # asm 2: vpxor <r17=%ymm7,<r5=%ymm8,<r5=%ymm8
  1006. vpxor %ymm7,%ymm8,%ymm8
  1007. # qhasm: r4 = r17
  1008. # asm 1: vmovapd <r17=reg256#8,>r4=reg256#8
  1009. # asm 2: vmovapd <r17=%ymm7,>r4=%ymm7
  1010. vmovapd %ymm7,%ymm7
  1011. # qhasm: a4 = mem256[ input_1 + 128 ]
  1012. # asm 1: vmovupd 128(<input_1=int64#2),>a4=reg256#15
  1013. # asm 2: vmovupd 128(<input_1=%rsi),>a4=%ymm14
  1014. vmovupd 128(%rsi),%ymm14
  1015. # qhasm: r = a4 & b0
  1016. # asm 1: vpand <a4=reg256#15,<b0=reg256#1,>r=reg256#16
  1017. # asm 2: vpand <a4=%ymm14,<b0=%ymm0,>r=%ymm15
  1018. vpand %ymm14,%ymm0,%ymm15
  1019. # qhasm: r4 ^= r
  1020. # asm 1: vpxor <r=reg256#16,<r4=reg256#8,<r4=reg256#8
  1021. # asm 2: vpxor <r=%ymm15,<r4=%ymm7,<r4=%ymm7
  1022. vpxor %ymm15,%ymm7,%ymm7
  1023. # qhasm: r = a4 & mem256[input_2 + 32]
  1024. # asm 1: vpand 32(<input_2=int64#3),<a4=reg256#15,>r=reg256#16
  1025. # asm 2: vpand 32(<input_2=%rdx),<a4=%ymm14,>r=%ymm15
  1026. vpand 32(%rdx),%ymm14,%ymm15
  1027. # qhasm: r5 ^= r
  1028. # asm 1: vpxor <r=reg256#16,<r5=reg256#9,<r5=reg256#9
  1029. # asm 2: vpxor <r=%ymm15,<r5=%ymm8,<r5=%ymm8
  1030. vpxor %ymm15,%ymm8,%ymm8
  1031. # qhasm: r = a4 & mem256[input_2 + 64]
  1032. # asm 1: vpand 64(<input_2=int64#3),<a4=reg256#15,>r=reg256#16
  1033. # asm 2: vpand 64(<input_2=%rdx),<a4=%ymm14,>r=%ymm15
  1034. vpand 64(%rdx),%ymm14,%ymm15
  1035. # qhasm: r6 ^= r
  1036. # asm 1: vpxor <r=reg256#16,<r6=reg256#10,<r6=reg256#10
  1037. # asm 2: vpxor <r=%ymm15,<r6=%ymm9,<r6=%ymm9
  1038. vpxor %ymm15,%ymm9,%ymm9
  1039. # qhasm: r = a4 & mem256[input_2 + 96]
  1040. # asm 1: vpand 96(<input_2=int64#3),<a4=reg256#15,>r=reg256#16
  1041. # asm 2: vpand 96(<input_2=%rdx),<a4=%ymm14,>r=%ymm15
  1042. vpand 96(%rdx),%ymm14,%ymm15
  1043. # qhasm: r7 ^= r
  1044. # asm 1: vpxor <r=reg256#16,<r7=reg256#11,<r7=reg256#11
  1045. # asm 2: vpxor <r=%ymm15,<r7=%ymm10,<r7=%ymm10
  1046. vpxor %ymm15,%ymm10,%ymm10
  1047. # qhasm: r = a4 & mem256[input_2 + 128]
  1048. # asm 1: vpand 128(<input_2=int64#3),<a4=reg256#15,>r=reg256#16
  1049. # asm 2: vpand 128(<input_2=%rdx),<a4=%ymm14,>r=%ymm15
  1050. vpand 128(%rdx),%ymm14,%ymm15
  1051. # qhasm: r8 ^= r
  1052. # asm 1: vpxor <r=reg256#16,<r8=reg256#12,<r8=reg256#12
  1053. # asm 2: vpxor <r=%ymm15,<r8=%ymm11,<r8=%ymm11
  1054. vpxor %ymm15,%ymm11,%ymm11
  1055. # qhasm: r = a4 & mem256[input_2 + 160]
  1056. # asm 1: vpand 160(<input_2=int64#3),<a4=reg256#15,>r=reg256#16
  1057. # asm 2: vpand 160(<input_2=%rdx),<a4=%ymm14,>r=%ymm15
  1058. vpand 160(%rdx),%ymm14,%ymm15
  1059. # qhasm: r9 ^= r
  1060. # asm 1: vpxor <r=reg256#16,<r9=reg256#13,<r9=reg256#13
  1061. # asm 2: vpxor <r=%ymm15,<r9=%ymm12,<r9=%ymm12
  1062. vpxor %ymm15,%ymm12,%ymm12
  1063. # qhasm: r = a4 & mem256[input_2 + 192]
  1064. # asm 1: vpand 192(<input_2=int64#3),<a4=reg256#15,>r=reg256#16
  1065. # asm 2: vpand 192(<input_2=%rdx),<a4=%ymm14,>r=%ymm15
  1066. vpand 192(%rdx),%ymm14,%ymm15
  1067. # qhasm: r10 ^= r
  1068. # asm 1: vpxor <r=reg256#16,<r10=reg256#14,<r10=reg256#14
  1069. # asm 2: vpxor <r=%ymm15,<r10=%ymm13,<r10=%ymm13
  1070. vpxor %ymm15,%ymm13,%ymm13
  1071. # qhasm: r = a4 & mem256[input_2 + 224]
  1072. # asm 1: vpand 224(<input_2=int64#3),<a4=reg256#15,>r=reg256#16
  1073. # asm 2: vpand 224(<input_2=%rdx),<a4=%ymm14,>r=%ymm15
  1074. vpand 224(%rdx),%ymm14,%ymm15
  1075. # qhasm: r11 ^= r
  1076. # asm 1: vpxor <r=reg256#16,<r11=reg256#2,<r11=reg256#2
  1077. # asm 2: vpxor <r=%ymm15,<r11=%ymm1,<r11=%ymm1
  1078. vpxor %ymm15,%ymm1,%ymm1
  1079. # qhasm: r = a4 & mem256[input_2 + 256]
  1080. # asm 1: vpand 256(<input_2=int64#3),<a4=reg256#15,>r=reg256#16
  1081. # asm 2: vpand 256(<input_2=%rdx),<a4=%ymm14,>r=%ymm15
  1082. vpand 256(%rdx),%ymm14,%ymm15
  1083. # qhasm: r12 ^= r
  1084. # asm 1: vpxor <r=reg256#16,<r12=reg256#3,<r12=reg256#3
  1085. # asm 2: vpxor <r=%ymm15,<r12=%ymm2,<r12=%ymm2
  1086. vpxor %ymm15,%ymm2,%ymm2
  1087. # qhasm: r = a4 & mem256[input_2 + 288]
  1088. # asm 1: vpand 288(<input_2=int64#3),<a4=reg256#15,>r=reg256#16
  1089. # asm 2: vpand 288(<input_2=%rdx),<a4=%ymm14,>r=%ymm15
  1090. vpand 288(%rdx),%ymm14,%ymm15
  1091. # qhasm: r13 ^= r
  1092. # asm 1: vpxor <r=reg256#16,<r13=reg256#4,<r13=reg256#4
  1093. # asm 2: vpxor <r=%ymm15,<r13=%ymm3,<r13=%ymm3
  1094. vpxor %ymm15,%ymm3,%ymm3
  1095. # qhasm: r = a4 & mem256[input_2 + 320]
  1096. # asm 1: vpand 320(<input_2=int64#3),<a4=reg256#15,>r=reg256#16
  1097. # asm 2: vpand 320(<input_2=%rdx),<a4=%ymm14,>r=%ymm15
  1098. vpand 320(%rdx),%ymm14,%ymm15
  1099. # qhasm: r14 ^= r
  1100. # asm 1: vpxor <r=reg256#16,<r14=reg256#5,<r14=reg256#5
  1101. # asm 2: vpxor <r=%ymm15,<r14=%ymm4,<r14=%ymm4
  1102. vpxor %ymm15,%ymm4,%ymm4
  1103. # qhasm: r = a4 & mem256[input_2 + 352]
  1104. # asm 1: vpand 352(<input_2=int64#3),<a4=reg256#15,>r=reg256#16
  1105. # asm 2: vpand 352(<input_2=%rdx),<a4=%ymm14,>r=%ymm15
  1106. vpand 352(%rdx),%ymm14,%ymm15
  1107. # qhasm: r15 ^= r
  1108. # asm 1: vpxor <r=reg256#16,<r15=reg256#6,<r15=reg256#6
  1109. # asm 2: vpxor <r=%ymm15,<r15=%ymm5,<r15=%ymm5
  1110. vpxor %ymm15,%ymm5,%ymm5
  1111. # qhasm: r = a4 & mem256[input_2 + 384]
  1112. # asm 1: vpand 384(<input_2=int64#3),<a4=reg256#15,>r=reg256#15
  1113. # asm 2: vpand 384(<input_2=%rdx),<a4=%ymm14,>r=%ymm14
  1114. vpand 384(%rdx),%ymm14,%ymm14
  1115. # qhasm: r16 ^= r
  1116. # asm 1: vpxor <r=reg256#15,<r16=reg256#7,<r16=reg256#7
  1117. # asm 2: vpxor <r=%ymm14,<r16=%ymm6,<r16=%ymm6
  1118. vpxor %ymm14,%ymm6,%ymm6
  1119. # qhasm: r7 ^= r16
  1120. # asm 1: vpxor <r16=reg256#7,<r7=reg256#11,<r7=reg256#11
  1121. # asm 2: vpxor <r16=%ymm6,<r7=%ymm10,<r7=%ymm10
  1122. vpxor %ymm6,%ymm10,%ymm10
  1123. # qhasm: r6 ^= r16
  1124. # asm 1: vpxor <r16=reg256#7,<r6=reg256#10,<r6=reg256#10
  1125. # asm 2: vpxor <r16=%ymm6,<r6=%ymm9,<r6=%ymm9
  1126. vpxor %ymm6,%ymm9,%ymm9
  1127. # qhasm: r4 ^= r16
  1128. # asm 1: vpxor <r16=reg256#7,<r4=reg256#8,<r4=reg256#8
  1129. # asm 2: vpxor <r16=%ymm6,<r4=%ymm7,<r4=%ymm7
  1130. vpxor %ymm6,%ymm7,%ymm7
  1131. # qhasm: r3 = r16
  1132. # asm 1: vmovapd <r16=reg256#7,>r3=reg256#7
  1133. # asm 2: vmovapd <r16=%ymm6,>r3=%ymm6
  1134. vmovapd %ymm6,%ymm6
  1135. # qhasm: a3 = mem256[ input_1 + 96 ]
  1136. # asm 1: vmovupd 96(<input_1=int64#2),>a3=reg256#15
  1137. # asm 2: vmovupd 96(<input_1=%rsi),>a3=%ymm14
  1138. vmovupd 96(%rsi),%ymm14
  1139. # qhasm: r = a3 & b0
  1140. # asm 1: vpand <a3=reg256#15,<b0=reg256#1,>r=reg256#16
  1141. # asm 2: vpand <a3=%ymm14,<b0=%ymm0,>r=%ymm15
  1142. vpand %ymm14,%ymm0,%ymm15
  1143. # qhasm: r3 ^= r
  1144. # asm 1: vpxor <r=reg256#16,<r3=reg256#7,<r3=reg256#7
  1145. # asm 2: vpxor <r=%ymm15,<r3=%ymm6,<r3=%ymm6
  1146. vpxor %ymm15,%ymm6,%ymm6
  1147. # qhasm: r = a3 & mem256[input_2 + 32]
  1148. # asm 1: vpand 32(<input_2=int64#3),<a3=reg256#15,>r=reg256#16
  1149. # asm 2: vpand 32(<input_2=%rdx),<a3=%ymm14,>r=%ymm15
  1150. vpand 32(%rdx),%ymm14,%ymm15
  1151. # qhasm: r4 ^= r
  1152. # asm 1: vpxor <r=reg256#16,<r4=reg256#8,<r4=reg256#8
  1153. # asm 2: vpxor <r=%ymm15,<r4=%ymm7,<r4=%ymm7
  1154. vpxor %ymm15,%ymm7,%ymm7
  1155. # qhasm: r = a3 & mem256[input_2 + 64]
  1156. # asm 1: vpand 64(<input_2=int64#3),<a3=reg256#15,>r=reg256#16
  1157. # asm 2: vpand 64(<input_2=%rdx),<a3=%ymm14,>r=%ymm15
  1158. vpand 64(%rdx),%ymm14,%ymm15
  1159. # qhasm: r5 ^= r
  1160. # asm 1: vpxor <r=reg256#16,<r5=reg256#9,<r5=reg256#9
  1161. # asm 2: vpxor <r=%ymm15,<r5=%ymm8,<r5=%ymm8
  1162. vpxor %ymm15,%ymm8,%ymm8
  1163. # qhasm: r = a3 & mem256[input_2 + 96]
  1164. # asm 1: vpand 96(<input_2=int64#3),<a3=reg256#15,>r=reg256#16
  1165. # asm 2: vpand 96(<input_2=%rdx),<a3=%ymm14,>r=%ymm15
  1166. vpand 96(%rdx),%ymm14,%ymm15
  1167. # qhasm: r6 ^= r
  1168. # asm 1: vpxor <r=reg256#16,<r6=reg256#10,<r6=reg256#10
  1169. # asm 2: vpxor <r=%ymm15,<r6=%ymm9,<r6=%ymm9
  1170. vpxor %ymm15,%ymm9,%ymm9
  1171. # qhasm: r = a3 & mem256[input_2 + 128]
  1172. # asm 1: vpand 128(<input_2=int64#3),<a3=reg256#15,>r=reg256#16
  1173. # asm 2: vpand 128(<input_2=%rdx),<a3=%ymm14,>r=%ymm15
  1174. vpand 128(%rdx),%ymm14,%ymm15
  1175. # qhasm: r7 ^= r
  1176. # asm 1: vpxor <r=reg256#16,<r7=reg256#11,<r7=reg256#11
  1177. # asm 2: vpxor <r=%ymm15,<r7=%ymm10,<r7=%ymm10
  1178. vpxor %ymm15,%ymm10,%ymm10
  1179. # qhasm: r = a3 & mem256[input_2 + 160]
  1180. # asm 1: vpand 160(<input_2=int64#3),<a3=reg256#15,>r=reg256#16
  1181. # asm 2: vpand 160(<input_2=%rdx),<a3=%ymm14,>r=%ymm15
  1182. vpand 160(%rdx),%ymm14,%ymm15
  1183. # qhasm: r8 ^= r
  1184. # asm 1: vpxor <r=reg256#16,<r8=reg256#12,<r8=reg256#12
  1185. # asm 2: vpxor <r=%ymm15,<r8=%ymm11,<r8=%ymm11
  1186. vpxor %ymm15,%ymm11,%ymm11
  1187. # qhasm: r = a3 & mem256[input_2 + 192]
  1188. # asm 1: vpand 192(<input_2=int64#3),<a3=reg256#15,>r=reg256#16
  1189. # asm 2: vpand 192(<input_2=%rdx),<a3=%ymm14,>r=%ymm15
  1190. vpand 192(%rdx),%ymm14,%ymm15
  1191. # qhasm: r9 ^= r
  1192. # asm 1: vpxor <r=reg256#16,<r9=reg256#13,<r9=reg256#13
  1193. # asm 2: vpxor <r=%ymm15,<r9=%ymm12,<r9=%ymm12
  1194. vpxor %ymm15,%ymm12,%ymm12
  1195. # qhasm: r = a3 & mem256[input_2 + 224]
  1196. # asm 1: vpand 224(<input_2=int64#3),<a3=reg256#15,>r=reg256#16
  1197. # asm 2: vpand 224(<input_2=%rdx),<a3=%ymm14,>r=%ymm15
  1198. vpand 224(%rdx),%ymm14,%ymm15
  1199. # qhasm: r10 ^= r
  1200. # asm 1: vpxor <r=reg256#16,<r10=reg256#14,<r10=reg256#14
  1201. # asm 2: vpxor <r=%ymm15,<r10=%ymm13,<r10=%ymm13
  1202. vpxor %ymm15,%ymm13,%ymm13
  1203. # qhasm: r = a3 & mem256[input_2 + 256]
  1204. # asm 1: vpand 256(<input_2=int64#3),<a3=reg256#15,>r=reg256#16
  1205. # asm 2: vpand 256(<input_2=%rdx),<a3=%ymm14,>r=%ymm15
  1206. vpand 256(%rdx),%ymm14,%ymm15
  1207. # qhasm: r11 ^= r
  1208. # asm 1: vpxor <r=reg256#16,<r11=reg256#2,<r11=reg256#2
  1209. # asm 2: vpxor <r=%ymm15,<r11=%ymm1,<r11=%ymm1
  1210. vpxor %ymm15,%ymm1,%ymm1
  1211. # qhasm: r = a3 & mem256[input_2 + 288]
  1212. # asm 1: vpand 288(<input_2=int64#3),<a3=reg256#15,>r=reg256#16
  1213. # asm 2: vpand 288(<input_2=%rdx),<a3=%ymm14,>r=%ymm15
  1214. vpand 288(%rdx),%ymm14,%ymm15
  1215. # qhasm: r12 ^= r
  1216. # asm 1: vpxor <r=reg256#16,<r12=reg256#3,<r12=reg256#3
  1217. # asm 2: vpxor <r=%ymm15,<r12=%ymm2,<r12=%ymm2
  1218. vpxor %ymm15,%ymm2,%ymm2
  1219. # qhasm: r = a3 & mem256[input_2 + 320]
  1220. # asm 1: vpand 320(<input_2=int64#3),<a3=reg256#15,>r=reg256#16
  1221. # asm 2: vpand 320(<input_2=%rdx),<a3=%ymm14,>r=%ymm15
  1222. vpand 320(%rdx),%ymm14,%ymm15
  1223. # qhasm: r13 ^= r
  1224. # asm 1: vpxor <r=reg256#16,<r13=reg256#4,<r13=reg256#4
  1225. # asm 2: vpxor <r=%ymm15,<r13=%ymm3,<r13=%ymm3
  1226. vpxor %ymm15,%ymm3,%ymm3
  1227. # qhasm: r = a3 & mem256[input_2 + 352]
  1228. # asm 1: vpand 352(<input_2=int64#3),<a3=reg256#15,>r=reg256#16
  1229. # asm 2: vpand 352(<input_2=%rdx),<a3=%ymm14,>r=%ymm15
  1230. vpand 352(%rdx),%ymm14,%ymm15
  1231. # qhasm: r14 ^= r
  1232. # asm 1: vpxor <r=reg256#16,<r14=reg256#5,<r14=reg256#5
  1233. # asm 2: vpxor <r=%ymm15,<r14=%ymm4,<r14=%ymm4
  1234. vpxor %ymm15,%ymm4,%ymm4
  1235. # qhasm: r = a3 & mem256[input_2 + 384]
  1236. # asm 1: vpand 384(<input_2=int64#3),<a3=reg256#15,>r=reg256#15
  1237. # asm 2: vpand 384(<input_2=%rdx),<a3=%ymm14,>r=%ymm14
  1238. vpand 384(%rdx),%ymm14,%ymm14
  1239. # qhasm: r15 ^= r
  1240. # asm 1: vpxor <r=reg256#15,<r15=reg256#6,<r15=reg256#6
  1241. # asm 2: vpxor <r=%ymm14,<r15=%ymm5,<r15=%ymm5
  1242. vpxor %ymm14,%ymm5,%ymm5
  1243. # qhasm: r6 ^= r15
  1244. # asm 1: vpxor <r15=reg256#6,<r6=reg256#10,<r6=reg256#10
  1245. # asm 2: vpxor <r15=%ymm5,<r6=%ymm9,<r6=%ymm9
  1246. vpxor %ymm5,%ymm9,%ymm9
  1247. # qhasm: r5 ^= r15
  1248. # asm 1: vpxor <r15=reg256#6,<r5=reg256#9,<r5=reg256#9
  1249. # asm 2: vpxor <r15=%ymm5,<r5=%ymm8,<r5=%ymm8
  1250. vpxor %ymm5,%ymm8,%ymm8
  1251. # qhasm: r3 ^= r15
  1252. # asm 1: vpxor <r15=reg256#6,<r3=reg256#7,<r3=reg256#7
  1253. # asm 2: vpxor <r15=%ymm5,<r3=%ymm6,<r3=%ymm6
  1254. vpxor %ymm5,%ymm6,%ymm6
  1255. # qhasm: r2 = r15
  1256. # asm 1: vmovapd <r15=reg256#6,>r2=reg256#6
  1257. # asm 2: vmovapd <r15=%ymm5,>r2=%ymm5
  1258. vmovapd %ymm5,%ymm5
  1259. # qhasm: a2 = mem256[ input_1 + 64 ]
  1260. # asm 1: vmovupd 64(<input_1=int64#2),>a2=reg256#15
  1261. # asm 2: vmovupd 64(<input_1=%rsi),>a2=%ymm14
  1262. vmovupd 64(%rsi),%ymm14
  1263. # qhasm: r = a2 & b0
  1264. # asm 1: vpand <a2=reg256#15,<b0=reg256#1,>r=reg256#16
  1265. # asm 2: vpand <a2=%ymm14,<b0=%ymm0,>r=%ymm15
  1266. vpand %ymm14,%ymm0,%ymm15
  1267. # qhasm: r2 ^= r
  1268. # asm 1: vpxor <r=reg256#16,<r2=reg256#6,<r2=reg256#6
  1269. # asm 2: vpxor <r=%ymm15,<r2=%ymm5,<r2=%ymm5
  1270. vpxor %ymm15,%ymm5,%ymm5
  1271. # qhasm: r = a2 & mem256[input_2 + 32]
  1272. # asm 1: vpand 32(<input_2=int64#3),<a2=reg256#15,>r=reg256#16
  1273. # asm 2: vpand 32(<input_2=%rdx),<a2=%ymm14,>r=%ymm15
  1274. vpand 32(%rdx),%ymm14,%ymm15
  1275. # qhasm: r3 ^= r
  1276. # asm 1: vpxor <r=reg256#16,<r3=reg256#7,<r3=reg256#7
  1277. # asm 2: vpxor <r=%ymm15,<r3=%ymm6,<r3=%ymm6
  1278. vpxor %ymm15,%ymm6,%ymm6
  1279. # qhasm: r = a2 & mem256[input_2 + 64]
  1280. # asm 1: vpand 64(<input_2=int64#3),<a2=reg256#15,>r=reg256#16
  1281. # asm 2: vpand 64(<input_2=%rdx),<a2=%ymm14,>r=%ymm15
  1282. vpand 64(%rdx),%ymm14,%ymm15
  1283. # qhasm: r4 ^= r
  1284. # asm 1: vpxor <r=reg256#16,<r4=reg256#8,<r4=reg256#8
  1285. # asm 2: vpxor <r=%ymm15,<r4=%ymm7,<r4=%ymm7
  1286. vpxor %ymm15,%ymm7,%ymm7
  1287. # qhasm: r = a2 & mem256[input_2 + 96]
  1288. # asm 1: vpand 96(<input_2=int64#3),<a2=reg256#15,>r=reg256#16
  1289. # asm 2: vpand 96(<input_2=%rdx),<a2=%ymm14,>r=%ymm15
  1290. vpand 96(%rdx),%ymm14,%ymm15
  1291. # qhasm: r5 ^= r
  1292. # asm 1: vpxor <r=reg256#16,<r5=reg256#9,<r5=reg256#9
  1293. # asm 2: vpxor <r=%ymm15,<r5=%ymm8,<r5=%ymm8
  1294. vpxor %ymm15,%ymm8,%ymm8
  1295. # qhasm: r = a2 & mem256[input_2 + 128]
  1296. # asm 1: vpand 128(<input_2=int64#3),<a2=reg256#15,>r=reg256#16
  1297. # asm 2: vpand 128(<input_2=%rdx),<a2=%ymm14,>r=%ymm15
  1298. vpand 128(%rdx),%ymm14,%ymm15
  1299. # qhasm: r6 ^= r
  1300. # asm 1: vpxor <r=reg256#16,<r6=reg256#10,<r6=reg256#10
  1301. # asm 2: vpxor <r=%ymm15,<r6=%ymm9,<r6=%ymm9
  1302. vpxor %ymm15,%ymm9,%ymm9
  1303. # qhasm: r = a2 & mem256[input_2 + 160]
  1304. # asm 1: vpand 160(<input_2=int64#3),<a2=reg256#15,>r=reg256#16
  1305. # asm 2: vpand 160(<input_2=%rdx),<a2=%ymm14,>r=%ymm15
  1306. vpand 160(%rdx),%ymm14,%ymm15
  1307. # qhasm: r7 ^= r
  1308. # asm 1: vpxor <r=reg256#16,<r7=reg256#11,<r7=reg256#11
  1309. # asm 2: vpxor <r=%ymm15,<r7=%ymm10,<r7=%ymm10
  1310. vpxor %ymm15,%ymm10,%ymm10
  1311. # qhasm: r = a2 & mem256[input_2 + 192]
  1312. # asm 1: vpand 192(<input_2=int64#3),<a2=reg256#15,>r=reg256#16
  1313. # asm 2: vpand 192(<input_2=%rdx),<a2=%ymm14,>r=%ymm15
  1314. vpand 192(%rdx),%ymm14,%ymm15
  1315. # qhasm: r8 ^= r
  1316. # asm 1: vpxor <r=reg256#16,<r8=reg256#12,<r8=reg256#12
  1317. # asm 2: vpxor <r=%ymm15,<r8=%ymm11,<r8=%ymm11
  1318. vpxor %ymm15,%ymm11,%ymm11
  1319. # qhasm: r = a2 & mem256[input_2 + 224]
  1320. # asm 1: vpand 224(<input_2=int64#3),<a2=reg256#15,>r=reg256#16
  1321. # asm 2: vpand 224(<input_2=%rdx),<a2=%ymm14,>r=%ymm15
  1322. vpand 224(%rdx),%ymm14,%ymm15
  1323. # qhasm: r9 ^= r
  1324. # asm 1: vpxor <r=reg256#16,<r9=reg256#13,<r9=reg256#13
  1325. # asm 2: vpxor <r=%ymm15,<r9=%ymm12,<r9=%ymm12
  1326. vpxor %ymm15,%ymm12,%ymm12
  1327. # qhasm: r = a2 & mem256[input_2 + 256]
  1328. # asm 1: vpand 256(<input_2=int64#3),<a2=reg256#15,>r=reg256#16
  1329. # asm 2: vpand 256(<input_2=%rdx),<a2=%ymm14,>r=%ymm15
  1330. vpand 256(%rdx),%ymm14,%ymm15
  1331. # qhasm: r10 ^= r
  1332. # asm 1: vpxor <r=reg256#16,<r10=reg256#14,<r10=reg256#14
  1333. # asm 2: vpxor <r=%ymm15,<r10=%ymm13,<r10=%ymm13
  1334. vpxor %ymm15,%ymm13,%ymm13
  1335. # qhasm: r = a2 & mem256[input_2 + 288]
  1336. # asm 1: vpand 288(<input_2=int64#3),<a2=reg256#15,>r=reg256#16
  1337. # asm 2: vpand 288(<input_2=%rdx),<a2=%ymm14,>r=%ymm15
  1338. vpand 288(%rdx),%ymm14,%ymm15
  1339. # qhasm: r11 ^= r
  1340. # asm 1: vpxor <r=reg256#16,<r11=reg256#2,<r11=reg256#2
  1341. # asm 2: vpxor <r=%ymm15,<r11=%ymm1,<r11=%ymm1
  1342. vpxor %ymm15,%ymm1,%ymm1
  1343. # qhasm: r = a2 & mem256[input_2 + 320]
  1344. # asm 1: vpand 320(<input_2=int64#3),<a2=reg256#15,>r=reg256#16
  1345. # asm 2: vpand 320(<input_2=%rdx),<a2=%ymm14,>r=%ymm15
  1346. vpand 320(%rdx),%ymm14,%ymm15
  1347. # qhasm: r12 ^= r
  1348. # asm 1: vpxor <r=reg256#16,<r12=reg256#3,<r12=reg256#3
  1349. # asm 2: vpxor <r=%ymm15,<r12=%ymm2,<r12=%ymm2
  1350. vpxor %ymm15,%ymm2,%ymm2
  1351. # qhasm: r = a2 & mem256[input_2 + 352]
  1352. # asm 1: vpand 352(<input_2=int64#3),<a2=reg256#15,>r=reg256#16
  1353. # asm 2: vpand 352(<input_2=%rdx),<a2=%ymm14,>r=%ymm15
  1354. vpand 352(%rdx),%ymm14,%ymm15
  1355. # qhasm: r13 ^= r
  1356. # asm 1: vpxor <r=reg256#16,<r13=reg256#4,<r13=reg256#4
  1357. # asm 2: vpxor <r=%ymm15,<r13=%ymm3,<r13=%ymm3
  1358. vpxor %ymm15,%ymm3,%ymm3
  1359. # qhasm: r = a2 & mem256[input_2 + 384]
  1360. # asm 1: vpand 384(<input_2=int64#3),<a2=reg256#15,>r=reg256#15
  1361. # asm 2: vpand 384(<input_2=%rdx),<a2=%ymm14,>r=%ymm14
  1362. vpand 384(%rdx),%ymm14,%ymm14
  1363. # qhasm: r14 ^= r
  1364. # asm 1: vpxor <r=reg256#15,<r14=reg256#5,<r14=reg256#5
  1365. # asm 2: vpxor <r=%ymm14,<r14=%ymm4,<r14=%ymm4
  1366. vpxor %ymm14,%ymm4,%ymm4
  1367. # qhasm: r5 ^= r14
  1368. # asm 1: vpxor <r14=reg256#5,<r5=reg256#9,<r5=reg256#9
  1369. # asm 2: vpxor <r14=%ymm4,<r5=%ymm8,<r5=%ymm8
  1370. vpxor %ymm4,%ymm8,%ymm8
  1371. # qhasm: r4 ^= r14
  1372. # asm 1: vpxor <r14=reg256#5,<r4=reg256#8,<r4=reg256#8
  1373. # asm 2: vpxor <r14=%ymm4,<r4=%ymm7,<r4=%ymm7
  1374. vpxor %ymm4,%ymm7,%ymm7
  1375. # qhasm: r2 ^= r14
  1376. # asm 1: vpxor <r14=reg256#5,<r2=reg256#6,<r2=reg256#6
  1377. # asm 2: vpxor <r14=%ymm4,<r2=%ymm5,<r2=%ymm5
  1378. vpxor %ymm4,%ymm5,%ymm5
  1379. # qhasm: r1 = r14
  1380. # asm 1: vmovapd <r14=reg256#5,>r1=reg256#5
  1381. # asm 2: vmovapd <r14=%ymm4,>r1=%ymm4
  1382. vmovapd %ymm4,%ymm4
  1383. # qhasm: a1 = mem256[ input_1 + 32 ]
  1384. # asm 1: vmovupd 32(<input_1=int64#2),>a1=reg256#15
  1385. # asm 2: vmovupd 32(<input_1=%rsi),>a1=%ymm14
  1386. vmovupd 32(%rsi),%ymm14
  1387. # qhasm: r = a1 & b0
  1388. # asm 1: vpand <a1=reg256#15,<b0=reg256#1,>r=reg256#16
  1389. # asm 2: vpand <a1=%ymm14,<b0=%ymm0,>r=%ymm15
  1390. vpand %ymm14,%ymm0,%ymm15
  1391. # qhasm: r1 ^= r
  1392. # asm 1: vpxor <r=reg256#16,<r1=reg256#5,<r1=reg256#5
  1393. # asm 2: vpxor <r=%ymm15,<r1=%ymm4,<r1=%ymm4
  1394. vpxor %ymm15,%ymm4,%ymm4
  1395. # qhasm: r = a1 & mem256[input_2 + 32]
  1396. # asm 1: vpand 32(<input_2=int64#3),<a1=reg256#15,>r=reg256#16
  1397. # asm 2: vpand 32(<input_2=%rdx),<a1=%ymm14,>r=%ymm15
  1398. vpand 32(%rdx),%ymm14,%ymm15
  1399. # qhasm: r2 ^= r
  1400. # asm 1: vpxor <r=reg256#16,<r2=reg256#6,<r2=reg256#6
  1401. # asm 2: vpxor <r=%ymm15,<r2=%ymm5,<r2=%ymm5
  1402. vpxor %ymm15,%ymm5,%ymm5
  1403. # qhasm: r = a1 & mem256[input_2 + 64]
  1404. # asm 1: vpand 64(<input_2=int64#3),<a1=reg256#15,>r=reg256#16
  1405. # asm 2: vpand 64(<input_2=%rdx),<a1=%ymm14,>r=%ymm15
  1406. vpand 64(%rdx),%ymm14,%ymm15
  1407. # qhasm: r3 ^= r
  1408. # asm 1: vpxor <r=reg256#16,<r3=reg256#7,<r3=reg256#7
  1409. # asm 2: vpxor <r=%ymm15,<r3=%ymm6,<r3=%ymm6
  1410. vpxor %ymm15,%ymm6,%ymm6
  1411. # qhasm: r = a1 & mem256[input_2 + 96]
  1412. # asm 1: vpand 96(<input_2=int64#3),<a1=reg256#15,>r=reg256#16
  1413. # asm 2: vpand 96(<input_2=%rdx),<a1=%ymm14,>r=%ymm15
  1414. vpand 96(%rdx),%ymm14,%ymm15
  1415. # qhasm: r4 ^= r
  1416. # asm 1: vpxor <r=reg256#16,<r4=reg256#8,<r4=reg256#8
  1417. # asm 2: vpxor <r=%ymm15,<r4=%ymm7,<r4=%ymm7
  1418. vpxor %ymm15,%ymm7,%ymm7
  1419. # qhasm: r = a1 & mem256[input_2 + 128]
  1420. # asm 1: vpand 128(<input_2=int64#3),<a1=reg256#15,>r=reg256#16
  1421. # asm 2: vpand 128(<input_2=%rdx),<a1=%ymm14,>r=%ymm15
  1422. vpand 128(%rdx),%ymm14,%ymm15
  1423. # qhasm: r5 ^= r
  1424. # asm 1: vpxor <r=reg256#16,<r5=reg256#9,<r5=reg256#9
  1425. # asm 2: vpxor <r=%ymm15,<r5=%ymm8,<r5=%ymm8
  1426. vpxor %ymm15,%ymm8,%ymm8
  1427. # qhasm: r = a1 & mem256[input_2 + 160]
  1428. # asm 1: vpand 160(<input_2=int64#3),<a1=reg256#15,>r=reg256#16
  1429. # asm 2: vpand 160(<input_2=%rdx),<a1=%ymm14,>r=%ymm15
  1430. vpand 160(%rdx),%ymm14,%ymm15
  1431. # qhasm: r6 ^= r
  1432. # asm 1: vpxor <r=reg256#16,<r6=reg256#10,<r6=reg256#10
  1433. # asm 2: vpxor <r=%ymm15,<r6=%ymm9,<r6=%ymm9
  1434. vpxor %ymm15,%ymm9,%ymm9
  1435. # qhasm: r = a1 & mem256[input_2 + 192]
  1436. # asm 1: vpand 192(<input_2=int64#3),<a1=reg256#15,>r=reg256#16
  1437. # asm 2: vpand 192(<input_2=%rdx),<a1=%ymm14,>r=%ymm15
  1438. vpand 192(%rdx),%ymm14,%ymm15
  1439. # qhasm: r7 ^= r
  1440. # asm 1: vpxor <r=reg256#16,<r7=reg256#11,<r7=reg256#11
  1441. # asm 2: vpxor <r=%ymm15,<r7=%ymm10,<r7=%ymm10
  1442. vpxor %ymm15,%ymm10,%ymm10
  1443. # qhasm: r = a1 & mem256[input_2 + 224]
  1444. # asm 1: vpand 224(<input_2=int64#3),<a1=reg256#15,>r=reg256#16
  1445. # asm 2: vpand 224(<input_2=%rdx),<a1=%ymm14,>r=%ymm15
  1446. vpand 224(%rdx),%ymm14,%ymm15
  1447. # qhasm: r8 ^= r
  1448. # asm 1: vpxor <r=reg256#16,<r8=reg256#12,<r8=reg256#12
  1449. # asm 2: vpxor <r=%ymm15,<r8=%ymm11,<r8=%ymm11
  1450. vpxor %ymm15,%ymm11,%ymm11
  1451. # qhasm: r = a1 & mem256[input_2 + 256]
  1452. # asm 1: vpand 256(<input_2=int64#3),<a1=reg256#15,>r=reg256#16
  1453. # asm 2: vpand 256(<input_2=%rdx),<a1=%ymm14,>r=%ymm15
  1454. vpand 256(%rdx),%ymm14,%ymm15
  1455. # qhasm: r9 ^= r
  1456. # asm 1: vpxor <r=reg256#16,<r9=reg256#13,<r9=reg256#13
  1457. # asm 2: vpxor <r=%ymm15,<r9=%ymm12,<r9=%ymm12
  1458. vpxor %ymm15,%ymm12,%ymm12
  1459. # qhasm: r = a1 & mem256[input_2 + 288]
  1460. # asm 1: vpand 288(<input_2=int64#3),<a1=reg256#15,>r=reg256#16
  1461. # asm 2: vpand 288(<input_2=%rdx),<a1=%ymm14,>r=%ymm15
  1462. vpand 288(%rdx),%ymm14,%ymm15
  1463. # qhasm: r10 ^= r
  1464. # asm 1: vpxor <r=reg256#16,<r10=reg256#14,<r10=reg256#14
  1465. # asm 2: vpxor <r=%ymm15,<r10=%ymm13,<r10=%ymm13
  1466. vpxor %ymm15,%ymm13,%ymm13
  1467. # qhasm: r = a1 & mem256[input_2 + 320]
  1468. # asm 1: vpand 320(<input_2=int64#3),<a1=reg256#15,>r=reg256#16
  1469. # asm 2: vpand 320(<input_2=%rdx),<a1=%ymm14,>r=%ymm15
  1470. vpand 320(%rdx),%ymm14,%ymm15
  1471. # qhasm: r11 ^= r
  1472. # asm 1: vpxor <r=reg256#16,<r11=reg256#2,<r11=reg256#2
  1473. # asm 2: vpxor <r=%ymm15,<r11=%ymm1,<r11=%ymm1
  1474. vpxor %ymm15,%ymm1,%ymm1
  1475. # qhasm: r = a1 & mem256[input_2 + 352]
  1476. # asm 1: vpand 352(<input_2=int64#3),<a1=reg256#15,>r=reg256#16
  1477. # asm 2: vpand 352(<input_2=%rdx),<a1=%ymm14,>r=%ymm15
  1478. vpand 352(%rdx),%ymm14,%ymm15
  1479. # qhasm: r12 ^= r
  1480. # asm 1: vpxor <r=reg256#16,<r12=reg256#3,<r12=reg256#3
  1481. # asm 2: vpxor <r=%ymm15,<r12=%ymm2,<r12=%ymm2
  1482. vpxor %ymm15,%ymm2,%ymm2
  1483. # qhasm: r = a1 & mem256[input_2 + 384]
  1484. # asm 1: vpand 384(<input_2=int64#3),<a1=reg256#15,>r=reg256#15
  1485. # asm 2: vpand 384(<input_2=%rdx),<a1=%ymm14,>r=%ymm14
  1486. vpand 384(%rdx),%ymm14,%ymm14
  1487. # qhasm: r13 ^= r
  1488. # asm 1: vpxor <r=reg256#15,<r13=reg256#4,<r13=reg256#4
  1489. # asm 2: vpxor <r=%ymm14,<r13=%ymm3,<r13=%ymm3
  1490. vpxor %ymm14,%ymm3,%ymm3
  1491. # qhasm: r4 ^= r13
  1492. # asm 1: vpxor <r13=reg256#4,<r4=reg256#8,<r4=reg256#8
  1493. # asm 2: vpxor <r13=%ymm3,<r4=%ymm7,<r4=%ymm7
  1494. vpxor %ymm3,%ymm7,%ymm7
  1495. # qhasm: r3 ^= r13
  1496. # asm 1: vpxor <r13=reg256#4,<r3=reg256#7,<r3=reg256#7
  1497. # asm 2: vpxor <r13=%ymm3,<r3=%ymm6,<r3=%ymm6
  1498. vpxor %ymm3,%ymm6,%ymm6
  1499. # qhasm: r1 ^= r13
  1500. # asm 1: vpxor <r13=reg256#4,<r1=reg256#5,<r1=reg256#5
  1501. # asm 2: vpxor <r13=%ymm3,<r1=%ymm4,<r1=%ymm4
  1502. vpxor %ymm3,%ymm4,%ymm4
  1503. # qhasm: r0 = r13
  1504. # asm 1: vmovapd <r13=reg256#4,>r0=reg256#4
  1505. # asm 2: vmovapd <r13=%ymm3,>r0=%ymm3
  1506. vmovapd %ymm3,%ymm3
  1507. # qhasm: a0 = mem256[ input_1 + 0 ]
  1508. # asm 1: vmovupd 0(<input_1=int64#2),>a0=reg256#15
  1509. # asm 2: vmovupd 0(<input_1=%rsi),>a0=%ymm14
  1510. vmovupd 0(%rsi),%ymm14
  1511. # qhasm: r = a0 & b0
  1512. # asm 1: vpand <a0=reg256#15,<b0=reg256#1,>r=reg256#1
  1513. # asm 2: vpand <a0=%ymm14,<b0=%ymm0,>r=%ymm0
  1514. vpand %ymm14,%ymm0,%ymm0
  1515. # qhasm: r0 ^= r
  1516. # asm 1: vpxor <r=reg256#1,<r0=reg256#4,<r0=reg256#4
  1517. # asm 2: vpxor <r=%ymm0,<r0=%ymm3,<r0=%ymm3
  1518. vpxor %ymm0,%ymm3,%ymm3
  1519. # qhasm: r = a0 & mem256[input_2 + 32]
  1520. # asm 1: vpand 32(<input_2=int64#3),<a0=reg256#15,>r=reg256#1
  1521. # asm 2: vpand 32(<input_2=%rdx),<a0=%ymm14,>r=%ymm0
  1522. vpand 32(%rdx),%ymm14,%ymm0
  1523. # qhasm: r1 ^= r
  1524. # asm 1: vpxor <r=reg256#1,<r1=reg256#5,<r1=reg256#5
  1525. # asm 2: vpxor <r=%ymm0,<r1=%ymm4,<r1=%ymm4
  1526. vpxor %ymm0,%ymm4,%ymm4
  1527. # qhasm: r = a0 & mem256[input_2 + 64]
  1528. # asm 1: vpand 64(<input_2=int64#3),<a0=reg256#15,>r=reg256#1
  1529. # asm 2: vpand 64(<input_2=%rdx),<a0=%ymm14,>r=%ymm0
  1530. vpand 64(%rdx),%ymm14,%ymm0
  1531. # qhasm: r2 ^= r
  1532. # asm 1: vpxor <r=reg256#1,<r2=reg256#6,<r2=reg256#6
  1533. # asm 2: vpxor <r=%ymm0,<r2=%ymm5,<r2=%ymm5
  1534. vpxor %ymm0,%ymm5,%ymm5
  1535. # qhasm: r = a0 & mem256[input_2 + 96]
  1536. # asm 1: vpand 96(<input_2=int64#3),<a0=reg256#15,>r=reg256#1
  1537. # asm 2: vpand 96(<input_2=%rdx),<a0=%ymm14,>r=%ymm0
  1538. vpand 96(%rdx),%ymm14,%ymm0
  1539. # qhasm: r3 ^= r
  1540. # asm 1: vpxor <r=reg256#1,<r3=reg256#7,<r3=reg256#7
  1541. # asm 2: vpxor <r=%ymm0,<r3=%ymm6,<r3=%ymm6
  1542. vpxor %ymm0,%ymm6,%ymm6
  1543. # qhasm: r = a0 & mem256[input_2 + 128]
  1544. # asm 1: vpand 128(<input_2=int64#3),<a0=reg256#15,>r=reg256#1
  1545. # asm 2: vpand 128(<input_2=%rdx),<a0=%ymm14,>r=%ymm0
  1546. vpand 128(%rdx),%ymm14,%ymm0
  1547. # qhasm: r4 ^= r
  1548. # asm 1: vpxor <r=reg256#1,<r4=reg256#8,<r4=reg256#8
  1549. # asm 2: vpxor <r=%ymm0,<r4=%ymm7,<r4=%ymm7
  1550. vpxor %ymm0,%ymm7,%ymm7
  1551. # qhasm: r = a0 & mem256[input_2 + 160]
  1552. # asm 1: vpand 160(<input_2=int64#3),<a0=reg256#15,>r=reg256#1
  1553. # asm 2: vpand 160(<input_2=%rdx),<a0=%ymm14,>r=%ymm0
  1554. vpand 160(%rdx),%ymm14,%ymm0
  1555. # qhasm: r5 ^= r
  1556. # asm 1: vpxor <r=reg256#1,<r5=reg256#9,<r5=reg256#9
  1557. # asm 2: vpxor <r=%ymm0,<r5=%ymm8,<r5=%ymm8
  1558. vpxor %ymm0,%ymm8,%ymm8
  1559. # qhasm: r = a0 & mem256[input_2 + 192]
  1560. # asm 1: vpand 192(<input_2=int64#3),<a0=reg256#15,>r=reg256#1
  1561. # asm 2: vpand 192(<input_2=%rdx),<a0=%ymm14,>r=%ymm0
  1562. vpand 192(%rdx),%ymm14,%ymm0
  1563. # qhasm: r6 ^= r
  1564. # asm 1: vpxor <r=reg256#1,<r6=reg256#10,<r6=reg256#10
  1565. # asm 2: vpxor <r=%ymm0,<r6=%ymm9,<r6=%ymm9
  1566. vpxor %ymm0,%ymm9,%ymm9
  1567. # qhasm: r = a0 & mem256[input_2 + 224]
  1568. # asm 1: vpand 224(<input_2=int64#3),<a0=reg256#15,>r=reg256#1
  1569. # asm 2: vpand 224(<input_2=%rdx),<a0=%ymm14,>r=%ymm0
  1570. vpand 224(%rdx),%ymm14,%ymm0
  1571. # qhasm: r7 ^= r
  1572. # asm 1: vpxor <r=reg256#1,<r7=reg256#11,<r7=reg256#11
  1573. # asm 2: vpxor <r=%ymm0,<r7=%ymm10,<r7=%ymm10
  1574. vpxor %ymm0,%ymm10,%ymm10
  1575. # qhasm: r = a0 & mem256[input_2 + 256]
  1576. # asm 1: vpand 256(<input_2=int64#3),<a0=reg256#15,>r=reg256#1
  1577. # asm 2: vpand 256(<input_2=%rdx),<a0=%ymm14,>r=%ymm0
  1578. vpand 256(%rdx),%ymm14,%ymm0
  1579. # qhasm: r8 ^= r
  1580. # asm 1: vpxor <r=reg256#1,<r8=reg256#12,<r8=reg256#12
  1581. # asm 2: vpxor <r=%ymm0,<r8=%ymm11,<r8=%ymm11
  1582. vpxor %ymm0,%ymm11,%ymm11
  1583. # qhasm: r = a0 & mem256[input_2 + 288]
  1584. # asm 1: vpand 288(<input_2=int64#3),<a0=reg256#15,>r=reg256#1
  1585. # asm 2: vpand 288(<input_2=%rdx),<a0=%ymm14,>r=%ymm0
  1586. vpand 288(%rdx),%ymm14,%ymm0
  1587. # qhasm: r9 ^= r
  1588. # asm 1: vpxor <r=reg256#1,<r9=reg256#13,<r9=reg256#13
  1589. # asm 2: vpxor <r=%ymm0,<r9=%ymm12,<r9=%ymm12
  1590. vpxor %ymm0,%ymm12,%ymm12
  1591. # qhasm: r = a0 & mem256[input_2 + 320]
  1592. # asm 1: vpand 320(<input_2=int64#3),<a0=reg256#15,>r=reg256#1
  1593. # asm 2: vpand 320(<input_2=%rdx),<a0=%ymm14,>r=%ymm0
  1594. vpand 320(%rdx),%ymm14,%ymm0
  1595. # qhasm: r10 ^= r
  1596. # asm 1: vpxor <r=reg256#1,<r10=reg256#14,<r10=reg256#14
  1597. # asm 2: vpxor <r=%ymm0,<r10=%ymm13,<r10=%ymm13
  1598. vpxor %ymm0,%ymm13,%ymm13
  1599. # qhasm: r = a0 & mem256[input_2 + 352]
  1600. # asm 1: vpand 352(<input_2=int64#3),<a0=reg256#15,>r=reg256#1
  1601. # asm 2: vpand 352(<input_2=%rdx),<a0=%ymm14,>r=%ymm0
  1602. vpand 352(%rdx),%ymm14,%ymm0
  1603. # qhasm: r11 ^= r
  1604. # asm 1: vpxor <r=reg256#1,<r11=reg256#2,<r11=reg256#2
  1605. # asm 2: vpxor <r=%ymm0,<r11=%ymm1,<r11=%ymm1
  1606. vpxor %ymm0,%ymm1,%ymm1
  1607. # qhasm: r = a0 & mem256[input_2 + 384]
  1608. # asm 1: vpand 384(<input_2=int64#3),<a0=reg256#15,>r=reg256#1
  1609. # asm 2: vpand 384(<input_2=%rdx),<a0=%ymm14,>r=%ymm0
  1610. vpand 384(%rdx),%ymm14,%ymm0
  1611. # qhasm: r12 ^= r
  1612. # asm 1: vpxor <r=reg256#1,<r12=reg256#3,<r12=reg256#3
  1613. # asm 2: vpxor <r=%ymm0,<r12=%ymm2,<r12=%ymm2
  1614. vpxor %ymm0,%ymm2,%ymm2
  1615. # qhasm: mem256[ input_0 + 384 ] = r12
  1616. # asm 1: vmovupd <r12=reg256#3,384(<input_0=int64#1)
  1617. # asm 2: vmovupd <r12=%ymm2,384(<input_0=%rdi)
  1618. vmovupd %ymm2,384(%rdi)
  1619. # qhasm: mem256[ input_0 + 352 ] = r11
  1620. # asm 1: vmovupd <r11=reg256#2,352(<input_0=int64#1)
  1621. # asm 2: vmovupd <r11=%ymm1,352(<input_0=%rdi)
  1622. vmovupd %ymm1,352(%rdi)
  1623. # qhasm: mem256[ input_0 + 320 ] = r10
  1624. # asm 1: vmovupd <r10=reg256#14,320(<input_0=int64#1)
  1625. # asm 2: vmovupd <r10=%ymm13,320(<input_0=%rdi)
  1626. vmovupd %ymm13,320(%rdi)
  1627. # qhasm: mem256[ input_0 + 288 ] = r9
  1628. # asm 1: vmovupd <r9=reg256#13,288(<input_0=int64#1)
  1629. # asm 2: vmovupd <r9=%ymm12,288(<input_0=%rdi)
  1630. vmovupd %ymm12,288(%rdi)
  1631. # qhasm: mem256[ input_0 + 256 ] = r8
  1632. # asm 1: vmovupd <r8=reg256#12,256(<input_0=int64#1)
  1633. # asm 2: vmovupd <r8=%ymm11,256(<input_0=%rdi)
  1634. vmovupd %ymm11,256(%rdi)
  1635. # qhasm: mem256[ input_0 + 224 ] = r7
  1636. # asm 1: vmovupd <r7=reg256#11,224(<input_0=int64#1)
  1637. # asm 2: vmovupd <r7=%ymm10,224(<input_0=%rdi)
  1638. vmovupd %ymm10,224(%rdi)
  1639. # qhasm: mem256[ input_0 + 192 ] = r6
  1640. # asm 1: vmovupd <r6=reg256#10,192(<input_0=int64#1)
  1641. # asm 2: vmovupd <r6=%ymm9,192(<input_0=%rdi)
  1642. vmovupd %ymm9,192(%rdi)
  1643. # qhasm: mem256[ input_0 + 160 ] = r5
  1644. # asm 1: vmovupd <r5=reg256#9,160(<input_0=int64#1)
  1645. # asm 2: vmovupd <r5=%ymm8,160(<input_0=%rdi)
  1646. vmovupd %ymm8,160(%rdi)
  1647. # qhasm: mem256[ input_0 + 128 ] = r4
  1648. # asm 1: vmovupd <r4=reg256#8,128(<input_0=int64#1)
  1649. # asm 2: vmovupd <r4=%ymm7,128(<input_0=%rdi)
  1650. vmovupd %ymm7,128(%rdi)
  1651. # qhasm: mem256[ input_0 + 96 ] = r3
  1652. # asm 1: vmovupd <r3=reg256#7,96(<input_0=int64#1)
  1653. # asm 2: vmovupd <r3=%ymm6,96(<input_0=%rdi)
  1654. vmovupd %ymm6,96(%rdi)
  1655. # qhasm: mem256[ input_0 + 64 ] = r2
  1656. # asm 1: vmovupd <r2=reg256#6,64(<input_0=int64#1)
  1657. # asm 2: vmovupd <r2=%ymm5,64(<input_0=%rdi)
  1658. vmovupd %ymm5,64(%rdi)
  1659. # qhasm: mem256[ input_0 + 32 ] = r1
  1660. # asm 1: vmovupd <r1=reg256#5,32(<input_0=int64#1)
  1661. # asm 2: vmovupd <r1=%ymm4,32(<input_0=%rdi)
  1662. vmovupd %ymm4,32(%rdi)
  1663. # qhasm: mem256[ input_0 + 0 ] = r0
  1664. # asm 1: vmovupd <r0=reg256#4,0(<input_0=int64#1)
  1665. # asm 2: vmovupd <r0=%ymm3,0(<input_0=%rdi)
  1666. vmovupd %ymm3,0(%rdi)
  1667. # qhasm: return
  1668. add %r11,%rsp
  1669. ret