VirtualBox

source: vbox/trunk/src/libs/openssl-1.1.1f/crypto/genasm-elf/aesni-x86.S@ 83531

Last change on this file since 83531 was 83531, checked in by vboxsync, 5 years ago

setting svn:sync-process=export for openssl-1.1.1f, all files except tests

File size: 64.2 KB
Line 
1.text
2.globl aesni_encrypt
3.type aesni_encrypt,@function
4.align 16
5aesni_encrypt:
6.L_aesni_encrypt_begin:
7 movl 4(%esp),%eax
8 movl 12(%esp),%edx
9 movups (%eax),%xmm2
10 movl 240(%edx),%ecx
11 movl 8(%esp),%eax
12 movups (%edx),%xmm0
13 movups 16(%edx),%xmm1
14 leal 32(%edx),%edx
15 xorps %xmm0,%xmm2
16.L000enc1_loop_1:
17.byte 102,15,56,220,209
18 decl %ecx
19 movups (%edx),%xmm1
20 leal 16(%edx),%edx
21 jnz .L000enc1_loop_1
22.byte 102,15,56,221,209
23 pxor %xmm0,%xmm0
24 pxor %xmm1,%xmm1
25 movups %xmm2,(%eax)
26 pxor %xmm2,%xmm2
27 ret
28.size aesni_encrypt,.-.L_aesni_encrypt_begin
29.globl aesni_decrypt
30.type aesni_decrypt,@function
31.align 16
32aesni_decrypt:
33.L_aesni_decrypt_begin:
34 movl 4(%esp),%eax
35 movl 12(%esp),%edx
36 movups (%eax),%xmm2
37 movl 240(%edx),%ecx
38 movl 8(%esp),%eax
39 movups (%edx),%xmm0
40 movups 16(%edx),%xmm1
41 leal 32(%edx),%edx
42 xorps %xmm0,%xmm2
43.L001dec1_loop_2:
44.byte 102,15,56,222,209
45 decl %ecx
46 movups (%edx),%xmm1
47 leal 16(%edx),%edx
48 jnz .L001dec1_loop_2
49.byte 102,15,56,223,209
50 pxor %xmm0,%xmm0
51 pxor %xmm1,%xmm1
52 movups %xmm2,(%eax)
53 pxor %xmm2,%xmm2
54 ret
55.size aesni_decrypt,.-.L_aesni_decrypt_begin
56.type _aesni_encrypt2,@function
57.align 16
58_aesni_encrypt2:
59 movups (%edx),%xmm0
60 shll $4,%ecx
61 movups 16(%edx),%xmm1
62 xorps %xmm0,%xmm2
63 pxor %xmm0,%xmm3
64 movups 32(%edx),%xmm0
65 leal 32(%edx,%ecx,1),%edx
66 negl %ecx
67 addl $16,%ecx
68.L002enc2_loop:
69.byte 102,15,56,220,209
70.byte 102,15,56,220,217
71 movups (%edx,%ecx,1),%xmm1
72 addl $32,%ecx
73.byte 102,15,56,220,208
74.byte 102,15,56,220,216
75 movups -16(%edx,%ecx,1),%xmm0
76 jnz .L002enc2_loop
77.byte 102,15,56,220,209
78.byte 102,15,56,220,217
79.byte 102,15,56,221,208
80.byte 102,15,56,221,216
81 ret
82.size _aesni_encrypt2,.-_aesni_encrypt2
83.type _aesni_decrypt2,@function
84.align 16
85_aesni_decrypt2:
86 movups (%edx),%xmm0
87 shll $4,%ecx
88 movups 16(%edx),%xmm1
89 xorps %xmm0,%xmm2
90 pxor %xmm0,%xmm3
91 movups 32(%edx),%xmm0
92 leal 32(%edx,%ecx,1),%edx
93 negl %ecx
94 addl $16,%ecx
95.L003dec2_loop:
96.byte 102,15,56,222,209
97.byte 102,15,56,222,217
98 movups (%edx,%ecx,1),%xmm1
99 addl $32,%ecx
100.byte 102,15,56,222,208
101.byte 102,15,56,222,216
102 movups -16(%edx,%ecx,1),%xmm0
103 jnz .L003dec2_loop
104.byte 102,15,56,222,209
105.byte 102,15,56,222,217
106.byte 102,15,56,223,208
107.byte 102,15,56,223,216
108 ret
109.size _aesni_decrypt2,.-_aesni_decrypt2
110.type _aesni_encrypt3,@function
111.align 16
112_aesni_encrypt3:
113 movups (%edx),%xmm0
114 shll $4,%ecx
115 movups 16(%edx),%xmm1
116 xorps %xmm0,%xmm2
117 pxor %xmm0,%xmm3
118 pxor %xmm0,%xmm4
119 movups 32(%edx),%xmm0
120 leal 32(%edx,%ecx,1),%edx
121 negl %ecx
122 addl $16,%ecx
123.L004enc3_loop:
124.byte 102,15,56,220,209
125.byte 102,15,56,220,217
126.byte 102,15,56,220,225
127 movups (%edx,%ecx,1),%xmm1
128 addl $32,%ecx
129.byte 102,15,56,220,208
130.byte 102,15,56,220,216
131.byte 102,15,56,220,224
132 movups -16(%edx,%ecx,1),%xmm0
133 jnz .L004enc3_loop
134.byte 102,15,56,220,209
135.byte 102,15,56,220,217
136.byte 102,15,56,220,225
137.byte 102,15,56,221,208
138.byte 102,15,56,221,216
139.byte 102,15,56,221,224
140 ret
141.size _aesni_encrypt3,.-_aesni_encrypt3
142.type _aesni_decrypt3,@function
143.align 16
144_aesni_decrypt3:
145 movups (%edx),%xmm0
146 shll $4,%ecx
147 movups 16(%edx),%xmm1
148 xorps %xmm0,%xmm2
149 pxor %xmm0,%xmm3
150 pxor %xmm0,%xmm4
151 movups 32(%edx),%xmm0
152 leal 32(%edx,%ecx,1),%edx
153 negl %ecx
154 addl $16,%ecx
155.L005dec3_loop:
156.byte 102,15,56,222,209
157.byte 102,15,56,222,217
158.byte 102,15,56,222,225
159 movups (%edx,%ecx,1),%xmm1
160 addl $32,%ecx
161.byte 102,15,56,222,208
162.byte 102,15,56,222,216
163.byte 102,15,56,222,224
164 movups -16(%edx,%ecx,1),%xmm0
165 jnz .L005dec3_loop
166.byte 102,15,56,222,209
167.byte 102,15,56,222,217
168.byte 102,15,56,222,225
169.byte 102,15,56,223,208
170.byte 102,15,56,223,216
171.byte 102,15,56,223,224
172 ret
173.size _aesni_decrypt3,.-_aesni_decrypt3
174.type _aesni_encrypt4,@function
175.align 16
176_aesni_encrypt4:
177 movups (%edx),%xmm0
178 movups 16(%edx),%xmm1
179 shll $4,%ecx
180 xorps %xmm0,%xmm2
181 pxor %xmm0,%xmm3
182 pxor %xmm0,%xmm4
183 pxor %xmm0,%xmm5
184 movups 32(%edx),%xmm0
185 leal 32(%edx,%ecx,1),%edx
186 negl %ecx
187.byte 15,31,64,0
188 addl $16,%ecx
189.L006enc4_loop:
190.byte 102,15,56,220,209
191.byte 102,15,56,220,217
192.byte 102,15,56,220,225
193.byte 102,15,56,220,233
194 movups (%edx,%ecx,1),%xmm1
195 addl $32,%ecx
196.byte 102,15,56,220,208
197.byte 102,15,56,220,216
198.byte 102,15,56,220,224
199.byte 102,15,56,220,232
200 movups -16(%edx,%ecx,1),%xmm0
201 jnz .L006enc4_loop
202.byte 102,15,56,220,209
203.byte 102,15,56,220,217
204.byte 102,15,56,220,225
205.byte 102,15,56,220,233
206.byte 102,15,56,221,208
207.byte 102,15,56,221,216
208.byte 102,15,56,221,224
209.byte 102,15,56,221,232
210 ret
211.size _aesni_encrypt4,.-_aesni_encrypt4
212.type _aesni_decrypt4,@function
213.align 16
214_aesni_decrypt4:
215 movups (%edx),%xmm0
216 movups 16(%edx),%xmm1
217 shll $4,%ecx
218 xorps %xmm0,%xmm2
219 pxor %xmm0,%xmm3
220 pxor %xmm0,%xmm4
221 pxor %xmm0,%xmm5
222 movups 32(%edx),%xmm0
223 leal 32(%edx,%ecx,1),%edx
224 negl %ecx
225.byte 15,31,64,0
226 addl $16,%ecx
227.L007dec4_loop:
228.byte 102,15,56,222,209
229.byte 102,15,56,222,217
230.byte 102,15,56,222,225
231.byte 102,15,56,222,233
232 movups (%edx,%ecx,1),%xmm1
233 addl $32,%ecx
234.byte 102,15,56,222,208
235.byte 102,15,56,222,216
236.byte 102,15,56,222,224
237.byte 102,15,56,222,232
238 movups -16(%edx,%ecx,1),%xmm0
239 jnz .L007dec4_loop
240.byte 102,15,56,222,209
241.byte 102,15,56,222,217
242.byte 102,15,56,222,225
243.byte 102,15,56,222,233
244.byte 102,15,56,223,208
245.byte 102,15,56,223,216
246.byte 102,15,56,223,224
247.byte 102,15,56,223,232
248 ret
249.size _aesni_decrypt4,.-_aesni_decrypt4
250.type _aesni_encrypt6,@function
251.align 16
252_aesni_encrypt6:
253 movups (%edx),%xmm0
254 shll $4,%ecx
255 movups 16(%edx),%xmm1
256 xorps %xmm0,%xmm2
257 pxor %xmm0,%xmm3
258 pxor %xmm0,%xmm4
259.byte 102,15,56,220,209
260 pxor %xmm0,%xmm5
261 pxor %xmm0,%xmm6
262.byte 102,15,56,220,217
263 leal 32(%edx,%ecx,1),%edx
264 negl %ecx
265.byte 102,15,56,220,225
266 pxor %xmm0,%xmm7
267 movups (%edx,%ecx,1),%xmm0
268 addl $16,%ecx
269 jmp .L008_aesni_encrypt6_inner
270.align 16
271.L009enc6_loop:
272.byte 102,15,56,220,209
273.byte 102,15,56,220,217
274.byte 102,15,56,220,225
275.L008_aesni_encrypt6_inner:
276.byte 102,15,56,220,233
277.byte 102,15,56,220,241
278.byte 102,15,56,220,249
279.L_aesni_encrypt6_enter:
280 movups (%edx,%ecx,1),%xmm1
281 addl $32,%ecx
282.byte 102,15,56,220,208
283.byte 102,15,56,220,216
284.byte 102,15,56,220,224
285.byte 102,15,56,220,232
286.byte 102,15,56,220,240
287.byte 102,15,56,220,248
288 movups -16(%edx,%ecx,1),%xmm0
289 jnz .L009enc6_loop
290.byte 102,15,56,220,209
291.byte 102,15,56,220,217
292.byte 102,15,56,220,225
293.byte 102,15,56,220,233
294.byte 102,15,56,220,241
295.byte 102,15,56,220,249
296.byte 102,15,56,221,208
297.byte 102,15,56,221,216
298.byte 102,15,56,221,224
299.byte 102,15,56,221,232
300.byte 102,15,56,221,240
301.byte 102,15,56,221,248
302 ret
303.size _aesni_encrypt6,.-_aesni_encrypt6
304.type _aesni_decrypt6,@function
305.align 16
306_aesni_decrypt6:
307 movups (%edx),%xmm0
308 shll $4,%ecx
309 movups 16(%edx),%xmm1
310 xorps %xmm0,%xmm2
311 pxor %xmm0,%xmm3
312 pxor %xmm0,%xmm4
313.byte 102,15,56,222,209
314 pxor %xmm0,%xmm5
315 pxor %xmm0,%xmm6
316.byte 102,15,56,222,217
317 leal 32(%edx,%ecx,1),%edx
318 negl %ecx
319.byte 102,15,56,222,225
320 pxor %xmm0,%xmm7
321 movups (%edx,%ecx,1),%xmm0
322 addl $16,%ecx
323 jmp .L010_aesni_decrypt6_inner
324.align 16
325.L011dec6_loop:
326.byte 102,15,56,222,209
327.byte 102,15,56,222,217
328.byte 102,15,56,222,225
329.L010_aesni_decrypt6_inner:
330.byte 102,15,56,222,233
331.byte 102,15,56,222,241
332.byte 102,15,56,222,249
333.L_aesni_decrypt6_enter:
334 movups (%edx,%ecx,1),%xmm1
335 addl $32,%ecx
336.byte 102,15,56,222,208
337.byte 102,15,56,222,216
338.byte 102,15,56,222,224
339.byte 102,15,56,222,232
340.byte 102,15,56,222,240
341.byte 102,15,56,222,248
342 movups -16(%edx,%ecx,1),%xmm0
343 jnz .L011dec6_loop
344.byte 102,15,56,222,209
345.byte 102,15,56,222,217
346.byte 102,15,56,222,225
347.byte 102,15,56,222,233
348.byte 102,15,56,222,241
349.byte 102,15,56,222,249
350.byte 102,15,56,223,208
351.byte 102,15,56,223,216
352.byte 102,15,56,223,224
353.byte 102,15,56,223,232
354.byte 102,15,56,223,240
355.byte 102,15,56,223,248
356 ret
357.size _aesni_decrypt6,.-_aesni_decrypt6
358.globl aesni_ecb_encrypt
359.type aesni_ecb_encrypt,@function
360.align 16
361aesni_ecb_encrypt:
362.L_aesni_ecb_encrypt_begin:
363 pushl %ebp
364 pushl %ebx
365 pushl %esi
366 pushl %edi
367 movl 20(%esp),%esi
368 movl 24(%esp),%edi
369 movl 28(%esp),%eax
370 movl 32(%esp),%edx
371 movl 36(%esp),%ebx
372 andl $-16,%eax
373 jz .L012ecb_ret
374 movl 240(%edx),%ecx
375 testl %ebx,%ebx
376 jz .L013ecb_decrypt
377 movl %edx,%ebp
378 movl %ecx,%ebx
379 cmpl $96,%eax
380 jb .L014ecb_enc_tail
381 movdqu (%esi),%xmm2
382 movdqu 16(%esi),%xmm3
383 movdqu 32(%esi),%xmm4
384 movdqu 48(%esi),%xmm5
385 movdqu 64(%esi),%xmm6
386 movdqu 80(%esi),%xmm7
387 leal 96(%esi),%esi
388 subl $96,%eax
389 jmp .L015ecb_enc_loop6_enter
390.align 16
391.L016ecb_enc_loop6:
392 movups %xmm2,(%edi)
393 movdqu (%esi),%xmm2
394 movups %xmm3,16(%edi)
395 movdqu 16(%esi),%xmm3
396 movups %xmm4,32(%edi)
397 movdqu 32(%esi),%xmm4
398 movups %xmm5,48(%edi)
399 movdqu 48(%esi),%xmm5
400 movups %xmm6,64(%edi)
401 movdqu 64(%esi),%xmm6
402 movups %xmm7,80(%edi)
403 leal 96(%edi),%edi
404 movdqu 80(%esi),%xmm7
405 leal 96(%esi),%esi
406.L015ecb_enc_loop6_enter:
407 call _aesni_encrypt6
408 movl %ebp,%edx
409 movl %ebx,%ecx
410 subl $96,%eax
411 jnc .L016ecb_enc_loop6
412 movups %xmm2,(%edi)
413 movups %xmm3,16(%edi)
414 movups %xmm4,32(%edi)
415 movups %xmm5,48(%edi)
416 movups %xmm6,64(%edi)
417 movups %xmm7,80(%edi)
418 leal 96(%edi),%edi
419 addl $96,%eax
420 jz .L012ecb_ret
421.L014ecb_enc_tail:
422 movups (%esi),%xmm2
423 cmpl $32,%eax
424 jb .L017ecb_enc_one
425 movups 16(%esi),%xmm3
426 je .L018ecb_enc_two
427 movups 32(%esi),%xmm4
428 cmpl $64,%eax
429 jb .L019ecb_enc_three
430 movups 48(%esi),%xmm5
431 je .L020ecb_enc_four
432 movups 64(%esi),%xmm6
433 xorps %xmm7,%xmm7
434 call _aesni_encrypt6
435 movups %xmm2,(%edi)
436 movups %xmm3,16(%edi)
437 movups %xmm4,32(%edi)
438 movups %xmm5,48(%edi)
439 movups %xmm6,64(%edi)
440 jmp .L012ecb_ret
441.align 16
442.L017ecb_enc_one:
443 movups (%edx),%xmm0
444 movups 16(%edx),%xmm1
445 leal 32(%edx),%edx
446 xorps %xmm0,%xmm2
447.L021enc1_loop_3:
448.byte 102,15,56,220,209
449 decl %ecx
450 movups (%edx),%xmm1
451 leal 16(%edx),%edx
452 jnz .L021enc1_loop_3
453.byte 102,15,56,221,209
454 movups %xmm2,(%edi)
455 jmp .L012ecb_ret
456.align 16
457.L018ecb_enc_two:
458 call _aesni_encrypt2
459 movups %xmm2,(%edi)
460 movups %xmm3,16(%edi)
461 jmp .L012ecb_ret
462.align 16
463.L019ecb_enc_three:
464 call _aesni_encrypt3
465 movups %xmm2,(%edi)
466 movups %xmm3,16(%edi)
467 movups %xmm4,32(%edi)
468 jmp .L012ecb_ret
469.align 16
470.L020ecb_enc_four:
471 call _aesni_encrypt4
472 movups %xmm2,(%edi)
473 movups %xmm3,16(%edi)
474 movups %xmm4,32(%edi)
475 movups %xmm5,48(%edi)
476 jmp .L012ecb_ret
477.align 16
478.L013ecb_decrypt:
479 movl %edx,%ebp
480 movl %ecx,%ebx
481 cmpl $96,%eax
482 jb .L022ecb_dec_tail
483 movdqu (%esi),%xmm2
484 movdqu 16(%esi),%xmm3
485 movdqu 32(%esi),%xmm4
486 movdqu 48(%esi),%xmm5
487 movdqu 64(%esi),%xmm6
488 movdqu 80(%esi),%xmm7
489 leal 96(%esi),%esi
490 subl $96,%eax
491 jmp .L023ecb_dec_loop6_enter
492.align 16
493.L024ecb_dec_loop6:
494 movups %xmm2,(%edi)
495 movdqu (%esi),%xmm2
496 movups %xmm3,16(%edi)
497 movdqu 16(%esi),%xmm3
498 movups %xmm4,32(%edi)
499 movdqu 32(%esi),%xmm4
500 movups %xmm5,48(%edi)
501 movdqu 48(%esi),%xmm5
502 movups %xmm6,64(%edi)
503 movdqu 64(%esi),%xmm6
504 movups %xmm7,80(%edi)
505 leal 96(%edi),%edi
506 movdqu 80(%esi),%xmm7
507 leal 96(%esi),%esi
508.L023ecb_dec_loop6_enter:
509 call _aesni_decrypt6
510 movl %ebp,%edx
511 movl %ebx,%ecx
512 subl $96,%eax
513 jnc .L024ecb_dec_loop6
514 movups %xmm2,(%edi)
515 movups %xmm3,16(%edi)
516 movups %xmm4,32(%edi)
517 movups %xmm5,48(%edi)
518 movups %xmm6,64(%edi)
519 movups %xmm7,80(%edi)
520 leal 96(%edi),%edi
521 addl $96,%eax
522 jz .L012ecb_ret
523.L022ecb_dec_tail:
524 movups (%esi),%xmm2
525 cmpl $32,%eax
526 jb .L025ecb_dec_one
527 movups 16(%esi),%xmm3
528 je .L026ecb_dec_two
529 movups 32(%esi),%xmm4
530 cmpl $64,%eax
531 jb .L027ecb_dec_three
532 movups 48(%esi),%xmm5
533 je .L028ecb_dec_four
534 movups 64(%esi),%xmm6
535 xorps %xmm7,%xmm7
536 call _aesni_decrypt6
537 movups %xmm2,(%edi)
538 movups %xmm3,16(%edi)
539 movups %xmm4,32(%edi)
540 movups %xmm5,48(%edi)
541 movups %xmm6,64(%edi)
542 jmp .L012ecb_ret
543.align 16
544.L025ecb_dec_one:
545 movups (%edx),%xmm0
546 movups 16(%edx),%xmm1
547 leal 32(%edx),%edx
548 xorps %xmm0,%xmm2
549.L029dec1_loop_4:
550.byte 102,15,56,222,209
551 decl %ecx
552 movups (%edx),%xmm1
553 leal 16(%edx),%edx
554 jnz .L029dec1_loop_4
555.byte 102,15,56,223,209
556 movups %xmm2,(%edi)
557 jmp .L012ecb_ret
558.align 16
559.L026ecb_dec_two:
560 call _aesni_decrypt2
561 movups %xmm2,(%edi)
562 movups %xmm3,16(%edi)
563 jmp .L012ecb_ret
564.align 16
565.L027ecb_dec_three:
566 call _aesni_decrypt3
567 movups %xmm2,(%edi)
568 movups %xmm3,16(%edi)
569 movups %xmm4,32(%edi)
570 jmp .L012ecb_ret
571.align 16
572.L028ecb_dec_four:
573 call _aesni_decrypt4
574 movups %xmm2,(%edi)
575 movups %xmm3,16(%edi)
576 movups %xmm4,32(%edi)
577 movups %xmm5,48(%edi)
578.L012ecb_ret:
579 pxor %xmm0,%xmm0
580 pxor %xmm1,%xmm1
581 pxor %xmm2,%xmm2
582 pxor %xmm3,%xmm3
583 pxor %xmm4,%xmm4
584 pxor %xmm5,%xmm5
585 pxor %xmm6,%xmm6
586 pxor %xmm7,%xmm7
587 popl %edi
588 popl %esi
589 popl %ebx
590 popl %ebp
591 ret
592.size aesni_ecb_encrypt,.-.L_aesni_ecb_encrypt_begin
593.globl aesni_ccm64_encrypt_blocks
594.type aesni_ccm64_encrypt_blocks,@function
595.align 16
596aesni_ccm64_encrypt_blocks:
597.L_aesni_ccm64_encrypt_blocks_begin:
598 pushl %ebp
599 pushl %ebx
600 pushl %esi
601 pushl %edi
602 movl 20(%esp),%esi
603 movl 24(%esp),%edi
604 movl 28(%esp),%eax
605 movl 32(%esp),%edx
606 movl 36(%esp),%ebx
607 movl 40(%esp),%ecx
608 movl %esp,%ebp
609 subl $60,%esp
610 andl $-16,%esp
611 movl %ebp,48(%esp)
612 movdqu (%ebx),%xmm7
613 movdqu (%ecx),%xmm3
614 movl 240(%edx),%ecx
615 movl $202182159,(%esp)
616 movl $134810123,4(%esp)
617 movl $67438087,8(%esp)
618 movl $66051,12(%esp)
619 movl $1,%ebx
620 xorl %ebp,%ebp
621 movl %ebx,16(%esp)
622 movl %ebp,20(%esp)
623 movl %ebp,24(%esp)
624 movl %ebp,28(%esp)
625 shll $4,%ecx
626 movl $16,%ebx
627 leal (%edx),%ebp
628 movdqa (%esp),%xmm5
629 movdqa %xmm7,%xmm2
630 leal 32(%edx,%ecx,1),%edx
631 subl %ecx,%ebx
632.byte 102,15,56,0,253
633.L030ccm64_enc_outer:
634 movups (%ebp),%xmm0
635 movl %ebx,%ecx
636 movups (%esi),%xmm6
637 xorps %xmm0,%xmm2
638 movups 16(%ebp),%xmm1
639 xorps %xmm6,%xmm0
640 xorps %xmm0,%xmm3
641 movups 32(%ebp),%xmm0
642.L031ccm64_enc2_loop:
643.byte 102,15,56,220,209
644.byte 102,15,56,220,217
645 movups (%edx,%ecx,1),%xmm1
646 addl $32,%ecx
647.byte 102,15,56,220,208
648.byte 102,15,56,220,216
649 movups -16(%edx,%ecx,1),%xmm0
650 jnz .L031ccm64_enc2_loop
651.byte 102,15,56,220,209
652.byte 102,15,56,220,217
653 paddq 16(%esp),%xmm7
654 decl %eax
655.byte 102,15,56,221,208
656.byte 102,15,56,221,216
657 leal 16(%esi),%esi
658 xorps %xmm2,%xmm6
659 movdqa %xmm7,%xmm2
660 movups %xmm6,(%edi)
661.byte 102,15,56,0,213
662 leal 16(%edi),%edi
663 jnz .L030ccm64_enc_outer
664 movl 48(%esp),%esp
665 movl 40(%esp),%edi
666 movups %xmm3,(%edi)
667 pxor %xmm0,%xmm0
668 pxor %xmm1,%xmm1
669 pxor %xmm2,%xmm2
670 pxor %xmm3,%xmm3
671 pxor %xmm4,%xmm4
672 pxor %xmm5,%xmm5
673 pxor %xmm6,%xmm6
674 pxor %xmm7,%xmm7
675 popl %edi
676 popl %esi
677 popl %ebx
678 popl %ebp
679 ret
680.size aesni_ccm64_encrypt_blocks,.-.L_aesni_ccm64_encrypt_blocks_begin
681.globl aesni_ccm64_decrypt_blocks
682.type aesni_ccm64_decrypt_blocks,@function
683.align 16
684aesni_ccm64_decrypt_blocks:
685.L_aesni_ccm64_decrypt_blocks_begin:
686 pushl %ebp
687 pushl %ebx
688 pushl %esi
689 pushl %edi
690 movl 20(%esp),%esi
691 movl 24(%esp),%edi
692 movl 28(%esp),%eax
693 movl 32(%esp),%edx
694 movl 36(%esp),%ebx
695 movl 40(%esp),%ecx
696 movl %esp,%ebp
697 subl $60,%esp
698 andl $-16,%esp
699 movl %ebp,48(%esp)
700 movdqu (%ebx),%xmm7
701 movdqu (%ecx),%xmm3
702 movl 240(%edx),%ecx
703 movl $202182159,(%esp)
704 movl $134810123,4(%esp)
705 movl $67438087,8(%esp)
706 movl $66051,12(%esp)
707 movl $1,%ebx
708 xorl %ebp,%ebp
709 movl %ebx,16(%esp)
710 movl %ebp,20(%esp)
711 movl %ebp,24(%esp)
712 movl %ebp,28(%esp)
713 movdqa (%esp),%xmm5
714 movdqa %xmm7,%xmm2
715 movl %edx,%ebp
716 movl %ecx,%ebx
717.byte 102,15,56,0,253
718 movups (%edx),%xmm0
719 movups 16(%edx),%xmm1
720 leal 32(%edx),%edx
721 xorps %xmm0,%xmm2
722.L032enc1_loop_5:
723.byte 102,15,56,220,209
724 decl %ecx
725 movups (%edx),%xmm1
726 leal 16(%edx),%edx
727 jnz .L032enc1_loop_5
728.byte 102,15,56,221,209
729 shll $4,%ebx
730 movl $16,%ecx
731 movups (%esi),%xmm6
732 paddq 16(%esp),%xmm7
733 leal 16(%esi),%esi
734 subl %ebx,%ecx
735 leal 32(%ebp,%ebx,1),%edx
736 movl %ecx,%ebx
737 jmp .L033ccm64_dec_outer
738.align 16
739.L033ccm64_dec_outer:
740 xorps %xmm2,%xmm6
741 movdqa %xmm7,%xmm2
742 movups %xmm6,(%edi)
743 leal 16(%edi),%edi
744.byte 102,15,56,0,213
745 subl $1,%eax
746 jz .L034ccm64_dec_break
747 movups (%ebp),%xmm0
748 movl %ebx,%ecx
749 movups 16(%ebp),%xmm1
750 xorps %xmm0,%xmm6
751 xorps %xmm0,%xmm2
752 xorps %xmm6,%xmm3
753 movups 32(%ebp),%xmm0
754.L035ccm64_dec2_loop:
755.byte 102,15,56,220,209
756.byte 102,15,56,220,217
757 movups (%edx,%ecx,1),%xmm1
758 addl $32,%ecx
759.byte 102,15,56,220,208
760.byte 102,15,56,220,216
761 movups -16(%edx,%ecx,1),%xmm0
762 jnz .L035ccm64_dec2_loop
763 movups (%esi),%xmm6
764 paddq 16(%esp),%xmm7
765.byte 102,15,56,220,209
766.byte 102,15,56,220,217
767.byte 102,15,56,221,208
768.byte 102,15,56,221,216
769 leal 16(%esi),%esi
770 jmp .L033ccm64_dec_outer
771.align 16
772.L034ccm64_dec_break:
773 movl 240(%ebp),%ecx
774 movl %ebp,%edx
775 movups (%edx),%xmm0
776 movups 16(%edx),%xmm1
777 xorps %xmm0,%xmm6
778 leal 32(%edx),%edx
779 xorps %xmm6,%xmm3
780.L036enc1_loop_6:
781.byte 102,15,56,220,217
782 decl %ecx
783 movups (%edx),%xmm1
784 leal 16(%edx),%edx
785 jnz .L036enc1_loop_6
786.byte 102,15,56,221,217
787 movl 48(%esp),%esp
788 movl 40(%esp),%edi
789 movups %xmm3,(%edi)
790 pxor %xmm0,%xmm0
791 pxor %xmm1,%xmm1
792 pxor %xmm2,%xmm2
793 pxor %xmm3,%xmm3
794 pxor %xmm4,%xmm4
795 pxor %xmm5,%xmm5
796 pxor %xmm6,%xmm6
797 pxor %xmm7,%xmm7
798 popl %edi
799 popl %esi
800 popl %ebx
801 popl %ebp
802 ret
803.size aesni_ccm64_decrypt_blocks,.-.L_aesni_ccm64_decrypt_blocks_begin
804.globl aesni_ctr32_encrypt_blocks
805.type aesni_ctr32_encrypt_blocks,@function
806.align 16
807aesni_ctr32_encrypt_blocks:
808.L_aesni_ctr32_encrypt_blocks_begin:
809 pushl %ebp
810 pushl %ebx
811 pushl %esi
812 pushl %edi
813 movl 20(%esp),%esi
814 movl 24(%esp),%edi
815 movl 28(%esp),%eax
816 movl 32(%esp),%edx
817 movl 36(%esp),%ebx
818 movl %esp,%ebp
819 subl $88,%esp
820 andl $-16,%esp
821 movl %ebp,80(%esp)
822 cmpl $1,%eax
823 je .L037ctr32_one_shortcut
824 movdqu (%ebx),%xmm7
825 movl $202182159,(%esp)
826 movl $134810123,4(%esp)
827 movl $67438087,8(%esp)
828 movl $66051,12(%esp)
829 movl $6,%ecx
830 xorl %ebp,%ebp
831 movl %ecx,16(%esp)
832 movl %ecx,20(%esp)
833 movl %ecx,24(%esp)
834 movl %ebp,28(%esp)
835.byte 102,15,58,22,251,3
836.byte 102,15,58,34,253,3
837 movl 240(%edx),%ecx
838 bswap %ebx
839 pxor %xmm0,%xmm0
840 pxor %xmm1,%xmm1
841 movdqa (%esp),%xmm2
842.byte 102,15,58,34,195,0
843 leal 3(%ebx),%ebp
844.byte 102,15,58,34,205,0
845 incl %ebx
846.byte 102,15,58,34,195,1
847 incl %ebp
848.byte 102,15,58,34,205,1
849 incl %ebx
850.byte 102,15,58,34,195,2
851 incl %ebp
852.byte 102,15,58,34,205,2
853 movdqa %xmm0,48(%esp)
854.byte 102,15,56,0,194
855 movdqu (%edx),%xmm6
856 movdqa %xmm1,64(%esp)
857.byte 102,15,56,0,202
858 pshufd $192,%xmm0,%xmm2
859 pshufd $128,%xmm0,%xmm3
860 cmpl $6,%eax
861 jb .L038ctr32_tail
862 pxor %xmm6,%xmm7
863 shll $4,%ecx
864 movl $16,%ebx
865 movdqa %xmm7,32(%esp)
866 movl %edx,%ebp
867 subl %ecx,%ebx
868 leal 32(%edx,%ecx,1),%edx
869 subl $6,%eax
870 jmp .L039ctr32_loop6
871.align 16
872.L039ctr32_loop6:
873 pshufd $64,%xmm0,%xmm4
874 movdqa 32(%esp),%xmm0
875 pshufd $192,%xmm1,%xmm5
876 pxor %xmm0,%xmm2
877 pshufd $128,%xmm1,%xmm6
878 pxor %xmm0,%xmm3
879 pshufd $64,%xmm1,%xmm7
880 movups 16(%ebp),%xmm1
881 pxor %xmm0,%xmm4
882 pxor %xmm0,%xmm5
883.byte 102,15,56,220,209
884 pxor %xmm0,%xmm6
885 pxor %xmm0,%xmm7
886.byte 102,15,56,220,217
887 movups 32(%ebp),%xmm0
888 movl %ebx,%ecx
889.byte 102,15,56,220,225
890.byte 102,15,56,220,233
891.byte 102,15,56,220,241
892.byte 102,15,56,220,249
893 call .L_aesni_encrypt6_enter
894 movups (%esi),%xmm1
895 movups 16(%esi),%xmm0
896 xorps %xmm1,%xmm2
897 movups 32(%esi),%xmm1
898 xorps %xmm0,%xmm3
899 movups %xmm2,(%edi)
900 movdqa 16(%esp),%xmm0
901 xorps %xmm1,%xmm4
902 movdqa 64(%esp),%xmm1
903 movups %xmm3,16(%edi)
904 movups %xmm4,32(%edi)
905 paddd %xmm0,%xmm1
906 paddd 48(%esp),%xmm0
907 movdqa (%esp),%xmm2
908 movups 48(%esi),%xmm3
909 movups 64(%esi),%xmm4
910 xorps %xmm3,%xmm5
911 movups 80(%esi),%xmm3
912 leal 96(%esi),%esi
913 movdqa %xmm0,48(%esp)
914.byte 102,15,56,0,194
915 xorps %xmm4,%xmm6
916 movups %xmm5,48(%edi)
917 xorps %xmm3,%xmm7
918 movdqa %xmm1,64(%esp)
919.byte 102,15,56,0,202
920 movups %xmm6,64(%edi)
921 pshufd $192,%xmm0,%xmm2
922 movups %xmm7,80(%edi)
923 leal 96(%edi),%edi
924 pshufd $128,%xmm0,%xmm3
925 subl $6,%eax
926 jnc .L039ctr32_loop6
927 addl $6,%eax
928 jz .L040ctr32_ret
929 movdqu (%ebp),%xmm7
930 movl %ebp,%edx
931 pxor 32(%esp),%xmm7
932 movl 240(%ebp),%ecx
933.L038ctr32_tail:
934 por %xmm7,%xmm2
935 cmpl $2,%eax
936 jb .L041ctr32_one
937 pshufd $64,%xmm0,%xmm4
938 por %xmm7,%xmm3
939 je .L042ctr32_two
940 pshufd $192,%xmm1,%xmm5
941 por %xmm7,%xmm4
942 cmpl $4,%eax
943 jb .L043ctr32_three
944 pshufd $128,%xmm1,%xmm6
945 por %xmm7,%xmm5
946 je .L044ctr32_four
947 por %xmm7,%xmm6
948 call _aesni_encrypt6
949 movups (%esi),%xmm1
950 movups 16(%esi),%xmm0
951 xorps %xmm1,%xmm2
952 movups 32(%esi),%xmm1
953 xorps %xmm0,%xmm3
954 movups 48(%esi),%xmm0
955 xorps %xmm1,%xmm4
956 movups 64(%esi),%xmm1
957 xorps %xmm0,%xmm5
958 movups %xmm2,(%edi)
959 xorps %xmm1,%xmm6
960 movups %xmm3,16(%edi)
961 movups %xmm4,32(%edi)
962 movups %xmm5,48(%edi)
963 movups %xmm6,64(%edi)
964 jmp .L040ctr32_ret
965.align 16
966.L037ctr32_one_shortcut:
967 movups (%ebx),%xmm2
968 movl 240(%edx),%ecx
969.L041ctr32_one:
970 movups (%edx),%xmm0
971 movups 16(%edx),%xmm1
972 leal 32(%edx),%edx
973 xorps %xmm0,%xmm2
974.L045enc1_loop_7:
975.byte 102,15,56,220,209
976 decl %ecx
977 movups (%edx),%xmm1
978 leal 16(%edx),%edx
979 jnz .L045enc1_loop_7
980.byte 102,15,56,221,209
981 movups (%esi),%xmm6
982 xorps %xmm2,%xmm6
983 movups %xmm6,(%edi)
984 jmp .L040ctr32_ret
985.align 16
986.L042ctr32_two:
987 call _aesni_encrypt2
988 movups (%esi),%xmm5
989 movups 16(%esi),%xmm6
990 xorps %xmm5,%xmm2
991 xorps %xmm6,%xmm3
992 movups %xmm2,(%edi)
993 movups %xmm3,16(%edi)
994 jmp .L040ctr32_ret
995.align 16
996.L043ctr32_three:
997 call _aesni_encrypt3
998 movups (%esi),%xmm5
999 movups 16(%esi),%xmm6
1000 xorps %xmm5,%xmm2
1001 movups 32(%esi),%xmm7
1002 xorps %xmm6,%xmm3
1003 movups %xmm2,(%edi)
1004 xorps %xmm7,%xmm4
1005 movups %xmm3,16(%edi)
1006 movups %xmm4,32(%edi)
1007 jmp .L040ctr32_ret
1008.align 16
1009.L044ctr32_four:
1010 call _aesni_encrypt4
1011 movups (%esi),%xmm6
1012 movups 16(%esi),%xmm7
1013 movups 32(%esi),%xmm1
1014 xorps %xmm6,%xmm2
1015 movups 48(%esi),%xmm0
1016 xorps %xmm7,%xmm3
1017 movups %xmm2,(%edi)
1018 xorps %xmm1,%xmm4
1019 movups %xmm3,16(%edi)
1020 xorps %xmm0,%xmm5
1021 movups %xmm4,32(%edi)
1022 movups %xmm5,48(%edi)
1023.L040ctr32_ret:
1024 pxor %xmm0,%xmm0
1025 pxor %xmm1,%xmm1
1026 pxor %xmm2,%xmm2
1027 pxor %xmm3,%xmm3
1028 pxor %xmm4,%xmm4
1029 movdqa %xmm0,32(%esp)
1030 pxor %xmm5,%xmm5
1031 movdqa %xmm0,48(%esp)
1032 pxor %xmm6,%xmm6
1033 movdqa %xmm0,64(%esp)
1034 pxor %xmm7,%xmm7
1035 movl 80(%esp),%esp
1036 popl %edi
1037 popl %esi
1038 popl %ebx
1039 popl %ebp
1040 ret
1041.size aesni_ctr32_encrypt_blocks,.-.L_aesni_ctr32_encrypt_blocks_begin
1042.globl aesni_xts_encrypt
1043.type aesni_xts_encrypt,@function
1044.align 16
1045aesni_xts_encrypt:
1046.L_aesni_xts_encrypt_begin:
1047 pushl %ebp
1048 pushl %ebx
1049 pushl %esi
1050 pushl %edi
1051 movl 36(%esp),%edx
1052 movl 40(%esp),%esi
1053 movl 240(%edx),%ecx
1054 movups (%esi),%xmm2
1055 movups (%edx),%xmm0
1056 movups 16(%edx),%xmm1
1057 leal 32(%edx),%edx
1058 xorps %xmm0,%xmm2
1059.L046enc1_loop_8:
1060.byte 102,15,56,220,209
1061 decl %ecx
1062 movups (%edx),%xmm1
1063 leal 16(%edx),%edx
1064 jnz .L046enc1_loop_8
1065.byte 102,15,56,221,209
1066 movl 20(%esp),%esi
1067 movl 24(%esp),%edi
1068 movl 28(%esp),%eax
1069 movl 32(%esp),%edx
1070 movl %esp,%ebp
1071 subl $120,%esp
1072 movl 240(%edx),%ecx
1073 andl $-16,%esp
1074 movl $135,96(%esp)
1075 movl $0,100(%esp)
1076 movl $1,104(%esp)
1077 movl $0,108(%esp)
1078 movl %eax,112(%esp)
1079 movl %ebp,116(%esp)
1080 movdqa %xmm2,%xmm1
1081 pxor %xmm0,%xmm0
1082 movdqa 96(%esp),%xmm3
1083 pcmpgtd %xmm1,%xmm0
1084 andl $-16,%eax
1085 movl %edx,%ebp
1086 movl %ecx,%ebx
1087 subl $96,%eax
1088 jc .L047xts_enc_short
1089 shll $4,%ecx
1090 movl $16,%ebx
1091 subl %ecx,%ebx
1092 leal 32(%edx,%ecx,1),%edx
1093 jmp .L048xts_enc_loop6
1094.align 16
1095.L048xts_enc_loop6:
1096 pshufd $19,%xmm0,%xmm2
1097 pxor %xmm0,%xmm0
1098 movdqa %xmm1,(%esp)
1099 paddq %xmm1,%xmm1
1100 pand %xmm3,%xmm2
1101 pcmpgtd %xmm1,%xmm0
1102 pxor %xmm2,%xmm1
1103 pshufd $19,%xmm0,%xmm2
1104 pxor %xmm0,%xmm0
1105 movdqa %xmm1,16(%esp)
1106 paddq %xmm1,%xmm1
1107 pand %xmm3,%xmm2
1108 pcmpgtd %xmm1,%xmm0
1109 pxor %xmm2,%xmm1
1110 pshufd $19,%xmm0,%xmm2
1111 pxor %xmm0,%xmm0
1112 movdqa %xmm1,32(%esp)
1113 paddq %xmm1,%xmm1
1114 pand %xmm3,%xmm2
1115 pcmpgtd %xmm1,%xmm0
1116 pxor %xmm2,%xmm1
1117 pshufd $19,%xmm0,%xmm2
1118 pxor %xmm0,%xmm0
1119 movdqa %xmm1,48(%esp)
1120 paddq %xmm1,%xmm1
1121 pand %xmm3,%xmm2
1122 pcmpgtd %xmm1,%xmm0
1123 pxor %xmm2,%xmm1
1124 pshufd $19,%xmm0,%xmm7
1125 movdqa %xmm1,64(%esp)
1126 paddq %xmm1,%xmm1
1127 movups (%ebp),%xmm0
1128 pand %xmm3,%xmm7
1129 movups (%esi),%xmm2
1130 pxor %xmm1,%xmm7
1131 movl %ebx,%ecx
1132 movdqu 16(%esi),%xmm3
1133 xorps %xmm0,%xmm2
1134 movdqu 32(%esi),%xmm4
1135 pxor %xmm0,%xmm3
1136 movdqu 48(%esi),%xmm5
1137 pxor %xmm0,%xmm4
1138 movdqu 64(%esi),%xmm6
1139 pxor %xmm0,%xmm5
1140 movdqu 80(%esi),%xmm1
1141 pxor %xmm0,%xmm6
1142 leal 96(%esi),%esi
1143 pxor (%esp),%xmm2
1144 movdqa %xmm7,80(%esp)
1145 pxor %xmm1,%xmm7
1146 movups 16(%ebp),%xmm1
1147 pxor 16(%esp),%xmm3
1148 pxor 32(%esp),%xmm4
1149.byte 102,15,56,220,209
1150 pxor 48(%esp),%xmm5
1151 pxor 64(%esp),%xmm6
1152.byte 102,15,56,220,217
1153 pxor %xmm0,%xmm7
1154 movups 32(%ebp),%xmm0
1155.byte 102,15,56,220,225
1156.byte 102,15,56,220,233
1157.byte 102,15,56,220,241
1158.byte 102,15,56,220,249
1159 call .L_aesni_encrypt6_enter
1160 movdqa 80(%esp),%xmm1
1161 pxor %xmm0,%xmm0
1162 xorps (%esp),%xmm2
1163 pcmpgtd %xmm1,%xmm0
1164 xorps 16(%esp),%xmm3
1165 movups %xmm2,(%edi)
1166 xorps 32(%esp),%xmm4
1167 movups %xmm3,16(%edi)
1168 xorps 48(%esp),%xmm5
1169 movups %xmm4,32(%edi)
1170 xorps 64(%esp),%xmm6
1171 movups %xmm5,48(%edi)
1172 xorps %xmm1,%xmm7
1173 movups %xmm6,64(%edi)
1174 pshufd $19,%xmm0,%xmm2
1175 movups %xmm7,80(%edi)
1176 leal 96(%edi),%edi
1177 movdqa 96(%esp),%xmm3
1178 pxor %xmm0,%xmm0
1179 paddq %xmm1,%xmm1
1180 pand %xmm3,%xmm2
1181 pcmpgtd %xmm1,%xmm0
1182 pxor %xmm2,%xmm1
1183 subl $96,%eax
1184 jnc .L048xts_enc_loop6
1185 movl 240(%ebp),%ecx
1186 movl %ebp,%edx
1187 movl %ecx,%ebx
1188.L047xts_enc_short:
1189 addl $96,%eax
1190 jz .L049xts_enc_done6x
1191 movdqa %xmm1,%xmm5
1192 cmpl $32,%eax
1193 jb .L050xts_enc_one
1194 pshufd $19,%xmm0,%xmm2
1195 pxor %xmm0,%xmm0
1196 paddq %xmm1,%xmm1
1197 pand %xmm3,%xmm2
1198 pcmpgtd %xmm1,%xmm0
1199 pxor %xmm2,%xmm1
1200 je .L051xts_enc_two
1201 pshufd $19,%xmm0,%xmm2
1202 pxor %xmm0,%xmm0
1203 movdqa %xmm1,%xmm6
1204 paddq %xmm1,%xmm1
1205 pand %xmm3,%xmm2
1206 pcmpgtd %xmm1,%xmm0
1207 pxor %xmm2,%xmm1
1208 cmpl $64,%eax
1209 jb .L052xts_enc_three
1210 pshufd $19,%xmm0,%xmm2
1211 pxor %xmm0,%xmm0
1212 movdqa %xmm1,%xmm7
1213 paddq %xmm1,%xmm1
1214 pand %xmm3,%xmm2
1215 pcmpgtd %xmm1,%xmm0
1216 pxor %xmm2,%xmm1
1217 movdqa %xmm5,(%esp)
1218 movdqa %xmm6,16(%esp)
1219 je .L053xts_enc_four
1220 movdqa %xmm7,32(%esp)
1221 pshufd $19,%xmm0,%xmm7
1222 movdqa %xmm1,48(%esp)
1223 paddq %xmm1,%xmm1
1224 pand %xmm3,%xmm7
1225 pxor %xmm1,%xmm7
1226 movdqu (%esi),%xmm2
1227 movdqu 16(%esi),%xmm3
1228 movdqu 32(%esi),%xmm4
1229 pxor (%esp),%xmm2
1230 movdqu 48(%esi),%xmm5
1231 pxor 16(%esp),%xmm3
1232 movdqu 64(%esi),%xmm6
1233 pxor 32(%esp),%xmm4
1234 leal 80(%esi),%esi
1235 pxor 48(%esp),%xmm5
1236 movdqa %xmm7,64(%esp)
1237 pxor %xmm7,%xmm6
1238 call _aesni_encrypt6
1239 movaps 64(%esp),%xmm1
1240 xorps (%esp),%xmm2
1241 xorps 16(%esp),%xmm3
1242 xorps 32(%esp),%xmm4
1243 movups %xmm2,(%edi)
1244 xorps 48(%esp),%xmm5
1245 movups %xmm3,16(%edi)
1246 xorps %xmm1,%xmm6
1247 movups %xmm4,32(%edi)
1248 movups %xmm5,48(%edi)
1249 movups %xmm6,64(%edi)
1250 leal 80(%edi),%edi
1251 jmp .L054xts_enc_done
1252.align 16
1253.L050xts_enc_one:
1254 movups (%esi),%xmm2
1255 leal 16(%esi),%esi
1256 xorps %xmm5,%xmm2
1257 movups (%edx),%xmm0
1258 movups 16(%edx),%xmm1
1259 leal 32(%edx),%edx
1260 xorps %xmm0,%xmm2
1261.L055enc1_loop_9:
1262.byte 102,15,56,220,209
1263 decl %ecx
1264 movups (%edx),%xmm1
1265 leal 16(%edx),%edx
1266 jnz .L055enc1_loop_9
1267.byte 102,15,56,221,209
1268 xorps %xmm5,%xmm2
1269 movups %xmm2,(%edi)
1270 leal 16(%edi),%edi
1271 movdqa %xmm5,%xmm1
1272 jmp .L054xts_enc_done
1273.align 16
1274.L051xts_enc_two:
1275 movaps %xmm1,%xmm6
1276 movups (%esi),%xmm2
1277 movups 16(%esi),%xmm3
1278 leal 32(%esi),%esi
1279 xorps %xmm5,%xmm2
1280 xorps %xmm6,%xmm3
1281 call _aesni_encrypt2
1282 xorps %xmm5,%xmm2
1283 xorps %xmm6,%xmm3
1284 movups %xmm2,(%edi)
1285 movups %xmm3,16(%edi)
1286 leal 32(%edi),%edi
1287 movdqa %xmm6,%xmm1
1288 jmp .L054xts_enc_done
1289.align 16
1290.L052xts_enc_three:
1291 movaps %xmm1,%xmm7
1292 movups (%esi),%xmm2
1293 movups 16(%esi),%xmm3
1294 movups 32(%esi),%xmm4
1295 leal 48(%esi),%esi
1296 xorps %xmm5,%xmm2
1297 xorps %xmm6,%xmm3
1298 xorps %xmm7,%xmm4
1299 call _aesni_encrypt3
1300 xorps %xmm5,%xmm2
1301 xorps %xmm6,%xmm3
1302 xorps %xmm7,%xmm4
1303 movups %xmm2,(%edi)
1304 movups %xmm3,16(%edi)
1305 movups %xmm4,32(%edi)
1306 leal 48(%edi),%edi
1307 movdqa %xmm7,%xmm1
1308 jmp .L054xts_enc_done
1309.align 16
1310.L053xts_enc_four:
1311 movaps %xmm1,%xmm6
1312 movups (%esi),%xmm2
1313 movups 16(%esi),%xmm3
1314 movups 32(%esi),%xmm4
1315 xorps (%esp),%xmm2
1316 movups 48(%esi),%xmm5
1317 leal 64(%esi),%esi
1318 xorps 16(%esp),%xmm3
1319 xorps %xmm7,%xmm4
1320 xorps %xmm6,%xmm5
1321 call _aesni_encrypt4
1322 xorps (%esp),%xmm2
1323 xorps 16(%esp),%xmm3
1324 xorps %xmm7,%xmm4
1325 movups %xmm2,(%edi)
1326 xorps %xmm6,%xmm5
1327 movups %xmm3,16(%edi)
1328 movups %xmm4,32(%edi)
1329 movups %xmm5,48(%edi)
1330 leal 64(%edi),%edi
1331 movdqa %xmm6,%xmm1
1332 jmp .L054xts_enc_done
1333.align 16
1334.L049xts_enc_done6x:
1335 movl 112(%esp),%eax
1336 andl $15,%eax
1337 jz .L056xts_enc_ret
1338 movdqa %xmm1,%xmm5
1339 movl %eax,112(%esp)
1340 jmp .L057xts_enc_steal
1341.align 16
1342.L054xts_enc_done:
1343 movl 112(%esp),%eax
1344 pxor %xmm0,%xmm0
1345 andl $15,%eax
1346 jz .L056xts_enc_ret
1347 pcmpgtd %xmm1,%xmm0
1348 movl %eax,112(%esp)
1349 pshufd $19,%xmm0,%xmm5
1350 paddq %xmm1,%xmm1
1351 pand 96(%esp),%xmm5
1352 pxor %xmm1,%xmm5
1353.L057xts_enc_steal:
1354 movzbl (%esi),%ecx
1355 movzbl -16(%edi),%edx
1356 leal 1(%esi),%esi
1357 movb %cl,-16(%edi)
1358 movb %dl,(%edi)
1359 leal 1(%edi),%edi
1360 subl $1,%eax
1361 jnz .L057xts_enc_steal
1362 subl 112(%esp),%edi
1363 movl %ebp,%edx
1364 movl %ebx,%ecx
1365 movups -16(%edi),%xmm2
1366 xorps %xmm5,%xmm2
1367 movups (%edx),%xmm0
1368 movups 16(%edx),%xmm1
1369 leal 32(%edx),%edx
1370 xorps %xmm0,%xmm2
1371.L058enc1_loop_10:
1372.byte 102,15,56,220,209
1373 decl %ecx
1374 movups (%edx),%xmm1
1375 leal 16(%edx),%edx
1376 jnz .L058enc1_loop_10
1377.byte 102,15,56,221,209
1378 xorps %xmm5,%xmm2
1379 movups %xmm2,-16(%edi)
1380.L056xts_enc_ret:
1381 pxor %xmm0,%xmm0
1382 pxor %xmm1,%xmm1
1383 pxor %xmm2,%xmm2
1384 movdqa %xmm0,(%esp)
1385 pxor %xmm3,%xmm3
1386 movdqa %xmm0,16(%esp)
1387 pxor %xmm4,%xmm4
1388 movdqa %xmm0,32(%esp)
1389 pxor %xmm5,%xmm5
1390 movdqa %xmm0,48(%esp)
1391 pxor %xmm6,%xmm6
1392 movdqa %xmm0,64(%esp)
1393 pxor %xmm7,%xmm7
1394 movdqa %xmm0,80(%esp)
1395 movl 116(%esp),%esp
1396 popl %edi
1397 popl %esi
1398 popl %ebx
1399 popl %ebp
1400 ret
1401.size aesni_xts_encrypt,.-.L_aesni_xts_encrypt_begin
1402.globl aesni_xts_decrypt
1403.type aesni_xts_decrypt,@function
1404.align 16
1405aesni_xts_decrypt:
1406.L_aesni_xts_decrypt_begin:
1407 pushl %ebp
1408 pushl %ebx
1409 pushl %esi
1410 pushl %edi
1411 movl 36(%esp),%edx
1412 movl 40(%esp),%esi
1413 movl 240(%edx),%ecx
1414 movups (%esi),%xmm2
1415 movups (%edx),%xmm0
1416 movups 16(%edx),%xmm1
1417 leal 32(%edx),%edx
1418 xorps %xmm0,%xmm2
1419.L059enc1_loop_11:
1420.byte 102,15,56,220,209
1421 decl %ecx
1422 movups (%edx),%xmm1
1423 leal 16(%edx),%edx
1424 jnz .L059enc1_loop_11
1425.byte 102,15,56,221,209
1426 movl 20(%esp),%esi
1427 movl 24(%esp),%edi
1428 movl 28(%esp),%eax
1429 movl 32(%esp),%edx
1430 movl %esp,%ebp
1431 subl $120,%esp
1432 andl $-16,%esp
1433 xorl %ebx,%ebx
1434 testl $15,%eax
1435 setnz %bl
1436 shll $4,%ebx
1437 subl %ebx,%eax
1438 movl $135,96(%esp)
1439 movl $0,100(%esp)
1440 movl $1,104(%esp)
1441 movl $0,108(%esp)
1442 movl %eax,112(%esp)
1443 movl %ebp,116(%esp)
1444 movl 240(%edx),%ecx
1445 movl %edx,%ebp
1446 movl %ecx,%ebx
1447 movdqa %xmm2,%xmm1
1448 pxor %xmm0,%xmm0
1449 movdqa 96(%esp),%xmm3
1450 pcmpgtd %xmm1,%xmm0
1451 andl $-16,%eax
1452 subl $96,%eax
1453 jc .L060xts_dec_short
1454 shll $4,%ecx
1455 movl $16,%ebx
1456 subl %ecx,%ebx
1457 leal 32(%edx,%ecx,1),%edx
1458 jmp .L061xts_dec_loop6
1459.align 16
1460.L061xts_dec_loop6:
1461 pshufd $19,%xmm0,%xmm2
1462 pxor %xmm0,%xmm0
1463 movdqa %xmm1,(%esp)
1464 paddq %xmm1,%xmm1
1465 pand %xmm3,%xmm2
1466 pcmpgtd %xmm1,%xmm0
1467 pxor %xmm2,%xmm1
1468 pshufd $19,%xmm0,%xmm2
1469 pxor %xmm0,%xmm0
1470 movdqa %xmm1,16(%esp)
1471 paddq %xmm1,%xmm1
1472 pand %xmm3,%xmm2
1473 pcmpgtd %xmm1,%xmm0
1474 pxor %xmm2,%xmm1
1475 pshufd $19,%xmm0,%xmm2
1476 pxor %xmm0,%xmm0
1477 movdqa %xmm1,32(%esp)
1478 paddq %xmm1,%xmm1
1479 pand %xmm3,%xmm2
1480 pcmpgtd %xmm1,%xmm0
1481 pxor %xmm2,%xmm1
1482 pshufd $19,%xmm0,%xmm2
1483 pxor %xmm0,%xmm0
1484 movdqa %xmm1,48(%esp)
1485 paddq %xmm1,%xmm1
1486 pand %xmm3,%xmm2
1487 pcmpgtd %xmm1,%xmm0
1488 pxor %xmm2,%xmm1
1489 pshufd $19,%xmm0,%xmm7
1490 movdqa %xmm1,64(%esp)
1491 paddq %xmm1,%xmm1
1492 movups (%ebp),%xmm0
1493 pand %xmm3,%xmm7
1494 movups (%esi),%xmm2
1495 pxor %xmm1,%xmm7
1496 movl %ebx,%ecx
1497 movdqu 16(%esi),%xmm3
1498 xorps %xmm0,%xmm2
1499 movdqu 32(%esi),%xmm4
1500 pxor %xmm0,%xmm3
1501 movdqu 48(%esi),%xmm5
1502 pxor %xmm0,%xmm4
1503 movdqu 64(%esi),%xmm6
1504 pxor %xmm0,%xmm5
1505 movdqu 80(%esi),%xmm1
1506 pxor %xmm0,%xmm6
1507 leal 96(%esi),%esi
1508 pxor (%esp),%xmm2
1509 movdqa %xmm7,80(%esp)
1510 pxor %xmm1,%xmm7
1511 movups 16(%ebp),%xmm1
1512 pxor 16(%esp),%xmm3
1513 pxor 32(%esp),%xmm4
1514.byte 102,15,56,222,209
1515 pxor 48(%esp),%xmm5
1516 pxor 64(%esp),%xmm6
1517.byte 102,15,56,222,217
1518 pxor %xmm0,%xmm7
1519 movups 32(%ebp),%xmm0
1520.byte 102,15,56,222,225
1521.byte 102,15,56,222,233
1522.byte 102,15,56,222,241
1523.byte 102,15,56,222,249
1524 call .L_aesni_decrypt6_enter
1525 movdqa 80(%esp),%xmm1
1526 pxor %xmm0,%xmm0
1527 xorps (%esp),%xmm2
1528 pcmpgtd %xmm1,%xmm0
1529 xorps 16(%esp),%xmm3
1530 movups %xmm2,(%edi)
1531 xorps 32(%esp),%xmm4
1532 movups %xmm3,16(%edi)
1533 xorps 48(%esp),%xmm5
1534 movups %xmm4,32(%edi)
1535 xorps 64(%esp),%xmm6
1536 movups %xmm5,48(%edi)
1537 xorps %xmm1,%xmm7
1538 movups %xmm6,64(%edi)
1539 pshufd $19,%xmm0,%xmm2
1540 movups %xmm7,80(%edi)
1541 leal 96(%edi),%edi
1542 movdqa 96(%esp),%xmm3
1543 pxor %xmm0,%xmm0
1544 paddq %xmm1,%xmm1
1545 pand %xmm3,%xmm2
1546 pcmpgtd %xmm1,%xmm0
1547 pxor %xmm2,%xmm1
1548 subl $96,%eax
1549 jnc .L061xts_dec_loop6
1550 movl 240(%ebp),%ecx
1551 movl %ebp,%edx
1552 movl %ecx,%ebx
1553.L060xts_dec_short:
1554 addl $96,%eax
1555 jz .L062xts_dec_done6x
1556 movdqa %xmm1,%xmm5
1557 cmpl $32,%eax
1558 jb .L063xts_dec_one
1559 pshufd $19,%xmm0,%xmm2
1560 pxor %xmm0,%xmm0
1561 paddq %xmm1,%xmm1
1562 pand %xmm3,%xmm2
1563 pcmpgtd %xmm1,%xmm0
1564 pxor %xmm2,%xmm1
1565 je .L064xts_dec_two
1566 pshufd $19,%xmm0,%xmm2
1567 pxor %xmm0,%xmm0
1568 movdqa %xmm1,%xmm6
1569 paddq %xmm1,%xmm1
1570 pand %xmm3,%xmm2
1571 pcmpgtd %xmm1,%xmm0
1572 pxor %xmm2,%xmm1
1573 cmpl $64,%eax
1574 jb .L065xts_dec_three
1575 pshufd $19,%xmm0,%xmm2
1576 pxor %xmm0,%xmm0
1577 movdqa %xmm1,%xmm7
1578 paddq %xmm1,%xmm1
1579 pand %xmm3,%xmm2
1580 pcmpgtd %xmm1,%xmm0
1581 pxor %xmm2,%xmm1
1582 movdqa %xmm5,(%esp)
1583 movdqa %xmm6,16(%esp)
1584 je .L066xts_dec_four
1585 movdqa %xmm7,32(%esp)
1586 pshufd $19,%xmm0,%xmm7
1587 movdqa %xmm1,48(%esp)
1588 paddq %xmm1,%xmm1
1589 pand %xmm3,%xmm7
1590 pxor %xmm1,%xmm7
1591 movdqu (%esi),%xmm2
1592 movdqu 16(%esi),%xmm3
1593 movdqu 32(%esi),%xmm4
1594 pxor (%esp),%xmm2
1595 movdqu 48(%esi),%xmm5
1596 pxor 16(%esp),%xmm3
1597 movdqu 64(%esi),%xmm6
1598 pxor 32(%esp),%xmm4
1599 leal 80(%esi),%esi
1600 pxor 48(%esp),%xmm5
1601 movdqa %xmm7,64(%esp)
1602 pxor %xmm7,%xmm6
1603 call _aesni_decrypt6
1604 movaps 64(%esp),%xmm1
1605 xorps (%esp),%xmm2
1606 xorps 16(%esp),%xmm3
1607 xorps 32(%esp),%xmm4
1608 movups %xmm2,(%edi)
1609 xorps 48(%esp),%xmm5
1610 movups %xmm3,16(%edi)
1611 xorps %xmm1,%xmm6
1612 movups %xmm4,32(%edi)
1613 movups %xmm5,48(%edi)
1614 movups %xmm6,64(%edi)
1615 leal 80(%edi),%edi
1616 jmp .L067xts_dec_done
1617.align 16
1618.L063xts_dec_one:
1619 movups (%esi),%xmm2
1620 leal 16(%esi),%esi
1621 xorps %xmm5,%xmm2
1622 movups (%edx),%xmm0
1623 movups 16(%edx),%xmm1
1624 leal 32(%edx),%edx
1625 xorps %xmm0,%xmm2
1626.L068dec1_loop_12:
1627.byte 102,15,56,222,209
1628 decl %ecx
1629 movups (%edx),%xmm1
1630 leal 16(%edx),%edx
1631 jnz .L068dec1_loop_12
1632.byte 102,15,56,223,209
1633 xorps %xmm5,%xmm2
1634 movups %xmm2,(%edi)
1635 leal 16(%edi),%edi
1636 movdqa %xmm5,%xmm1
1637 jmp .L067xts_dec_done
1638.align 16
1639.L064xts_dec_two:
1640 movaps %xmm1,%xmm6
1641 movups (%esi),%xmm2
1642 movups 16(%esi),%xmm3
1643 leal 32(%esi),%esi
1644 xorps %xmm5,%xmm2
1645 xorps %xmm6,%xmm3
1646 call _aesni_decrypt2
1647 xorps %xmm5,%xmm2
1648 xorps %xmm6,%xmm3
1649 movups %xmm2,(%edi)
1650 movups %xmm3,16(%edi)
1651 leal 32(%edi),%edi
1652 movdqa %xmm6,%xmm1
1653 jmp .L067xts_dec_done
1654.align 16
1655.L065xts_dec_three:
1656 movaps %xmm1,%xmm7
1657 movups (%esi),%xmm2
1658 movups 16(%esi),%xmm3
1659 movups 32(%esi),%xmm4
1660 leal 48(%esi),%esi
1661 xorps %xmm5,%xmm2
1662 xorps %xmm6,%xmm3
1663 xorps %xmm7,%xmm4
1664 call _aesni_decrypt3
1665 xorps %xmm5,%xmm2
1666 xorps %xmm6,%xmm3
1667 xorps %xmm7,%xmm4
1668 movups %xmm2,(%edi)
1669 movups %xmm3,16(%edi)
1670 movups %xmm4,32(%edi)
1671 leal 48(%edi),%edi
1672 movdqa %xmm7,%xmm1
1673 jmp .L067xts_dec_done
1674.align 16
1675.L066xts_dec_four:
1676 movaps %xmm1,%xmm6
1677 movups (%esi),%xmm2
1678 movups 16(%esi),%xmm3
1679 movups 32(%esi),%xmm4
1680 xorps (%esp),%xmm2
1681 movups 48(%esi),%xmm5
1682 leal 64(%esi),%esi
1683 xorps 16(%esp),%xmm3
1684 xorps %xmm7,%xmm4
1685 xorps %xmm6,%xmm5
1686 call _aesni_decrypt4
1687 xorps (%esp),%xmm2
1688 xorps 16(%esp),%xmm3
1689 xorps %xmm7,%xmm4
1690 movups %xmm2,(%edi)
1691 xorps %xmm6,%xmm5
1692 movups %xmm3,16(%edi)
1693 movups %xmm4,32(%edi)
1694 movups %xmm5,48(%edi)
1695 leal 64(%edi),%edi
1696 movdqa %xmm6,%xmm1
1697 jmp .L067xts_dec_done
1698.align 16
1699.L062xts_dec_done6x:
1700 movl 112(%esp),%eax
1701 andl $15,%eax
1702 jz .L069xts_dec_ret
1703 movl %eax,112(%esp)
1704 jmp .L070xts_dec_only_one_more
1705.align 16
1706.L067xts_dec_done:
1707 movl 112(%esp),%eax
1708 pxor %xmm0,%xmm0
1709 andl $15,%eax
1710 jz .L069xts_dec_ret
1711 pcmpgtd %xmm1,%xmm0
1712 movl %eax,112(%esp)
1713 pshufd $19,%xmm0,%xmm2
1714 pxor %xmm0,%xmm0
1715 movdqa 96(%esp),%xmm3
1716 paddq %xmm1,%xmm1
1717 pand %xmm3,%xmm2
1718 pcmpgtd %xmm1,%xmm0
1719 pxor %xmm2,%xmm1
1720.L070xts_dec_only_one_more:
1721 pshufd $19,%xmm0,%xmm5
1722 movdqa %xmm1,%xmm6
1723 paddq %xmm1,%xmm1
1724 pand %xmm3,%xmm5
1725 pxor %xmm1,%xmm5
1726 movl %ebp,%edx
1727 movl %ebx,%ecx
1728 movups (%esi),%xmm2
1729 xorps %xmm5,%xmm2
1730 movups (%edx),%xmm0
1731 movups 16(%edx),%xmm1
1732 leal 32(%edx),%edx
1733 xorps %xmm0,%xmm2
1734.L071dec1_loop_13:
1735.byte 102,15,56,222,209
1736 decl %ecx
1737 movups (%edx),%xmm1
1738 leal 16(%edx),%edx
1739 jnz .L071dec1_loop_13
1740.byte 102,15,56,223,209
1741 xorps %xmm5,%xmm2
1742 movups %xmm2,(%edi)
1743.L072xts_dec_steal:
1744 movzbl 16(%esi),%ecx
1745 movzbl (%edi),%edx
1746 leal 1(%esi),%esi
1747 movb %cl,(%edi)
1748 movb %dl,16(%edi)
1749 leal 1(%edi),%edi
1750 subl $1,%eax
1751 jnz .L072xts_dec_steal
1752 subl 112(%esp),%edi
1753 movl %ebp,%edx
1754 movl %ebx,%ecx
1755 movups (%edi),%xmm2
1756 xorps %xmm6,%xmm2
1757 movups (%edx),%xmm0
1758 movups 16(%edx),%xmm1
1759 leal 32(%edx),%edx
1760 xorps %xmm0,%xmm2
1761.L073dec1_loop_14:
1762.byte 102,15,56,222,209
1763 decl %ecx
1764 movups (%edx),%xmm1
1765 leal 16(%edx),%edx
1766 jnz .L073dec1_loop_14
1767.byte 102,15,56,223,209
1768 xorps %xmm6,%xmm2
1769 movups %xmm2,(%edi)
1770.L069xts_dec_ret:
1771 pxor %xmm0,%xmm0
1772 pxor %xmm1,%xmm1
1773 pxor %xmm2,%xmm2
1774 movdqa %xmm0,(%esp)
1775 pxor %xmm3,%xmm3
1776 movdqa %xmm0,16(%esp)
1777 pxor %xmm4,%xmm4
1778 movdqa %xmm0,32(%esp)
1779 pxor %xmm5,%xmm5
1780 movdqa %xmm0,48(%esp)
1781 pxor %xmm6,%xmm6
1782 movdqa %xmm0,64(%esp)
1783 pxor %xmm7,%xmm7
1784 movdqa %xmm0,80(%esp)
1785 movl 116(%esp),%esp
1786 popl %edi
1787 popl %esi
1788 popl %ebx
1789 popl %ebp
1790 ret
1791.size aesni_xts_decrypt,.-.L_aesni_xts_decrypt_begin
1792.globl aesni_ocb_encrypt
1793.type aesni_ocb_encrypt,@function
1794.align 16
1795aesni_ocb_encrypt:
1796.L_aesni_ocb_encrypt_begin:
1797 pushl %ebp
1798 pushl %ebx
1799 pushl %esi
1800 pushl %edi
1801 movl 40(%esp),%ecx
1802 movl 48(%esp),%ebx
1803 movl 20(%esp),%esi
1804 movl 24(%esp),%edi
1805 movl 28(%esp),%eax
1806 movl 32(%esp),%edx
1807 movdqu (%ecx),%xmm0
1808 movl 36(%esp),%ebp
1809 movdqu (%ebx),%xmm1
1810 movl 44(%esp),%ebx
1811 movl %esp,%ecx
1812 subl $132,%esp
1813 andl $-16,%esp
1814 subl %esi,%edi
1815 shll $4,%eax
1816 leal -96(%esi,%eax,1),%eax
1817 movl %edi,120(%esp)
1818 movl %eax,124(%esp)
1819 movl %ecx,128(%esp)
1820 movl 240(%edx),%ecx
1821 testl $1,%ebp
1822 jnz .L074odd
1823 bsfl %ebp,%eax
1824 addl $1,%ebp
1825 shll $4,%eax
1826 movdqu (%ebx,%eax,1),%xmm7
1827 movl %edx,%eax
1828 movdqu (%esi),%xmm2
1829 leal 16(%esi),%esi
1830 pxor %xmm0,%xmm7
1831 pxor %xmm2,%xmm1
1832 pxor %xmm7,%xmm2
1833 movdqa %xmm1,%xmm6
1834 movups (%edx),%xmm0
1835 movups 16(%edx),%xmm1
1836 leal 32(%edx),%edx
1837 xorps %xmm0,%xmm2
1838.L075enc1_loop_15:
1839.byte 102,15,56,220,209
1840 decl %ecx
1841 movups (%edx),%xmm1
1842 leal 16(%edx),%edx
1843 jnz .L075enc1_loop_15
1844.byte 102,15,56,221,209
1845 xorps %xmm7,%xmm2
1846 movdqa %xmm7,%xmm0
1847 movdqa %xmm6,%xmm1
1848 movups %xmm2,-16(%edi,%esi,1)
1849 movl 240(%eax),%ecx
1850 movl %eax,%edx
1851 movl 124(%esp),%eax
1852.L074odd:
1853 shll $4,%ecx
1854 movl $16,%edi
1855 subl %ecx,%edi
1856 movl %edx,112(%esp)
1857 leal 32(%edx,%ecx,1),%edx
1858 movl %edi,116(%esp)
1859 cmpl %eax,%esi
1860 ja .L076short
1861 jmp .L077grandloop
1862.align 32
1863.L077grandloop:
1864 leal 1(%ebp),%ecx
1865 leal 3(%ebp),%eax
1866 leal 5(%ebp),%edi
1867 addl $6,%ebp
1868 bsfl %ecx,%ecx
1869 bsfl %eax,%eax
1870 bsfl %edi,%edi
1871 shll $4,%ecx
1872 shll $4,%eax
1873 shll $4,%edi
1874 movdqu (%ebx),%xmm2
1875 movdqu (%ebx,%ecx,1),%xmm3
1876 movl 116(%esp),%ecx
1877 movdqa %xmm2,%xmm4
1878 movdqu (%ebx,%eax,1),%xmm5
1879 movdqa %xmm2,%xmm6
1880 movdqu (%ebx,%edi,1),%xmm7
1881 pxor %xmm0,%xmm2
1882 pxor %xmm2,%xmm3
1883 movdqa %xmm2,(%esp)
1884 pxor %xmm3,%xmm4
1885 movdqa %xmm3,16(%esp)
1886 pxor %xmm4,%xmm5
1887 movdqa %xmm4,32(%esp)
1888 pxor %xmm5,%xmm6
1889 movdqa %xmm5,48(%esp)
1890 pxor %xmm6,%xmm7
1891 movdqa %xmm6,64(%esp)
1892 movdqa %xmm7,80(%esp)
1893 movups -48(%edx,%ecx,1),%xmm0
1894 movdqu (%esi),%xmm2
1895 movdqu 16(%esi),%xmm3
1896 movdqu 32(%esi),%xmm4
1897 movdqu 48(%esi),%xmm5
1898 movdqu 64(%esi),%xmm6
1899 movdqu 80(%esi),%xmm7
1900 leal 96(%esi),%esi
1901 pxor %xmm2,%xmm1
1902 pxor %xmm0,%xmm2
1903 pxor %xmm3,%xmm1
1904 pxor %xmm0,%xmm3
1905 pxor %xmm4,%xmm1
1906 pxor %xmm0,%xmm4
1907 pxor %xmm5,%xmm1
1908 pxor %xmm0,%xmm5
1909 pxor %xmm6,%xmm1
1910 pxor %xmm0,%xmm6
1911 pxor %xmm7,%xmm1
1912 pxor %xmm0,%xmm7
1913 movdqa %xmm1,96(%esp)
1914 movups -32(%edx,%ecx,1),%xmm1
1915 pxor (%esp),%xmm2
1916 pxor 16(%esp),%xmm3
1917 pxor 32(%esp),%xmm4
1918 pxor 48(%esp),%xmm5
1919 pxor 64(%esp),%xmm6
1920 pxor 80(%esp),%xmm7
1921 movups -16(%edx,%ecx,1),%xmm0
1922.byte 102,15,56,220,209
1923.byte 102,15,56,220,217
1924.byte 102,15,56,220,225
1925.byte 102,15,56,220,233
1926.byte 102,15,56,220,241
1927.byte 102,15,56,220,249
1928 movl 120(%esp),%edi
1929 movl 124(%esp),%eax
1930 call .L_aesni_encrypt6_enter
1931 movdqa 80(%esp),%xmm0
1932 pxor (%esp),%xmm2
1933 pxor 16(%esp),%xmm3
1934 pxor 32(%esp),%xmm4
1935 pxor 48(%esp),%xmm5
1936 pxor 64(%esp),%xmm6
1937 pxor %xmm0,%xmm7
1938 movdqa 96(%esp),%xmm1
1939 movdqu %xmm2,-96(%edi,%esi,1)
1940 movdqu %xmm3,-80(%edi,%esi,1)
1941 movdqu %xmm4,-64(%edi,%esi,1)
1942 movdqu %xmm5,-48(%edi,%esi,1)
1943 movdqu %xmm6,-32(%edi,%esi,1)
1944 movdqu %xmm7,-16(%edi,%esi,1)
1945 cmpl %eax,%esi
1946 jb .L077grandloop
1947.L076short:
1948 addl $96,%eax
1949 subl %esi,%eax
1950 jz .L078done
1951 cmpl $32,%eax
1952 jb .L079one
1953 je .L080two
1954 cmpl $64,%eax
1955 jb .L081three
1956 je .L082four
1957 leal 1(%ebp),%ecx
1958 leal 3(%ebp),%eax
1959 bsfl %ecx,%ecx
1960 bsfl %eax,%eax
1961 shll $4,%ecx
1962 shll $4,%eax
1963 movdqu (%ebx),%xmm2
1964 movdqu (%ebx,%ecx,1),%xmm3
1965 movl 116(%esp),%ecx
1966 movdqa %xmm2,%xmm4
1967 movdqu (%ebx,%eax,1),%xmm5
1968 movdqa %xmm2,%xmm6
1969 pxor %xmm0,%xmm2
1970 pxor %xmm2,%xmm3
1971 movdqa %xmm2,(%esp)
1972 pxor %xmm3,%xmm4
1973 movdqa %xmm3,16(%esp)
1974 pxor %xmm4,%xmm5
1975 movdqa %xmm4,32(%esp)
1976 pxor %xmm5,%xmm6
1977 movdqa %xmm5,48(%esp)
1978 pxor %xmm6,%xmm7
1979 movdqa %xmm6,64(%esp)
1980 movups -48(%edx,%ecx,1),%xmm0
1981 movdqu (%esi),%xmm2
1982 movdqu 16(%esi),%xmm3
1983 movdqu 32(%esi),%xmm4
1984 movdqu 48(%esi),%xmm5
1985 movdqu 64(%esi),%xmm6
1986 pxor %xmm7,%xmm7
1987 pxor %xmm2,%xmm1
1988 pxor %xmm0,%xmm2
1989 pxor %xmm3,%xmm1
1990 pxor %xmm0,%xmm3
1991 pxor %xmm4,%xmm1
1992 pxor %xmm0,%xmm4
1993 pxor %xmm5,%xmm1
1994 pxor %xmm0,%xmm5
1995 pxor %xmm6,%xmm1
1996 pxor %xmm0,%xmm6
1997 movdqa %xmm1,96(%esp)
1998 movups -32(%edx,%ecx,1),%xmm1
1999 pxor (%esp),%xmm2
2000 pxor 16(%esp),%xmm3
2001 pxor 32(%esp),%xmm4
2002 pxor 48(%esp),%xmm5
2003 pxor 64(%esp),%xmm6
2004 movups -16(%edx,%ecx,1),%xmm0
2005.byte 102,15,56,220,209
2006.byte 102,15,56,220,217
2007.byte 102,15,56,220,225
2008.byte 102,15,56,220,233
2009.byte 102,15,56,220,241
2010.byte 102,15,56,220,249
2011 movl 120(%esp),%edi
2012 call .L_aesni_encrypt6_enter
2013 movdqa 64(%esp),%xmm0
2014 pxor (%esp),%xmm2
2015 pxor 16(%esp),%xmm3
2016 pxor 32(%esp),%xmm4
2017 pxor 48(%esp),%xmm5
2018 pxor %xmm0,%xmm6
2019 movdqa 96(%esp),%xmm1
2020 movdqu %xmm2,(%edi,%esi,1)
2021 movdqu %xmm3,16(%edi,%esi,1)
2022 movdqu %xmm4,32(%edi,%esi,1)
2023 movdqu %xmm5,48(%edi,%esi,1)
2024 movdqu %xmm6,64(%edi,%esi,1)
2025 jmp .L078done
2026.align 16
2027.L079one:
2028 movdqu (%ebx),%xmm7
2029 movl 112(%esp),%edx
2030 movdqu (%esi),%xmm2
2031 movl 240(%edx),%ecx
2032 pxor %xmm0,%xmm7
2033 pxor %xmm2,%xmm1
2034 pxor %xmm7,%xmm2
2035 movdqa %xmm1,%xmm6
2036 movl 120(%esp),%edi
2037 movups (%edx),%xmm0
2038 movups 16(%edx),%xmm1
2039 leal 32(%edx),%edx
2040 xorps %xmm0,%xmm2
2041.L083enc1_loop_16:
2042.byte 102,15,56,220,209
2043 decl %ecx
2044 movups (%edx),%xmm1
2045 leal 16(%edx),%edx
2046 jnz .L083enc1_loop_16
2047.byte 102,15,56,221,209
2048 xorps %xmm7,%xmm2
2049 movdqa %xmm7,%xmm0
2050 movdqa %xmm6,%xmm1
2051 movups %xmm2,(%edi,%esi,1)
2052 jmp .L078done
2053.align 16
2054.L080two:
2055 leal 1(%ebp),%ecx
2056 movl 112(%esp),%edx
2057 bsfl %ecx,%ecx
2058 shll $4,%ecx
2059 movdqu (%ebx),%xmm6
2060 movdqu (%ebx,%ecx,1),%xmm7
2061 movdqu (%esi),%xmm2
2062 movdqu 16(%esi),%xmm3
2063 movl 240(%edx),%ecx
2064 pxor %xmm0,%xmm6
2065 pxor %xmm6,%xmm7
2066 pxor %xmm2,%xmm1
2067 pxor %xmm6,%xmm2
2068 pxor %xmm3,%xmm1
2069 pxor %xmm7,%xmm3
2070 movdqa %xmm1,%xmm5
2071 movl 120(%esp),%edi
2072 call _aesni_encrypt2
2073 xorps %xmm6,%xmm2
2074 xorps %xmm7,%xmm3
2075 movdqa %xmm7,%xmm0
2076 movdqa %xmm5,%xmm1
2077 movups %xmm2,(%edi,%esi,1)
2078 movups %xmm3,16(%edi,%esi,1)
2079 jmp .L078done
2080.align 16
2081.L081three:
2082 leal 1(%ebp),%ecx
2083 movl 112(%esp),%edx
2084 bsfl %ecx,%ecx
2085 shll $4,%ecx
2086 movdqu (%ebx),%xmm5
2087 movdqu (%ebx,%ecx,1),%xmm6
2088 movdqa %xmm5,%xmm7
2089 movdqu (%esi),%xmm2
2090 movdqu 16(%esi),%xmm3
2091 movdqu 32(%esi),%xmm4
2092 movl 240(%edx),%ecx
2093 pxor %xmm0,%xmm5
2094 pxor %xmm5,%xmm6
2095 pxor %xmm6,%xmm7
2096 pxor %xmm2,%xmm1
2097 pxor %xmm5,%xmm2
2098 pxor %xmm3,%xmm1
2099 pxor %xmm6,%xmm3
2100 pxor %xmm4,%xmm1
2101 pxor %xmm7,%xmm4
2102 movdqa %xmm1,96(%esp)
2103 movl 120(%esp),%edi
2104 call _aesni_encrypt3
2105 xorps %xmm5,%xmm2
2106 xorps %xmm6,%xmm3
2107 xorps %xmm7,%xmm4
2108 movdqa %xmm7,%xmm0
2109 movdqa 96(%esp),%xmm1
2110 movups %xmm2,(%edi,%esi,1)
2111 movups %xmm3,16(%edi,%esi,1)
2112 movups %xmm4,32(%edi,%esi,1)
2113 jmp .L078done
2114.align 16
2115.L082four:
2116 leal 1(%ebp),%ecx
2117 leal 3(%ebp),%eax
2118 bsfl %ecx,%ecx
2119 bsfl %eax,%eax
2120 movl 112(%esp),%edx
2121 shll $4,%ecx
2122 shll $4,%eax
2123 movdqu (%ebx),%xmm4
2124 movdqu (%ebx,%ecx,1),%xmm5
2125 movdqa %xmm4,%xmm6
2126 movdqu (%ebx,%eax,1),%xmm7
2127 pxor %xmm0,%xmm4
2128 movdqu (%esi),%xmm2
2129 pxor %xmm4,%xmm5
2130 movdqu 16(%esi),%xmm3
2131 pxor %xmm5,%xmm6
2132 movdqa %xmm4,(%esp)
2133 pxor %xmm6,%xmm7
2134 movdqa %xmm5,16(%esp)
2135 movdqu 32(%esi),%xmm4
2136 movdqu 48(%esi),%xmm5
2137 movl 240(%edx),%ecx
2138 pxor %xmm2,%xmm1
2139 pxor (%esp),%xmm2
2140 pxor %xmm3,%xmm1
2141 pxor 16(%esp),%xmm3
2142 pxor %xmm4,%xmm1
2143 pxor %xmm6,%xmm4
2144 pxor %xmm5,%xmm1
2145 pxor %xmm7,%xmm5
2146 movdqa %xmm1,96(%esp)
2147 movl 120(%esp),%edi
2148 call _aesni_encrypt4
2149 xorps (%esp),%xmm2
2150 xorps 16(%esp),%xmm3
2151 xorps %xmm6,%xmm4
2152 movups %xmm2,(%edi,%esi,1)
2153 xorps %xmm7,%xmm5
2154 movups %xmm3,16(%edi,%esi,1)
2155 movdqa %xmm7,%xmm0
2156 movups %xmm4,32(%edi,%esi,1)
2157 movdqa 96(%esp),%xmm1
2158 movups %xmm5,48(%edi,%esi,1)
2159.L078done:
2160 movl 128(%esp),%edx
2161 pxor %xmm2,%xmm2
2162 pxor %xmm3,%xmm3
2163 movdqa %xmm2,(%esp)
2164 pxor %xmm4,%xmm4
2165 movdqa %xmm2,16(%esp)
2166 pxor %xmm5,%xmm5
2167 movdqa %xmm2,32(%esp)
2168 pxor %xmm6,%xmm6
2169 movdqa %xmm2,48(%esp)
2170 pxor %xmm7,%xmm7
2171 movdqa %xmm2,64(%esp)
2172 movdqa %xmm2,80(%esp)
2173 movdqa %xmm2,96(%esp)
2174 leal (%edx),%esp
2175 movl 40(%esp),%ecx
2176 movl 48(%esp),%ebx
2177 movdqu %xmm0,(%ecx)
2178 pxor %xmm0,%xmm0
2179 movdqu %xmm1,(%ebx)
2180 pxor %xmm1,%xmm1
2181 popl %edi
2182 popl %esi
2183 popl %ebx
2184 popl %ebp
2185 ret
2186.size aesni_ocb_encrypt,.-.L_aesni_ocb_encrypt_begin
2187.globl aesni_ocb_decrypt
2188.type aesni_ocb_decrypt,@function
2189.align 16
2190aesni_ocb_decrypt:
2191.L_aesni_ocb_decrypt_begin:
2192 pushl %ebp
2193 pushl %ebx
2194 pushl %esi
2195 pushl %edi
2196 movl 40(%esp),%ecx
2197 movl 48(%esp),%ebx
2198 movl 20(%esp),%esi
2199 movl 24(%esp),%edi
2200 movl 28(%esp),%eax
2201 movl 32(%esp),%edx
2202 movdqu (%ecx),%xmm0
2203 movl 36(%esp),%ebp
2204 movdqu (%ebx),%xmm1
2205 movl 44(%esp),%ebx
2206 movl %esp,%ecx
2207 subl $132,%esp
2208 andl $-16,%esp
2209 subl %esi,%edi
2210 shll $4,%eax
2211 leal -96(%esi,%eax,1),%eax
2212 movl %edi,120(%esp)
2213 movl %eax,124(%esp)
2214 movl %ecx,128(%esp)
2215 movl 240(%edx),%ecx
2216 testl $1,%ebp
2217 jnz .L084odd
2218 bsfl %ebp,%eax
2219 addl $1,%ebp
2220 shll $4,%eax
2221 movdqu (%ebx,%eax,1),%xmm7
2222 movl %edx,%eax
2223 movdqu (%esi),%xmm2
2224 leal 16(%esi),%esi
2225 pxor %xmm0,%xmm7
2226 pxor %xmm7,%xmm2
2227 movdqa %xmm1,%xmm6
2228 movups (%edx),%xmm0
2229 movups 16(%edx),%xmm1
2230 leal 32(%edx),%edx
2231 xorps %xmm0,%xmm2
2232.L085dec1_loop_17:
2233.byte 102,15,56,222,209
2234 decl %ecx
2235 movups (%edx),%xmm1
2236 leal 16(%edx),%edx
2237 jnz .L085dec1_loop_17
2238.byte 102,15,56,223,209
2239 xorps %xmm7,%xmm2
2240 movaps %xmm6,%xmm1
2241 movdqa %xmm7,%xmm0
2242 xorps %xmm2,%xmm1
2243 movups %xmm2,-16(%edi,%esi,1)
2244 movl 240(%eax),%ecx
2245 movl %eax,%edx
2246 movl 124(%esp),%eax
2247.L084odd:
2248 shll $4,%ecx
2249 movl $16,%edi
2250 subl %ecx,%edi
2251 movl %edx,112(%esp)
2252 leal 32(%edx,%ecx,1),%edx
2253 movl %edi,116(%esp)
2254 cmpl %eax,%esi
2255 ja .L086short
2256 jmp .L087grandloop
2257.align 32
2258.L087grandloop:
2259 leal 1(%ebp),%ecx
2260 leal 3(%ebp),%eax
2261 leal 5(%ebp),%edi
2262 addl $6,%ebp
2263 bsfl %ecx,%ecx
2264 bsfl %eax,%eax
2265 bsfl %edi,%edi
2266 shll $4,%ecx
2267 shll $4,%eax
2268 shll $4,%edi
2269 movdqu (%ebx),%xmm2
2270 movdqu (%ebx,%ecx,1),%xmm3
2271 movl 116(%esp),%ecx
2272 movdqa %xmm2,%xmm4
2273 movdqu (%ebx,%eax,1),%xmm5
2274 movdqa %xmm2,%xmm6
2275 movdqu (%ebx,%edi,1),%xmm7
2276 pxor %xmm0,%xmm2
2277 pxor %xmm2,%xmm3
2278 movdqa %xmm2,(%esp)
2279 pxor %xmm3,%xmm4
2280 movdqa %xmm3,16(%esp)
2281 pxor %xmm4,%xmm5
2282 movdqa %xmm4,32(%esp)
2283 pxor %xmm5,%xmm6
2284 movdqa %xmm5,48(%esp)
2285 pxor %xmm6,%xmm7
2286 movdqa %xmm6,64(%esp)
2287 movdqa %xmm7,80(%esp)
2288 movups -48(%edx,%ecx,1),%xmm0
2289 movdqu (%esi),%xmm2
2290 movdqu 16(%esi),%xmm3
2291 movdqu 32(%esi),%xmm4
2292 movdqu 48(%esi),%xmm5
2293 movdqu 64(%esi),%xmm6
2294 movdqu 80(%esi),%xmm7
2295 leal 96(%esi),%esi
2296 movdqa %xmm1,96(%esp)
2297 pxor %xmm0,%xmm2
2298 pxor %xmm0,%xmm3
2299 pxor %xmm0,%xmm4
2300 pxor %xmm0,%xmm5
2301 pxor %xmm0,%xmm6
2302 pxor %xmm0,%xmm7
2303 movups -32(%edx,%ecx,1),%xmm1
2304 pxor (%esp),%xmm2
2305 pxor 16(%esp),%xmm3
2306 pxor 32(%esp),%xmm4
2307 pxor 48(%esp),%xmm5
2308 pxor 64(%esp),%xmm6
2309 pxor 80(%esp),%xmm7
2310 movups -16(%edx,%ecx,1),%xmm0
2311.byte 102,15,56,222,209
2312.byte 102,15,56,222,217
2313.byte 102,15,56,222,225
2314.byte 102,15,56,222,233
2315.byte 102,15,56,222,241
2316.byte 102,15,56,222,249
2317 movl 120(%esp),%edi
2318 movl 124(%esp),%eax
2319 call .L_aesni_decrypt6_enter
2320 movdqa 80(%esp),%xmm0
2321 pxor (%esp),%xmm2
2322 movdqa 96(%esp),%xmm1
2323 pxor 16(%esp),%xmm3
2324 pxor 32(%esp),%xmm4
2325 pxor 48(%esp),%xmm5
2326 pxor 64(%esp),%xmm6
2327 pxor %xmm0,%xmm7
2328 pxor %xmm2,%xmm1
2329 movdqu %xmm2,-96(%edi,%esi,1)
2330 pxor %xmm3,%xmm1
2331 movdqu %xmm3,-80(%edi,%esi,1)
2332 pxor %xmm4,%xmm1
2333 movdqu %xmm4,-64(%edi,%esi,1)
2334 pxor %xmm5,%xmm1
2335 movdqu %xmm5,-48(%edi,%esi,1)
2336 pxor %xmm6,%xmm1
2337 movdqu %xmm6,-32(%edi,%esi,1)
2338 pxor %xmm7,%xmm1
2339 movdqu %xmm7,-16(%edi,%esi,1)
2340 cmpl %eax,%esi
2341 jb .L087grandloop
2342.L086short:
2343 addl $96,%eax
2344 subl %esi,%eax
2345 jz .L088done
2346 cmpl $32,%eax
2347 jb .L089one
2348 je .L090two
2349 cmpl $64,%eax
2350 jb .L091three
2351 je .L092four
2352 leal 1(%ebp),%ecx
2353 leal 3(%ebp),%eax
2354 bsfl %ecx,%ecx
2355 bsfl %eax,%eax
2356 shll $4,%ecx
2357 shll $4,%eax
2358 movdqu (%ebx),%xmm2
2359 movdqu (%ebx,%ecx,1),%xmm3
2360 movl 116(%esp),%ecx
2361 movdqa %xmm2,%xmm4
2362 movdqu (%ebx,%eax,1),%xmm5
2363 movdqa %xmm2,%xmm6
2364 pxor %xmm0,%xmm2
2365 pxor %xmm2,%xmm3
2366 movdqa %xmm2,(%esp)
2367 pxor %xmm3,%xmm4
2368 movdqa %xmm3,16(%esp)
2369 pxor %xmm4,%xmm5
2370 movdqa %xmm4,32(%esp)
2371 pxor %xmm5,%xmm6
2372 movdqa %xmm5,48(%esp)
2373 pxor %xmm6,%xmm7
2374 movdqa %xmm6,64(%esp)
2375 movups -48(%edx,%ecx,1),%xmm0
2376 movdqu (%esi),%xmm2
2377 movdqu 16(%esi),%xmm3
2378 movdqu 32(%esi),%xmm4
2379 movdqu 48(%esi),%xmm5
2380 movdqu 64(%esi),%xmm6
2381 pxor %xmm7,%xmm7
2382 movdqa %xmm1,96(%esp)
2383 pxor %xmm0,%xmm2
2384 pxor %xmm0,%xmm3
2385 pxor %xmm0,%xmm4
2386 pxor %xmm0,%xmm5
2387 pxor %xmm0,%xmm6
2388 movups -32(%edx,%ecx,1),%xmm1
2389 pxor (%esp),%xmm2
2390 pxor 16(%esp),%xmm3
2391 pxor 32(%esp),%xmm4
2392 pxor 48(%esp),%xmm5
2393 pxor 64(%esp),%xmm6
2394 movups -16(%edx,%ecx,1),%xmm0
2395.byte 102,15,56,222,209
2396.byte 102,15,56,222,217
2397.byte 102,15,56,222,225
2398.byte 102,15,56,222,233
2399.byte 102,15,56,222,241
2400.byte 102,15,56,222,249
2401 movl 120(%esp),%edi
2402 call .L_aesni_decrypt6_enter
2403 movdqa 64(%esp),%xmm0
2404 pxor (%esp),%xmm2
2405 movdqa 96(%esp),%xmm1
2406 pxor 16(%esp),%xmm3
2407 pxor 32(%esp),%xmm4
2408 pxor 48(%esp),%xmm5
2409 pxor %xmm0,%xmm6
2410 pxor %xmm2,%xmm1
2411 movdqu %xmm2,(%edi,%esi,1)
2412 pxor %xmm3,%xmm1
2413 movdqu %xmm3,16(%edi,%esi,1)
2414 pxor %xmm4,%xmm1
2415 movdqu %xmm4,32(%edi,%esi,1)
2416 pxor %xmm5,%xmm1
2417 movdqu %xmm5,48(%edi,%esi,1)
2418 pxor %xmm6,%xmm1
2419 movdqu %xmm6,64(%edi,%esi,1)
2420 jmp .L088done
2421.align 16
2422.L089one:
2423 movdqu (%ebx),%xmm7
2424 movl 112(%esp),%edx
2425 movdqu (%esi),%xmm2
2426 movl 240(%edx),%ecx
2427 pxor %xmm0,%xmm7
2428 pxor %xmm7,%xmm2
2429 movdqa %xmm1,%xmm6
2430 movl 120(%esp),%edi
2431 movups (%edx),%xmm0
2432 movups 16(%edx),%xmm1
2433 leal 32(%edx),%edx
2434 xorps %xmm0,%xmm2
2435.L093dec1_loop_18:
2436.byte 102,15,56,222,209
2437 decl %ecx
2438 movups (%edx),%xmm1
2439 leal 16(%edx),%edx
2440 jnz .L093dec1_loop_18
2441.byte 102,15,56,223,209
2442 xorps %xmm7,%xmm2
2443 movaps %xmm6,%xmm1
2444 movdqa %xmm7,%xmm0
2445 xorps %xmm2,%xmm1
2446 movups %xmm2,(%edi,%esi,1)
2447 jmp .L088done
2448.align 16
2449.L090two:
2450 leal 1(%ebp),%ecx
2451 movl 112(%esp),%edx
2452 bsfl %ecx,%ecx
2453 shll $4,%ecx
2454 movdqu (%ebx),%xmm6
2455 movdqu (%ebx,%ecx,1),%xmm7
2456 movdqu (%esi),%xmm2
2457 movdqu 16(%esi),%xmm3
2458 movl 240(%edx),%ecx
2459 movdqa %xmm1,%xmm5
2460 pxor %xmm0,%xmm6
2461 pxor %xmm6,%xmm7
2462 pxor %xmm6,%xmm2
2463 pxor %xmm7,%xmm3
2464 movl 120(%esp),%edi
2465 call _aesni_decrypt2
2466 xorps %xmm6,%xmm2
2467 xorps %xmm7,%xmm3
2468 movdqa %xmm7,%xmm0
2469 xorps %xmm2,%xmm5
2470 movups %xmm2,(%edi,%esi,1)
2471 xorps %xmm3,%xmm5
2472 movups %xmm3,16(%edi,%esi,1)
2473 movaps %xmm5,%xmm1
2474 jmp .L088done
2475.align 16
2476.L091three:
2477 leal 1(%ebp),%ecx
2478 movl 112(%esp),%edx
2479 bsfl %ecx,%ecx
2480 shll $4,%ecx
2481 movdqu (%ebx),%xmm5
2482 movdqu (%ebx,%ecx,1),%xmm6
2483 movdqa %xmm5,%xmm7
2484 movdqu (%esi),%xmm2
2485 movdqu 16(%esi),%xmm3
2486 movdqu 32(%esi),%xmm4
2487 movl 240(%edx),%ecx
2488 movdqa %xmm1,96(%esp)
2489 pxor %xmm0,%xmm5
2490 pxor %xmm5,%xmm6
2491 pxor %xmm6,%xmm7
2492 pxor %xmm5,%xmm2
2493 pxor %xmm6,%xmm3
2494 pxor %xmm7,%xmm4
2495 movl 120(%esp),%edi
2496 call _aesni_decrypt3
2497 movdqa 96(%esp),%xmm1
2498 xorps %xmm5,%xmm2
2499 xorps %xmm6,%xmm3
2500 xorps %xmm7,%xmm4
2501 movups %xmm2,(%edi,%esi,1)
2502 pxor %xmm2,%xmm1
2503 movdqa %xmm7,%xmm0
2504 movups %xmm3,16(%edi,%esi,1)
2505 pxor %xmm3,%xmm1
2506 movups %xmm4,32(%edi,%esi,1)
2507 pxor %xmm4,%xmm1
2508 jmp .L088done
2509.align 16
2510.L092four:
2511 leal 1(%ebp),%ecx
2512 leal 3(%ebp),%eax
2513 bsfl %ecx,%ecx
2514 bsfl %eax,%eax
2515 movl 112(%esp),%edx
2516 shll $4,%ecx
2517 shll $4,%eax
2518 movdqu (%ebx),%xmm4
2519 movdqu (%ebx,%ecx,1),%xmm5
2520 movdqa %xmm4,%xmm6
2521 movdqu (%ebx,%eax,1),%xmm7
2522 pxor %xmm0,%xmm4
2523 movdqu (%esi),%xmm2
2524 pxor %xmm4,%xmm5
2525 movdqu 16(%esi),%xmm3
2526 pxor %xmm5,%xmm6
2527 movdqa %xmm4,(%esp)
2528 pxor %xmm6,%xmm7
2529 movdqa %xmm5,16(%esp)
2530 movdqu 32(%esi),%xmm4
2531 movdqu 48(%esi),%xmm5
2532 movl 240(%edx),%ecx
2533 movdqa %xmm1,96(%esp)
2534 pxor (%esp),%xmm2
2535 pxor 16(%esp),%xmm3
2536 pxor %xmm6,%xmm4
2537 pxor %xmm7,%xmm5
2538 movl 120(%esp),%edi
2539 call _aesni_decrypt4
2540 movdqa 96(%esp),%xmm1
2541 xorps (%esp),%xmm2
2542 xorps 16(%esp),%xmm3
2543 xorps %xmm6,%xmm4
2544 movups %xmm2,(%edi,%esi,1)
2545 pxor %xmm2,%xmm1
2546 xorps %xmm7,%xmm5
2547 movups %xmm3,16(%edi,%esi,1)
2548 pxor %xmm3,%xmm1
2549 movdqa %xmm7,%xmm0
2550 movups %xmm4,32(%edi,%esi,1)
2551 pxor %xmm4,%xmm1
2552 movups %xmm5,48(%edi,%esi,1)
2553 pxor %xmm5,%xmm1
2554.L088done:
2555 movl 128(%esp),%edx
2556 pxor %xmm2,%xmm2
2557 pxor %xmm3,%xmm3
2558 movdqa %xmm2,(%esp)
2559 pxor %xmm4,%xmm4
2560 movdqa %xmm2,16(%esp)
2561 pxor %xmm5,%xmm5
2562 movdqa %xmm2,32(%esp)
2563 pxor %xmm6,%xmm6
2564 movdqa %xmm2,48(%esp)
2565 pxor %xmm7,%xmm7
2566 movdqa %xmm2,64(%esp)
2567 movdqa %xmm2,80(%esp)
2568 movdqa %xmm2,96(%esp)
2569 leal (%edx),%esp
2570 movl 40(%esp),%ecx
2571 movl 48(%esp),%ebx
2572 movdqu %xmm0,(%ecx)
2573 pxor %xmm0,%xmm0
2574 movdqu %xmm1,(%ebx)
2575 pxor %xmm1,%xmm1
2576 popl %edi
2577 popl %esi
2578 popl %ebx
2579 popl %ebp
2580 ret
2581.size aesni_ocb_decrypt,.-.L_aesni_ocb_decrypt_begin
2582.globl aesni_cbc_encrypt
2583.type aesni_cbc_encrypt,@function
2584.align 16
2585aesni_cbc_encrypt:
2586.L_aesni_cbc_encrypt_begin:
2587 pushl %ebp
2588 pushl %ebx
2589 pushl %esi
2590 pushl %edi
2591 movl 20(%esp),%esi
2592 movl %esp,%ebx
2593 movl 24(%esp),%edi
2594 subl $24,%ebx
2595 movl 28(%esp),%eax
2596 andl $-16,%ebx
2597 movl 32(%esp),%edx
2598 movl 36(%esp),%ebp
2599 testl %eax,%eax
2600 jz .L094cbc_abort
2601 cmpl $0,40(%esp)
2602 xchgl %esp,%ebx
2603 movups (%ebp),%xmm7
2604 movl 240(%edx),%ecx
2605 movl %edx,%ebp
2606 movl %ebx,16(%esp)
2607 movl %ecx,%ebx
2608 je .L095cbc_decrypt
2609 movaps %xmm7,%xmm2
2610 cmpl $16,%eax
2611 jb .L096cbc_enc_tail
2612 subl $16,%eax
2613 jmp .L097cbc_enc_loop
2614.align 16
2615.L097cbc_enc_loop:
2616 movups (%esi),%xmm7
2617 leal 16(%esi),%esi
2618 movups (%edx),%xmm0
2619 movups 16(%edx),%xmm1
2620 xorps %xmm0,%xmm7
2621 leal 32(%edx),%edx
2622 xorps %xmm7,%xmm2
2623.L098enc1_loop_19:
2624.byte 102,15,56,220,209
2625 decl %ecx
2626 movups (%edx),%xmm1
2627 leal 16(%edx),%edx
2628 jnz .L098enc1_loop_19
2629.byte 102,15,56,221,209
2630 movl %ebx,%ecx
2631 movl %ebp,%edx
2632 movups %xmm2,(%edi)
2633 leal 16(%edi),%edi
2634 subl $16,%eax
2635 jnc .L097cbc_enc_loop
2636 addl $16,%eax
2637 jnz .L096cbc_enc_tail
2638 movaps %xmm2,%xmm7
2639 pxor %xmm2,%xmm2
2640 jmp .L099cbc_ret
2641.L096cbc_enc_tail:
2642 movl %eax,%ecx
2643.long 2767451785
2644 movl $16,%ecx
2645 subl %eax,%ecx
2646 xorl %eax,%eax
2647.long 2868115081
2648 leal -16(%edi),%edi
2649 movl %ebx,%ecx
2650 movl %edi,%esi
2651 movl %ebp,%edx
2652 jmp .L097cbc_enc_loop
2653.align 16
2654.L095cbc_decrypt:
2655 cmpl $80,%eax
2656 jbe .L100cbc_dec_tail
2657 movaps %xmm7,(%esp)
2658 subl $80,%eax
2659 jmp .L101cbc_dec_loop6_enter
2660.align 16
2661.L102cbc_dec_loop6:
2662 movaps %xmm0,(%esp)
2663 movups %xmm7,(%edi)
2664 leal 16(%edi),%edi
2665.L101cbc_dec_loop6_enter:
2666 movdqu (%esi),%xmm2
2667 movdqu 16(%esi),%xmm3
2668 movdqu 32(%esi),%xmm4
2669 movdqu 48(%esi),%xmm5
2670 movdqu 64(%esi),%xmm6
2671 movdqu 80(%esi),%xmm7
2672 call _aesni_decrypt6
2673 movups (%esi),%xmm1
2674 movups 16(%esi),%xmm0
2675 xorps (%esp),%xmm2
2676 xorps %xmm1,%xmm3
2677 movups 32(%esi),%xmm1
2678 xorps %xmm0,%xmm4
2679 movups 48(%esi),%xmm0
2680 xorps %xmm1,%xmm5
2681 movups 64(%esi),%xmm1
2682 xorps %xmm0,%xmm6
2683 movups 80(%esi),%xmm0
2684 xorps %xmm1,%xmm7
2685 movups %xmm2,(%edi)
2686 movups %xmm3,16(%edi)
2687 leal 96(%esi),%esi
2688 movups %xmm4,32(%edi)
2689 movl %ebx,%ecx
2690 movups %xmm5,48(%edi)
2691 movl %ebp,%edx
2692 movups %xmm6,64(%edi)
2693 leal 80(%edi),%edi
2694 subl $96,%eax
2695 ja .L102cbc_dec_loop6
2696 movaps %xmm7,%xmm2
2697 movaps %xmm0,%xmm7
2698 addl $80,%eax
2699 jle .L103cbc_dec_clear_tail_collected
2700 movups %xmm2,(%edi)
2701 leal 16(%edi),%edi
2702.L100cbc_dec_tail:
2703 movups (%esi),%xmm2
2704 movaps %xmm2,%xmm6
2705 cmpl $16,%eax
2706 jbe .L104cbc_dec_one
2707 movups 16(%esi),%xmm3
2708 movaps %xmm3,%xmm5
2709 cmpl $32,%eax
2710 jbe .L105cbc_dec_two
2711 movups 32(%esi),%xmm4
2712 cmpl $48,%eax
2713 jbe .L106cbc_dec_three
2714 movups 48(%esi),%xmm5
2715 cmpl $64,%eax
2716 jbe .L107cbc_dec_four
2717 movups 64(%esi),%xmm6
2718 movaps %xmm7,(%esp)
2719 movups (%esi),%xmm2
2720 xorps %xmm7,%xmm7
2721 call _aesni_decrypt6
2722 movups (%esi),%xmm1
2723 movups 16(%esi),%xmm0
2724 xorps (%esp),%xmm2
2725 xorps %xmm1,%xmm3
2726 movups 32(%esi),%xmm1
2727 xorps %xmm0,%xmm4
2728 movups 48(%esi),%xmm0
2729 xorps %xmm1,%xmm5
2730 movups 64(%esi),%xmm7
2731 xorps %xmm0,%xmm6
2732 movups %xmm2,(%edi)
2733 movups %xmm3,16(%edi)
2734 pxor %xmm3,%xmm3
2735 movups %xmm4,32(%edi)
2736 pxor %xmm4,%xmm4
2737 movups %xmm5,48(%edi)
2738 pxor %xmm5,%xmm5
2739 leal 64(%edi),%edi
2740 movaps %xmm6,%xmm2
2741 pxor %xmm6,%xmm6
2742 subl $80,%eax
2743 jmp .L108cbc_dec_tail_collected
2744.align 16
2745.L104cbc_dec_one:
2746 movups (%edx),%xmm0
2747 movups 16(%edx),%xmm1
2748 leal 32(%edx),%edx
2749 xorps %xmm0,%xmm2
2750.L109dec1_loop_20:
2751.byte 102,15,56,222,209
2752 decl %ecx
2753 movups (%edx),%xmm1
2754 leal 16(%edx),%edx
2755 jnz .L109dec1_loop_20
2756.byte 102,15,56,223,209
2757 xorps %xmm7,%xmm2
2758 movaps %xmm6,%xmm7
2759 subl $16,%eax
2760 jmp .L108cbc_dec_tail_collected
2761.align 16
2762.L105cbc_dec_two:
2763 call _aesni_decrypt2
2764 xorps %xmm7,%xmm2
2765 xorps %xmm6,%xmm3
2766 movups %xmm2,(%edi)
2767 movaps %xmm3,%xmm2
2768 pxor %xmm3,%xmm3
2769 leal 16(%edi),%edi
2770 movaps %xmm5,%xmm7
2771 subl $32,%eax
2772 jmp .L108cbc_dec_tail_collected
2773.align 16
2774.L106cbc_dec_three:
2775 call _aesni_decrypt3
2776 xorps %xmm7,%xmm2
2777 xorps %xmm6,%xmm3
2778 xorps %xmm5,%xmm4
2779 movups %xmm2,(%edi)
2780 movaps %xmm4,%xmm2
2781 pxor %xmm4,%xmm4
2782 movups %xmm3,16(%edi)
2783 pxor %xmm3,%xmm3
2784 leal 32(%edi),%edi
2785 movups 32(%esi),%xmm7
2786 subl $48,%eax
2787 jmp .L108cbc_dec_tail_collected
2788.align 16
2789.L107cbc_dec_four:
2790 call _aesni_decrypt4
2791 movups 16(%esi),%xmm1
2792 movups 32(%esi),%xmm0
2793 xorps %xmm7,%xmm2
2794 movups 48(%esi),%xmm7
2795 xorps %xmm6,%xmm3
2796 movups %xmm2,(%edi)
2797 xorps %xmm1,%xmm4
2798 movups %xmm3,16(%edi)
2799 pxor %xmm3,%xmm3
2800 xorps %xmm0,%xmm5
2801 movups %xmm4,32(%edi)
2802 pxor %xmm4,%xmm4
2803 leal 48(%edi),%edi
2804 movaps %xmm5,%xmm2
2805 pxor %xmm5,%xmm5
2806 subl $64,%eax
2807 jmp .L108cbc_dec_tail_collected
2808.align 16
2809.L103cbc_dec_clear_tail_collected:
2810 pxor %xmm3,%xmm3
2811 pxor %xmm4,%xmm4
2812 pxor %xmm5,%xmm5
2813 pxor %xmm6,%xmm6
2814.L108cbc_dec_tail_collected:
2815 andl $15,%eax
2816 jnz .L110cbc_dec_tail_partial
2817 movups %xmm2,(%edi)
2818 pxor %xmm0,%xmm0
2819 jmp .L099cbc_ret
2820.align 16
2821.L110cbc_dec_tail_partial:
2822 movaps %xmm2,(%esp)
2823 pxor %xmm0,%xmm0
2824 movl $16,%ecx
2825 movl %esp,%esi
2826 subl %eax,%ecx
2827.long 2767451785
2828 movdqa %xmm2,(%esp)
2829.L099cbc_ret:
2830 movl 16(%esp),%esp
2831 movl 36(%esp),%ebp
2832 pxor %xmm2,%xmm2
2833 pxor %xmm1,%xmm1
2834 movups %xmm7,(%ebp)
2835 pxor %xmm7,%xmm7
2836.L094cbc_abort:
2837 popl %edi
2838 popl %esi
2839 popl %ebx
2840 popl %ebp
2841 ret
2842.size aesni_cbc_encrypt,.-.L_aesni_cbc_encrypt_begin
2843.type _aesni_set_encrypt_key,@function
2844.align 16
2845_aesni_set_encrypt_key:
2846 pushl %ebp
2847 pushl %ebx
2848 testl %eax,%eax
2849 jz .L111bad_pointer
2850 testl %edx,%edx
2851 jz .L111bad_pointer
2852 call .L112pic
2853.L112pic:
2854 popl %ebx
2855 leal .Lkey_const-.L112pic(%ebx),%ebx
2856 leal OPENSSL_ia32cap_P-.Lkey_const(%ebx),%ebp
2857 movups (%eax),%xmm0
2858 xorps %xmm4,%xmm4
2859 movl 4(%ebp),%ebp
2860 leal 16(%edx),%edx
2861 andl $268437504,%ebp
2862 cmpl $256,%ecx
2863 je .L11314rounds
2864 cmpl $192,%ecx
2865 je .L11412rounds
2866 cmpl $128,%ecx
2867 jne .L115bad_keybits
2868.align 16
2869.L11610rounds:
2870 cmpl $268435456,%ebp
2871 je .L11710rounds_alt
2872 movl $9,%ecx
2873 movups %xmm0,-16(%edx)
2874.byte 102,15,58,223,200,1
2875 call .L118key_128_cold
2876.byte 102,15,58,223,200,2
2877 call .L119key_128
2878.byte 102,15,58,223,200,4
2879 call .L119key_128
2880.byte 102,15,58,223,200,8
2881 call .L119key_128
2882.byte 102,15,58,223,200,16
2883 call .L119key_128
2884.byte 102,15,58,223,200,32
2885 call .L119key_128
2886.byte 102,15,58,223,200,64
2887 call .L119key_128
2888.byte 102,15,58,223,200,128
2889 call .L119key_128
2890.byte 102,15,58,223,200,27
2891 call .L119key_128
2892.byte 102,15,58,223,200,54
2893 call .L119key_128
2894 movups %xmm0,(%edx)
2895 movl %ecx,80(%edx)
2896 jmp .L120good_key
2897.align 16
2898.L119key_128:
2899 movups %xmm0,(%edx)
2900 leal 16(%edx),%edx
2901.L118key_128_cold:
2902 shufps $16,%xmm0,%xmm4
2903 xorps %xmm4,%xmm0
2904 shufps $140,%xmm0,%xmm4
2905 xorps %xmm4,%xmm0
2906 shufps $255,%xmm1,%xmm1
2907 xorps %xmm1,%xmm0
2908 ret
2909.align 16
2910.L11710rounds_alt:
2911 movdqa (%ebx),%xmm5
2912 movl $8,%ecx
2913 movdqa 32(%ebx),%xmm4
2914 movdqa %xmm0,%xmm2
2915 movdqu %xmm0,-16(%edx)
2916.L121loop_key128:
2917.byte 102,15,56,0,197
2918.byte 102,15,56,221,196
2919 pslld $1,%xmm4
2920 leal 16(%edx),%edx
2921 movdqa %xmm2,%xmm3
2922 pslldq $4,%xmm2
2923 pxor %xmm2,%xmm3
2924 pslldq $4,%xmm2
2925 pxor %xmm2,%xmm3
2926 pslldq $4,%xmm2
2927 pxor %xmm3,%xmm2
2928 pxor %xmm2,%xmm0
2929 movdqu %xmm0,-16(%edx)
2930 movdqa %xmm0,%xmm2
2931 decl %ecx
2932 jnz .L121loop_key128
2933 movdqa 48(%ebx),%xmm4
2934.byte 102,15,56,0,197
2935.byte 102,15,56,221,196
2936 pslld $1,%xmm4
2937 movdqa %xmm2,%xmm3
2938 pslldq $4,%xmm2
2939 pxor %xmm2,%xmm3
2940 pslldq $4,%xmm2
2941 pxor %xmm2,%xmm3
2942 pslldq $4,%xmm2
2943 pxor %xmm3,%xmm2
2944 pxor %xmm2,%xmm0
2945 movdqu %xmm0,(%edx)
2946 movdqa %xmm0,%xmm2
2947.byte 102,15,56,0,197
2948.byte 102,15,56,221,196
2949 movdqa %xmm2,%xmm3
2950 pslldq $4,%xmm2
2951 pxor %xmm2,%xmm3
2952 pslldq $4,%xmm2
2953 pxor %xmm2,%xmm3
2954 pslldq $4,%xmm2
2955 pxor %xmm3,%xmm2
2956 pxor %xmm2,%xmm0
2957 movdqu %xmm0,16(%edx)
2958 movl $9,%ecx
2959 movl %ecx,96(%edx)
2960 jmp .L120good_key
2961.align 16
2962.L11412rounds:
2963 movq 16(%eax),%xmm2
2964 cmpl $268435456,%ebp
2965 je .L12212rounds_alt
2966 movl $11,%ecx
2967 movups %xmm0,-16(%edx)
2968.byte 102,15,58,223,202,1
2969 call .L123key_192a_cold
2970.byte 102,15,58,223,202,2
2971 call .L124key_192b
2972.byte 102,15,58,223,202,4
2973 call .L125key_192a
2974.byte 102,15,58,223,202,8
2975 call .L124key_192b
2976.byte 102,15,58,223,202,16
2977 call .L125key_192a
2978.byte 102,15,58,223,202,32
2979 call .L124key_192b
2980.byte 102,15,58,223,202,64
2981 call .L125key_192a
2982.byte 102,15,58,223,202,128
2983 call .L124key_192b
2984 movups %xmm0,(%edx)
2985 movl %ecx,48(%edx)
2986 jmp .L120good_key
2987.align 16
2988.L125key_192a:
2989 movups %xmm0,(%edx)
2990 leal 16(%edx),%edx
2991.align 16
2992.L123key_192a_cold:
2993 movaps %xmm2,%xmm5
2994.L126key_192b_warm:
2995 shufps $16,%xmm0,%xmm4
2996 movdqa %xmm2,%xmm3
2997 xorps %xmm4,%xmm0
2998 shufps $140,%xmm0,%xmm4
2999 pslldq $4,%xmm3
3000 xorps %xmm4,%xmm0
3001 pshufd $85,%xmm1,%xmm1
3002 pxor %xmm3,%xmm2
3003 pxor %xmm1,%xmm0
3004 pshufd $255,%xmm0,%xmm3
3005 pxor %xmm3,%xmm2
3006 ret
3007.align 16
3008.L124key_192b:
3009 movaps %xmm0,%xmm3
3010 shufps $68,%xmm0,%xmm5
3011 movups %xmm5,(%edx)
3012 shufps $78,%xmm2,%xmm3
3013 movups %xmm3,16(%edx)
3014 leal 32(%edx),%edx
3015 jmp .L126key_192b_warm
3016.align 16
3017.L12212rounds_alt:
3018 movdqa 16(%ebx),%xmm5
3019 movdqa 32(%ebx),%xmm4
3020 movl $8,%ecx
3021 movdqu %xmm0,-16(%edx)
3022.L127loop_key192:
3023 movq %xmm2,(%edx)
3024 movdqa %xmm2,%xmm1
3025.byte 102,15,56,0,213
3026.byte 102,15,56,221,212
3027 pslld $1,%xmm4
3028 leal 24(%edx),%edx
3029 movdqa %xmm0,%xmm3
3030 pslldq $4,%xmm0
3031 pxor %xmm0,%xmm3
3032 pslldq $4,%xmm0
3033 pxor %xmm0,%xmm3
3034 pslldq $4,%xmm0
3035 pxor %xmm3,%xmm0
3036 pshufd $255,%xmm0,%xmm3
3037 pxor %xmm1,%xmm3
3038 pslldq $4,%xmm1
3039 pxor %xmm1,%xmm3
3040 pxor %xmm2,%xmm0
3041 pxor %xmm3,%xmm2
3042 movdqu %xmm0,-16(%edx)
3043 decl %ecx
3044 jnz .L127loop_key192
3045 movl $11,%ecx
3046 movl %ecx,32(%edx)
3047 jmp .L120good_key
3048.align 16
3049.L11314rounds:
3050 movups 16(%eax),%xmm2
3051 leal 16(%edx),%edx
3052 cmpl $268435456,%ebp
3053 je .L12814rounds_alt
3054 movl $13,%ecx
3055 movups %xmm0,-32(%edx)
3056 movups %xmm2,-16(%edx)
3057.byte 102,15,58,223,202,1
3058 call .L129key_256a_cold
3059.byte 102,15,58,223,200,1
3060 call .L130key_256b
3061.byte 102,15,58,223,202,2
3062 call .L131key_256a
3063.byte 102,15,58,223,200,2
3064 call .L130key_256b
3065.byte 102,15,58,223,202,4
3066 call .L131key_256a
3067.byte 102,15,58,223,200,4
3068 call .L130key_256b
3069.byte 102,15,58,223,202,8
3070 call .L131key_256a
3071.byte 102,15,58,223,200,8
3072 call .L130key_256b
3073.byte 102,15,58,223,202,16
3074 call .L131key_256a
3075.byte 102,15,58,223,200,16
3076 call .L130key_256b
3077.byte 102,15,58,223,202,32
3078 call .L131key_256a
3079.byte 102,15,58,223,200,32
3080 call .L130key_256b
3081.byte 102,15,58,223,202,64
3082 call .L131key_256a
3083 movups %xmm0,(%edx)
3084 movl %ecx,16(%edx)
3085 xorl %eax,%eax
3086 jmp .L120good_key
3087.align 16
3088.L131key_256a:
3089 movups %xmm2,(%edx)
3090 leal 16(%edx),%edx
3091.L129key_256a_cold:
3092 shufps $16,%xmm0,%xmm4
3093 xorps %xmm4,%xmm0
3094 shufps $140,%xmm0,%xmm4
3095 xorps %xmm4,%xmm0
3096 shufps $255,%xmm1,%xmm1
3097 xorps %xmm1,%xmm0
3098 ret
3099.align 16
3100.L130key_256b:
3101 movups %xmm0,(%edx)
3102 leal 16(%edx),%edx
3103 shufps $16,%xmm2,%xmm4
3104 xorps %xmm4,%xmm2
3105 shufps $140,%xmm2,%xmm4
3106 xorps %xmm4,%xmm2
3107 shufps $170,%xmm1,%xmm1
3108 xorps %xmm1,%xmm2
3109 ret
3110.align 16
3111.L12814rounds_alt:
3112 movdqa (%ebx),%xmm5
3113 movdqa 32(%ebx),%xmm4
3114 movl $7,%ecx
3115 movdqu %xmm0,-32(%edx)
3116 movdqa %xmm2,%xmm1
3117 movdqu %xmm2,-16(%edx)
3118.L132loop_key256:
3119.byte 102,15,56,0,213
3120.byte 102,15,56,221,212
3121 movdqa %xmm0,%xmm3
3122 pslldq $4,%xmm0
3123 pxor %xmm0,%xmm3
3124 pslldq $4,%xmm0
3125 pxor %xmm0,%xmm3
3126 pslldq $4,%xmm0
3127 pxor %xmm3,%xmm0
3128 pslld $1,%xmm4
3129 pxor %xmm2,%xmm0
3130 movdqu %xmm0,(%edx)
3131 decl %ecx
3132 jz .L133done_key256
3133 pshufd $255,%xmm0,%xmm2
3134 pxor %xmm3,%xmm3
3135.byte 102,15,56,221,211
3136 movdqa %xmm1,%xmm3
3137 pslldq $4,%xmm1
3138 pxor %xmm1,%xmm3
3139 pslldq $4,%xmm1
3140 pxor %xmm1,%xmm3
3141 pslldq $4,%xmm1
3142 pxor %xmm3,%xmm1
3143 pxor %xmm1,%xmm2
3144 movdqu %xmm2,16(%edx)
3145 leal 32(%edx),%edx
3146 movdqa %xmm2,%xmm1
3147 jmp .L132loop_key256
3148.L133done_key256:
3149 movl $13,%ecx
3150 movl %ecx,16(%edx)
3151.L120good_key:
3152 pxor %xmm0,%xmm0
3153 pxor %xmm1,%xmm1
3154 pxor %xmm2,%xmm2
3155 pxor %xmm3,%xmm3
3156 pxor %xmm4,%xmm4
3157 pxor %xmm5,%xmm5
3158 xorl %eax,%eax
3159 popl %ebx
3160 popl %ebp
3161 ret
3162.align 4
3163.L111bad_pointer:
3164 movl $-1,%eax
3165 popl %ebx
3166 popl %ebp
3167 ret
3168.align 4
3169.L115bad_keybits:
3170 pxor %xmm0,%xmm0
3171 movl $-2,%eax
3172 popl %ebx
3173 popl %ebp
3174 ret
3175.size _aesni_set_encrypt_key,.-_aesni_set_encrypt_key
3176.globl aesni_set_encrypt_key
3177.type aesni_set_encrypt_key,@function
3178.align 16
3179aesni_set_encrypt_key:
3180.L_aesni_set_encrypt_key_begin:
3181 movl 4(%esp),%eax
3182 movl 8(%esp),%ecx
3183 movl 12(%esp),%edx
3184 call _aesni_set_encrypt_key
3185 ret
3186.size aesni_set_encrypt_key,.-.L_aesni_set_encrypt_key_begin
3187.globl aesni_set_decrypt_key
3188.type aesni_set_decrypt_key,@function
3189.align 16
3190aesni_set_decrypt_key:
3191.L_aesni_set_decrypt_key_begin:
3192 movl 4(%esp),%eax
3193 movl 8(%esp),%ecx
3194 movl 12(%esp),%edx
3195 call _aesni_set_encrypt_key
3196 movl 12(%esp),%edx
3197 shll $4,%ecx
3198 testl %eax,%eax
3199 jnz .L134dec_key_ret
3200 leal 16(%edx,%ecx,1),%eax
3201 movups (%edx),%xmm0
3202 movups (%eax),%xmm1
3203 movups %xmm0,(%eax)
3204 movups %xmm1,(%edx)
3205 leal 16(%edx),%edx
3206 leal -16(%eax),%eax
3207.L135dec_key_inverse:
3208 movups (%edx),%xmm0
3209 movups (%eax),%xmm1
3210.byte 102,15,56,219,192
3211.byte 102,15,56,219,201
3212 leal 16(%edx),%edx
3213 leal -16(%eax),%eax
3214 movups %xmm0,16(%eax)
3215 movups %xmm1,-16(%edx)
3216 cmpl %edx,%eax
3217 ja .L135dec_key_inverse
3218 movups (%edx),%xmm0
3219.byte 102,15,56,219,192
3220 movups %xmm0,(%edx)
3221 pxor %xmm0,%xmm0
3222 pxor %xmm1,%xmm1
3223 xorl %eax,%eax
3224.L134dec_key_ret:
3225 ret
3226.size aesni_set_decrypt_key,.-.L_aesni_set_decrypt_key_begin
3227.align 64
3228.Lkey_const:
3229.long 202313229,202313229,202313229,202313229
3230.long 67569157,67569157,67569157,67569157
3231.long 1,1,1,1
3232.long 27,27,27,27
3233.byte 65,69,83,32,102,111,114,32,73,110,116,101,108,32,65,69
3234.byte 83,45,78,73,44,32,67,82,89,80,84,79,71,65,77,83
3235.byte 32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115
3236.byte 115,108,46,111,114,103,62,0
3237.comm OPENSSL_ia32cap_P,16,4
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette