VirtualBox

source: vbox/trunk/src/libs/openssl-3.0.9/crypto/genasm-elf/ghash-x86.S@ 100942

Last change on this file since 100942 was 97372, checked in by vboxsync, 2 years ago

libs: Switch to openssl-3.0.7, bugref:10317

File size: 13.3 KB
Line 
1.text
2.globl gcm_gmult_4bit_x86
3.type gcm_gmult_4bit_x86,@function
4.align 16
5gcm_gmult_4bit_x86:
6.L_gcm_gmult_4bit_x86_begin:
7 #ifdef __CET__
8
9.byte 243,15,30,251
10 #endif
11
12 pushl %ebp
13 pushl %ebx
14 pushl %esi
15 pushl %edi
16 subl $84,%esp
17 movl 104(%esp),%edi
18 movl 108(%esp),%esi
19 movl (%edi),%ebp
20 movl 4(%edi),%edx
21 movl 8(%edi),%ecx
22 movl 12(%edi),%ebx
23 movl $0,16(%esp)
24 movl $471859200,20(%esp)
25 movl $943718400,24(%esp)
26 movl $610271232,28(%esp)
27 movl $1887436800,32(%esp)
28 movl $1822425088,36(%esp)
29 movl $1220542464,40(%esp)
30 movl $1423966208,44(%esp)
31 movl $3774873600,48(%esp)
32 movl $4246732800,52(%esp)
33 movl $3644850176,56(%esp)
34 movl $3311403008,60(%esp)
35 movl $2441084928,64(%esp)
36 movl $2376073216,68(%esp)
37 movl $2847932416,72(%esp)
38 movl $3051356160,76(%esp)
39 movl %ebp,(%esp)
40 movl %edx,4(%esp)
41 movl %ecx,8(%esp)
42 movl %ebx,12(%esp)
43 shrl $20,%ebx
44 andl $240,%ebx
45 movl 4(%esi,%ebx,1),%ebp
46 movl (%esi,%ebx,1),%edx
47 movl 12(%esi,%ebx,1),%ecx
48 movl 8(%esi,%ebx,1),%ebx
49 xorl %eax,%eax
50 movl $15,%edi
51 jmp .L000x86_loop
52.align 16
53.L000x86_loop:
54 movb %bl,%al
55 shrdl $4,%ecx,%ebx
56 andb $15,%al
57 shrdl $4,%edx,%ecx
58 shrdl $4,%ebp,%edx
59 shrl $4,%ebp
60 xorl 16(%esp,%eax,4),%ebp
61 movb (%esp,%edi,1),%al
62 andb $240,%al
63 xorl 8(%esi,%eax,1),%ebx
64 xorl 12(%esi,%eax,1),%ecx
65 xorl (%esi,%eax,1),%edx
66 xorl 4(%esi,%eax,1),%ebp
67 decl %edi
68 js .L001x86_break
69 movb %bl,%al
70 shrdl $4,%ecx,%ebx
71 andb $15,%al
72 shrdl $4,%edx,%ecx
73 shrdl $4,%ebp,%edx
74 shrl $4,%ebp
75 xorl 16(%esp,%eax,4),%ebp
76 movb (%esp,%edi,1),%al
77 shlb $4,%al
78 xorl 8(%esi,%eax,1),%ebx
79 xorl 12(%esi,%eax,1),%ecx
80 xorl (%esi,%eax,1),%edx
81 xorl 4(%esi,%eax,1),%ebp
82 jmp .L000x86_loop
83.align 16
84.L001x86_break:
85 bswap %ebx
86 bswap %ecx
87 bswap %edx
88 bswap %ebp
89 movl 104(%esp),%edi
90 movl %ebx,12(%edi)
91 movl %ecx,8(%edi)
92 movl %edx,4(%edi)
93 movl %ebp,(%edi)
94 addl $84,%esp
95 popl %edi
96 popl %esi
97 popl %ebx
98 popl %ebp
99 ret
100.size gcm_gmult_4bit_x86,.-.L_gcm_gmult_4bit_x86_begin
101.globl gcm_ghash_4bit_x86
102.type gcm_ghash_4bit_x86,@function
103.align 16
104gcm_ghash_4bit_x86:
105.L_gcm_ghash_4bit_x86_begin:
106 #ifdef __CET__
107
108.byte 243,15,30,251
109 #endif
110
111 pushl %ebp
112 pushl %ebx
113 pushl %esi
114 pushl %edi
115 subl $84,%esp
116 movl 104(%esp),%ebx
117 movl 108(%esp),%esi
118 movl 112(%esp),%edi
119 movl 116(%esp),%ecx
120 addl %edi,%ecx
121 movl %ecx,116(%esp)
122 movl (%ebx),%ebp
123 movl 4(%ebx),%edx
124 movl 8(%ebx),%ecx
125 movl 12(%ebx),%ebx
126 movl $0,16(%esp)
127 movl $471859200,20(%esp)
128 movl $943718400,24(%esp)
129 movl $610271232,28(%esp)
130 movl $1887436800,32(%esp)
131 movl $1822425088,36(%esp)
132 movl $1220542464,40(%esp)
133 movl $1423966208,44(%esp)
134 movl $3774873600,48(%esp)
135 movl $4246732800,52(%esp)
136 movl $3644850176,56(%esp)
137 movl $3311403008,60(%esp)
138 movl $2441084928,64(%esp)
139 movl $2376073216,68(%esp)
140 movl $2847932416,72(%esp)
141 movl $3051356160,76(%esp)
142.align 16
143.L002x86_outer_loop:
144 xorl 12(%edi),%ebx
145 xorl 8(%edi),%ecx
146 xorl 4(%edi),%edx
147 xorl (%edi),%ebp
148 movl %ebx,12(%esp)
149 movl %ecx,8(%esp)
150 movl %edx,4(%esp)
151 movl %ebp,(%esp)
152 shrl $20,%ebx
153 andl $240,%ebx
154 movl 4(%esi,%ebx,1),%ebp
155 movl (%esi,%ebx,1),%edx
156 movl 12(%esi,%ebx,1),%ecx
157 movl 8(%esi,%ebx,1),%ebx
158 xorl %eax,%eax
159 movl $15,%edi
160 jmp .L003x86_loop
161.align 16
162.L003x86_loop:
163 movb %bl,%al
164 shrdl $4,%ecx,%ebx
165 andb $15,%al
166 shrdl $4,%edx,%ecx
167 shrdl $4,%ebp,%edx
168 shrl $4,%ebp
169 xorl 16(%esp,%eax,4),%ebp
170 movb (%esp,%edi,1),%al
171 andb $240,%al
172 xorl 8(%esi,%eax,1),%ebx
173 xorl 12(%esi,%eax,1),%ecx
174 xorl (%esi,%eax,1),%edx
175 xorl 4(%esi,%eax,1),%ebp
176 decl %edi
177 js .L004x86_break
178 movb %bl,%al
179 shrdl $4,%ecx,%ebx
180 andb $15,%al
181 shrdl $4,%edx,%ecx
182 shrdl $4,%ebp,%edx
183 shrl $4,%ebp
184 xorl 16(%esp,%eax,4),%ebp
185 movb (%esp,%edi,1),%al
186 shlb $4,%al
187 xorl 8(%esi,%eax,1),%ebx
188 xorl 12(%esi,%eax,1),%ecx
189 xorl (%esi,%eax,1),%edx
190 xorl 4(%esi,%eax,1),%ebp
191 jmp .L003x86_loop
192.align 16
193.L004x86_break:
194 bswap %ebx
195 bswap %ecx
196 bswap %edx
197 bswap %ebp
198 movl 112(%esp),%edi
199 leal 16(%edi),%edi
200 cmpl 116(%esp),%edi
201 movl %edi,112(%esp)
202 jb .L002x86_outer_loop
203 movl 104(%esp),%edi
204 movl %ebx,12(%edi)
205 movl %ecx,8(%edi)
206 movl %edx,4(%edi)
207 movl %ebp,(%edi)
208 addl $84,%esp
209 popl %edi
210 popl %esi
211 popl %ebx
212 popl %ebp
213 ret
214.size gcm_ghash_4bit_x86,.-.L_gcm_ghash_4bit_x86_begin
215.type _mmx_gmult_4bit_inner,@function
216.align 16
217_mmx_gmult_4bit_inner:
218 #ifdef __CET__
219
220.byte 243,15,30,251
221 #endif
222
223 xorl %ecx,%ecx
224 movl %ebx,%edx
225 movb %dl,%cl
226 shlb $4,%cl
227 andl $240,%edx
228 movq 8(%esi,%ecx,1),%mm0
229 movq (%esi,%ecx,1),%mm1
230 movd %mm0,%ebp
231 psrlq $4,%mm0
232 movq %mm1,%mm2
233 psrlq $4,%mm1
234 pxor 8(%esi,%edx,1),%mm0
235 movb 14(%edi),%cl
236 psllq $60,%mm2
237 andl $15,%ebp
238 pxor (%esi,%edx,1),%mm1
239 movl %ecx,%edx
240 movd %mm0,%ebx
241 pxor %mm2,%mm0
242 shlb $4,%cl
243 psrlq $4,%mm0
244 movq %mm1,%mm2
245 psrlq $4,%mm1
246 pxor 8(%esi,%ecx,1),%mm0
247 psllq $60,%mm2
248 andl $240,%edx
249 pxor (%eax,%ebp,8),%mm1
250 andl $15,%ebx
251 pxor (%esi,%ecx,1),%mm1
252 movd %mm0,%ebp
253 pxor %mm2,%mm0
254 psrlq $4,%mm0
255 movq %mm1,%mm2
256 psrlq $4,%mm1
257 pxor 8(%esi,%edx,1),%mm0
258 movb 13(%edi),%cl
259 psllq $60,%mm2
260 pxor (%eax,%ebx,8),%mm1
261 andl $15,%ebp
262 pxor (%esi,%edx,1),%mm1
263 movl %ecx,%edx
264 movd %mm0,%ebx
265 pxor %mm2,%mm0
266 shlb $4,%cl
267 psrlq $4,%mm0
268 movq %mm1,%mm2
269 psrlq $4,%mm1
270 pxor 8(%esi,%ecx,1),%mm0
271 psllq $60,%mm2
272 andl $240,%edx
273 pxor (%eax,%ebp,8),%mm1
274 andl $15,%ebx
275 pxor (%esi,%ecx,1),%mm1
276 movd %mm0,%ebp
277 pxor %mm2,%mm0
278 psrlq $4,%mm0
279 movq %mm1,%mm2
280 psrlq $4,%mm1
281 pxor 8(%esi,%edx,1),%mm0
282 movb 12(%edi),%cl
283 psllq $60,%mm2
284 pxor (%eax,%ebx,8),%mm1
285 andl $15,%ebp
286 pxor (%esi,%edx,1),%mm1
287 movl %ecx,%edx
288 movd %mm0,%ebx
289 pxor %mm2,%mm0
290 shlb $4,%cl
291 psrlq $4,%mm0
292 movq %mm1,%mm2
293 psrlq $4,%mm1
294 pxor 8(%esi,%ecx,1),%mm0
295 psllq $60,%mm2
296 andl $240,%edx
297 pxor (%eax,%ebp,8),%mm1
298 andl $15,%ebx
299 pxor (%esi,%ecx,1),%mm1
300 movd %mm0,%ebp
301 pxor %mm2,%mm0
302 psrlq $4,%mm0
303 movq %mm1,%mm2
304 psrlq $4,%mm1
305 pxor 8(%esi,%edx,1),%mm0
306 movb 11(%edi),%cl
307 psllq $60,%mm2
308 pxor (%eax,%ebx,8),%mm1
309 andl $15,%ebp
310 pxor (%esi,%edx,1),%mm1
311 movl %ecx,%edx
312 movd %mm0,%ebx
313 pxor %mm2,%mm0
314 shlb $4,%cl
315 psrlq $4,%mm0
316 movq %mm1,%mm2
317 psrlq $4,%mm1
318 pxor 8(%esi,%ecx,1),%mm0
319 psllq $60,%mm2
320 andl $240,%edx
321 pxor (%eax,%ebp,8),%mm1
322 andl $15,%ebx
323 pxor (%esi,%ecx,1),%mm1
324 movd %mm0,%ebp
325 pxor %mm2,%mm0
326 psrlq $4,%mm0
327 movq %mm1,%mm2
328 psrlq $4,%mm1
329 pxor 8(%esi,%edx,1),%mm0
330 movb 10(%edi),%cl
331 psllq $60,%mm2
332 pxor (%eax,%ebx,8),%mm1
333 andl $15,%ebp
334 pxor (%esi,%edx,1),%mm1
335 movl %ecx,%edx
336 movd %mm0,%ebx
337 pxor %mm2,%mm0
338 shlb $4,%cl
339 psrlq $4,%mm0
340 movq %mm1,%mm2
341 psrlq $4,%mm1
342 pxor 8(%esi,%ecx,1),%mm0
343 psllq $60,%mm2
344 andl $240,%edx
345 pxor (%eax,%ebp,8),%mm1
346 andl $15,%ebx
347 pxor (%esi,%ecx,1),%mm1
348 movd %mm0,%ebp
349 pxor %mm2,%mm0
350 psrlq $4,%mm0
351 movq %mm1,%mm2
352 psrlq $4,%mm1
353 pxor 8(%esi,%edx,1),%mm0
354 movb 9(%edi),%cl
355 psllq $60,%mm2
356 pxor (%eax,%ebx,8),%mm1
357 andl $15,%ebp
358 pxor (%esi,%edx,1),%mm1
359 movl %ecx,%edx
360 movd %mm0,%ebx
361 pxor %mm2,%mm0
362 shlb $4,%cl
363 psrlq $4,%mm0
364 movq %mm1,%mm2
365 psrlq $4,%mm1
366 pxor 8(%esi,%ecx,1),%mm0
367 psllq $60,%mm2
368 andl $240,%edx
369 pxor (%eax,%ebp,8),%mm1
370 andl $15,%ebx
371 pxor (%esi,%ecx,1),%mm1
372 movd %mm0,%ebp
373 pxor %mm2,%mm0
374 psrlq $4,%mm0
375 movq %mm1,%mm2
376 psrlq $4,%mm1
377 pxor 8(%esi,%edx,1),%mm0
378 movb 8(%edi),%cl
379 psllq $60,%mm2
380 pxor (%eax,%ebx,8),%mm1
381 andl $15,%ebp
382 pxor (%esi,%edx,1),%mm1
383 movl %ecx,%edx
384 movd %mm0,%ebx
385 pxor %mm2,%mm0
386 shlb $4,%cl
387 psrlq $4,%mm0
388 movq %mm1,%mm2
389 psrlq $4,%mm1
390 pxor 8(%esi,%ecx,1),%mm0
391 psllq $60,%mm2
392 andl $240,%edx
393 pxor (%eax,%ebp,8),%mm1
394 andl $15,%ebx
395 pxor (%esi,%ecx,1),%mm1
396 movd %mm0,%ebp
397 pxor %mm2,%mm0
398 psrlq $4,%mm0
399 movq %mm1,%mm2
400 psrlq $4,%mm1
401 pxor 8(%esi,%edx,1),%mm0
402 movb 7(%edi),%cl
403 psllq $60,%mm2
404 pxor (%eax,%ebx,8),%mm1
405 andl $15,%ebp
406 pxor (%esi,%edx,1),%mm1
407 movl %ecx,%edx
408 movd %mm0,%ebx
409 pxor %mm2,%mm0
410 shlb $4,%cl
411 psrlq $4,%mm0
412 movq %mm1,%mm2
413 psrlq $4,%mm1
414 pxor 8(%esi,%ecx,1),%mm0
415 psllq $60,%mm2
416 andl $240,%edx
417 pxor (%eax,%ebp,8),%mm1
418 andl $15,%ebx
419 pxor (%esi,%ecx,1),%mm1
420 movd %mm0,%ebp
421 pxor %mm2,%mm0
422 psrlq $4,%mm0
423 movq %mm1,%mm2
424 psrlq $4,%mm1
425 pxor 8(%esi,%edx,1),%mm0
426 movb 6(%edi),%cl
427 psllq $60,%mm2
428 pxor (%eax,%ebx,8),%mm1
429 andl $15,%ebp
430 pxor (%esi,%edx,1),%mm1
431 movl %ecx,%edx
432 movd %mm0,%ebx
433 pxor %mm2,%mm0
434 shlb $4,%cl
435 psrlq $4,%mm0
436 movq %mm1,%mm2
437 psrlq $4,%mm1
438 pxor 8(%esi,%ecx,1),%mm0
439 psllq $60,%mm2
440 andl $240,%edx
441 pxor (%eax,%ebp,8),%mm1
442 andl $15,%ebx
443 pxor (%esi,%ecx,1),%mm1
444 movd %mm0,%ebp
445 pxor %mm2,%mm0
446 psrlq $4,%mm0
447 movq %mm1,%mm2
448 psrlq $4,%mm1
449 pxor 8(%esi,%edx,1),%mm0
450 movb 5(%edi),%cl
451 psllq $60,%mm2
452 pxor (%eax,%ebx,8),%mm1
453 andl $15,%ebp
454 pxor (%esi,%edx,1),%mm1
455 movl %ecx,%edx
456 movd %mm0,%ebx
457 pxor %mm2,%mm0
458 shlb $4,%cl
459 psrlq $4,%mm0
460 movq %mm1,%mm2
461 psrlq $4,%mm1
462 pxor 8(%esi,%ecx,1),%mm0
463 psllq $60,%mm2
464 andl $240,%edx
465 pxor (%eax,%ebp,8),%mm1
466 andl $15,%ebx
467 pxor (%esi,%ecx,1),%mm1
468 movd %mm0,%ebp
469 pxor %mm2,%mm0
470 psrlq $4,%mm0
471 movq %mm1,%mm2
472 psrlq $4,%mm1
473 pxor 8(%esi,%edx,1),%mm0
474 movb 4(%edi),%cl
475 psllq $60,%mm2
476 pxor (%eax,%ebx,8),%mm1
477 andl $15,%ebp
478 pxor (%esi,%edx,1),%mm1
479 movl %ecx,%edx
480 movd %mm0,%ebx
481 pxor %mm2,%mm0
482 shlb $4,%cl
483 psrlq $4,%mm0
484 movq %mm1,%mm2
485 psrlq $4,%mm1
486 pxor 8(%esi,%ecx,1),%mm0
487 psllq $60,%mm2
488 andl $240,%edx
489 pxor (%eax,%ebp,8),%mm1
490 andl $15,%ebx
491 pxor (%esi,%ecx,1),%mm1
492 movd %mm0,%ebp
493 pxor %mm2,%mm0
494 psrlq $4,%mm0
495 movq %mm1,%mm2
496 psrlq $4,%mm1
497 pxor 8(%esi,%edx,1),%mm0
498 movb 3(%edi),%cl
499 psllq $60,%mm2
500 pxor (%eax,%ebx,8),%mm1
501 andl $15,%ebp
502 pxor (%esi,%edx,1),%mm1
503 movl %ecx,%edx
504 movd %mm0,%ebx
505 pxor %mm2,%mm0
506 shlb $4,%cl
507 psrlq $4,%mm0
508 movq %mm1,%mm2
509 psrlq $4,%mm1
510 pxor 8(%esi,%ecx,1),%mm0
511 psllq $60,%mm2
512 andl $240,%edx
513 pxor (%eax,%ebp,8),%mm1
514 andl $15,%ebx
515 pxor (%esi,%ecx,1),%mm1
516 movd %mm0,%ebp
517 pxor %mm2,%mm0
518 psrlq $4,%mm0
519 movq %mm1,%mm2
520 psrlq $4,%mm1
521 pxor 8(%esi,%edx,1),%mm0
522 movb 2(%edi),%cl
523 psllq $60,%mm2
524 pxor (%eax,%ebx,8),%mm1
525 andl $15,%ebp
526 pxor (%esi,%edx,1),%mm1
527 movl %ecx,%edx
528 movd %mm0,%ebx
529 pxor %mm2,%mm0
530 shlb $4,%cl
531 psrlq $4,%mm0
532 movq %mm1,%mm2
533 psrlq $4,%mm1
534 pxor 8(%esi,%ecx,1),%mm0
535 psllq $60,%mm2
536 andl $240,%edx
537 pxor (%eax,%ebp,8),%mm1
538 andl $15,%ebx
539 pxor (%esi,%ecx,1),%mm1
540 movd %mm0,%ebp
541 pxor %mm2,%mm0
542 psrlq $4,%mm0
543 movq %mm1,%mm2
544 psrlq $4,%mm1
545 pxor 8(%esi,%edx,1),%mm0
546 movb 1(%edi),%cl
547 psllq $60,%mm2
548 pxor (%eax,%ebx,8),%mm1
549 andl $15,%ebp
550 pxor (%esi,%edx,1),%mm1
551 movl %ecx,%edx
552 movd %mm0,%ebx
553 pxor %mm2,%mm0
554 shlb $4,%cl
555 psrlq $4,%mm0
556 movq %mm1,%mm2
557 psrlq $4,%mm1
558 pxor 8(%esi,%ecx,1),%mm0
559 psllq $60,%mm2
560 andl $240,%edx
561 pxor (%eax,%ebp,8),%mm1
562 andl $15,%ebx
563 pxor (%esi,%ecx,1),%mm1
564 movd %mm0,%ebp
565 pxor %mm2,%mm0
566 psrlq $4,%mm0
567 movq %mm1,%mm2
568 psrlq $4,%mm1
569 pxor 8(%esi,%edx,1),%mm0
570 movb (%edi),%cl
571 psllq $60,%mm2
572 pxor (%eax,%ebx,8),%mm1
573 andl $15,%ebp
574 pxor (%esi,%edx,1),%mm1
575 movl %ecx,%edx
576 movd %mm0,%ebx
577 pxor %mm2,%mm0
578 shlb $4,%cl
579 psrlq $4,%mm0
580 movq %mm1,%mm2
581 psrlq $4,%mm1
582 pxor 8(%esi,%ecx,1),%mm0
583 psllq $60,%mm2
584 andl $240,%edx
585 pxor (%eax,%ebp,8),%mm1
586 andl $15,%ebx
587 pxor (%esi,%ecx,1),%mm1
588 movd %mm0,%ebp
589 pxor %mm2,%mm0
590 psrlq $4,%mm0
591 movq %mm1,%mm2
592 psrlq $4,%mm1
593 pxor 8(%esi,%edx,1),%mm0
594 psllq $60,%mm2
595 pxor (%eax,%ebx,8),%mm1
596 andl $15,%ebp
597 pxor (%esi,%edx,1),%mm1
598 movd %mm0,%ebx
599 pxor %mm2,%mm0
600 movl 4(%eax,%ebp,8),%edi
601 psrlq $32,%mm0
602 movd %mm1,%edx
603 psrlq $32,%mm1
604 movd %mm0,%ecx
605 movd %mm1,%ebp
606 shll $4,%edi
607 bswap %ebx
608 bswap %edx
609 bswap %ecx
610 xorl %edi,%ebp
611 bswap %ebp
612 ret
613.size _mmx_gmult_4bit_inner,.-_mmx_gmult_4bit_inner
614.globl gcm_gmult_4bit_mmx
615.type gcm_gmult_4bit_mmx,@function
616.align 16
617gcm_gmult_4bit_mmx:
618.L_gcm_gmult_4bit_mmx_begin:
619 #ifdef __CET__
620
621.byte 243,15,30,251
622 #endif
623
624 pushl %ebp
625 pushl %ebx
626 pushl %esi
627 pushl %edi
628 movl 20(%esp),%edi
629 movl 24(%esp),%esi
630 call .L005pic_point
631.L005pic_point:
632 popl %eax
633 leal .Lrem_4bit-.L005pic_point(%eax),%eax
634 movzbl 15(%edi),%ebx
635 call _mmx_gmult_4bit_inner
636 movl 20(%esp),%edi
637 emms
638 movl %ebx,12(%edi)
639 movl %edx,4(%edi)
640 movl %ecx,8(%edi)
641 movl %ebp,(%edi)
642 popl %edi
643 popl %esi
644 popl %ebx
645 popl %ebp
646 ret
647.size gcm_gmult_4bit_mmx,.-.L_gcm_gmult_4bit_mmx_begin
648.globl gcm_ghash_4bit_mmx
649.type gcm_ghash_4bit_mmx,@function
650.align 16
651gcm_ghash_4bit_mmx:
652.L_gcm_ghash_4bit_mmx_begin:
653 #ifdef __CET__
654
655.byte 243,15,30,251
656 #endif
657
658 pushl %ebp
659 pushl %ebx
660 pushl %esi
661 pushl %edi
662 movl 20(%esp),%ebp
663 movl 24(%esp),%esi
664 movl 28(%esp),%edi
665 movl 32(%esp),%ecx
666 call .L006pic_point
667.L006pic_point:
668 popl %eax
669 leal .Lrem_4bit-.L006pic_point(%eax),%eax
670 addl %edi,%ecx
671 movl %ecx,32(%esp)
672 subl $20,%esp
673 movl 12(%ebp),%ebx
674 movl 4(%ebp),%edx
675 movl 8(%ebp),%ecx
676 movl (%ebp),%ebp
677 jmp .L007mmx_outer_loop
678.align 16
679.L007mmx_outer_loop:
680 xorl 12(%edi),%ebx
681 xorl 4(%edi),%edx
682 xorl 8(%edi),%ecx
683 xorl (%edi),%ebp
684 movl %edi,48(%esp)
685 movl %ebx,12(%esp)
686 movl %edx,4(%esp)
687 movl %ecx,8(%esp)
688 movl %ebp,(%esp)
689 movl %esp,%edi
690 shrl $24,%ebx
691 call _mmx_gmult_4bit_inner
692 movl 48(%esp),%edi
693 leal 16(%edi),%edi
694 cmpl 52(%esp),%edi
695 jb .L007mmx_outer_loop
696 movl 40(%esp),%edi
697 emms
698 movl %ebx,12(%edi)
699 movl %edx,4(%edi)
700 movl %ecx,8(%edi)
701 movl %ebp,(%edi)
702 addl $20,%esp
703 popl %edi
704 popl %esi
705 popl %ebx
706 popl %ebp
707 ret
708.size gcm_ghash_4bit_mmx,.-.L_gcm_ghash_4bit_mmx_begin
709.align 64
710.Lrem_4bit:
711.long 0,0,0,29491200,0,58982400,0,38141952
712.long 0,117964800,0,113901568,0,76283904,0,88997888
713.long 0,235929600,0,265420800,0,227803136,0,206962688
714.long 0,152567808,0,148504576,0,177995776,0,190709760
715.byte 71,72,65,83,72,32,102,111,114,32,120,56,54,44,32,67
716.byte 82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112
717.byte 112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62
718.byte 0
719
720 .section ".note.gnu.property", "a"
721 .p2align 2
722 .long 1f - 0f
723 .long 4f - 1f
724 .long 5
7250:
726 .asciz "GNU"
7271:
728 .p2align 2
729 .long 0xc0000002
730 .long 3f - 2f
7312:
732 .long 3
7333:
734 .p2align 2
7354:
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette