VirtualBox

source: vbox/trunk/src/libs/openssl-3.0.1/crypto/genasm-elf/ghash-x86.S@ 94083

Last change on this file since 94083 was 94083, checked in by vboxsync, 3 years ago

libs/openssl-3.0.1: Recreate asm files, bugref:10128

File size: 13.2 KB
Line 
1.text
2.globl gcm_gmult_4bit_x86
3.type gcm_gmult_4bit_x86,@function
4.align 16
5gcm_gmult_4bit_x86:
6.L_gcm_gmult_4bit_x86_begin:
7.byte 243,15,30,251
8 pushl %ebp
9 pushl %ebx
10 pushl %esi
11 pushl %edi
12 subl $84,%esp
13 movl 104(%esp),%edi
14 movl 108(%esp),%esi
15 movl (%edi),%ebp
16 movl 4(%edi),%edx
17 movl 8(%edi),%ecx
18 movl 12(%edi),%ebx
19 movl $0,16(%esp)
20 movl $471859200,20(%esp)
21 movl $943718400,24(%esp)
22 movl $610271232,28(%esp)
23 movl $1887436800,32(%esp)
24 movl $1822425088,36(%esp)
25 movl $1220542464,40(%esp)
26 movl $1423966208,44(%esp)
27 movl $3774873600,48(%esp)
28 movl $4246732800,52(%esp)
29 movl $3644850176,56(%esp)
30 movl $3311403008,60(%esp)
31 movl $2441084928,64(%esp)
32 movl $2376073216,68(%esp)
33 movl $2847932416,72(%esp)
34 movl $3051356160,76(%esp)
35 movl %ebp,(%esp)
36 movl %edx,4(%esp)
37 movl %ecx,8(%esp)
38 movl %ebx,12(%esp)
39 shrl $20,%ebx
40 andl $240,%ebx
41 movl 4(%esi,%ebx,1),%ebp
42 movl (%esi,%ebx,1),%edx
43 movl 12(%esi,%ebx,1),%ecx
44 movl 8(%esi,%ebx,1),%ebx
45 xorl %eax,%eax
46 movl $15,%edi
47 jmp .L000x86_loop
48.align 16
49.L000x86_loop:
50 movb %bl,%al
51 shrdl $4,%ecx,%ebx
52 andb $15,%al
53 shrdl $4,%edx,%ecx
54 shrdl $4,%ebp,%edx
55 shrl $4,%ebp
56 xorl 16(%esp,%eax,4),%ebp
57 movb (%esp,%edi,1),%al
58 andb $240,%al
59 xorl 8(%esi,%eax,1),%ebx
60 xorl 12(%esi,%eax,1),%ecx
61 xorl (%esi,%eax,1),%edx
62 xorl 4(%esi,%eax,1),%ebp
63 decl %edi
64 js .L001x86_break
65 movb %bl,%al
66 shrdl $4,%ecx,%ebx
67 andb $15,%al
68 shrdl $4,%edx,%ecx
69 shrdl $4,%ebp,%edx
70 shrl $4,%ebp
71 xorl 16(%esp,%eax,4),%ebp
72 movb (%esp,%edi,1),%al
73 shlb $4,%al
74 xorl 8(%esi,%eax,1),%ebx
75 xorl 12(%esi,%eax,1),%ecx
76 xorl (%esi,%eax,1),%edx
77 xorl 4(%esi,%eax,1),%ebp
78 jmp .L000x86_loop
79.align 16
80.L001x86_break:
81 bswap %ebx
82 bswap %ecx
83 bswap %edx
84 bswap %ebp
85 movl 104(%esp),%edi
86 movl %ebx,12(%edi)
87 movl %ecx,8(%edi)
88 movl %edx,4(%edi)
89 movl %ebp,(%edi)
90 addl $84,%esp
91 popl %edi
92 popl %esi
93 popl %ebx
94 popl %ebp
95 ret
96.size gcm_gmult_4bit_x86,.-.L_gcm_gmult_4bit_x86_begin
97.globl gcm_ghash_4bit_x86
98.type gcm_ghash_4bit_x86,@function
99.align 16
100gcm_ghash_4bit_x86:
101.L_gcm_ghash_4bit_x86_begin:
102.byte 243,15,30,251
103 pushl %ebp
104 pushl %ebx
105 pushl %esi
106 pushl %edi
107 subl $84,%esp
108 movl 104(%esp),%ebx
109 movl 108(%esp),%esi
110 movl 112(%esp),%edi
111 movl 116(%esp),%ecx
112 addl %edi,%ecx
113 movl %ecx,116(%esp)
114 movl (%ebx),%ebp
115 movl 4(%ebx),%edx
116 movl 8(%ebx),%ecx
117 movl 12(%ebx),%ebx
118 movl $0,16(%esp)
119 movl $471859200,20(%esp)
120 movl $943718400,24(%esp)
121 movl $610271232,28(%esp)
122 movl $1887436800,32(%esp)
123 movl $1822425088,36(%esp)
124 movl $1220542464,40(%esp)
125 movl $1423966208,44(%esp)
126 movl $3774873600,48(%esp)
127 movl $4246732800,52(%esp)
128 movl $3644850176,56(%esp)
129 movl $3311403008,60(%esp)
130 movl $2441084928,64(%esp)
131 movl $2376073216,68(%esp)
132 movl $2847932416,72(%esp)
133 movl $3051356160,76(%esp)
134.align 16
135.L002x86_outer_loop:
136 xorl 12(%edi),%ebx
137 xorl 8(%edi),%ecx
138 xorl 4(%edi),%edx
139 xorl (%edi),%ebp
140 movl %ebx,12(%esp)
141 movl %ecx,8(%esp)
142 movl %edx,4(%esp)
143 movl %ebp,(%esp)
144 shrl $20,%ebx
145 andl $240,%ebx
146 movl 4(%esi,%ebx,1),%ebp
147 movl (%esi,%ebx,1),%edx
148 movl 12(%esi,%ebx,1),%ecx
149 movl 8(%esi,%ebx,1),%ebx
150 xorl %eax,%eax
151 movl $15,%edi
152 jmp .L003x86_loop
153.align 16
154.L003x86_loop:
155 movb %bl,%al
156 shrdl $4,%ecx,%ebx
157 andb $15,%al
158 shrdl $4,%edx,%ecx
159 shrdl $4,%ebp,%edx
160 shrl $4,%ebp
161 xorl 16(%esp,%eax,4),%ebp
162 movb (%esp,%edi,1),%al
163 andb $240,%al
164 xorl 8(%esi,%eax,1),%ebx
165 xorl 12(%esi,%eax,1),%ecx
166 xorl (%esi,%eax,1),%edx
167 xorl 4(%esi,%eax,1),%ebp
168 decl %edi
169 js .L004x86_break
170 movb %bl,%al
171 shrdl $4,%ecx,%ebx
172 andb $15,%al
173 shrdl $4,%edx,%ecx
174 shrdl $4,%ebp,%edx
175 shrl $4,%ebp
176 xorl 16(%esp,%eax,4),%ebp
177 movb (%esp,%edi,1),%al
178 shlb $4,%al
179 xorl 8(%esi,%eax,1),%ebx
180 xorl 12(%esi,%eax,1),%ecx
181 xorl (%esi,%eax,1),%edx
182 xorl 4(%esi,%eax,1),%ebp
183 jmp .L003x86_loop
184.align 16
185.L004x86_break:
186 bswap %ebx
187 bswap %ecx
188 bswap %edx
189 bswap %ebp
190 movl 112(%esp),%edi
191 leal 16(%edi),%edi
192 cmpl 116(%esp),%edi
193 movl %edi,112(%esp)
194 jb .L002x86_outer_loop
195 movl 104(%esp),%edi
196 movl %ebx,12(%edi)
197 movl %ecx,8(%edi)
198 movl %edx,4(%edi)
199 movl %ebp,(%edi)
200 addl $84,%esp
201 popl %edi
202 popl %esi
203 popl %ebx
204 popl %ebp
205 ret
206.size gcm_ghash_4bit_x86,.-.L_gcm_ghash_4bit_x86_begin
207.type _mmx_gmult_4bit_inner,@function
208.align 16
209_mmx_gmult_4bit_inner:
210.byte 243,15,30,251
211 xorl %ecx,%ecx
212 movl %ebx,%edx
213 movb %dl,%cl
214 shlb $4,%cl
215 andl $240,%edx
216 movq 8(%esi,%ecx,1),%mm0
217 movq (%esi,%ecx,1),%mm1
218 movd %mm0,%ebp
219 psrlq $4,%mm0
220 movq %mm1,%mm2
221 psrlq $4,%mm1
222 pxor 8(%esi,%edx,1),%mm0
223 movb 14(%edi),%cl
224 psllq $60,%mm2
225 andl $15,%ebp
226 pxor (%esi,%edx,1),%mm1
227 movl %ecx,%edx
228 movd %mm0,%ebx
229 pxor %mm2,%mm0
230 shlb $4,%cl
231 psrlq $4,%mm0
232 movq %mm1,%mm2
233 psrlq $4,%mm1
234 pxor 8(%esi,%ecx,1),%mm0
235 psllq $60,%mm2
236 andl $240,%edx
237 pxor (%eax,%ebp,8),%mm1
238 andl $15,%ebx
239 pxor (%esi,%ecx,1),%mm1
240 movd %mm0,%ebp
241 pxor %mm2,%mm0
242 psrlq $4,%mm0
243 movq %mm1,%mm2
244 psrlq $4,%mm1
245 pxor 8(%esi,%edx,1),%mm0
246 movb 13(%edi),%cl
247 psllq $60,%mm2
248 pxor (%eax,%ebx,8),%mm1
249 andl $15,%ebp
250 pxor (%esi,%edx,1),%mm1
251 movl %ecx,%edx
252 movd %mm0,%ebx
253 pxor %mm2,%mm0
254 shlb $4,%cl
255 psrlq $4,%mm0
256 movq %mm1,%mm2
257 psrlq $4,%mm1
258 pxor 8(%esi,%ecx,1),%mm0
259 psllq $60,%mm2
260 andl $240,%edx
261 pxor (%eax,%ebp,8),%mm1
262 andl $15,%ebx
263 pxor (%esi,%ecx,1),%mm1
264 movd %mm0,%ebp
265 pxor %mm2,%mm0
266 psrlq $4,%mm0
267 movq %mm1,%mm2
268 psrlq $4,%mm1
269 pxor 8(%esi,%edx,1),%mm0
270 movb 12(%edi),%cl
271 psllq $60,%mm2
272 pxor (%eax,%ebx,8),%mm1
273 andl $15,%ebp
274 pxor (%esi,%edx,1),%mm1
275 movl %ecx,%edx
276 movd %mm0,%ebx
277 pxor %mm2,%mm0
278 shlb $4,%cl
279 psrlq $4,%mm0
280 movq %mm1,%mm2
281 psrlq $4,%mm1
282 pxor 8(%esi,%ecx,1),%mm0
283 psllq $60,%mm2
284 andl $240,%edx
285 pxor (%eax,%ebp,8),%mm1
286 andl $15,%ebx
287 pxor (%esi,%ecx,1),%mm1
288 movd %mm0,%ebp
289 pxor %mm2,%mm0
290 psrlq $4,%mm0
291 movq %mm1,%mm2
292 psrlq $4,%mm1
293 pxor 8(%esi,%edx,1),%mm0
294 movb 11(%edi),%cl
295 psllq $60,%mm2
296 pxor (%eax,%ebx,8),%mm1
297 andl $15,%ebp
298 pxor (%esi,%edx,1),%mm1
299 movl %ecx,%edx
300 movd %mm0,%ebx
301 pxor %mm2,%mm0
302 shlb $4,%cl
303 psrlq $4,%mm0
304 movq %mm1,%mm2
305 psrlq $4,%mm1
306 pxor 8(%esi,%ecx,1),%mm0
307 psllq $60,%mm2
308 andl $240,%edx
309 pxor (%eax,%ebp,8),%mm1
310 andl $15,%ebx
311 pxor (%esi,%ecx,1),%mm1
312 movd %mm0,%ebp
313 pxor %mm2,%mm0
314 psrlq $4,%mm0
315 movq %mm1,%mm2
316 psrlq $4,%mm1
317 pxor 8(%esi,%edx,1),%mm0
318 movb 10(%edi),%cl
319 psllq $60,%mm2
320 pxor (%eax,%ebx,8),%mm1
321 andl $15,%ebp
322 pxor (%esi,%edx,1),%mm1
323 movl %ecx,%edx
324 movd %mm0,%ebx
325 pxor %mm2,%mm0
326 shlb $4,%cl
327 psrlq $4,%mm0
328 movq %mm1,%mm2
329 psrlq $4,%mm1
330 pxor 8(%esi,%ecx,1),%mm0
331 psllq $60,%mm2
332 andl $240,%edx
333 pxor (%eax,%ebp,8),%mm1
334 andl $15,%ebx
335 pxor (%esi,%ecx,1),%mm1
336 movd %mm0,%ebp
337 pxor %mm2,%mm0
338 psrlq $4,%mm0
339 movq %mm1,%mm2
340 psrlq $4,%mm1
341 pxor 8(%esi,%edx,1),%mm0
342 movb 9(%edi),%cl
343 psllq $60,%mm2
344 pxor (%eax,%ebx,8),%mm1
345 andl $15,%ebp
346 pxor (%esi,%edx,1),%mm1
347 movl %ecx,%edx
348 movd %mm0,%ebx
349 pxor %mm2,%mm0
350 shlb $4,%cl
351 psrlq $4,%mm0
352 movq %mm1,%mm2
353 psrlq $4,%mm1
354 pxor 8(%esi,%ecx,1),%mm0
355 psllq $60,%mm2
356 andl $240,%edx
357 pxor (%eax,%ebp,8),%mm1
358 andl $15,%ebx
359 pxor (%esi,%ecx,1),%mm1
360 movd %mm0,%ebp
361 pxor %mm2,%mm0
362 psrlq $4,%mm0
363 movq %mm1,%mm2
364 psrlq $4,%mm1
365 pxor 8(%esi,%edx,1),%mm0
366 movb 8(%edi),%cl
367 psllq $60,%mm2
368 pxor (%eax,%ebx,8),%mm1
369 andl $15,%ebp
370 pxor (%esi,%edx,1),%mm1
371 movl %ecx,%edx
372 movd %mm0,%ebx
373 pxor %mm2,%mm0
374 shlb $4,%cl
375 psrlq $4,%mm0
376 movq %mm1,%mm2
377 psrlq $4,%mm1
378 pxor 8(%esi,%ecx,1),%mm0
379 psllq $60,%mm2
380 andl $240,%edx
381 pxor (%eax,%ebp,8),%mm1
382 andl $15,%ebx
383 pxor (%esi,%ecx,1),%mm1
384 movd %mm0,%ebp
385 pxor %mm2,%mm0
386 psrlq $4,%mm0
387 movq %mm1,%mm2
388 psrlq $4,%mm1
389 pxor 8(%esi,%edx,1),%mm0
390 movb 7(%edi),%cl
391 psllq $60,%mm2
392 pxor (%eax,%ebx,8),%mm1
393 andl $15,%ebp
394 pxor (%esi,%edx,1),%mm1
395 movl %ecx,%edx
396 movd %mm0,%ebx
397 pxor %mm2,%mm0
398 shlb $4,%cl
399 psrlq $4,%mm0
400 movq %mm1,%mm2
401 psrlq $4,%mm1
402 pxor 8(%esi,%ecx,1),%mm0
403 psllq $60,%mm2
404 andl $240,%edx
405 pxor (%eax,%ebp,8),%mm1
406 andl $15,%ebx
407 pxor (%esi,%ecx,1),%mm1
408 movd %mm0,%ebp
409 pxor %mm2,%mm0
410 psrlq $4,%mm0
411 movq %mm1,%mm2
412 psrlq $4,%mm1
413 pxor 8(%esi,%edx,1),%mm0
414 movb 6(%edi),%cl
415 psllq $60,%mm2
416 pxor (%eax,%ebx,8),%mm1
417 andl $15,%ebp
418 pxor (%esi,%edx,1),%mm1
419 movl %ecx,%edx
420 movd %mm0,%ebx
421 pxor %mm2,%mm0
422 shlb $4,%cl
423 psrlq $4,%mm0
424 movq %mm1,%mm2
425 psrlq $4,%mm1
426 pxor 8(%esi,%ecx,1),%mm0
427 psllq $60,%mm2
428 andl $240,%edx
429 pxor (%eax,%ebp,8),%mm1
430 andl $15,%ebx
431 pxor (%esi,%ecx,1),%mm1
432 movd %mm0,%ebp
433 pxor %mm2,%mm0
434 psrlq $4,%mm0
435 movq %mm1,%mm2
436 psrlq $4,%mm1
437 pxor 8(%esi,%edx,1),%mm0
438 movb 5(%edi),%cl
439 psllq $60,%mm2
440 pxor (%eax,%ebx,8),%mm1
441 andl $15,%ebp
442 pxor (%esi,%edx,1),%mm1
443 movl %ecx,%edx
444 movd %mm0,%ebx
445 pxor %mm2,%mm0
446 shlb $4,%cl
447 psrlq $4,%mm0
448 movq %mm1,%mm2
449 psrlq $4,%mm1
450 pxor 8(%esi,%ecx,1),%mm0
451 psllq $60,%mm2
452 andl $240,%edx
453 pxor (%eax,%ebp,8),%mm1
454 andl $15,%ebx
455 pxor (%esi,%ecx,1),%mm1
456 movd %mm0,%ebp
457 pxor %mm2,%mm0
458 psrlq $4,%mm0
459 movq %mm1,%mm2
460 psrlq $4,%mm1
461 pxor 8(%esi,%edx,1),%mm0
462 movb 4(%edi),%cl
463 psllq $60,%mm2
464 pxor (%eax,%ebx,8),%mm1
465 andl $15,%ebp
466 pxor (%esi,%edx,1),%mm1
467 movl %ecx,%edx
468 movd %mm0,%ebx
469 pxor %mm2,%mm0
470 shlb $4,%cl
471 psrlq $4,%mm0
472 movq %mm1,%mm2
473 psrlq $4,%mm1
474 pxor 8(%esi,%ecx,1),%mm0
475 psllq $60,%mm2
476 andl $240,%edx
477 pxor (%eax,%ebp,8),%mm1
478 andl $15,%ebx
479 pxor (%esi,%ecx,1),%mm1
480 movd %mm0,%ebp
481 pxor %mm2,%mm0
482 psrlq $4,%mm0
483 movq %mm1,%mm2
484 psrlq $4,%mm1
485 pxor 8(%esi,%edx,1),%mm0
486 movb 3(%edi),%cl
487 psllq $60,%mm2
488 pxor (%eax,%ebx,8),%mm1
489 andl $15,%ebp
490 pxor (%esi,%edx,1),%mm1
491 movl %ecx,%edx
492 movd %mm0,%ebx
493 pxor %mm2,%mm0
494 shlb $4,%cl
495 psrlq $4,%mm0
496 movq %mm1,%mm2
497 psrlq $4,%mm1
498 pxor 8(%esi,%ecx,1),%mm0
499 psllq $60,%mm2
500 andl $240,%edx
501 pxor (%eax,%ebp,8),%mm1
502 andl $15,%ebx
503 pxor (%esi,%ecx,1),%mm1
504 movd %mm0,%ebp
505 pxor %mm2,%mm0
506 psrlq $4,%mm0
507 movq %mm1,%mm2
508 psrlq $4,%mm1
509 pxor 8(%esi,%edx,1),%mm0
510 movb 2(%edi),%cl
511 psllq $60,%mm2
512 pxor (%eax,%ebx,8),%mm1
513 andl $15,%ebp
514 pxor (%esi,%edx,1),%mm1
515 movl %ecx,%edx
516 movd %mm0,%ebx
517 pxor %mm2,%mm0
518 shlb $4,%cl
519 psrlq $4,%mm0
520 movq %mm1,%mm2
521 psrlq $4,%mm1
522 pxor 8(%esi,%ecx,1),%mm0
523 psllq $60,%mm2
524 andl $240,%edx
525 pxor (%eax,%ebp,8),%mm1
526 andl $15,%ebx
527 pxor (%esi,%ecx,1),%mm1
528 movd %mm0,%ebp
529 pxor %mm2,%mm0
530 psrlq $4,%mm0
531 movq %mm1,%mm2
532 psrlq $4,%mm1
533 pxor 8(%esi,%edx,1),%mm0
534 movb 1(%edi),%cl
535 psllq $60,%mm2
536 pxor (%eax,%ebx,8),%mm1
537 andl $15,%ebp
538 pxor (%esi,%edx,1),%mm1
539 movl %ecx,%edx
540 movd %mm0,%ebx
541 pxor %mm2,%mm0
542 shlb $4,%cl
543 psrlq $4,%mm0
544 movq %mm1,%mm2
545 psrlq $4,%mm1
546 pxor 8(%esi,%ecx,1),%mm0
547 psllq $60,%mm2
548 andl $240,%edx
549 pxor (%eax,%ebp,8),%mm1
550 andl $15,%ebx
551 pxor (%esi,%ecx,1),%mm1
552 movd %mm0,%ebp
553 pxor %mm2,%mm0
554 psrlq $4,%mm0
555 movq %mm1,%mm2
556 psrlq $4,%mm1
557 pxor 8(%esi,%edx,1),%mm0
558 movb (%edi),%cl
559 psllq $60,%mm2
560 pxor (%eax,%ebx,8),%mm1
561 andl $15,%ebp
562 pxor (%esi,%edx,1),%mm1
563 movl %ecx,%edx
564 movd %mm0,%ebx
565 pxor %mm2,%mm0
566 shlb $4,%cl
567 psrlq $4,%mm0
568 movq %mm1,%mm2
569 psrlq $4,%mm1
570 pxor 8(%esi,%ecx,1),%mm0
571 psllq $60,%mm2
572 andl $240,%edx
573 pxor (%eax,%ebp,8),%mm1
574 andl $15,%ebx
575 pxor (%esi,%ecx,1),%mm1
576 movd %mm0,%ebp
577 pxor %mm2,%mm0
578 psrlq $4,%mm0
579 movq %mm1,%mm2
580 psrlq $4,%mm1
581 pxor 8(%esi,%edx,1),%mm0
582 psllq $60,%mm2
583 pxor (%eax,%ebx,8),%mm1
584 andl $15,%ebp
585 pxor (%esi,%edx,1),%mm1
586 movd %mm0,%ebx
587 pxor %mm2,%mm0
588 movl 4(%eax,%ebp,8),%edi
589 psrlq $32,%mm0
590 movd %mm1,%edx
591 psrlq $32,%mm1
592 movd %mm0,%ecx
593 movd %mm1,%ebp
594 shll $4,%edi
595 bswap %ebx
596 bswap %edx
597 bswap %ecx
598 xorl %edi,%ebp
599 bswap %ebp
600 ret
601.size _mmx_gmult_4bit_inner,.-_mmx_gmult_4bit_inner
602.globl gcm_gmult_4bit_mmx
603.type gcm_gmult_4bit_mmx,@function
604.align 16
605gcm_gmult_4bit_mmx:
606.L_gcm_gmult_4bit_mmx_begin:
607.byte 243,15,30,251
608 pushl %ebp
609 pushl %ebx
610 pushl %esi
611 pushl %edi
612 movl 20(%esp),%edi
613 movl 24(%esp),%esi
614 call .L005pic_point
615.L005pic_point:
616 popl %eax
617 leal .Lrem_4bit-.L005pic_point(%eax),%eax
618 movzbl 15(%edi),%ebx
619 call _mmx_gmult_4bit_inner
620 movl 20(%esp),%edi
621 emms
622 movl %ebx,12(%edi)
623 movl %edx,4(%edi)
624 movl %ecx,8(%edi)
625 movl %ebp,(%edi)
626 popl %edi
627 popl %esi
628 popl %ebx
629 popl %ebp
630 ret
631.size gcm_gmult_4bit_mmx,.-.L_gcm_gmult_4bit_mmx_begin
632.globl gcm_ghash_4bit_mmx
633.type gcm_ghash_4bit_mmx,@function
634.align 16
635gcm_ghash_4bit_mmx:
636.L_gcm_ghash_4bit_mmx_begin:
637.byte 243,15,30,251
638 pushl %ebp
639 pushl %ebx
640 pushl %esi
641 pushl %edi
642 movl 20(%esp),%ebp
643 movl 24(%esp),%esi
644 movl 28(%esp),%edi
645 movl 32(%esp),%ecx
646 call .L006pic_point
647.L006pic_point:
648 popl %eax
649 leal .Lrem_4bit-.L006pic_point(%eax),%eax
650 addl %edi,%ecx
651 movl %ecx,32(%esp)
652 subl $20,%esp
653 movl 12(%ebp),%ebx
654 movl 4(%ebp),%edx
655 movl 8(%ebp),%ecx
656 movl (%ebp),%ebp
657 jmp .L007mmx_outer_loop
658.align 16
659.L007mmx_outer_loop:
660 xorl 12(%edi),%ebx
661 xorl 4(%edi),%edx
662 xorl 8(%edi),%ecx
663 xorl (%edi),%ebp
664 movl %edi,48(%esp)
665 movl %ebx,12(%esp)
666 movl %edx,4(%esp)
667 movl %ecx,8(%esp)
668 movl %ebp,(%esp)
669 movl %esp,%edi
670 shrl $24,%ebx
671 call _mmx_gmult_4bit_inner
672 movl 48(%esp),%edi
673 leal 16(%edi),%edi
674 cmpl 52(%esp),%edi
675 jb .L007mmx_outer_loop
676 movl 40(%esp),%edi
677 emms
678 movl %ebx,12(%edi)
679 movl %edx,4(%edi)
680 movl %ecx,8(%edi)
681 movl %ebp,(%edi)
682 addl $20,%esp
683 popl %edi
684 popl %esi
685 popl %ebx
686 popl %ebp
687 ret
688.size gcm_ghash_4bit_mmx,.-.L_gcm_ghash_4bit_mmx_begin
689.align 64
690.Lrem_4bit:
691.long 0,0,0,29491200,0,58982400,0,38141952
692.long 0,117964800,0,113901568,0,76283904,0,88997888
693.long 0,235929600,0,265420800,0,227803136,0,206962688
694.long 0,152567808,0,148504576,0,177995776,0,190709760
695.byte 71,72,65,83,72,32,102,111,114,32,120,56,54,44,32,67
696.byte 82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112
697.byte 112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62
698.byte 0
699
700 .section ".note.gnu.property", "a"
701 .p2align 2
702 .long 1f - 0f
703 .long 4f - 1f
704 .long 5
7050:
706 .asciz "GNU"
7071:
708 .p2align 2
709 .long 0xc0000002
710 .long 3f - 2f
7112:
712 .long 3
7133:
714 .p2align 2
7154:
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette