VirtualBox

source: vbox/trunk/src/libs/openssl-3.0.1/crypto/genasm-elf/ghash-x86_64.S@ 94083

Last change on this file since 94083 was 94083, checked in by vboxsync, 3 years ago

libs/openssl-3.0.1: Recreate asm files, bugref:10128

File size: 25.9 KB
Line 
1.text
2
3
4.globl gcm_gmult_4bit
5.type gcm_gmult_4bit,@function
6.align 16
7gcm_gmult_4bit:
8.cfi_startproc
9.byte 243,15,30,250
10 pushq %rbx
11.cfi_adjust_cfa_offset 8
12.cfi_offset %rbx,-16
13 pushq %rbp
14.cfi_adjust_cfa_offset 8
15.cfi_offset %rbp,-24
16 pushq %r12
17.cfi_adjust_cfa_offset 8
18.cfi_offset %r12,-32
19 pushq %r13
20.cfi_adjust_cfa_offset 8
21.cfi_offset %r13,-40
22 pushq %r14
23.cfi_adjust_cfa_offset 8
24.cfi_offset %r14,-48
25 pushq %r15
26.cfi_adjust_cfa_offset 8
27.cfi_offset %r15,-56
28 subq $280,%rsp
29.cfi_adjust_cfa_offset 280
30.Lgmult_prologue:
31
32 movzbq 15(%rdi),%r8
33 leaq .Lrem_4bit(%rip),%r11
34 xorq %rax,%rax
35 xorq %rbx,%rbx
36 movb %r8b,%al
37 movb %r8b,%bl
38 shlb $4,%al
39 movq $14,%rcx
40 movq 8(%rsi,%rax,1),%r8
41 movq (%rsi,%rax,1),%r9
42 andb $0xf0,%bl
43 movq %r8,%rdx
44 jmp .Loop1
45
46.align 16
47.Loop1:
48 shrq $4,%r8
49 andq $0xf,%rdx
50 movq %r9,%r10
51 movb (%rdi,%rcx,1),%al
52 shrq $4,%r9
53 xorq 8(%rsi,%rbx,1),%r8
54 shlq $60,%r10
55 xorq (%rsi,%rbx,1),%r9
56 movb %al,%bl
57 xorq (%r11,%rdx,8),%r9
58 movq %r8,%rdx
59 shlb $4,%al
60 xorq %r10,%r8
61 decq %rcx
62 js .Lbreak1
63
64 shrq $4,%r8
65 andq $0xf,%rdx
66 movq %r9,%r10
67 shrq $4,%r9
68 xorq 8(%rsi,%rax,1),%r8
69 shlq $60,%r10
70 xorq (%rsi,%rax,1),%r9
71 andb $0xf0,%bl
72 xorq (%r11,%rdx,8),%r9
73 movq %r8,%rdx
74 xorq %r10,%r8
75 jmp .Loop1
76
77.align 16
78.Lbreak1:
79 shrq $4,%r8
80 andq $0xf,%rdx
81 movq %r9,%r10
82 shrq $4,%r9
83 xorq 8(%rsi,%rax,1),%r8
84 shlq $60,%r10
85 xorq (%rsi,%rax,1),%r9
86 andb $0xf0,%bl
87 xorq (%r11,%rdx,8),%r9
88 movq %r8,%rdx
89 xorq %r10,%r8
90
91 shrq $4,%r8
92 andq $0xf,%rdx
93 movq %r9,%r10
94 shrq $4,%r9
95 xorq 8(%rsi,%rbx,1),%r8
96 shlq $60,%r10
97 xorq (%rsi,%rbx,1),%r9
98 xorq %r10,%r8
99 xorq (%r11,%rdx,8),%r9
100
101 bswapq %r8
102 bswapq %r9
103 movq %r8,8(%rdi)
104 movq %r9,(%rdi)
105
106 leaq 280+48(%rsp),%rsi
107.cfi_def_cfa %rsi,8
108 movq -8(%rsi),%rbx
109.cfi_restore %rbx
110 leaq (%rsi),%rsp
111.cfi_def_cfa_register %rsp
112.Lgmult_epilogue:
113 .byte 0xf3,0xc3
114.cfi_endproc
115.size gcm_gmult_4bit,.-gcm_gmult_4bit
116.globl gcm_ghash_4bit
117.type gcm_ghash_4bit,@function
118.align 16
119gcm_ghash_4bit:
120.cfi_startproc
121.byte 243,15,30,250
122 pushq %rbx
123.cfi_adjust_cfa_offset 8
124.cfi_offset %rbx,-16
125 pushq %rbp
126.cfi_adjust_cfa_offset 8
127.cfi_offset %rbp,-24
128 pushq %r12
129.cfi_adjust_cfa_offset 8
130.cfi_offset %r12,-32
131 pushq %r13
132.cfi_adjust_cfa_offset 8
133.cfi_offset %r13,-40
134 pushq %r14
135.cfi_adjust_cfa_offset 8
136.cfi_offset %r14,-48
137 pushq %r15
138.cfi_adjust_cfa_offset 8
139.cfi_offset %r15,-56
140 subq $280,%rsp
141.cfi_adjust_cfa_offset 280
142.Lghash_prologue:
143 movq %rdx,%r14
144 movq %rcx,%r15
145 subq $-128,%rsi
146 leaq 16+128(%rsp),%rbp
147 xorl %edx,%edx
148 movq 0+0-128(%rsi),%r8
149 movq 0+8-128(%rsi),%rax
150 movb %al,%dl
151 shrq $4,%rax
152 movq %r8,%r10
153 shrq $4,%r8
154 movq 16+0-128(%rsi),%r9
155 shlb $4,%dl
156 movq 16+8-128(%rsi),%rbx
157 shlq $60,%r10
158 movb %dl,0(%rsp)
159 orq %r10,%rax
160 movb %bl,%dl
161 shrq $4,%rbx
162 movq %r9,%r10
163 shrq $4,%r9
164 movq %r8,0(%rbp)
165 movq 32+0-128(%rsi),%r8
166 shlb $4,%dl
167 movq %rax,0-128(%rbp)
168 movq 32+8-128(%rsi),%rax
169 shlq $60,%r10
170 movb %dl,1(%rsp)
171 orq %r10,%rbx
172 movb %al,%dl
173 shrq $4,%rax
174 movq %r8,%r10
175 shrq $4,%r8
176 movq %r9,8(%rbp)
177 movq 48+0-128(%rsi),%r9
178 shlb $4,%dl
179 movq %rbx,8-128(%rbp)
180 movq 48+8-128(%rsi),%rbx
181 shlq $60,%r10
182 movb %dl,2(%rsp)
183 orq %r10,%rax
184 movb %bl,%dl
185 shrq $4,%rbx
186 movq %r9,%r10
187 shrq $4,%r9
188 movq %r8,16(%rbp)
189 movq 64+0-128(%rsi),%r8
190 shlb $4,%dl
191 movq %rax,16-128(%rbp)
192 movq 64+8-128(%rsi),%rax
193 shlq $60,%r10
194 movb %dl,3(%rsp)
195 orq %r10,%rbx
196 movb %al,%dl
197 shrq $4,%rax
198 movq %r8,%r10
199 shrq $4,%r8
200 movq %r9,24(%rbp)
201 movq 80+0-128(%rsi),%r9
202 shlb $4,%dl
203 movq %rbx,24-128(%rbp)
204 movq 80+8-128(%rsi),%rbx
205 shlq $60,%r10
206 movb %dl,4(%rsp)
207 orq %r10,%rax
208 movb %bl,%dl
209 shrq $4,%rbx
210 movq %r9,%r10
211 shrq $4,%r9
212 movq %r8,32(%rbp)
213 movq 96+0-128(%rsi),%r8
214 shlb $4,%dl
215 movq %rax,32-128(%rbp)
216 movq 96+8-128(%rsi),%rax
217 shlq $60,%r10
218 movb %dl,5(%rsp)
219 orq %r10,%rbx
220 movb %al,%dl
221 shrq $4,%rax
222 movq %r8,%r10
223 shrq $4,%r8
224 movq %r9,40(%rbp)
225 movq 112+0-128(%rsi),%r9
226 shlb $4,%dl
227 movq %rbx,40-128(%rbp)
228 movq 112+8-128(%rsi),%rbx
229 shlq $60,%r10
230 movb %dl,6(%rsp)
231 orq %r10,%rax
232 movb %bl,%dl
233 shrq $4,%rbx
234 movq %r9,%r10
235 shrq $4,%r9
236 movq %r8,48(%rbp)
237 movq 128+0-128(%rsi),%r8
238 shlb $4,%dl
239 movq %rax,48-128(%rbp)
240 movq 128+8-128(%rsi),%rax
241 shlq $60,%r10
242 movb %dl,7(%rsp)
243 orq %r10,%rbx
244 movb %al,%dl
245 shrq $4,%rax
246 movq %r8,%r10
247 shrq $4,%r8
248 movq %r9,56(%rbp)
249 movq 144+0-128(%rsi),%r9
250 shlb $4,%dl
251 movq %rbx,56-128(%rbp)
252 movq 144+8-128(%rsi),%rbx
253 shlq $60,%r10
254 movb %dl,8(%rsp)
255 orq %r10,%rax
256 movb %bl,%dl
257 shrq $4,%rbx
258 movq %r9,%r10
259 shrq $4,%r9
260 movq %r8,64(%rbp)
261 movq 160+0-128(%rsi),%r8
262 shlb $4,%dl
263 movq %rax,64-128(%rbp)
264 movq 160+8-128(%rsi),%rax
265 shlq $60,%r10
266 movb %dl,9(%rsp)
267 orq %r10,%rbx
268 movb %al,%dl
269 shrq $4,%rax
270 movq %r8,%r10
271 shrq $4,%r8
272 movq %r9,72(%rbp)
273 movq 176+0-128(%rsi),%r9
274 shlb $4,%dl
275 movq %rbx,72-128(%rbp)
276 movq 176+8-128(%rsi),%rbx
277 shlq $60,%r10
278 movb %dl,10(%rsp)
279 orq %r10,%rax
280 movb %bl,%dl
281 shrq $4,%rbx
282 movq %r9,%r10
283 shrq $4,%r9
284 movq %r8,80(%rbp)
285 movq 192+0-128(%rsi),%r8
286 shlb $4,%dl
287 movq %rax,80-128(%rbp)
288 movq 192+8-128(%rsi),%rax
289 shlq $60,%r10
290 movb %dl,11(%rsp)
291 orq %r10,%rbx
292 movb %al,%dl
293 shrq $4,%rax
294 movq %r8,%r10
295 shrq $4,%r8
296 movq %r9,88(%rbp)
297 movq 208+0-128(%rsi),%r9
298 shlb $4,%dl
299 movq %rbx,88-128(%rbp)
300 movq 208+8-128(%rsi),%rbx
301 shlq $60,%r10
302 movb %dl,12(%rsp)
303 orq %r10,%rax
304 movb %bl,%dl
305 shrq $4,%rbx
306 movq %r9,%r10
307 shrq $4,%r9
308 movq %r8,96(%rbp)
309 movq 224+0-128(%rsi),%r8
310 shlb $4,%dl
311 movq %rax,96-128(%rbp)
312 movq 224+8-128(%rsi),%rax
313 shlq $60,%r10
314 movb %dl,13(%rsp)
315 orq %r10,%rbx
316 movb %al,%dl
317 shrq $4,%rax
318 movq %r8,%r10
319 shrq $4,%r8
320 movq %r9,104(%rbp)
321 movq 240+0-128(%rsi),%r9
322 shlb $4,%dl
323 movq %rbx,104-128(%rbp)
324 movq 240+8-128(%rsi),%rbx
325 shlq $60,%r10
326 movb %dl,14(%rsp)
327 orq %r10,%rax
328 movb %bl,%dl
329 shrq $4,%rbx
330 movq %r9,%r10
331 shrq $4,%r9
332 movq %r8,112(%rbp)
333 shlb $4,%dl
334 movq %rax,112-128(%rbp)
335 shlq $60,%r10
336 movb %dl,15(%rsp)
337 orq %r10,%rbx
338 movq %r9,120(%rbp)
339 movq %rbx,120-128(%rbp)
340 addq $-128,%rsi
341 movq 8(%rdi),%r8
342 movq 0(%rdi),%r9
343 addq %r14,%r15
344 leaq .Lrem_8bit(%rip),%r11
345 jmp .Louter_loop
346.align 16
347.Louter_loop:
348 xorq (%r14),%r9
349 movq 8(%r14),%rdx
350 leaq 16(%r14),%r14
351 xorq %r8,%rdx
352 movq %r9,(%rdi)
353 movq %rdx,8(%rdi)
354 shrq $32,%rdx
355 xorq %rax,%rax
356 roll $8,%edx
357 movb %dl,%al
358 movzbl %dl,%ebx
359 shlb $4,%al
360 shrl $4,%ebx
361 roll $8,%edx
362 movq 8(%rsi,%rax,1),%r8
363 movq (%rsi,%rax,1),%r9
364 movb %dl,%al
365 movzbl %dl,%ecx
366 shlb $4,%al
367 movzbq (%rsp,%rbx,1),%r12
368 shrl $4,%ecx
369 xorq %r8,%r12
370 movq %r9,%r10
371 shrq $8,%r8
372 movzbq %r12b,%r12
373 shrq $8,%r9
374 xorq -128(%rbp,%rbx,8),%r8
375 shlq $56,%r10
376 xorq (%rbp,%rbx,8),%r9
377 roll $8,%edx
378 xorq 8(%rsi,%rax,1),%r8
379 xorq (%rsi,%rax,1),%r9
380 movb %dl,%al
381 xorq %r10,%r8
382 movzwq (%r11,%r12,2),%r12
383 movzbl %dl,%ebx
384 shlb $4,%al
385 movzbq (%rsp,%rcx,1),%r13
386 shrl $4,%ebx
387 shlq $48,%r12
388 xorq %r8,%r13
389 movq %r9,%r10
390 xorq %r12,%r9
391 shrq $8,%r8
392 movzbq %r13b,%r13
393 shrq $8,%r9
394 xorq -128(%rbp,%rcx,8),%r8
395 shlq $56,%r10
396 xorq (%rbp,%rcx,8),%r9
397 roll $8,%edx
398 xorq 8(%rsi,%rax,1),%r8
399 xorq (%rsi,%rax,1),%r9
400 movb %dl,%al
401 xorq %r10,%r8
402 movzwq (%r11,%r13,2),%r13
403 movzbl %dl,%ecx
404 shlb $4,%al
405 movzbq (%rsp,%rbx,1),%r12
406 shrl $4,%ecx
407 shlq $48,%r13
408 xorq %r8,%r12
409 movq %r9,%r10
410 xorq %r13,%r9
411 shrq $8,%r8
412 movzbq %r12b,%r12
413 movl 8(%rdi),%edx
414 shrq $8,%r9
415 xorq -128(%rbp,%rbx,8),%r8
416 shlq $56,%r10
417 xorq (%rbp,%rbx,8),%r9
418 roll $8,%edx
419 xorq 8(%rsi,%rax,1),%r8
420 xorq (%rsi,%rax,1),%r9
421 movb %dl,%al
422 xorq %r10,%r8
423 movzwq (%r11,%r12,2),%r12
424 movzbl %dl,%ebx
425 shlb $4,%al
426 movzbq (%rsp,%rcx,1),%r13
427 shrl $4,%ebx
428 shlq $48,%r12
429 xorq %r8,%r13
430 movq %r9,%r10
431 xorq %r12,%r9
432 shrq $8,%r8
433 movzbq %r13b,%r13
434 shrq $8,%r9
435 xorq -128(%rbp,%rcx,8),%r8
436 shlq $56,%r10
437 xorq (%rbp,%rcx,8),%r9
438 roll $8,%edx
439 xorq 8(%rsi,%rax,1),%r8
440 xorq (%rsi,%rax,1),%r9
441 movb %dl,%al
442 xorq %r10,%r8
443 movzwq (%r11,%r13,2),%r13
444 movzbl %dl,%ecx
445 shlb $4,%al
446 movzbq (%rsp,%rbx,1),%r12
447 shrl $4,%ecx
448 shlq $48,%r13
449 xorq %r8,%r12
450 movq %r9,%r10
451 xorq %r13,%r9
452 shrq $8,%r8
453 movzbq %r12b,%r12
454 shrq $8,%r9
455 xorq -128(%rbp,%rbx,8),%r8
456 shlq $56,%r10
457 xorq (%rbp,%rbx,8),%r9
458 roll $8,%edx
459 xorq 8(%rsi,%rax,1),%r8
460 xorq (%rsi,%rax,1),%r9
461 movb %dl,%al
462 xorq %r10,%r8
463 movzwq (%r11,%r12,2),%r12
464 movzbl %dl,%ebx
465 shlb $4,%al
466 movzbq (%rsp,%rcx,1),%r13
467 shrl $4,%ebx
468 shlq $48,%r12
469 xorq %r8,%r13
470 movq %r9,%r10
471 xorq %r12,%r9
472 shrq $8,%r8
473 movzbq %r13b,%r13
474 shrq $8,%r9
475 xorq -128(%rbp,%rcx,8),%r8
476 shlq $56,%r10
477 xorq (%rbp,%rcx,8),%r9
478 roll $8,%edx
479 xorq 8(%rsi,%rax,1),%r8
480 xorq (%rsi,%rax,1),%r9
481 movb %dl,%al
482 xorq %r10,%r8
483 movzwq (%r11,%r13,2),%r13
484 movzbl %dl,%ecx
485 shlb $4,%al
486 movzbq (%rsp,%rbx,1),%r12
487 shrl $4,%ecx
488 shlq $48,%r13
489 xorq %r8,%r12
490 movq %r9,%r10
491 xorq %r13,%r9
492 shrq $8,%r8
493 movzbq %r12b,%r12
494 movl 4(%rdi),%edx
495 shrq $8,%r9
496 xorq -128(%rbp,%rbx,8),%r8
497 shlq $56,%r10
498 xorq (%rbp,%rbx,8),%r9
499 roll $8,%edx
500 xorq 8(%rsi,%rax,1),%r8
501 xorq (%rsi,%rax,1),%r9
502 movb %dl,%al
503 xorq %r10,%r8
504 movzwq (%r11,%r12,2),%r12
505 movzbl %dl,%ebx
506 shlb $4,%al
507 movzbq (%rsp,%rcx,1),%r13
508 shrl $4,%ebx
509 shlq $48,%r12
510 xorq %r8,%r13
511 movq %r9,%r10
512 xorq %r12,%r9
513 shrq $8,%r8
514 movzbq %r13b,%r13
515 shrq $8,%r9
516 xorq -128(%rbp,%rcx,8),%r8
517 shlq $56,%r10
518 xorq (%rbp,%rcx,8),%r9
519 roll $8,%edx
520 xorq 8(%rsi,%rax,1),%r8
521 xorq (%rsi,%rax,1),%r9
522 movb %dl,%al
523 xorq %r10,%r8
524 movzwq (%r11,%r13,2),%r13
525 movzbl %dl,%ecx
526 shlb $4,%al
527 movzbq (%rsp,%rbx,1),%r12
528 shrl $4,%ecx
529 shlq $48,%r13
530 xorq %r8,%r12
531 movq %r9,%r10
532 xorq %r13,%r9
533 shrq $8,%r8
534 movzbq %r12b,%r12
535 shrq $8,%r9
536 xorq -128(%rbp,%rbx,8),%r8
537 shlq $56,%r10
538 xorq (%rbp,%rbx,8),%r9
539 roll $8,%edx
540 xorq 8(%rsi,%rax,1),%r8
541 xorq (%rsi,%rax,1),%r9
542 movb %dl,%al
543 xorq %r10,%r8
544 movzwq (%r11,%r12,2),%r12
545 movzbl %dl,%ebx
546 shlb $4,%al
547 movzbq (%rsp,%rcx,1),%r13
548 shrl $4,%ebx
549 shlq $48,%r12
550 xorq %r8,%r13
551 movq %r9,%r10
552 xorq %r12,%r9
553 shrq $8,%r8
554 movzbq %r13b,%r13
555 shrq $8,%r9
556 xorq -128(%rbp,%rcx,8),%r8
557 shlq $56,%r10
558 xorq (%rbp,%rcx,8),%r9
559 roll $8,%edx
560 xorq 8(%rsi,%rax,1),%r8
561 xorq (%rsi,%rax,1),%r9
562 movb %dl,%al
563 xorq %r10,%r8
564 movzwq (%r11,%r13,2),%r13
565 movzbl %dl,%ecx
566 shlb $4,%al
567 movzbq (%rsp,%rbx,1),%r12
568 shrl $4,%ecx
569 shlq $48,%r13
570 xorq %r8,%r12
571 movq %r9,%r10
572 xorq %r13,%r9
573 shrq $8,%r8
574 movzbq %r12b,%r12
575 movl 0(%rdi),%edx
576 shrq $8,%r9
577 xorq -128(%rbp,%rbx,8),%r8
578 shlq $56,%r10
579 xorq (%rbp,%rbx,8),%r9
580 roll $8,%edx
581 xorq 8(%rsi,%rax,1),%r8
582 xorq (%rsi,%rax,1),%r9
583 movb %dl,%al
584 xorq %r10,%r8
585 movzwq (%r11,%r12,2),%r12
586 movzbl %dl,%ebx
587 shlb $4,%al
588 movzbq (%rsp,%rcx,1),%r13
589 shrl $4,%ebx
590 shlq $48,%r12
591 xorq %r8,%r13
592 movq %r9,%r10
593 xorq %r12,%r9
594 shrq $8,%r8
595 movzbq %r13b,%r13
596 shrq $8,%r9
597 xorq -128(%rbp,%rcx,8),%r8
598 shlq $56,%r10
599 xorq (%rbp,%rcx,8),%r9
600 roll $8,%edx
601 xorq 8(%rsi,%rax,1),%r8
602 xorq (%rsi,%rax,1),%r9
603 movb %dl,%al
604 xorq %r10,%r8
605 movzwq (%r11,%r13,2),%r13
606 movzbl %dl,%ecx
607 shlb $4,%al
608 movzbq (%rsp,%rbx,1),%r12
609 shrl $4,%ecx
610 shlq $48,%r13
611 xorq %r8,%r12
612 movq %r9,%r10
613 xorq %r13,%r9
614 shrq $8,%r8
615 movzbq %r12b,%r12
616 shrq $8,%r9
617 xorq -128(%rbp,%rbx,8),%r8
618 shlq $56,%r10
619 xorq (%rbp,%rbx,8),%r9
620 roll $8,%edx
621 xorq 8(%rsi,%rax,1),%r8
622 xorq (%rsi,%rax,1),%r9
623 movb %dl,%al
624 xorq %r10,%r8
625 movzwq (%r11,%r12,2),%r12
626 movzbl %dl,%ebx
627 shlb $4,%al
628 movzbq (%rsp,%rcx,1),%r13
629 shrl $4,%ebx
630 shlq $48,%r12
631 xorq %r8,%r13
632 movq %r9,%r10
633 xorq %r12,%r9
634 shrq $8,%r8
635 movzbq %r13b,%r13
636 shrq $8,%r9
637 xorq -128(%rbp,%rcx,8),%r8
638 shlq $56,%r10
639 xorq (%rbp,%rcx,8),%r9
640 roll $8,%edx
641 xorq 8(%rsi,%rax,1),%r8
642 xorq (%rsi,%rax,1),%r9
643 movb %dl,%al
644 xorq %r10,%r8
645 movzwq (%r11,%r13,2),%r13
646 movzbl %dl,%ecx
647 shlb $4,%al
648 movzbq (%rsp,%rbx,1),%r12
649 andl $240,%ecx
650 shlq $48,%r13
651 xorq %r8,%r12
652 movq %r9,%r10
653 xorq %r13,%r9
654 shrq $8,%r8
655 movzbq %r12b,%r12
656 movl -4(%rdi),%edx
657 shrq $8,%r9
658 xorq -128(%rbp,%rbx,8),%r8
659 shlq $56,%r10
660 xorq (%rbp,%rbx,8),%r9
661 movzwq (%r11,%r12,2),%r12
662 xorq 8(%rsi,%rax,1),%r8
663 xorq (%rsi,%rax,1),%r9
664 shlq $48,%r12
665 xorq %r10,%r8
666 xorq %r12,%r9
667 movzbq %r8b,%r13
668 shrq $4,%r8
669 movq %r9,%r10
670 shlb $4,%r13b
671 shrq $4,%r9
672 xorq 8(%rsi,%rcx,1),%r8
673 movzwq (%r11,%r13,2),%r13
674 shlq $60,%r10
675 xorq (%rsi,%rcx,1),%r9
676 xorq %r10,%r8
677 shlq $48,%r13
678 bswapq %r8
679 xorq %r13,%r9
680 bswapq %r9
681 cmpq %r15,%r14
682 jb .Louter_loop
683 movq %r8,8(%rdi)
684 movq %r9,(%rdi)
685
686 leaq 280+48(%rsp),%rsi
687.cfi_def_cfa %rsi,8
688 movq -48(%rsi),%r15
689.cfi_restore %r15
690 movq -40(%rsi),%r14
691.cfi_restore %r14
692 movq -32(%rsi),%r13
693.cfi_restore %r13
694 movq -24(%rsi),%r12
695.cfi_restore %r12
696 movq -16(%rsi),%rbp
697.cfi_restore %rbp
698 movq -8(%rsi),%rbx
699.cfi_restore %rbx
700 leaq 0(%rsi),%rsp
701.cfi_def_cfa_register %rsp
702.Lghash_epilogue:
703 .byte 0xf3,0xc3
704.cfi_endproc
705.size gcm_ghash_4bit,.-gcm_ghash_4bit
706.globl gcm_init_clmul
707.type gcm_init_clmul,@function
708.align 16
709gcm_init_clmul:
710.cfi_startproc
711.L_init_clmul:
712 movdqu (%rsi),%xmm2
713 pshufd $78,%xmm2,%xmm2
714
715
716 pshufd $255,%xmm2,%xmm4
717 movdqa %xmm2,%xmm3
718 psllq $1,%xmm2
719 pxor %xmm5,%xmm5
720 psrlq $63,%xmm3
721 pcmpgtd %xmm4,%xmm5
722 pslldq $8,%xmm3
723 por %xmm3,%xmm2
724
725
726 pand .L0x1c2_polynomial(%rip),%xmm5
727 pxor %xmm5,%xmm2
728
729
730 pshufd $78,%xmm2,%xmm6
731 movdqa %xmm2,%xmm0
732 pxor %xmm2,%xmm6
733 movdqa %xmm0,%xmm1
734 pshufd $78,%xmm0,%xmm3
735 pxor %xmm0,%xmm3
736.byte 102,15,58,68,194,0
737.byte 102,15,58,68,202,17
738.byte 102,15,58,68,222,0
739 pxor %xmm0,%xmm3
740 pxor %xmm1,%xmm3
741
742 movdqa %xmm3,%xmm4
743 psrldq $8,%xmm3
744 pslldq $8,%xmm4
745 pxor %xmm3,%xmm1
746 pxor %xmm4,%xmm0
747
748 movdqa %xmm0,%xmm4
749 movdqa %xmm0,%xmm3
750 psllq $5,%xmm0
751 pxor %xmm0,%xmm3
752 psllq $1,%xmm0
753 pxor %xmm3,%xmm0
754 psllq $57,%xmm0
755 movdqa %xmm0,%xmm3
756 pslldq $8,%xmm0
757 psrldq $8,%xmm3
758 pxor %xmm4,%xmm0
759 pxor %xmm3,%xmm1
760
761
762 movdqa %xmm0,%xmm4
763 psrlq $1,%xmm0
764 pxor %xmm4,%xmm1
765 pxor %xmm0,%xmm4
766 psrlq $5,%xmm0
767 pxor %xmm4,%xmm0
768 psrlq $1,%xmm0
769 pxor %xmm1,%xmm0
770 pshufd $78,%xmm2,%xmm3
771 pshufd $78,%xmm0,%xmm4
772 pxor %xmm2,%xmm3
773 movdqu %xmm2,0(%rdi)
774 pxor %xmm0,%xmm4
775 movdqu %xmm0,16(%rdi)
776.byte 102,15,58,15,227,8
777 movdqu %xmm4,32(%rdi)
778 movdqa %xmm0,%xmm1
779 pshufd $78,%xmm0,%xmm3
780 pxor %xmm0,%xmm3
781.byte 102,15,58,68,194,0
782.byte 102,15,58,68,202,17
783.byte 102,15,58,68,222,0
784 pxor %xmm0,%xmm3
785 pxor %xmm1,%xmm3
786
787 movdqa %xmm3,%xmm4
788 psrldq $8,%xmm3
789 pslldq $8,%xmm4
790 pxor %xmm3,%xmm1
791 pxor %xmm4,%xmm0
792
793 movdqa %xmm0,%xmm4
794 movdqa %xmm0,%xmm3
795 psllq $5,%xmm0
796 pxor %xmm0,%xmm3
797 psllq $1,%xmm0
798 pxor %xmm3,%xmm0
799 psllq $57,%xmm0
800 movdqa %xmm0,%xmm3
801 pslldq $8,%xmm0
802 psrldq $8,%xmm3
803 pxor %xmm4,%xmm0
804 pxor %xmm3,%xmm1
805
806
807 movdqa %xmm0,%xmm4
808 psrlq $1,%xmm0
809 pxor %xmm4,%xmm1
810 pxor %xmm0,%xmm4
811 psrlq $5,%xmm0
812 pxor %xmm4,%xmm0
813 psrlq $1,%xmm0
814 pxor %xmm1,%xmm0
815 movdqa %xmm0,%xmm5
816 movdqa %xmm0,%xmm1
817 pshufd $78,%xmm0,%xmm3
818 pxor %xmm0,%xmm3
819.byte 102,15,58,68,194,0
820.byte 102,15,58,68,202,17
821.byte 102,15,58,68,222,0
822 pxor %xmm0,%xmm3
823 pxor %xmm1,%xmm3
824
825 movdqa %xmm3,%xmm4
826 psrldq $8,%xmm3
827 pslldq $8,%xmm4
828 pxor %xmm3,%xmm1
829 pxor %xmm4,%xmm0
830
831 movdqa %xmm0,%xmm4
832 movdqa %xmm0,%xmm3
833 psllq $5,%xmm0
834 pxor %xmm0,%xmm3
835 psllq $1,%xmm0
836 pxor %xmm3,%xmm0
837 psllq $57,%xmm0
838 movdqa %xmm0,%xmm3
839 pslldq $8,%xmm0
840 psrldq $8,%xmm3
841 pxor %xmm4,%xmm0
842 pxor %xmm3,%xmm1
843
844
845 movdqa %xmm0,%xmm4
846 psrlq $1,%xmm0
847 pxor %xmm4,%xmm1
848 pxor %xmm0,%xmm4
849 psrlq $5,%xmm0
850 pxor %xmm4,%xmm0
851 psrlq $1,%xmm0
852 pxor %xmm1,%xmm0
853 pshufd $78,%xmm5,%xmm3
854 pshufd $78,%xmm0,%xmm4
855 pxor %xmm5,%xmm3
856 movdqu %xmm5,48(%rdi)
857 pxor %xmm0,%xmm4
858 movdqu %xmm0,64(%rdi)
859.byte 102,15,58,15,227,8
860 movdqu %xmm4,80(%rdi)
861 .byte 0xf3,0xc3
862.cfi_endproc
863.size gcm_init_clmul,.-gcm_init_clmul
864.globl gcm_gmult_clmul
865.type gcm_gmult_clmul,@function
866.align 16
867gcm_gmult_clmul:
868.cfi_startproc
869.byte 243,15,30,250
870.L_gmult_clmul:
871 movdqu (%rdi),%xmm0
872 movdqa .Lbswap_mask(%rip),%xmm5
873 movdqu (%rsi),%xmm2
874 movdqu 32(%rsi),%xmm4
875.byte 102,15,56,0,197
876 movdqa %xmm0,%xmm1
877 pshufd $78,%xmm0,%xmm3
878 pxor %xmm0,%xmm3
879.byte 102,15,58,68,194,0
880.byte 102,15,58,68,202,17
881.byte 102,15,58,68,220,0
882 pxor %xmm0,%xmm3
883 pxor %xmm1,%xmm3
884
885 movdqa %xmm3,%xmm4
886 psrldq $8,%xmm3
887 pslldq $8,%xmm4
888 pxor %xmm3,%xmm1
889 pxor %xmm4,%xmm0
890
891 movdqa %xmm0,%xmm4
892 movdqa %xmm0,%xmm3
893 psllq $5,%xmm0
894 pxor %xmm0,%xmm3
895 psllq $1,%xmm0
896 pxor %xmm3,%xmm0
897 psllq $57,%xmm0
898 movdqa %xmm0,%xmm3
899 pslldq $8,%xmm0
900 psrldq $8,%xmm3
901 pxor %xmm4,%xmm0
902 pxor %xmm3,%xmm1
903
904
905 movdqa %xmm0,%xmm4
906 psrlq $1,%xmm0
907 pxor %xmm4,%xmm1
908 pxor %xmm0,%xmm4
909 psrlq $5,%xmm0
910 pxor %xmm4,%xmm0
911 psrlq $1,%xmm0
912 pxor %xmm1,%xmm0
913.byte 102,15,56,0,197
914 movdqu %xmm0,(%rdi)
915 .byte 0xf3,0xc3
916.cfi_endproc
917.size gcm_gmult_clmul,.-gcm_gmult_clmul
918.globl gcm_ghash_clmul
919.type gcm_ghash_clmul,@function
920.align 32
921gcm_ghash_clmul:
922.cfi_startproc
923.byte 243,15,30,250
924.L_ghash_clmul:
925 movdqa .Lbswap_mask(%rip),%xmm10
926
927 movdqu (%rdi),%xmm0
928 movdqu (%rsi),%xmm2
929 movdqu 32(%rsi),%xmm7
930.byte 102,65,15,56,0,194
931
932 subq $0x10,%rcx
933 jz .Lodd_tail
934
935 movdqu 16(%rsi),%xmm6
936 movl OPENSSL_ia32cap_P+4(%rip),%eax
937 cmpq $0x30,%rcx
938 jb .Lskip4x
939
940 andl $71303168,%eax
941 cmpl $4194304,%eax
942 je .Lskip4x
943
944 subq $0x30,%rcx
945 movq $0xA040608020C0E000,%rax
946 movdqu 48(%rsi),%xmm14
947 movdqu 64(%rsi),%xmm15
948
949
950
951
952 movdqu 48(%rdx),%xmm3
953 movdqu 32(%rdx),%xmm11
954.byte 102,65,15,56,0,218
955.byte 102,69,15,56,0,218
956 movdqa %xmm3,%xmm5
957 pshufd $78,%xmm3,%xmm4
958 pxor %xmm3,%xmm4
959.byte 102,15,58,68,218,0
960.byte 102,15,58,68,234,17
961.byte 102,15,58,68,231,0
962
963 movdqa %xmm11,%xmm13
964 pshufd $78,%xmm11,%xmm12
965 pxor %xmm11,%xmm12
966.byte 102,68,15,58,68,222,0
967.byte 102,68,15,58,68,238,17
968.byte 102,68,15,58,68,231,16
969 xorps %xmm11,%xmm3
970 xorps %xmm13,%xmm5
971 movups 80(%rsi),%xmm7
972 xorps %xmm12,%xmm4
973
974 movdqu 16(%rdx),%xmm11
975 movdqu 0(%rdx),%xmm8
976.byte 102,69,15,56,0,218
977.byte 102,69,15,56,0,194
978 movdqa %xmm11,%xmm13
979 pshufd $78,%xmm11,%xmm12
980 pxor %xmm8,%xmm0
981 pxor %xmm11,%xmm12
982.byte 102,69,15,58,68,222,0
983 movdqa %xmm0,%xmm1
984 pshufd $78,%xmm0,%xmm8
985 pxor %xmm0,%xmm8
986.byte 102,69,15,58,68,238,17
987.byte 102,68,15,58,68,231,0
988 xorps %xmm11,%xmm3
989 xorps %xmm13,%xmm5
990
991 leaq 64(%rdx),%rdx
992 subq $0x40,%rcx
993 jc .Ltail4x
994
995 jmp .Lmod4_loop
996.align 32
997.Lmod4_loop:
998.byte 102,65,15,58,68,199,0
999 xorps %xmm12,%xmm4
1000 movdqu 48(%rdx),%xmm11
1001.byte 102,69,15,56,0,218
1002.byte 102,65,15,58,68,207,17
1003 xorps %xmm3,%xmm0
1004 movdqu 32(%rdx),%xmm3
1005 movdqa %xmm11,%xmm13
1006.byte 102,68,15,58,68,199,16
1007 pshufd $78,%xmm11,%xmm12
1008 xorps %xmm5,%xmm1
1009 pxor %xmm11,%xmm12
1010.byte 102,65,15,56,0,218
1011 movups 32(%rsi),%xmm7
1012 xorps %xmm4,%xmm8
1013.byte 102,68,15,58,68,218,0
1014 pshufd $78,%xmm3,%xmm4
1015
1016 pxor %xmm0,%xmm8
1017 movdqa %xmm3,%xmm5
1018 pxor %xmm1,%xmm8
1019 pxor %xmm3,%xmm4
1020 movdqa %xmm8,%xmm9
1021.byte 102,68,15,58,68,234,17
1022 pslldq $8,%xmm8
1023 psrldq $8,%xmm9
1024 pxor %xmm8,%xmm0
1025 movdqa .L7_mask(%rip),%xmm8
1026 pxor %xmm9,%xmm1
1027.byte 102,76,15,110,200
1028
1029 pand %xmm0,%xmm8
1030.byte 102,69,15,56,0,200
1031 pxor %xmm0,%xmm9
1032.byte 102,68,15,58,68,231,0
1033 psllq $57,%xmm9
1034 movdqa %xmm9,%xmm8
1035 pslldq $8,%xmm9
1036.byte 102,15,58,68,222,0
1037 psrldq $8,%xmm8
1038 pxor %xmm9,%xmm0
1039 pxor %xmm8,%xmm1
1040 movdqu 0(%rdx),%xmm8
1041
1042 movdqa %xmm0,%xmm9
1043 psrlq $1,%xmm0
1044.byte 102,15,58,68,238,17
1045 xorps %xmm11,%xmm3
1046 movdqu 16(%rdx),%xmm11
1047.byte 102,69,15,56,0,218
1048.byte 102,15,58,68,231,16
1049 xorps %xmm13,%xmm5
1050 movups 80(%rsi),%xmm7
1051.byte 102,69,15,56,0,194
1052 pxor %xmm9,%xmm1
1053 pxor %xmm0,%xmm9
1054 psrlq $5,%xmm0
1055
1056 movdqa %xmm11,%xmm13
1057 pxor %xmm12,%xmm4
1058 pshufd $78,%xmm11,%xmm12
1059 pxor %xmm9,%xmm0
1060 pxor %xmm8,%xmm1
1061 pxor %xmm11,%xmm12
1062.byte 102,69,15,58,68,222,0
1063 psrlq $1,%xmm0
1064 pxor %xmm1,%xmm0
1065 movdqa %xmm0,%xmm1
1066.byte 102,69,15,58,68,238,17
1067 xorps %xmm11,%xmm3
1068 pshufd $78,%xmm0,%xmm8
1069 pxor %xmm0,%xmm8
1070
1071.byte 102,68,15,58,68,231,0
1072 xorps %xmm13,%xmm5
1073
1074 leaq 64(%rdx),%rdx
1075 subq $0x40,%rcx
1076 jnc .Lmod4_loop
1077
1078.Ltail4x:
1079.byte 102,65,15,58,68,199,0
1080.byte 102,65,15,58,68,207,17
1081.byte 102,68,15,58,68,199,16
1082 xorps %xmm12,%xmm4
1083 xorps %xmm3,%xmm0
1084 xorps %xmm5,%xmm1
1085 pxor %xmm0,%xmm1
1086 pxor %xmm4,%xmm8
1087
1088 pxor %xmm1,%xmm8
1089 pxor %xmm0,%xmm1
1090
1091 movdqa %xmm8,%xmm9
1092 psrldq $8,%xmm8
1093 pslldq $8,%xmm9
1094 pxor %xmm8,%xmm1
1095 pxor %xmm9,%xmm0
1096
1097 movdqa %xmm0,%xmm4
1098 movdqa %xmm0,%xmm3
1099 psllq $5,%xmm0
1100 pxor %xmm0,%xmm3
1101 psllq $1,%xmm0
1102 pxor %xmm3,%xmm0
1103 psllq $57,%xmm0
1104 movdqa %xmm0,%xmm3
1105 pslldq $8,%xmm0
1106 psrldq $8,%xmm3
1107 pxor %xmm4,%xmm0
1108 pxor %xmm3,%xmm1
1109
1110
1111 movdqa %xmm0,%xmm4
1112 psrlq $1,%xmm0
1113 pxor %xmm4,%xmm1
1114 pxor %xmm0,%xmm4
1115 psrlq $5,%xmm0
1116 pxor %xmm4,%xmm0
1117 psrlq $1,%xmm0
1118 pxor %xmm1,%xmm0
1119 addq $0x40,%rcx
1120 jz .Ldone
1121 movdqu 32(%rsi),%xmm7
1122 subq $0x10,%rcx
1123 jz .Lodd_tail
1124.Lskip4x:
1125
1126
1127
1128
1129
1130 movdqu (%rdx),%xmm8
1131 movdqu 16(%rdx),%xmm3
1132.byte 102,69,15,56,0,194
1133.byte 102,65,15,56,0,218
1134 pxor %xmm8,%xmm0
1135
1136 movdqa %xmm3,%xmm5
1137 pshufd $78,%xmm3,%xmm4
1138 pxor %xmm3,%xmm4
1139.byte 102,15,58,68,218,0
1140.byte 102,15,58,68,234,17
1141.byte 102,15,58,68,231,0
1142
1143 leaq 32(%rdx),%rdx
1144 nop
1145 subq $0x20,%rcx
1146 jbe .Leven_tail
1147 nop
1148 jmp .Lmod_loop
1149
1150.align 32
1151.Lmod_loop:
1152 movdqa %xmm0,%xmm1
1153 movdqa %xmm4,%xmm8
1154 pshufd $78,%xmm0,%xmm4
1155 pxor %xmm0,%xmm4
1156
1157.byte 102,15,58,68,198,0
1158.byte 102,15,58,68,206,17
1159.byte 102,15,58,68,231,16
1160
1161 pxor %xmm3,%xmm0
1162 pxor %xmm5,%xmm1
1163 movdqu (%rdx),%xmm9
1164 pxor %xmm0,%xmm8
1165.byte 102,69,15,56,0,202
1166 movdqu 16(%rdx),%xmm3
1167
1168 pxor %xmm1,%xmm8
1169 pxor %xmm9,%xmm1
1170 pxor %xmm8,%xmm4
1171.byte 102,65,15,56,0,218
1172 movdqa %xmm4,%xmm8
1173 psrldq $8,%xmm8
1174 pslldq $8,%xmm4
1175 pxor %xmm8,%xmm1
1176 pxor %xmm4,%xmm0
1177
1178 movdqa %xmm3,%xmm5
1179
1180 movdqa %xmm0,%xmm9
1181 movdqa %xmm0,%xmm8
1182 psllq $5,%xmm0
1183 pxor %xmm0,%xmm8
1184.byte 102,15,58,68,218,0
1185 psllq $1,%xmm0
1186 pxor %xmm8,%xmm0
1187 psllq $57,%xmm0
1188 movdqa %xmm0,%xmm8
1189 pslldq $8,%xmm0
1190 psrldq $8,%xmm8
1191 pxor %xmm9,%xmm0
1192 pshufd $78,%xmm5,%xmm4
1193 pxor %xmm8,%xmm1
1194 pxor %xmm5,%xmm4
1195
1196 movdqa %xmm0,%xmm9
1197 psrlq $1,%xmm0
1198.byte 102,15,58,68,234,17
1199 pxor %xmm9,%xmm1
1200 pxor %xmm0,%xmm9
1201 psrlq $5,%xmm0
1202 pxor %xmm9,%xmm0
1203 leaq 32(%rdx),%rdx
1204 psrlq $1,%xmm0
1205.byte 102,15,58,68,231,0
1206 pxor %xmm1,%xmm0
1207
1208 subq $0x20,%rcx
1209 ja .Lmod_loop
1210
1211.Leven_tail:
1212 movdqa %xmm0,%xmm1
1213 movdqa %xmm4,%xmm8
1214 pshufd $78,%xmm0,%xmm4
1215 pxor %xmm0,%xmm4
1216
1217.byte 102,15,58,68,198,0
1218.byte 102,15,58,68,206,17
1219.byte 102,15,58,68,231,16
1220
1221 pxor %xmm3,%xmm0
1222 pxor %xmm5,%xmm1
1223 pxor %xmm0,%xmm8
1224 pxor %xmm1,%xmm8
1225 pxor %xmm8,%xmm4
1226 movdqa %xmm4,%xmm8
1227 psrldq $8,%xmm8
1228 pslldq $8,%xmm4
1229 pxor %xmm8,%xmm1
1230 pxor %xmm4,%xmm0
1231
1232 movdqa %xmm0,%xmm4
1233 movdqa %xmm0,%xmm3
1234 psllq $5,%xmm0
1235 pxor %xmm0,%xmm3
1236 psllq $1,%xmm0
1237 pxor %xmm3,%xmm0
1238 psllq $57,%xmm0
1239 movdqa %xmm0,%xmm3
1240 pslldq $8,%xmm0
1241 psrldq $8,%xmm3
1242 pxor %xmm4,%xmm0
1243 pxor %xmm3,%xmm1
1244
1245
1246 movdqa %xmm0,%xmm4
1247 psrlq $1,%xmm0
1248 pxor %xmm4,%xmm1
1249 pxor %xmm0,%xmm4
1250 psrlq $5,%xmm0
1251 pxor %xmm4,%xmm0
1252 psrlq $1,%xmm0
1253 pxor %xmm1,%xmm0
1254 testq %rcx,%rcx
1255 jnz .Ldone
1256
1257.Lodd_tail:
1258 movdqu (%rdx),%xmm8
1259.byte 102,69,15,56,0,194
1260 pxor %xmm8,%xmm0
1261 movdqa %xmm0,%xmm1
1262 pshufd $78,%xmm0,%xmm3
1263 pxor %xmm0,%xmm3
1264.byte 102,15,58,68,194,0
1265.byte 102,15,58,68,202,17
1266.byte 102,15,58,68,223,0
1267 pxor %xmm0,%xmm3
1268 pxor %xmm1,%xmm3
1269
1270 movdqa %xmm3,%xmm4
1271 psrldq $8,%xmm3
1272 pslldq $8,%xmm4
1273 pxor %xmm3,%xmm1
1274 pxor %xmm4,%xmm0
1275
1276 movdqa %xmm0,%xmm4
1277 movdqa %xmm0,%xmm3
1278 psllq $5,%xmm0
1279 pxor %xmm0,%xmm3
1280 psllq $1,%xmm0
1281 pxor %xmm3,%xmm0
1282 psllq $57,%xmm0
1283 movdqa %xmm0,%xmm3
1284 pslldq $8,%xmm0
1285 psrldq $8,%xmm3
1286 pxor %xmm4,%xmm0
1287 pxor %xmm3,%xmm1
1288
1289
1290 movdqa %xmm0,%xmm4
1291 psrlq $1,%xmm0
1292 pxor %xmm4,%xmm1
1293 pxor %xmm0,%xmm4
1294 psrlq $5,%xmm0
1295 pxor %xmm4,%xmm0
1296 psrlq $1,%xmm0
1297 pxor %xmm1,%xmm0
1298.Ldone:
1299.byte 102,65,15,56,0,194
1300 movdqu %xmm0,(%rdi)
1301 .byte 0xf3,0xc3
1302.cfi_endproc
1303.size gcm_ghash_clmul,.-gcm_ghash_clmul
1304.globl gcm_init_avx
1305.type gcm_init_avx,@function
1306.align 32
1307gcm_init_avx:
1308.cfi_startproc
1309 jmp .L_init_clmul
1310.cfi_endproc
1311.size gcm_init_avx,.-gcm_init_avx
1312.globl gcm_gmult_avx
1313.type gcm_gmult_avx,@function
1314.align 32
1315gcm_gmult_avx:
1316.cfi_startproc
1317.byte 243,15,30,250
1318 jmp .L_gmult_clmul
1319.cfi_endproc
1320.size gcm_gmult_avx,.-gcm_gmult_avx
1321.globl gcm_ghash_avx
1322.type gcm_ghash_avx,@function
1323.align 32
1324gcm_ghash_avx:
1325.cfi_startproc
1326.byte 243,15,30,250
1327 jmp .L_ghash_clmul
1328.cfi_endproc
1329.size gcm_ghash_avx,.-gcm_ghash_avx
1330.align 64
1331.Lbswap_mask:
1332.byte 15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0
1333.L0x1c2_polynomial:
1334.byte 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0xc2
1335.L7_mask:
1336.long 7,0,7,0
1337.L7_mask_poly:
1338.long 7,0,450,0
1339.align 64
1340.type .Lrem_4bit,@object
1341.Lrem_4bit:
1342.long 0,0,0,471859200,0,943718400,0,610271232
1343.long 0,1887436800,0,1822425088,0,1220542464,0,1423966208
1344.long 0,3774873600,0,4246732800,0,3644850176,0,3311403008
1345.long 0,2441084928,0,2376073216,0,2847932416,0,3051356160
1346.type .Lrem_8bit,@object
1347.Lrem_8bit:
1348.value 0x0000,0x01C2,0x0384,0x0246,0x0708,0x06CA,0x048C,0x054E
1349.value 0x0E10,0x0FD2,0x0D94,0x0C56,0x0918,0x08DA,0x0A9C,0x0B5E
1350.value 0x1C20,0x1DE2,0x1FA4,0x1E66,0x1B28,0x1AEA,0x18AC,0x196E
1351.value 0x1230,0x13F2,0x11B4,0x1076,0x1538,0x14FA,0x16BC,0x177E
1352.value 0x3840,0x3982,0x3BC4,0x3A06,0x3F48,0x3E8A,0x3CCC,0x3D0E
1353.value 0x3650,0x3792,0x35D4,0x3416,0x3158,0x309A,0x32DC,0x331E
1354.value 0x2460,0x25A2,0x27E4,0x2626,0x2368,0x22AA,0x20EC,0x212E
1355.value 0x2A70,0x2BB2,0x29F4,0x2836,0x2D78,0x2CBA,0x2EFC,0x2F3E
1356.value 0x7080,0x7142,0x7304,0x72C6,0x7788,0x764A,0x740C,0x75CE
1357.value 0x7E90,0x7F52,0x7D14,0x7CD6,0x7998,0x785A,0x7A1C,0x7BDE
1358.value 0x6CA0,0x6D62,0x6F24,0x6EE6,0x6BA8,0x6A6A,0x682C,0x69EE
1359.value 0x62B0,0x6372,0x6134,0x60F6,0x65B8,0x647A,0x663C,0x67FE
1360.value 0x48C0,0x4902,0x4B44,0x4A86,0x4FC8,0x4E0A,0x4C4C,0x4D8E
1361.value 0x46D0,0x4712,0x4554,0x4496,0x41D8,0x401A,0x425C,0x439E
1362.value 0x54E0,0x5522,0x5764,0x56A6,0x53E8,0x522A,0x506C,0x51AE
1363.value 0x5AF0,0x5B32,0x5974,0x58B6,0x5DF8,0x5C3A,0x5E7C,0x5FBE
1364.value 0xE100,0xE0C2,0xE284,0xE346,0xE608,0xE7CA,0xE58C,0xE44E
1365.value 0xEF10,0xEED2,0xEC94,0xED56,0xE818,0xE9DA,0xEB9C,0xEA5E
1366.value 0xFD20,0xFCE2,0xFEA4,0xFF66,0xFA28,0xFBEA,0xF9AC,0xF86E
1367.value 0xF330,0xF2F2,0xF0B4,0xF176,0xF438,0xF5FA,0xF7BC,0xF67E
1368.value 0xD940,0xD882,0xDAC4,0xDB06,0xDE48,0xDF8A,0xDDCC,0xDC0E
1369.value 0xD750,0xD692,0xD4D4,0xD516,0xD058,0xD19A,0xD3DC,0xD21E
1370.value 0xC560,0xC4A2,0xC6E4,0xC726,0xC268,0xC3AA,0xC1EC,0xC02E
1371.value 0xCB70,0xCAB2,0xC8F4,0xC936,0xCC78,0xCDBA,0xCFFC,0xCE3E
1372.value 0x9180,0x9042,0x9204,0x93C6,0x9688,0x974A,0x950C,0x94CE
1373.value 0x9F90,0x9E52,0x9C14,0x9DD6,0x9898,0x995A,0x9B1C,0x9ADE
1374.value 0x8DA0,0x8C62,0x8E24,0x8FE6,0x8AA8,0x8B6A,0x892C,0x88EE
1375.value 0x83B0,0x8272,0x8034,0x81F6,0x84B8,0x857A,0x873C,0x86FE
1376.value 0xA9C0,0xA802,0xAA44,0xAB86,0xAEC8,0xAF0A,0xAD4C,0xAC8E
1377.value 0xA7D0,0xA612,0xA454,0xA596,0xA0D8,0xA11A,0xA35C,0xA29E
1378.value 0xB5E0,0xB422,0xB664,0xB7A6,0xB2E8,0xB32A,0xB16C,0xB0AE
1379.value 0xBBF0,0xBA32,0xB874,0xB9B6,0xBCF8,0xBD3A,0xBF7C,0xBEBE
1380
1381.byte 71,72,65,83,72,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
1382.align 64
1383 .section .note.gnu.property, #alloc
1384 .p2align 3
1385 .long 1f - 0f
1386 .long 4f - 1f
1387 .long 5
13880:
1389 # "GNU" encoded with .byte, since .asciz isn't supported
1390 # on Solaris.
1391 .byte 0x47
1392 .byte 0x4e
1393 .byte 0x55
1394 .byte 0
13951:
1396 .p2align 3
1397 .long 0xc0000002
1398 .long 3f - 2f
13992:
1400 .long 3
14013:
1402 .p2align 3
14034:
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette