VirtualBox

source: vbox/trunk/src/VBox/VMM/testcase/tstX86-1A.asm@ 40001

Last change on this file since 40001 was 40001, checked in by vboxsync, 13 years ago

IEM: Filled in some NOPs and PREFETCHes.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 37.3 KB
Line 
1; $Id: tstX86-1A.asm 40001 2012-02-05 21:30:40Z vboxsync $
2;; @file
3; X86 instruction set exploration/testcase #1.
4;
5
6;
7; Copyright (C) 2011-2012 Oracle Corporation
8;
9; This file is part of VirtualBox Open Source Edition (OSE), as
10; available from http://www.virtualbox.org. This file is free software;
11; you can redistribute it and/or modify it under the terms of the GNU
12; General Public License (GPL) as published by the Free Software
13; Foundation, in version 2 as it comes in the "COPYING" file of the
14; VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15; hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16;
17
18
19;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
20; Header Files ;
21;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
22%include "iprt/asmdefs.mac"
23%include "iprt/x86.mac"
24
25;; @todo Move this to a header?
26struc TRAPINFO
27 .uTrapPC RTCCPTR_RES 1
28 .uResumePC RTCCPTR_RES 1
29 .u8TrapNo resb 1
30 .cbInstr resb 1
31 .au8Padding resb (RTCCPTR_CB*2 - 2)
32endstruc
33
34
35;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
36; Global Variables ;
37;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
38BEGINDATA
39extern NAME(g_pbEfPage)
40extern NAME(g_pbEfExecPage)
41
42GLOBALNAME g_szAlpha
43 db "abcdefghijklmnopqrstuvwxyz", 0
44g_szAlpha_end:
45%define g_cchAlpha (g_szAlpha_end - NAME(g_szAlpha))
46 db 0, 0, 0,
47
48;;
49; The last global data item. We build this as we write the code.
50GLOBALNAME g_aTrapInfo
51
52
53;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
54; Defined Constants And Macros ;
55;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
56%define X86_XCPT_UD 6
57%define X86_XCPT_GP 13
58%define X86_XCPT_PF 14
59
60%define PAGE_SIZE 0x1000
61
62;; Reference a global variable
63%ifdef RT_ARCH_AMD64
64 %define REF_GLOBAL(a_Name) [NAME(a_Name) wrt rip]
65%else
66 %define REF_GLOBAL(a_Name) [NAME(a_Name)]
67%endif
68
69;;
70; Macro for recording a trapping instruction (simple).
71;
72; @param 1 The trap number.
73; @param 2+ The instruction which should trap.
74%macro ShouldTrap 2+
75%%trap:
76 %2
77%%trap_end:
78 mov eax, __LINE__
79 jmp .return
80BEGINDATA
81%%trapinfo: istruc TRAPINFO
82 at TRAPINFO.uTrapPC, RTCCPTR_DEF %%trap
83 at TRAPINFO.uResumePC, RTCCPTR_DEF %%resume
84 at TRAPINFO.u8TrapNo, db %1
85 at TRAPINFO.cbInstr, db (%%trap_end - %%trap)
86iend
87BEGINCODE
88%%resume:
89%endmacro
90
91
92;;
93; Function prologue saving all registers except EAX.
94;
95%macro SAVE_ALL_PROLOGUE 0
96 push xBP
97 mov xBP, xSP
98 pushf
99 push xBX
100 push xCX
101 push xDX
102 push xSI
103 push xDI
104%ifdef RT_ARCH_AMD64
105 push r8
106 push r9
107 push r10
108 push r11
109 push r12
110 push r13
111 push r14
112 push r15
113%endif
114%endmacro
115
116
117;;
118; Function epilogue restoring all regisers except EAX.
119;
120%macro SAVE_ALL_EPILOGUE 0
121%ifdef RT_ARCH_AMD64
122 pop r15
123 pop r14
124 pop r13
125 pop r12
126 pop r11
127 pop r10
128 pop r9
129 pop r8
130%endif
131 pop xDI
132 pop xSI
133 pop xDX
134 pop xCX
135 pop xBX
136 popf
137 leave
138%endmacro
139
140
141
142
143BEGINCODE
144
145;;
146; Loads all general registers except xBP and xSP with unique values.
147;
148x861_LoadUniqueRegValues:
149%ifdef RT_ARCH_AMD64
150 mov rax, 00000000000000000h
151 mov rcx, 01111111111111111h
152 mov rdx, 02222222222222222h
153 mov rbx, 03333333333333333h
154 mov rsi, 06666666666666666h
155 mov rdi, 07777777777777777h
156 mov r8, 08888888888888888h
157 mov r9, 09999999999999999h
158 mov r10, 0aaaaaaaaaaaaaaaah
159 mov r11, 0bbbbbbbbbbbbbbbbh
160 mov r12, 0cccccccccccccccch
161 mov r13, 0ddddddddddddddddh
162 mov r14, 0eeeeeeeeeeeeeeeeh
163 mov r15, 0ffffffffffffffffh
164%else
165 mov eax, 000000000h
166 mov ecx, 011111111h
167 mov edx, 022222222h
168 mov ebx, 033333333h
169 mov esi, 066666666h
170 mov edi, 077777777h
171%endif
172 ret
173; end x861_LoadUniqueRegValues
174
175
176;;
177; Clears all general registers except xBP and xSP.
178;
179x861_ClearRegisters:
180 xor eax, eax
181 xor ebx, ebx
182 xor ecx, ecx
183 xor edx, edx
184 xor esi, esi
185 xor edi, edi
186%ifdef RT_ARCH_AMD64
187 xor r8, r8
188 xor r9, r9
189 xor r10, r10
190 xor r11, r11
191 xor r12, r12
192 xor r13, r13
193 xor r14, r14
194 xor r15, r15
195%endif
196 ret
197; x861_ClearRegisters
198
199
200;;
201; Loads all general, MMX and SSE registers except xBP and xSP with unique values.
202;
203x861_LoadUniqueRegValuesSSE:
204 movq mm0, [._mm0]
205 movq mm1, [._mm1]
206 movq mm2, [._mm2]
207 movq mm3, [._mm3]
208 movq mm4, [._mm4]
209 movq mm5, [._mm5]
210 movq mm6, [._mm6]
211 movq mm7, [._mm7]
212 movdqu xmm0, [._xmm0]
213 movdqu xmm1, [._xmm1]
214 movdqu xmm2, [._xmm2]
215 movdqu xmm3, [._xmm3]
216 movdqu xmm4, [._xmm4]
217 movdqu xmm5, [._xmm5]
218 movdqu xmm6, [._xmm6]
219 movdqu xmm7, [._xmm7]
220%ifdef RT_ARCH_AMD64
221 movdqu xmm8, [._xmm8]
222 movdqu xmm9, [._xmm9]
223 movdqu xmm10, [._xmm10]
224 movdqu xmm11, [._xmm11]
225 movdqu xmm12, [._xmm12]
226 movdqu xmm13, [._xmm13]
227 movdqu xmm14, [._xmm14]
228 movdqu xmm15, [._xmm15]
229%endif
230 call x861_LoadUniqueRegValues
231 ret
232._mm0: times 8 db 040h
233._mm1: times 8 db 041h
234._mm2: times 8 db 042h
235._mm3: times 8 db 043h
236._mm4: times 8 db 044h
237._mm5: times 8 db 045h
238._mm6: times 8 db 046h
239._mm7: times 8 db 047h
240._xmm0: times 16 db 080h
241._xmm1: times 16 db 081h
242._xmm2: times 16 db 082h
243._xmm3: times 16 db 083h
244._xmm4: times 16 db 084h
245._xmm5: times 16 db 085h
246._xmm6: times 16 db 086h
247._xmm7: times 16 db 087h
248%ifdef RT_ARCH_AMD64
249._xmm8: times 16 db 088h
250._xmm9: times 16 db 089h
251._xmm10: times 16 db 08ah
252._xmm11: times 16 db 08bh
253._xmm12: times 16 db 08ch
254._xmm13: times 16 db 08dh
255._xmm14: times 16 db 08eh
256._xmm15: times 16 db 08fh
257%endif
258; end x861_LoadUniqueRegValuesSSE
259
260
261;;
262; Clears all general, MMX and SSE registers except xBP and xSP.
263;
264x861_ClearRegistersSSE:
265 call x861_ClearRegisters
266 movq mm0, [.zero]
267 movq mm1, [.zero]
268 movq mm2, [.zero]
269 movq mm3, [.zero]
270 movq mm4, [.zero]
271 movq mm5, [.zero]
272 movq mm6, [.zero]
273 movq mm7, [.zero]
274 movdqu xmm0, [.zero]
275 movdqu xmm1, [.zero]
276 movdqu xmm2, [.zero]
277 movdqu xmm3, [.zero]
278 movdqu xmm4, [.zero]
279 movdqu xmm5, [.zero]
280 movdqu xmm6, [.zero]
281 movdqu xmm7, [.zero]
282%ifdef RT_ARCH_AMD64
283 movdqu xmm8, [.zero]
284 movdqu xmm9, [.zero]
285 movdqu xmm10, [.zero]
286 movdqu xmm11, [.zero]
287 movdqu xmm12, [.zero]
288 movdqu xmm13, [.zero]
289 movdqu xmm14, [.zero]
290 movdqu xmm15, [.zero]
291%endif
292 call x861_LoadUniqueRegValues
293 ret
294
295 ret
296.zero times 16 db 000h
297; x861_ClearRegistersSSE
298
299
300BEGINPROC x861_Test1
301 push xBP
302 mov xBP, xSP
303 pushf
304 push xBX
305 push xCX
306 push xDX
307 push xSI
308 push xDI
309%ifdef RT_ARCH_AMD64
310 push r8
311 push r9
312 push r10
313 push r11
314 push r12
315 push r13
316 push r14
317 push r15
318%endif
319
320 ;
321 ; Odd push behavior
322 ;
323%if 0 ; Seems to be so on AMD only
324%ifdef RT_ARCH_X86
325 ; upper word of a 'push cs' is cleared.
326 mov eax, __LINE__
327 mov dword [esp - 4], 0f0f0f0fh
328 push cs
329 pop ecx
330 mov bx, cs
331 and ebx, 0000ffffh
332 cmp ecx, ebx
333 jne .failed
334
335 ; upper word of a 'push ds' is cleared.
336 mov eax, __LINE__
337 mov dword [esp - 4], 0f0f0f0fh
338 push ds
339 pop ecx
340 mov bx, ds
341 and ebx, 0000ffffh
342 cmp ecx, ebx
343 jne .failed
344
345 ; upper word of a 'push es' is cleared.
346 mov eax, __LINE__
347 mov dword [esp - 4], 0f0f0f0fh
348 push es
349 pop ecx
350 mov bx, es
351 and ebx, 0000ffffh
352 cmp ecx, ebx
353 jne .failed
354%endif ; RT_ARCH_X86
355
356 ; The upper part of a 'push fs' is cleared.
357 mov eax, __LINE__
358 xor ecx, ecx
359 not xCX
360 push xCX
361 pop xCX
362 push fs
363 pop xCX
364 mov bx, fs
365 and ebx, 0000ffffh
366 cmp xCX, xBX
367 jne .failed
368
369 ; The upper part of a 'push gs' is cleared.
370 mov eax, __LINE__
371 xor ecx, ecx
372 not xCX
373 push xCX
374 pop xCX
375 push gs
376 pop xCX
377 mov bx, gs
378 and ebx, 0000ffffh
379 cmp xCX, xBX
380 jne .failed
381%endif
382
383%ifdef RT_ARCH_AMD64
384 ; REX.B works with 'push r64'.
385 call x861_LoadUniqueRegValues
386 mov eax, __LINE__
387 push rcx
388 pop rdx
389 cmp rdx, rcx
390 jne .failed
391
392 call x861_LoadUniqueRegValues
393 mov eax, __LINE__
394 db 041h ; REX.B
395 push rcx
396 pop rdx
397 cmp rdx, r9
398 jne .failed
399
400 call x861_LoadUniqueRegValues
401 mov eax, __LINE__
402 db 042h ; REX.X
403 push rcx
404 pop rdx
405 cmp rdx, rcx
406 jne .failed
407
408 call x861_LoadUniqueRegValues
409 mov eax, __LINE__
410 db 044h ; REX.R
411 push rcx
412 pop rdx
413 cmp rdx, rcx
414 jne .failed
415
416 call x861_LoadUniqueRegValues
417 mov eax, __LINE__
418 db 048h ; REX.W
419 push rcx
420 pop rdx
421 cmp rdx, rcx
422 jne .failed
423
424 call x861_LoadUniqueRegValues
425 mov eax, __LINE__
426 db 04fh ; REX.*
427 push rcx
428 pop rdx
429 cmp rdx, r9
430 jne .failed
431%endif
432
433 ;
434 ; Zero extening when moving from a segreg as well as memory access sizes.
435 ;
436 call x861_LoadUniqueRegValues
437 mov eax, __LINE__
438 mov ecx, ds
439 shr xCX, 16
440 cmp xCX, 0
441 jnz .failed
442
443%ifdef RT_ARCH_AMD64
444 call x861_LoadUniqueRegValues
445 mov eax, __LINE__
446 mov rcx, ds
447 shr rcx, 16
448 cmp rcx, 0
449 jnz .failed
450%endif
451
452 call x861_LoadUniqueRegValues
453 mov eax, __LINE__
454 mov xDX, xCX
455 mov cx, ds
456 shr xCX, 16
457 shr xDX, 16
458 cmp xCX, xDX
459 jnz .failed
460
461 ; Loading is always a word access.
462 mov eax, __LINE__
463 mov xDI, REF_GLOBAL(g_pbEfPage)
464 lea xDI, [xDI + 0x1000 - 2]
465 mov xDX, es
466 mov [xDI], dx
467 mov es, [xDI] ; should not crash
468
469 ; Saving is always a word access.
470 mov eax, __LINE__
471 mov xDI, REF_GLOBAL(g_pbEfPage)
472 mov dword [xDI + 0x1000 - 4], -1
473 mov [xDI + 0x1000 - 2], ss ; Should not crash.
474 mov bx, ss
475 mov cx, [xDI + 0x1000 - 2]
476 cmp cx, bx
477 jne .failed
478
479%ifdef RT_ARCH_AMD64
480 ; Check that the rex.R and rex.W bits don't have any influence over a memory write.
481 call x861_ClearRegisters
482 mov eax, __LINE__
483 mov xDI, REF_GLOBAL(g_pbEfPage)
484 mov dword [xDI + 0x1000 - 4], -1
485 db 04ah
486 mov [xDI + 0x1000 - 2], ss ; Should not crash.
487 mov bx, ss
488 mov cx, [xDI + 0x1000 - 2]
489 cmp cx, bx
490 jne .failed
491%endif
492
493
494 ;
495 ; Check what happens when both string prefixes are used.
496 ;
497 cld
498 mov dx, ds
499 mov es, dx
500
501 ; check that repne scasb (al=0) behaves like expected.
502 lea xDI, REF_GLOBAL(g_szAlpha)
503 xor eax, eax ; find the end
504 mov ecx, g_cchAlpha + 1
505 repne scasb
506 cmp ecx, 1
507 mov eax, __LINE__
508 jne .failed
509
510 ; check that repe scasb (al=0) behaves like expected.
511 lea xDI, REF_GLOBAL(g_szAlpha)
512 xor eax, eax ; find the end
513 mov ecx, g_cchAlpha + 1
514 repe scasb
515 cmp ecx, g_cchAlpha
516 mov eax, __LINE__
517 jne .failed
518
519 ; repne is last, it wins.
520 lea xDI, REF_GLOBAL(g_szAlpha)
521 xor eax, eax ; find the end
522 mov ecx, g_cchAlpha + 1
523 db 0f3h ; repe - ignored
524 db 0f2h ; repne
525 scasb
526 cmp ecx, 1
527 mov eax, __LINE__
528 jne .failed
529
530 ; repe is last, it wins.
531 lea xDI, REF_GLOBAL(g_szAlpha)
532 xor eax, eax ; find the end
533 mov ecx, g_cchAlpha + 1
534 db 0f2h ; repne - ignored
535 db 0f3h ; repe
536 scasb
537 cmp ecx, g_cchAlpha
538 mov eax, __LINE__
539 jne .failed
540
541 ;
542 ; Check if stosb works with both prefixes.
543 ;
544 cld
545 mov dx, ds
546 mov es, dx
547 mov xDI, REF_GLOBAL(g_pbEfPage)
548 xor eax, eax
549 mov ecx, 01000h
550 rep stosb
551
552 mov xDI, REF_GLOBAL(g_pbEfPage)
553 mov ecx, 4
554 mov eax, 0ffh
555 db 0f2h ; repne
556 stosb
557 mov eax, __LINE__
558 cmp ecx, 0
559 jne .failed
560 mov eax, __LINE__
561 mov xDI, REF_GLOBAL(g_pbEfPage)
562 cmp dword [xDI], 0ffffffffh
563 jne .failed
564 cmp dword [xDI+4], 0
565 jne .failed
566
567 mov xDI, REF_GLOBAL(g_pbEfPage)
568 mov ecx, 4
569 mov eax, 0feh
570 db 0f3h ; repe
571 stosb
572 mov eax, __LINE__
573 cmp ecx, 0
574 jne .failed
575 mov eax, __LINE__
576 mov xDI, REF_GLOBAL(g_pbEfPage)
577 cmp dword [xDI], 0fefefefeh
578 jne .failed
579 cmp dword [xDI+4], 0
580 jne .failed
581
582 ;
583 ; String operations shouldn't crash because of an invalid address if rCX is 0.
584 ;
585 mov eax, __LINE__
586 cld
587 mov dx, ds
588 mov es, dx
589 mov xDI, REF_GLOBAL(g_pbEfPage)
590 xor xCX, xCX
591 rep stosb ; no trap
592
593 ;
594 ; INS/OUTS will trap in ring-3 even when rCX is 0. (ASSUMES IOPL < 3)
595 ;
596 mov eax, __LINE__
597 cld
598 mov dx, ss
599 mov ss, dx
600 mov xDI, xSP
601 xor xCX, xCX
602 ShouldTrap X86_XCPT_GP, rep insb
603
604 ;
605 ; SMSW can get to the whole of CR0.
606 ;
607 mov eax, __LINE__
608 xor xBX, xBX
609 smsw xBX
610 test ebx, X86_CR0_PG
611 jz .failed
612 test ebx, X86_CR0_PE
613 jz .failed
614
615 ;
616 ; Will the CPU decode the whole r/m+sib stuff before signalling a lock
617 ; prefix error? Use the EF exec page and a LOCK ADD CL,[rDI + disp32]
618 ; instruction at the very end of it.
619 ;
620 mov eax, __LINE__
621 mov xDI, REF_GLOBAL(g_pbEfExecPage)
622 add xDI, 1000h - 8h
623 mov byte [xDI+0], 0f0h
624 mov byte [xDI+1], 002h
625 mov byte [xDI+2], 08fh
626 mov dword [xDI+3], 000000000h
627 mov byte [xDI+7], 0cch
628 ShouldTrap X86_XCPT_UD, call xDI
629
630 mov eax, __LINE__
631 mov xDI, REF_GLOBAL(g_pbEfExecPage)
632 add xDI, 1000h - 7h
633 mov byte [xDI+0], 0f0h
634 mov byte [xDI+1], 002h
635 mov byte [xDI+2], 08Fh
636 mov dword [xDI+3], 000000000h
637 ShouldTrap X86_XCPT_UD, call xDI
638
639 mov eax, __LINE__
640 mov xDI, REF_GLOBAL(g_pbEfExecPage)
641 add xDI, 1000h - 4h
642 mov byte [xDI+0], 0f0h
643 mov byte [xDI+1], 002h
644 mov byte [xDI+2], 08Fh
645 mov byte [xDI+3], 000h
646 ShouldTrap X86_XCPT_PF, call xDI
647
648 mov eax, __LINE__
649 mov xDI, REF_GLOBAL(g_pbEfExecPage)
650 add xDI, 1000h - 6h
651 mov byte [xDI+0], 0f0h
652 mov byte [xDI+1], 002h
653 mov byte [xDI+2], 08Fh
654 mov byte [xDI+3], 00h
655 mov byte [xDI+4], 00h
656 mov byte [xDI+5], 00h
657 ShouldTrap X86_XCPT_PF, call xDI
658
659 mov eax, __LINE__
660 mov xDI, REF_GLOBAL(g_pbEfExecPage)
661 add xDI, 1000h - 5h
662 mov byte [xDI+0], 0f0h
663 mov byte [xDI+1], 002h
664 mov byte [xDI+2], 08Fh
665 mov byte [xDI+3], 00h
666 mov byte [xDI+4], 00h
667 ShouldTrap X86_XCPT_PF, call xDI
668
669 mov eax, __LINE__
670 mov xDI, REF_GLOBAL(g_pbEfExecPage)
671 add xDI, 1000h - 4h
672 mov byte [xDI+0], 0f0h
673 mov byte [xDI+1], 002h
674 mov byte [xDI+2], 08Fh
675 mov byte [xDI+3], 00h
676 ShouldTrap X86_XCPT_PF, call xDI
677
678 mov eax, __LINE__
679 mov xDI, REF_GLOBAL(g_pbEfExecPage)
680 add xDI, 1000h - 3h
681 mov byte [xDI+0], 0f0h
682 mov byte [xDI+1], 002h
683 mov byte [xDI+2], 08Fh
684 ShouldTrap X86_XCPT_PF, call xDI
685
686 mov eax, __LINE__
687 mov xDI, REF_GLOBAL(g_pbEfExecPage)
688 add xDI, 1000h - 2h
689 mov byte [xDI+0], 0f0h
690 mov byte [xDI+1], 002h
691 ShouldTrap X86_XCPT_PF, call xDI
692
693 mov eax, __LINE__
694 mov xDI, REF_GLOBAL(g_pbEfExecPage)
695 add xDI, 1000h - 1h
696 mov byte [xDI+0], 0f0h
697 ShouldTrap X86_XCPT_PF, call xDI
698
699
700
701.success:
702 xor eax, eax
703.return:
704%ifdef RT_ARCH_AMD64
705 pop r15
706 pop r14
707 pop r13
708 pop r12
709 pop r11
710 pop r10
711 pop r9
712 pop r8
713%endif
714 pop xDI
715 pop xSI
716 pop xDX
717 pop xCX
718 pop xBX
719 popf
720 leave
721 ret
722
723.failed2:
724 mov eax, -1
725.failed:
726 jmp .return
727ENDPROC x861_Test1
728
729
730
731;;
732; Tests the effect of prefix order in group 14.
733;
734BEGINPROC x861_Test2
735 SAVE_ALL_PROLOGUE
736
737 ; Check testcase preconditions.
738 call x861_LoadUniqueRegValuesSSE
739 mov eax, __LINE__
740 db 00Fh, 073h, 0D0h, 080h ; psrlq mm0, 128
741 call .check_mm0_zero_and_xmm0_nz
742
743 call x861_LoadUniqueRegValuesSSE
744 mov eax, __LINE__
745 db 066h, 00Fh, 073h, 0D0h, 080h ; psrlq xmm0, 128
746 call .check_xmm0_zero_and_mm0_nz
747
748
749 ;
750 ; Real test - Inject other prefixes before the 066h and see what
751 ; happens.
752 ;
753
754 ; General checks that order does not matter, etc.
755 call x861_LoadUniqueRegValuesSSE
756 mov eax, __LINE__
757 db 026h, 066h, 00Fh, 073h, 0D0h, 080h
758 call .check_xmm0_zero_and_mm0_nz
759
760 call x861_LoadUniqueRegValuesSSE
761 mov eax, __LINE__
762 db 066h, 026h, 00Fh, 073h, 0D0h, 080h
763 call .check_xmm0_zero_and_mm0_nz
764
765 call x861_LoadUniqueRegValuesSSE
766 mov eax, __LINE__
767 db 066h, 067h, 00Fh, 073h, 0D0h, 080h
768 call .check_xmm0_zero_and_mm0_nz
769
770 call x861_LoadUniqueRegValuesSSE
771 mov eax, __LINE__
772 db 067h, 066h, 00Fh, 073h, 0D0h, 080h
773 call .check_xmm0_zero_and_mm0_nz
774
775 call x861_LoadUniqueRegValuesSSE
776 mov eax, __LINE__
777 db 067h, 066h, 065h, 00Fh, 073h, 0D0h, 080h
778 call .check_xmm0_zero_and_mm0_nz
779
780%ifdef RT_ARCH_AMD64
781 call x861_LoadUniqueRegValuesSSE
782 mov eax, __LINE__
783 db 048h, 066h, 00Fh, 073h, 0D0h, 080h ; REX.W
784 call .check_xmm0_zero_and_mm0_nz
785
786 call x861_LoadUniqueRegValuesSSE
787 mov eax, __LINE__
788 db 044h, 066h, 00Fh, 073h, 0D0h, 080h ; REX.R
789 call .check_xmm0_zero_and_mm0_nz
790
791 call x861_LoadUniqueRegValuesSSE
792 mov eax, __LINE__
793 db 042h, 066h, 00Fh, 073h, 0D0h, 080h ; REX.X
794 call .check_xmm0_zero_and_mm0_nz
795
796 ; Actually for REX, order does matter if the prefix is used.
797 call x861_LoadUniqueRegValuesSSE
798 mov eax, __LINE__
799 db 041h, 066h, 00Fh, 073h, 0D0h, 080h ; REX.B
800 call .check_xmm0_zero_and_mm0_nz
801
802 call x861_LoadUniqueRegValuesSSE
803 mov eax, __LINE__
804 db 066h, 041h, 00Fh, 073h, 0D0h, 080h ; REX.B
805 call .check_xmm8_zero_and_xmm0_nz
806%endif
807
808 ; Check all ignored prefixes (repeates some of the above).
809 call x861_LoadUniqueRegValuesSSE
810 mov eax, __LINE__
811 db 066h, 026h, 00Fh, 073h, 0D0h, 080h ; es
812 call .check_xmm0_zero_and_mm0_nz
813
814 call x861_LoadUniqueRegValuesSSE
815 mov eax, __LINE__
816 db 066h, 065h, 00Fh, 073h, 0D0h, 080h ; gs
817 call .check_xmm0_zero_and_mm0_nz
818
819 call x861_LoadUniqueRegValuesSSE
820 mov eax, __LINE__
821 db 066h, 064h, 00Fh, 073h, 0D0h, 080h ; fs
822 call .check_xmm0_zero_and_mm0_nz
823
824 call x861_LoadUniqueRegValuesSSE
825 mov eax, __LINE__
826 db 066h, 02eh, 00Fh, 073h, 0D0h, 080h ; cs
827 call .check_xmm0_zero_and_mm0_nz
828
829 call x861_LoadUniqueRegValuesSSE
830 mov eax, __LINE__
831 db 066h, 036h, 00Fh, 073h, 0D0h, 080h ; ss
832 call .check_xmm0_zero_and_mm0_nz
833
834 call x861_LoadUniqueRegValuesSSE
835 mov eax, __LINE__
836 db 066h, 03eh, 00Fh, 073h, 0D0h, 080h ; ds
837 call .check_xmm0_zero_and_mm0_nz
838
839 call x861_LoadUniqueRegValuesSSE
840 mov eax, __LINE__
841 db 066h, 067h, 00Fh, 073h, 0D0h, 080h ; addr size
842 call .check_xmm0_zero_and_mm0_nz
843
844%ifdef RT_ARCH_AMD64
845 call x861_LoadUniqueRegValuesSSE
846 mov eax, __LINE__
847 db 066h, 048h, 00Fh, 073h, 0D0h, 080h ; REX.W
848 call .check_xmm0_zero_and_mm0_nz
849
850 call x861_LoadUniqueRegValuesSSE
851 mov eax, __LINE__
852 db 066h, 044h, 00Fh, 073h, 0D0h, 080h ; REX.R
853 call .check_xmm0_zero_and_mm0_nz
854
855 call x861_LoadUniqueRegValuesSSE
856 mov eax, __LINE__
857 db 066h, 042h, 00Fh, 073h, 0D0h, 080h ; REX.X
858 call .check_xmm0_zero_and_mm0_nz
859
860 call x861_LoadUniqueRegValuesSSE
861 mov eax, __LINE__
862 db 066h, 041h, 00Fh, 073h, 0D0h, 080h ; REX.B - has actual effect on the instruction.
863 call .check_xmm8_zero_and_xmm0_nz
864%endif
865
866 ; Repeated prefix until we hit the max opcode limit.
867 call x861_LoadUniqueRegValuesSSE
868 mov eax, __LINE__
869 db 066h, 066h, 00Fh, 073h, 0D0h, 080h
870 call .check_xmm0_zero_and_mm0_nz
871
872 call x861_LoadUniqueRegValuesSSE
873 mov eax, __LINE__
874 db 066h, 066h, 066h, 00Fh, 073h, 0D0h, 080h
875 call .check_xmm0_zero_and_mm0_nz
876
877 call x861_LoadUniqueRegValuesSSE
878 mov eax, __LINE__
879 db 066h, 066h, 066h, 066h, 066h, 066h, 066h, 066h, 00Fh, 073h, 0D0h, 080h
880 call .check_xmm0_zero_and_mm0_nz
881
882 call x861_LoadUniqueRegValuesSSE
883 mov eax, __LINE__
884 db 066h, 066h, 066h, 066h, 066h, 066h, 066h, 066h, 066h, 066h, 066h, 00Fh, 073h, 0D0h, 080h
885 call .check_xmm0_zero_and_mm0_nz
886
887 ShouldTrap X86_XCPT_GP, db 066h, 066h, 066h, 066h, 066h, 066h, 066h, 066h, 066h, 066h, 066h, 066h, 00Fh, 073h, 0D0h, 080h
888
889%ifdef RT_ARCH_AMD64
890 ; Repeated REX is parsed, but only the last byte matters.
891 call x861_LoadUniqueRegValuesSSE
892 mov eax, __LINE__
893 db 066h, 041h, 048h, 00Fh, 073h, 0D0h, 080h ; REX.B, REX.W
894 call .check_xmm0_zero_and_mm0_nz
895
896 call x861_LoadUniqueRegValuesSSE
897 mov eax, __LINE__
898 db 066h, 048h, 041h, 00Fh, 073h, 0D0h, 080h ; REX.B, REX.W
899 call .check_xmm8_zero_and_xmm0_nz
900
901 call x861_LoadUniqueRegValuesSSE
902 mov eax, __LINE__
903 db 066h, 048h, 044h, 042h, 048h, 044h, 042h, 048h, 044h, 042h, 041h, 00Fh, 073h, 0D0h, 080h
904 call .check_xmm8_zero_and_xmm0_nz
905
906 call x861_LoadUniqueRegValuesSSE
907 mov eax, __LINE__
908 db 066h, 041h, 041h, 041h, 041h, 041h, 041h, 041h, 041h, 041h, 04eh, 00Fh, 073h, 0D0h, 080h
909 call .check_xmm0_zero_and_mm0_nz
910%endif
911
912 ; Undefined sequences with prefixes that counts.
913 ShouldTrap X86_XCPT_UD, db 0f0h, 066h, 00Fh, 073h, 0D0h, 080h ; LOCK
914 ShouldTrap X86_XCPT_UD, db 0f2h, 066h, 00Fh, 073h, 0D0h, 080h ; REPNZ
915 ShouldTrap X86_XCPT_UD, db 0f3h, 066h, 00Fh, 073h, 0D0h, 080h ; REPZ
916 ShouldTrap X86_XCPT_UD, db 066h, 0f2h, 00Fh, 073h, 0D0h, 080h
917 ShouldTrap X86_XCPT_UD, db 066h, 0f3h, 00Fh, 073h, 0D0h, 080h
918 ShouldTrap X86_XCPT_UD, db 066h, 0f3h, 0f2h, 00Fh, 073h, 0D0h, 080h
919 ShouldTrap X86_XCPT_UD, db 066h, 0f2h, 0f3h, 00Fh, 073h, 0D0h, 080h
920 ShouldTrap X86_XCPT_UD, db 0f2h, 066h, 0f3h, 00Fh, 073h, 0D0h, 080h
921 ShouldTrap X86_XCPT_UD, db 0f3h, 066h, 0f2h, 00Fh, 073h, 0D0h, 080h
922 ShouldTrap X86_XCPT_UD, db 0f3h, 0f2h, 066h, 00Fh, 073h, 0D0h, 080h
923 ShouldTrap X86_XCPT_UD, db 0f2h, 0f3h, 066h, 00Fh, 073h, 0D0h, 080h
924 ShouldTrap X86_XCPT_UD, db 0f0h, 0f2h, 066h, 0f3h, 00Fh, 073h, 0D0h, 080h
925 ShouldTrap X86_XCPT_UD, db 0f0h, 0f3h, 066h, 0f2h, 00Fh, 073h, 0D0h, 080h
926 ShouldTrap X86_XCPT_UD, db 0f0h, 0f3h, 0f2h, 066h, 00Fh, 073h, 0D0h, 080h
927 ShouldTrap X86_XCPT_UD, db 0f0h, 0f2h, 0f3h, 066h, 00Fh, 073h, 0D0h, 080h
928
929.success:
930 xor eax, eax
931.return:
932 SAVE_ALL_EPILOGUE
933 ret
934
935.check_xmm0_zero_and_mm0_nz:
936 sub xSP, 20h
937 movdqu [xSP], xmm0
938 cmp dword [xSP], 0
939 jne .failed3
940 cmp dword [xSP + 4], 0
941 jne .failed3
942 cmp dword [xSP + 8], 0
943 jne .failed3
944 cmp dword [xSP + 12], 0
945 jne .failed3
946 movq [xSP], mm0
947 cmp dword [xSP], 0
948 je .failed3
949 cmp dword [xSP + 4], 0
950 je .failed3
951 add xSP, 20h
952 ret
953
954.check_mm0_zero_and_xmm0_nz:
955 sub xSP, 20h
956 movq [xSP], mm0
957 cmp dword [xSP], 0
958 jne .failed3
959 cmp dword [xSP + 4], 0
960 jne .failed3
961 movdqu [xSP], xmm0
962 cmp dword [xSP], 0
963 je .failed3
964 cmp dword [xSP + 4], 0
965 je .failed3
966 cmp dword [xSP + 8], 0
967 je .failed3
968 cmp dword [xSP + 12], 0
969 je .failed3
970 add xSP, 20h
971 ret
972
973%ifdef RT_ARCH_AMD64
974.check_xmm8_zero_and_xmm0_nz:
975 sub xSP, 20h
976 movdqu [xSP], xmm8
977 cmp dword [xSP], 0
978 jne .failed3
979 cmp dword [xSP + 4], 0
980 jne .failed3
981 cmp dword [xSP + 8], 0
982 jne .failed3
983 cmp dword [xSP + 12], 0
984 jne .failed3
985 movdqu [xSP], xmm0
986 cmp dword [xSP], 0
987 je .failed3
988 cmp dword [xSP + 4], 0
989 je .failed3
990 cmp dword [xSP + 8], 0
991 je .failed3
992 cmp dword [xSP + 12], 0
993 je .failed3
994 add xSP, 20h
995 ret
996%endif
997
998.failed3:
999 add xSP, 20h + xS
1000 jmp .return
1001
1002
1003ENDPROC x861_Test2
1004
1005
1006;;
1007; Tests how much fxsave and fxrstor actually accesses of their 512 memory
1008; operand.
1009;
1010BEGINPROC x861_Test3
1011 SAVE_ALL_PROLOGUE
1012 call x861_LoadUniqueRegValuesSSE
1013 mov xDI, REF_GLOBAL(g_pbEfExecPage)
1014
1015 ; Check testcase preconditions.
1016 fxsave [xDI]
1017 fxrstor [xDI]
1018
1019 add xDI, PAGE_SIZE - 512
1020 mov xSI, xDI
1021 fxsave [xDI]
1022 fxrstor [xDI]
1023
1024 ; 464:511 are available to software use. Check that they are left
1025 ; untouched by fxsave.
1026 mov eax, 0aabbccddh
1027 mov ecx, 512 / 4
1028 cld
1029 rep stosd
1030 mov xDI, xSI
1031 fxsave [xDI]
1032
1033 mov ebx, 512
1034.chech_software_area_loop:
1035 cmp [xDI + xBX - 4], eax
1036 jne .chech_software_area_done
1037 sub ebx, 4
1038 jmp .chech_software_area_loop
1039.chech_software_area_done:
1040 cmp ebx, 464
1041 mov eax, __LINE__
1042 ja .return
1043
1044 ; Check that a save + restore + save cycle yield the same results.
1045 mov xBX, REF_GLOBAL(g_pbEfExecPage)
1046 mov xDI, xBX
1047 mov eax, 066778899h
1048 mov ecx, 512 * 2 / 4
1049 cld
1050 rep stosd
1051 fxsave [xBX]
1052
1053 call x861_ClearRegistersSSE
1054 mov xBX, REF_GLOBAL(g_pbEfExecPage)
1055 fxrstor [xBX]
1056
1057 fxsave [xBX + 512]
1058 mov xSI, xBX
1059 lea xDI, [xBX + 512]
1060 mov ecx, 512
1061 cld
1062 repe cmpsb
1063 mov eax, __LINE__
1064 jnz .return
1065
1066
1067 ; 464:511 are available to software use. Let see how carefully access
1068 ; to the full 512 bytes are checked...
1069 call x861_LoadUniqueRegValuesSSE
1070 mov xDI, REF_GLOBAL(g_pbEfExecPage)
1071 add xDI, PAGE_SIZE - 512
1072 ShouldTrap X86_XCPT_PF, fxsave [xDI + 16]
1073 ShouldTrap X86_XCPT_PF, fxsave [xDI + 32]
1074 ShouldTrap X86_XCPT_PF, fxsave [xDI + 48]
1075 ShouldTrap X86_XCPT_PF, fxsave [xDI + 64]
1076 ShouldTrap X86_XCPT_PF, fxsave [xDI + 80]
1077 ShouldTrap X86_XCPT_PF, fxsave [xDI + 96]
1078 ShouldTrap X86_XCPT_PF, fxsave [xDI + 128]
1079 ShouldTrap X86_XCPT_PF, fxsave [xDI + 144]
1080 ShouldTrap X86_XCPT_PF, fxsave [xDI + 160]
1081 ShouldTrap X86_XCPT_PF, fxsave [xDI + 176]
1082 ShouldTrap X86_XCPT_PF, fxsave [xDI + 192]
1083 ShouldTrap X86_XCPT_PF, fxsave [xDI + 208]
1084 ShouldTrap X86_XCPT_PF, fxsave [xDI + 224]
1085 ShouldTrap X86_XCPT_PF, fxsave [xDI + 240]
1086 ShouldTrap X86_XCPT_PF, fxsave [xDI + 256]
1087 ShouldTrap X86_XCPT_PF, fxsave [xDI + 384]
1088 ShouldTrap X86_XCPT_PF, fxsave [xDI + 432]
1089 ShouldTrap X86_XCPT_PF, fxsave [xDI + 496]
1090
1091 ShouldTrap X86_XCPT_PF, fxrstor [xDI + 16]
1092 ShouldTrap X86_XCPT_PF, fxrstor [xDI + 32]
1093 ShouldTrap X86_XCPT_PF, fxrstor [xDI + 48]
1094 ShouldTrap X86_XCPT_PF, fxrstor [xDI + 64]
1095 ShouldTrap X86_XCPT_PF, fxrstor [xDI + 80]
1096 ShouldTrap X86_XCPT_PF, fxrstor [xDI + 96]
1097 ShouldTrap X86_XCPT_PF, fxrstor [xDI + 128]
1098 ShouldTrap X86_XCPT_PF, fxrstor [xDI + 144]
1099 ShouldTrap X86_XCPT_PF, fxrstor [xDI + 160]
1100 ShouldTrap X86_XCPT_PF, fxrstor [xDI + 176]
1101 ShouldTrap X86_XCPT_PF, fxrstor [xDI + 192]
1102 ShouldTrap X86_XCPT_PF, fxrstor [xDI + 208]
1103 ShouldTrap X86_XCPT_PF, fxrstor [xDI + 224]
1104 ShouldTrap X86_XCPT_PF, fxrstor [xDI + 240]
1105 ShouldTrap X86_XCPT_PF, fxrstor [xDI + 256]
1106 ShouldTrap X86_XCPT_PF, fxrstor [xDI + 384]
1107 ShouldTrap X86_XCPT_PF, fxrstor [xDI + 432]
1108 ShouldTrap X86_XCPT_PF, fxrstor [xDI + 496]
1109
1110 ; Unaligned accesses will cause #GP(0). This takes precedence over #PF.
1111 ShouldTrap X86_XCPT_GP, fxsave [xDI + 1]
1112 ShouldTrap X86_XCPT_GP, fxsave [xDI + 2]
1113 ShouldTrap X86_XCPT_GP, fxsave [xDI + 3]
1114 ShouldTrap X86_XCPT_GP, fxsave [xDI + 4]
1115 ShouldTrap X86_XCPT_GP, fxsave [xDI + 5]
1116 ShouldTrap X86_XCPT_GP, fxsave [xDI + 6]
1117 ShouldTrap X86_XCPT_GP, fxsave [xDI + 7]
1118 ShouldTrap X86_XCPT_GP, fxsave [xDI + 8]
1119 ShouldTrap X86_XCPT_GP, fxsave [xDI + 9]
1120 ShouldTrap X86_XCPT_GP, fxsave [xDI + 10]
1121 ShouldTrap X86_XCPT_GP, fxsave [xDI + 11]
1122 ShouldTrap X86_XCPT_GP, fxsave [xDI + 12]
1123 ShouldTrap X86_XCPT_GP, fxsave [xDI + 13]
1124 ShouldTrap X86_XCPT_GP, fxsave [xDI + 14]
1125 ShouldTrap X86_XCPT_GP, fxsave [xDI + 15]
1126
1127 ShouldTrap X86_XCPT_GP, fxrstor [xDI + 1]
1128 ShouldTrap X86_XCPT_GP, fxrstor [xDI + 2]
1129 ShouldTrap X86_XCPT_GP, fxrstor [xDI + 3]
1130 ShouldTrap X86_XCPT_GP, fxrstor [xDI + 4]
1131 ShouldTrap X86_XCPT_GP, fxrstor [xDI + 5]
1132 ShouldTrap X86_XCPT_GP, fxrstor [xDI + 6]
1133 ShouldTrap X86_XCPT_GP, fxrstor [xDI + 7]
1134 ShouldTrap X86_XCPT_GP, fxrstor [xDI + 8]
1135 ShouldTrap X86_XCPT_GP, fxrstor [xDI + 9]
1136 ShouldTrap X86_XCPT_GP, fxrstor [xDI + 10]
1137 ShouldTrap X86_XCPT_GP, fxrstor [xDI + 11]
1138 ShouldTrap X86_XCPT_GP, fxrstor [xDI + 12]
1139 ShouldTrap X86_XCPT_GP, fxrstor [xDI + 13]
1140 ShouldTrap X86_XCPT_GP, fxrstor [xDI + 14]
1141 ShouldTrap X86_XCPT_GP, fxrstor [xDI + 15]
1142
1143 ; Lets check what a FP in fxsave changes ... nothing on intel.
1144 mov ebx, 16
1145.fxsave_pf_effect_loop:
1146 mov xDI, REF_GLOBAL(g_pbEfExecPage)
1147 add xDI, PAGE_SIZE - 512 * 2
1148 mov xSI, xDI
1149 mov eax, 066778899h
1150 mov ecx, 512 * 2 / 4
1151 cld
1152 rep stosd
1153
1154 ShouldTrap X86_XCPT_PF, fxsave [xSI + PAGE_SIZE - 512 + xBX]
1155
1156 mov ecx, 512 / 4
1157 lea xDI, [xSI + 512]
1158 cld
1159 repz cmpsd
1160 lea xAX, [xBX + 20000]
1161 jnz .return
1162
1163 add ebx, 16
1164 cmp ebx, 512
1165 jbe .fxsave_pf_effect_loop
1166
1167 ; Lets check that a FP in fxrstor does not have any effect on the FPU or SSE state.
1168 mov xDI, REF_GLOBAL(g_pbEfExecPage)
1169 mov ecx, PAGE_SIZE / 4
1170 mov eax, 0ffaa33cch
1171 cld
1172 rep stosd
1173
1174 call x861_LoadUniqueRegValuesSSE
1175 mov xDI, REF_GLOBAL(g_pbEfExecPage)
1176 fxsave [xDI]
1177
1178 call x861_ClearRegistersSSE
1179 mov xDI, REF_GLOBAL(g_pbEfExecPage)
1180 fxsave [xDI + 512]
1181
1182 mov ebx, 16
1183.fxrstor_pf_effect_loop:
1184 mov xDI, REF_GLOBAL(g_pbEfExecPage)
1185 mov xSI, xDI
1186 lea xDI, [xDI + PAGE_SIZE - 512 + xBX]
1187 mov ecx, 512
1188 sub ecx, ebx
1189 cld
1190 rep movsb ; copy unique state to end of page.
1191
1192 push xBX
1193 call x861_ClearRegistersSSE
1194 pop xBX
1195 mov xDI, REF_GLOBAL(g_pbEfExecPage)
1196 ShouldTrap X86_XCPT_PF, fxrstor [xDI + PAGE_SIZE - 512 + xBX] ; try load unique state
1197
1198 mov xDI, REF_GLOBAL(g_pbEfExecPage)
1199 lea xSI, [xDI + 512] ; point it to the clean state, which is what we expect.
1200 lea xDI, [xDI + 1024]
1201 fxsave [xDI] ; save whatever the fpu state currently is.
1202 mov ecx, 512 / 4
1203 cld
1204 repe cmpsd
1205 lea xAX, [xBX + 40000]
1206 jnz .return ; it shouldn't be modified by faulting fxrstor, i.e. a clean state.
1207
1208 add ebx, 16
1209 cmp ebx, 512
1210 jbe .fxrstor_pf_effect_loop
1211
1212.success:
1213 xor eax, eax
1214.return:
1215 SAVE_ALL_EPILOGUE
1216 ret
1217ENDPROC x861_Test3
1218
1219
1220;;
1221; Tests various multibyte NOP sequences.
1222;
1223BEGINPROC x861_Test4
1224 SAVE_ALL_PROLOGUE
1225 call x861_ClearRegisters
1226
1227 ; Intel recommended sequences.
1228 nop
1229 db 066h, 090h
1230 db 00fh, 01fh, 000h
1231 db 00fh, 01fh, 040h, 000h
1232 db 00fh, 01fh, 044h, 000h, 000h
1233 db 066h, 00fh, 01fh, 044h, 000h, 000h
1234 db 00fh, 01fh, 080h, 000h, 000h, 000h, 000h
1235 db 00fh, 01fh, 084h, 000h, 000h, 000h, 000h, 000h
1236 db 066h, 00fh, 01fh, 084h, 000h, 000h, 000h, 000h, 000h
1237
1238 ; Check that the NOPs are allergic to lock prefixing.
1239 ShouldTrap X86_XCPT_UD, db 0f0h, 090h ; lock prefixed NOP.
1240 ShouldTrap X86_XCPT_UD, db 0f0h, 066h, 090h ; lock prefixed two byte NOP.
1241 ShouldTrap X86_XCPT_UD, db 0f0h, 00fh, 01fh, 000h ; lock prefixed three byte NOP.
1242
1243 ; Check the range of instructions that AMD marks as NOPs.
1244%macro TST_NOP 1
1245 db 00fh, %1, 000h
1246 db 00fh, %1, 040h, 000h
1247 db 00fh, %1, 044h, 000h, 000h
1248 db 066h, 00fh, %1, 044h, 000h, 000h
1249 db 00fh, %1, 080h, 000h, 000h, 000h, 000h
1250 db 00fh, %1, 084h, 000h, 000h, 000h, 000h, 000h
1251 db 066h, 00fh, %1, 084h, 000h, 000h, 000h, 000h, 000h
1252 ShouldTrap X86_XCPT_UD, db 0f0h, 00fh, %1, 000h
1253%endmacro
1254 TST_NOP 019h
1255 TST_NOP 01ah
1256 TST_NOP 01bh
1257 TST_NOP 01ch
1258 TST_NOP 01dh
1259 TST_NOP 01eh
1260 TST_NOP 01fh
1261
1262 ; The AMD P group, intel marks this as a NOP.
1263 TST_NOP 00dh
1264
1265.success:
1266 xor eax, eax
1267.return:
1268 SAVE_ALL_EPILOGUE
1269 ret
1270ENDPROC x861_Test4
1271
1272;;
1273; Terminate the trap info array with a NIL entry.
1274BEGINDATA
1275GLOBALNAME g_aTrapInfoEnd
1276istruc TRAPINFO
1277 at TRAPINFO.uTrapPC, RTCCPTR_DEF 0
1278 at TRAPINFO.uResumePC, RTCCPTR_DEF 0
1279 at TRAPINFO.u8TrapNo, db 0
1280 at TRAPINFO.cbInstr, db 0
1281iend
1282
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette