Changeset 100831 in vbox for trunk/src/VBox/VMM/VMMAll
- Timestamp:
- Aug 9, 2023 2:17:40 PM (19 months ago)
- svn:sync-xref-src-repo-rev:
- 158731
- Location:
- trunk/src/VBox/VMM/VMMAll
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllAImplC.cpp
r100826 r100831 1012 1012 */ 1013 1013 1014 IEM_DECL_IMPL_DEF(void, iemAImpl_cmp_u64,(uint64_t *puDst, uint64_t uSrc, uint32_t *pfEFlags))1014 IEM_DECL_IMPL_DEF(void, iemAImpl_cmp_u64,(uint64_t const *puDst, uint64_t uSrc, uint32_t *pfEFlags)) 1015 1015 { 1016 1016 uint64_t uDstTmp = *puDst; … … 1020 1020 # if !defined(RT_ARCH_X86) || defined(IEM_WITHOUT_ASSEMBLY) 1021 1021 1022 IEM_DECL_IMPL_DEF(void, iemAImpl_cmp_u32,(uint32_t *puDst, uint32_t uSrc, uint32_t *pfEFlags))1022 IEM_DECL_IMPL_DEF(void, iemAImpl_cmp_u32,(uint32_t const *puDst, uint32_t uSrc, uint32_t *pfEFlags)) 1023 1023 { 1024 1024 uint32_t uDstTmp = *puDst; … … 1027 1027 1028 1028 1029 IEM_DECL_IMPL_DEF(void, iemAImpl_cmp_u16,(uint16_t *puDst, uint16_t uSrc, uint32_t *pfEFlags))1029 IEM_DECL_IMPL_DEF(void, iemAImpl_cmp_u16,(uint16_t const *puDst, uint16_t uSrc, uint32_t *pfEFlags)) 1030 1030 { 1031 1031 uint16_t uDstTmp = *puDst; … … 1046 1046 */ 1047 1047 1048 IEM_DECL_IMPL_DEF(void, iemAImpl_test_u64,(uint64_t *puDst, uint64_t uSrc, uint32_t *pfEFlags))1048 IEM_DECL_IMPL_DEF(void, iemAImpl_test_u64,(uint64_t const *puDst, uint64_t uSrc, uint32_t *pfEFlags)) 1049 1049 { 1050 1050 uint64_t uResult = *puDst & uSrc; … … 1054 1054 # if !defined(RT_ARCH_X86) || defined(IEM_WITHOUT_ASSEMBLY) 1055 1055 1056 IEM_DECL_IMPL_DEF(void, iemAImpl_test_u32,(uint32_t *puDst, uint32_t uSrc, uint32_t *pfEFlags))1056 IEM_DECL_IMPL_DEF(void, iemAImpl_test_u32,(uint32_t const *puDst, uint32_t uSrc, uint32_t *pfEFlags)) 1057 1057 { 1058 1058 uint32_t uResult = *puDst & uSrc; … … 1061 1061 1062 1062 1063 IEM_DECL_IMPL_DEF(void, iemAImpl_test_u16,(uint16_t *puDst, uint16_t uSrc, uint32_t *pfEFlags))1063 IEM_DECL_IMPL_DEF(void, iemAImpl_test_u16,(uint16_t const *puDst, uint16_t uSrc, uint32_t *pfEFlags)) 1064 1064 { 1065 1065 uint16_t uResult = *puDst & uSrc; -
trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h
r100826 r100831 261 261 * memory/register as the destination. 262 262 */ 263 #define IEMOP_BODY_BINARY_rm_rv (a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \263 #define IEMOP_BODY_BINARY_rm_rv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \ 264 264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \ 265 265 \ … … 299 299 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \ 300 300 \ 301 if ((a_fRW) == IEM_ACCESS_DATA_RW) /* not TEST and CMP */ \ 302 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \ 301 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \ 303 302 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 304 303 IEM_MC_END(); \ … … 336 335 { \ 337 336 case IEMMODE_16BIT: \ 338 IEM_MC_BEGIN(3, 2); \337 IEM_MC_BEGIN(3, 3); \ 339 338 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \ 340 339 IEM_MC_ARG(uint16_t, u16Src, 1); \ 341 340 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \ 342 341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 342 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 343 343 \ 344 344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 345 345 IEMOP_HLP_DONE_DECODING(); \ 346 IEM_MC_MEM_MAP (pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \346 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 347 347 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 348 348 IEM_MC_FETCH_EFLAGS(EFlags); \ 349 349 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \ 350 350 \ 351 IEM_MC_MEM_COMMIT_AND_UNMAP (pu16Dst, a_fRW); \351 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \ 352 352 IEM_MC_COMMIT_EFLAGS(EFlags); \ 353 353 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ … … 356 356 \ 357 357 case IEMMODE_32BIT: \ 358 IEM_MC_BEGIN(3, 2); \358 IEM_MC_BEGIN(3, 3); \ 359 359 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \ 360 360 IEM_MC_ARG(uint32_t, u32Src, 1); \ 361 361 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \ 362 362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 363 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 363 364 \ 364 365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 365 366 IEMOP_HLP_DONE_DECODING(); \ 366 IEM_MC_MEM_MAP (pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \367 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 367 368 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 368 369 IEM_MC_FETCH_EFLAGS(EFlags); \ 369 370 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \ 370 371 \ 371 IEM_MC_MEM_COMMIT_AND_UNMAP (pu32Dst, a_fRW); \372 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \ 372 373 IEM_MC_COMMIT_EFLAGS(EFlags); \ 373 374 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ … … 376 377 \ 377 378 case IEMMODE_64BIT: \ 378 IEM_MC_BEGIN(3, 2); \379 IEM_MC_BEGIN(3, 3); \ 379 380 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \ 380 381 IEM_MC_ARG(uint64_t, u64Src, 1); \ 381 382 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \ 382 383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 384 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 383 385 \ 384 386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 385 387 IEMOP_HLP_DONE_DECODING(); \ 386 IEM_MC_MEM_MAP (pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \388 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 387 389 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 388 390 IEM_MC_FETCH_EFLAGS(EFlags); \ 389 391 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \ 390 392 \ 391 IEM_MC_MEM_COMMIT_AND_UNMAP (pu64Dst, a_fRW); \393 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \ 392 394 IEM_MC_COMMIT_EFLAGS(EFlags); \ 393 395 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ … … 401 403 { \ 402 404 (void)0 403 404 #define IEMOP_BODY_BINARY_rm_rv_NO_LOCK() \ 405 IEMOP_HLP_DONE_DECODING(); \ 406 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \ 407 } \ 408 } \ 409 (void)0 410 405 /* Separate macro to work around parsing issue in IEMAllInstPython.py */ 411 406 #define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \ 412 407 switch (pVCpu->iem.s.enmEffOpSize) \ 413 408 { \ 414 409 case IEMMODE_16BIT: \ 415 IEM_MC_BEGIN(3, 2); \410 IEM_MC_BEGIN(3, 3); \ 416 411 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \ 417 412 IEM_MC_ARG(uint16_t, u16Src, 1); \ 418 413 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \ 419 414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 415 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 420 416 \ 421 417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 422 418 IEMOP_HLP_DONE_DECODING(); \ 423 IEM_MC_MEM_MAP (pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \419 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 424 420 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 425 421 IEM_MC_FETCH_EFLAGS(EFlags); \ 426 422 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \ 427 423 \ 428 IEM_MC_MEM_COMMIT_AND_UNMAP (pu16Dst, IEM_ACCESS_DATA_RW); \424 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \ 429 425 IEM_MC_COMMIT_EFLAGS(EFlags); \ 430 426 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ … … 433 429 \ 434 430 case IEMMODE_32BIT: \ 435 IEM_MC_BEGIN(3, 2); \431 IEM_MC_BEGIN(3, 3); \ 436 432 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \ 437 433 IEM_MC_ARG(uint32_t, u32Src, 1); \ 438 434 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \ 439 435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 436 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 440 437 \ 441 438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 442 439 IEMOP_HLP_DONE_DECODING(); \ 443 IEM_MC_MEM_MAP (pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \440 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 444 441 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 445 442 IEM_MC_FETCH_EFLAGS(EFlags); \ 446 443 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \ 447 444 \ 448 IEM_MC_MEM_COMMIT_AND_UNMAP (pu32Dst, IEM_ACCESS_DATA_RW/* CMP,TEST */); \445 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo /* CMP,TEST */); \ 449 446 IEM_MC_COMMIT_EFLAGS(EFlags); \ 450 447 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ … … 453 450 \ 454 451 case IEMMODE_64BIT: \ 455 IEM_MC_BEGIN(3, 2); \452 IEM_MC_BEGIN(3, 3); \ 456 453 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \ 457 454 IEM_MC_ARG(uint64_t, u64Src, 1); \ 458 455 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \ 459 456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 457 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 460 458 \ 461 459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 462 460 IEMOP_HLP_DONE_DECODING(); \ 463 IEM_MC_MEM_MAP (pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \461 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 464 462 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 465 463 IEM_MC_FETCH_EFLAGS(EFlags); \ 466 464 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \ 467 465 \ 468 IEM_MC_MEM_COMMIT_AND_UNMAP (pu64Dst, IEM_ACCESS_DATA_RW); \466 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \ 469 467 IEM_MC_COMMIT_EFLAGS(EFlags); \ 470 468 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ … … 474 472 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \ 475 473 } \ 474 } \ 475 } \ 476 (void)0 477 478 /** 479 * Body for read-only word/dword/qword instructions like TEST and CMP with 480 * memory/register as the destination. 481 */ 482 #define IEMOP_BODY_BINARY_rm_rv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \ 483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \ 484 \ 485 /* \ 486 * If rm is denoting a register, no more instruction bytes. \ 487 */ \ 488 if (IEM_IS_MODRM_REG_MODE(bRm)) \ 489 { \ 490 switch (pVCpu->iem.s.enmEffOpSize) \ 491 { \ 492 case IEMMODE_16BIT: \ 493 IEM_MC_BEGIN(3, 0); \ 494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 495 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \ 496 IEM_MC_ARG(uint16_t, u16Src, 1); \ 497 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 498 \ 499 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 500 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 501 IEM_MC_REF_EFLAGS(pEFlags); \ 502 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \ 503 \ 504 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 505 IEM_MC_END(); \ 506 break; \ 507 \ 508 case IEMMODE_32BIT: \ 509 IEM_MC_BEGIN(3, 0); \ 510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 511 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \ 512 IEM_MC_ARG(uint32_t, u32Src, 1); \ 513 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 514 \ 515 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 516 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 517 IEM_MC_REF_EFLAGS(pEFlags); \ 518 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \ 519 \ 520 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 521 IEM_MC_END(); \ 522 break; \ 523 \ 524 case IEMMODE_64BIT: \ 525 IEM_MC_BEGIN(3, 0); \ 526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 527 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \ 528 IEM_MC_ARG(uint64_t, u64Src, 1); \ 529 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 530 \ 531 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 532 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 533 IEM_MC_REF_EFLAGS(pEFlags); \ 534 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \ 535 \ 536 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 537 IEM_MC_END(); \ 538 break; \ 539 \ 540 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \ 541 } \ 542 } \ 543 else \ 544 { \ 545 /* \ 546 * We're accessing memory. \ 547 * Note! We're putting the eflags on the stack here so we can commit them \ 548 * after the memory. \ 549 */ \ 550 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \ 551 { \ 552 switch (pVCpu->iem.s.enmEffOpSize) \ 553 { \ 554 case IEMMODE_16BIT: \ 555 IEM_MC_BEGIN(3, 3); \ 556 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \ 557 IEM_MC_ARG(uint16_t, u16Src, 1); \ 558 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \ 559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 560 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 561 \ 562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 563 IEMOP_HLP_DONE_DECODING(); \ 564 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 565 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 566 IEM_MC_FETCH_EFLAGS(EFlags); \ 567 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \ 568 \ 569 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \ 570 IEM_MC_COMMIT_EFLAGS(EFlags); \ 571 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 572 IEM_MC_END(); \ 573 break; \ 574 \ 575 case IEMMODE_32BIT: \ 576 IEM_MC_BEGIN(3, 3); \ 577 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \ 578 IEM_MC_ARG(uint32_t, u32Src, 1); \ 579 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \ 580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 581 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 582 \ 583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 584 IEMOP_HLP_DONE_DECODING(); \ 585 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 586 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 587 IEM_MC_FETCH_EFLAGS(EFlags); \ 588 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \ 589 \ 590 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \ 591 IEM_MC_COMMIT_EFLAGS(EFlags); \ 592 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 593 IEM_MC_END(); \ 594 break; \ 595 \ 596 case IEMMODE_64BIT: \ 597 IEM_MC_BEGIN(3, 3); \ 598 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \ 599 IEM_MC_ARG(uint64_t, u64Src, 1); \ 600 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \ 601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 602 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 603 \ 604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 605 IEMOP_HLP_DONE_DECODING(); \ 606 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 607 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 608 IEM_MC_FETCH_EFLAGS(EFlags); \ 609 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \ 610 \ 611 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \ 612 IEM_MC_COMMIT_EFLAGS(EFlags); \ 613 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 614 IEM_MC_END(); \ 615 break; \ 616 \ 617 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \ 618 } \ 619 } \ 620 else \ 621 { \ 622 IEMOP_HLP_DONE_DECODING(); \ 623 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \ 476 624 } \ 477 625 } \ … … 606 754 { 607 755 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED); 608 IEMOP_BODY_BINARY_rm_rv ( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, IEM_ACCESS_DATA_RW);756 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64); 609 757 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked); 610 758 } … … 728 876 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED); 729 877 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 730 IEMOP_BODY_BINARY_rm_rv ( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, IEM_ACCESS_DATA_RW);878 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64); 731 879 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked); 732 880 } … … 892 1040 { 893 1041 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED); 894 IEMOP_BODY_BINARY_rm_rv ( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, IEM_ACCESS_DATA_RW);1042 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64); 895 1043 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked); 896 1044 } … … 1002 1150 { 1003 1151 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED); 1004 IEMOP_BODY_BINARY_rm_rv ( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, IEM_ACCESS_DATA_RW);1152 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64); 1005 1153 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked); 1006 1154 } … … 1111 1259 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED); 1112 1260 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 1113 IEMOP_BODY_BINARY_rm_rv ( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, IEM_ACCESS_DATA_RW);1261 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64); 1114 1262 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked); 1115 1263 } … … 1233 1381 { 1234 1382 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED); 1235 IEMOP_BODY_BINARY_rm_rv ( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, IEM_ACCESS_DATA_RW);1383 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64); 1236 1384 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked); 1237 1385 } … … 1349 1497 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED); 1350 1498 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 1351 IEMOP_BODY_BINARY_rm_rv ( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, IEM_ACCESS_DATA_RW);1499 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64); 1352 1500 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked); 1353 1501 } … … 1500 1648 { 1501 1649 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv"); 1502 IEMOP_BODY_BINARY_rm_rv(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, IEM_ACCESS_DATA_R); 1503 IEMOP_BODY_BINARY_rm_rv_NO_LOCK(); 1650 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64); 1504 1651 } 1505 1652 … … 4691 4838 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv"); 4692 4839 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 4693 IEMOP_BODY_BINARY_rm_rv(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, IEM_ACCESS_DATA_R); 4694 IEMOP_BODY_BINARY_rm_rv_NO_LOCK(); 4840 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64); 4695 4841 } 4696 4842
Note:
See TracChangeset
for help on using the changeset viewer.